]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
cortex-a9.md (cortex-a9-neon.md): Actually include.
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
5e6908ea 1/* Emit RTL for the GCC expander.
ef58a523 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
54fb1ae0
AS
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010
b6f65e3c 5 Free Software Foundation, Inc.
23b2ce53 6
1322177d 7This file is part of GCC.
23b2ce53 8
1322177d
LB
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
9dcd6f09 11Software Foundation; either version 3, or (at your option) any later
1322177d 12version.
23b2ce53 13
1322177d
LB
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
23b2ce53
RS
18
19You should have received a copy of the GNU General Public License
9dcd6f09
NC
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
23b2ce53
RS
22
23
24/* Middle-to-low level generation of rtx code and insns.
25
f822fcf7
KH
26 This file contains support functions for creating rtl expressions
27 and manipulating them in the doubly-linked chain of insns.
23b2ce53
RS
28
29 The patterns of the insns are created by machine-dependent
30 routines in insn-emit.c, which is generated automatically from
f822fcf7
KH
31 the machine description. These routines make the individual rtx's
32 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33 which are automatically generated from rtl.def; what is machine
a2a8cc44
KH
34 dependent is the kind of rtx's they make and what arguments they
35 use. */
23b2ce53
RS
36
37#include "config.h"
670ee920 38#include "system.h"
4977bab6
ZW
39#include "coretypes.h"
40#include "tm.h"
718f9c0f 41#include "diagnostic-core.h"
23b2ce53 42#include "rtl.h"
a25c7971 43#include "tree.h"
6baf1cc8 44#include "tm_p.h"
23b2ce53
RS
45#include "flags.h"
46#include "function.h"
47#include "expr.h"
48#include "regs.h"
aff48bca 49#include "hard-reg-set.h"
c13e8210 50#include "hashtab.h"
23b2ce53 51#include "insn-config.h"
e9a25f70 52#include "recog.h"
0dfa1860 53#include "bitmap.h"
a05924f9 54#include "basic-block.h"
87ff9c8e 55#include "ggc.h"
e1772ac0 56#include "debug.h"
d23c55c2 57#include "langhooks.h"
ef330312 58#include "tree-pass.h"
6fb5fa3c 59#include "df.h"
b5b8b0ac 60#include "params.h"
d4ebfa65 61#include "target.h"
ca695ac9 62
5fb0e246
RS
63struct target_rtl default_target_rtl;
64#if SWITCHABLE_TARGET
65struct target_rtl *this_target_rtl = &default_target_rtl;
66#endif
67
68#define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
69
1d445e9e
ILT
70/* Commonly used modes. */
71
0f41302f
MS
72enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
73enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
9ec36da5 74enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
0f41302f 75enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
1d445e9e 76
bd60bab2
JH
77/* Datastructures maintained for currently processed function in RTL form. */
78
3e029763 79struct rtl_data x_rtl;
bd60bab2
JH
80
81/* Indexed by pseudo register number, gives the rtx for that pseudo.
b8698a0f 82 Allocated in parallel with regno_pointer_align.
bd60bab2
JH
83 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
84 with length attribute nested in top level structures. */
85
86rtx * regno_reg_rtx;
23b2ce53
RS
87
88/* This is *not* reset after each function. It gives each CODE_LABEL
89 in the entire compilation a unique label number. */
90
044b4de3 91static GTY(()) int label_num = 1;
23b2ce53 92
23b2ce53
RS
93/* We record floating-point CONST_DOUBLEs in each floating-point mode for
94 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
95 record a copy of const[012]_rtx. */
96
97rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
98
68d75312
JC
99rtx const_true_rtx;
100
23b2ce53
RS
101REAL_VALUE_TYPE dconst0;
102REAL_VALUE_TYPE dconst1;
103REAL_VALUE_TYPE dconst2;
104REAL_VALUE_TYPE dconstm1;
03f2ea93 105REAL_VALUE_TYPE dconsthalf;
23b2ce53 106
325217ed
CF
107/* Record fixed-point constant 0 and 1. */
108FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
109FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
110
23b2ce53
RS
111/* We make one copy of (const_int C) where C is in
112 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
113 to save space during the compilation and simplify comparisons of
114 integers. */
115
5da077de 116rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
23b2ce53 117
c13e8210
MM
118/* A hash table storing CONST_INTs whose absolute value is greater
119 than MAX_SAVED_CONST_INT. */
120
e2500fed
GK
121static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
122 htab_t const_int_htab;
c13e8210 123
173b24b9 124/* A hash table storing memory attribute structures. */
e2500fed
GK
125static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
126 htab_t mem_attrs_htab;
173b24b9 127
a560d4d4
JH
128/* A hash table storing register attribute structures. */
129static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
130 htab_t reg_attrs_htab;
131
5692c7bc 132/* A hash table storing all CONST_DOUBLEs. */
e2500fed
GK
133static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
134 htab_t const_double_htab;
5692c7bc 135
091a3ac7
CF
136/* A hash table storing all CONST_FIXEDs. */
137static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
138 htab_t const_fixed_htab;
139
3e029763 140#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
b5b8b0ac 141#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
3e029763
JH
142#define last_location (crtl->emit.x_last_location)
143#define first_label_num (crtl->emit.x_first_label_num)
23b2ce53 144
502b8322 145static rtx make_call_insn_raw (rtx);
502b8322 146static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
5eb2a9f2 147static void set_used_decls (tree);
502b8322
AJ
148static void mark_label_nuses (rtx);
149static hashval_t const_int_htab_hash (const void *);
150static int const_int_htab_eq (const void *, const void *);
151static hashval_t const_double_htab_hash (const void *);
152static int const_double_htab_eq (const void *, const void *);
153static rtx lookup_const_double (rtx);
091a3ac7
CF
154static hashval_t const_fixed_htab_hash (const void *);
155static int const_fixed_htab_eq (const void *, const void *);
156static rtx lookup_const_fixed (rtx);
502b8322
AJ
157static hashval_t mem_attrs_htab_hash (const void *);
158static int mem_attrs_htab_eq (const void *, const void *);
4862826d 159static mem_attrs *get_mem_attrs (alias_set_type, tree, rtx, rtx, unsigned int,
09e881c9 160 addr_space_t, enum machine_mode);
502b8322
AJ
161static hashval_t reg_attrs_htab_hash (const void *);
162static int reg_attrs_htab_eq (const void *, const void *);
163static reg_attrs *get_reg_attrs (tree, int);
a73b091d 164static rtx gen_const_vector (enum machine_mode, int);
32b32b16 165static void copy_rtx_if_shared_1 (rtx *orig);
c13e8210 166
6b24c259
JH
167/* Probability of the conditional branch currently proceeded by try_split.
168 Set to -1 otherwise. */
169int split_branch_probability = -1;
ca695ac9 170\f
c13e8210
MM
171/* Returns a hash code for X (which is a really a CONST_INT). */
172
173static hashval_t
502b8322 174const_int_htab_hash (const void *x)
c13e8210 175{
f7d504c2 176 return (hashval_t) INTVAL ((const_rtx) x);
c13e8210
MM
177}
178
cc2902df 179/* Returns nonzero if the value represented by X (which is really a
c13e8210
MM
180 CONST_INT) is the same as that given by Y (which is really a
181 HOST_WIDE_INT *). */
182
183static int
502b8322 184const_int_htab_eq (const void *x, const void *y)
c13e8210 185{
f7d504c2 186 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
5692c7bc
ZW
187}
188
189/* Returns a hash code for X (which is really a CONST_DOUBLE). */
190static hashval_t
502b8322 191const_double_htab_hash (const void *x)
5692c7bc 192{
f7d504c2 193 const_rtx const value = (const_rtx) x;
46b33600 194 hashval_t h;
5692c7bc 195
46b33600
RH
196 if (GET_MODE (value) == VOIDmode)
197 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
198 else
fe352c29 199 {
15c812e3 200 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
fe352c29
DJ
201 /* MODE is used in the comparison, so it should be in the hash. */
202 h ^= GET_MODE (value);
203 }
5692c7bc
ZW
204 return h;
205}
206
cc2902df 207/* Returns nonzero if the value represented by X (really a ...)
5692c7bc
ZW
208 is the same as that represented by Y (really a ...) */
209static int
502b8322 210const_double_htab_eq (const void *x, const void *y)
5692c7bc 211{
f7d504c2 212 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
5692c7bc
ZW
213
214 if (GET_MODE (a) != GET_MODE (b))
215 return 0;
8580f7a0
RH
216 if (GET_MODE (a) == VOIDmode)
217 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
218 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
219 else
220 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
221 CONST_DOUBLE_REAL_VALUE (b));
c13e8210
MM
222}
223
091a3ac7
CF
224/* Returns a hash code for X (which is really a CONST_FIXED). */
225
226static hashval_t
227const_fixed_htab_hash (const void *x)
228{
3101faab 229 const_rtx const value = (const_rtx) x;
091a3ac7
CF
230 hashval_t h;
231
232 h = fixed_hash (CONST_FIXED_VALUE (value));
233 /* MODE is used in the comparison, so it should be in the hash. */
234 h ^= GET_MODE (value);
235 return h;
236}
237
238/* Returns nonzero if the value represented by X (really a ...)
239 is the same as that represented by Y (really a ...). */
240
241static int
242const_fixed_htab_eq (const void *x, const void *y)
243{
3101faab 244 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
091a3ac7
CF
245
246 if (GET_MODE (a) != GET_MODE (b))
247 return 0;
248 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
249}
250
173b24b9
RK
251/* Returns a hash code for X (which is a really a mem_attrs *). */
252
253static hashval_t
502b8322 254mem_attrs_htab_hash (const void *x)
173b24b9 255{
f7d504c2 256 const mem_attrs *const p = (const mem_attrs *) x;
173b24b9
RK
257
258 return (p->alias ^ (p->align * 1000)
09e881c9 259 ^ (p->addrspace * 4000)
173b24b9
RK
260 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
261 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
78b76d08 262 ^ (size_t) iterative_hash_expr (p->expr, 0));
173b24b9
RK
263}
264
cc2902df 265/* Returns nonzero if the value represented by X (which is really a
173b24b9
RK
266 mem_attrs *) is the same as that given by Y (which is also really a
267 mem_attrs *). */
c13e8210
MM
268
269static int
502b8322 270mem_attrs_htab_eq (const void *x, const void *y)
c13e8210 271{
741ac903
KG
272 const mem_attrs *const p = (const mem_attrs *) x;
273 const mem_attrs *const q = (const mem_attrs *) y;
173b24b9 274
78b76d08
SB
275 return (p->alias == q->alias && p->offset == q->offset
276 && p->size == q->size && p->align == q->align
09e881c9 277 && p->addrspace == q->addrspace
78b76d08
SB
278 && (p->expr == q->expr
279 || (p->expr != NULL_TREE && q->expr != NULL_TREE
280 && operand_equal_p (p->expr, q->expr, 0))));
c13e8210
MM
281}
282
173b24b9 283/* Allocate a new mem_attrs structure and insert it into the hash table if
10b76d73
RK
284 one identical to it is not already in the table. We are doing this for
285 MEM of mode MODE. */
173b24b9
RK
286
287static mem_attrs *
4862826d 288get_mem_attrs (alias_set_type alias, tree expr, rtx offset, rtx size,
09e881c9 289 unsigned int align, addr_space_t addrspace, enum machine_mode mode)
173b24b9
RK
290{
291 mem_attrs attrs;
292 void **slot;
293
bb056a77
OH
294 /* If everything is the default, we can just return zero.
295 This must match what the corresponding MEM_* macros return when the
296 field is not present. */
09e881c9 297 if (alias == 0 && expr == 0 && offset == 0 && addrspace == 0
10b76d73
RK
298 && (size == 0
299 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
bb056a77
OH
300 && (STRICT_ALIGNMENT && mode != BLKmode
301 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
10b76d73
RK
302 return 0;
303
173b24b9 304 attrs.alias = alias;
998d7deb 305 attrs.expr = expr;
173b24b9
RK
306 attrs.offset = offset;
307 attrs.size = size;
308 attrs.align = align;
09e881c9 309 attrs.addrspace = addrspace;
173b24b9
RK
310
311 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
312 if (*slot == 0)
313 {
a9429e29 314 *slot = ggc_alloc_mem_attrs ();
173b24b9
RK
315 memcpy (*slot, &attrs, sizeof (mem_attrs));
316 }
317
1b4572a8 318 return (mem_attrs *) *slot;
c13e8210
MM
319}
320
a560d4d4
JH
321/* Returns a hash code for X (which is a really a reg_attrs *). */
322
323static hashval_t
502b8322 324reg_attrs_htab_hash (const void *x)
a560d4d4 325{
741ac903 326 const reg_attrs *const p = (const reg_attrs *) x;
a560d4d4
JH
327
328 return ((p->offset * 1000) ^ (long) p->decl);
329}
330
6356f892 331/* Returns nonzero if the value represented by X (which is really a
a560d4d4
JH
332 reg_attrs *) is the same as that given by Y (which is also really a
333 reg_attrs *). */
334
335static int
502b8322 336reg_attrs_htab_eq (const void *x, const void *y)
a560d4d4 337{
741ac903
KG
338 const reg_attrs *const p = (const reg_attrs *) x;
339 const reg_attrs *const q = (const reg_attrs *) y;
a560d4d4
JH
340
341 return (p->decl == q->decl && p->offset == q->offset);
342}
343/* Allocate a new reg_attrs structure and insert it into the hash table if
344 one identical to it is not already in the table. We are doing this for
345 MEM of mode MODE. */
346
347static reg_attrs *
502b8322 348get_reg_attrs (tree decl, int offset)
a560d4d4
JH
349{
350 reg_attrs attrs;
351 void **slot;
352
353 /* If everything is the default, we can just return zero. */
354 if (decl == 0 && offset == 0)
355 return 0;
356
357 attrs.decl = decl;
358 attrs.offset = offset;
359
360 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
361 if (*slot == 0)
362 {
a9429e29 363 *slot = ggc_alloc_reg_attrs ();
a560d4d4
JH
364 memcpy (*slot, &attrs, sizeof (reg_attrs));
365 }
366
1b4572a8 367 return (reg_attrs *) *slot;
a560d4d4
JH
368}
369
6fb5fa3c
DB
370
371#if !HAVE_blockage
372/* Generate an empty ASM_INPUT, which is used to block attempts to schedule
373 across this insn. */
374
375rtx
376gen_blockage (void)
377{
378 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
379 MEM_VOLATILE_P (x) = true;
380 return x;
381}
382#endif
383
384
08394eef
BS
385/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
386 don't attempt to share with the various global pieces of rtl (such as
387 frame_pointer_rtx). */
388
389rtx
502b8322 390gen_raw_REG (enum machine_mode mode, int regno)
08394eef
BS
391{
392 rtx x = gen_rtx_raw_REG (mode, regno);
393 ORIGINAL_REGNO (x) = regno;
394 return x;
395}
396
c5c76735
JL
397/* There are some RTL codes that require special attention; the generation
398 functions do the raw handling. If you add to this list, modify
399 special_rtx in gengenrtl.c as well. */
400
3b80f6ca 401rtx
502b8322 402gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3b80f6ca 403{
c13e8210
MM
404 void **slot;
405
3b80f6ca 406 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
5da077de 407 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3b80f6ca
RH
408
409#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
410 if (const_true_rtx && arg == STORE_FLAG_VALUE)
411 return const_true_rtx;
412#endif
413
c13e8210 414 /* Look up the CONST_INT in the hash table. */
e38992e8
RK
415 slot = htab_find_slot_with_hash (const_int_htab, &arg,
416 (hashval_t) arg, INSERT);
29105cea 417 if (*slot == 0)
1f8f4a0b 418 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
c13e8210
MM
419
420 return (rtx) *slot;
3b80f6ca
RH
421}
422
2496c7bd 423rtx
502b8322 424gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
2496c7bd
LB
425{
426 return GEN_INT (trunc_int_for_mode (c, mode));
427}
428
5692c7bc
ZW
429/* CONST_DOUBLEs might be created from pairs of integers, or from
430 REAL_VALUE_TYPEs. Also, their length is known only at run time,
431 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
432
433/* Determine whether REAL, a CONST_DOUBLE, already exists in the
434 hash table. If so, return its counterpart; otherwise add it
435 to the hash table and return it. */
436static rtx
502b8322 437lookup_const_double (rtx real)
5692c7bc
ZW
438{
439 void **slot = htab_find_slot (const_double_htab, real, INSERT);
440 if (*slot == 0)
441 *slot = real;
442
443 return (rtx) *slot;
444}
29105cea 445
5692c7bc
ZW
446/* Return a CONST_DOUBLE rtx for a floating-point value specified by
447 VALUE in mode MODE. */
0133b7d9 448rtx
502b8322 449const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
0133b7d9 450{
5692c7bc
ZW
451 rtx real = rtx_alloc (CONST_DOUBLE);
452 PUT_MODE (real, mode);
453
9e254451 454 real->u.rv = value;
5692c7bc
ZW
455
456 return lookup_const_double (real);
457}
458
091a3ac7
CF
459/* Determine whether FIXED, a CONST_FIXED, already exists in the
460 hash table. If so, return its counterpart; otherwise add it
461 to the hash table and return it. */
462
463static rtx
464lookup_const_fixed (rtx fixed)
465{
466 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
467 if (*slot == 0)
468 *slot = fixed;
469
470 return (rtx) *slot;
471}
472
473/* Return a CONST_FIXED rtx for a fixed-point value specified by
474 VALUE in mode MODE. */
475
476rtx
477const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
478{
479 rtx fixed = rtx_alloc (CONST_FIXED);
480 PUT_MODE (fixed, mode);
481
482 fixed->u.fv = value;
483
484 return lookup_const_fixed (fixed);
485}
486
3e93ff81
AS
487/* Constructs double_int from rtx CST. */
488
489double_int
490rtx_to_double_int (const_rtx cst)
491{
492 double_int r;
493
494 if (CONST_INT_P (cst))
495 r = shwi_to_double_int (INTVAL (cst));
496 else if (CONST_DOUBLE_P (cst) && GET_MODE (cst) == VOIDmode)
497 {
498 r.low = CONST_DOUBLE_LOW (cst);
499 r.high = CONST_DOUBLE_HIGH (cst);
500 }
501 else
502 gcc_unreachable ();
503
504 return r;
505}
506
507
54fb1ae0
AS
508/* Return a CONST_DOUBLE or CONST_INT for a value specified as
509 a double_int. */
510
511rtx
512immed_double_int_const (double_int i, enum machine_mode mode)
513{
514 return immed_double_const (i.low, i.high, mode);
515}
516
5692c7bc
ZW
517/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
518 of ints: I0 is the low-order word and I1 is the high-order word.
519 Do not use this routine for non-integer modes; convert to
520 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
521
522rtx
502b8322 523immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
5692c7bc
ZW
524{
525 rtx value;
526 unsigned int i;
527
65acccdd
ZD
528 /* There are the following cases (note that there are no modes with
529 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
530
531 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
532 gen_int_mode.
533 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
534 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
b8698a0f 535 from copies of the sign bit, and sign of i0 and i1 are the same), then
65acccdd
ZD
536 we return a CONST_INT for i0.
537 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
5692c7bc
ZW
538 if (mode != VOIDmode)
539 {
5b0264cb
NS
540 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
541 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
542 /* We can get a 0 for an error mark. */
543 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
544 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
5692c7bc 545
65acccdd
ZD
546 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
547 return gen_int_mode (i0, mode);
548
549 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
5692c7bc
ZW
550 }
551
552 /* If this integer fits in one word, return a CONST_INT. */
553 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
554 return GEN_INT (i0);
555
556 /* We use VOIDmode for integers. */
557 value = rtx_alloc (CONST_DOUBLE);
558 PUT_MODE (value, VOIDmode);
559
560 CONST_DOUBLE_LOW (value) = i0;
561 CONST_DOUBLE_HIGH (value) = i1;
562
563 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
564 XWINT (value, i) = 0;
565
566 return lookup_const_double (value);
0133b7d9
RH
567}
568
3b80f6ca 569rtx
502b8322 570gen_rtx_REG (enum machine_mode mode, unsigned int regno)
3b80f6ca
RH
571{
572 /* In case the MD file explicitly references the frame pointer, have
573 all such references point to the same frame pointer. This is
574 used during frame pointer elimination to distinguish the explicit
575 references to these registers from pseudos that happened to be
576 assigned to them.
577
578 If we have eliminated the frame pointer or arg pointer, we will
579 be using it as a normal register, for example as a spill
580 register. In such cases, we might be accessing it in a mode that
581 is not Pmode and therefore cannot use the pre-allocated rtx.
582
583 Also don't do this when we are making new REGs in reload, since
584 we don't want to get confused with the real pointers. */
585
586 if (mode == Pmode && !reload_in_progress)
587 {
e10c79fe
LB
588 if (regno == FRAME_POINTER_REGNUM
589 && (!reload_completed || frame_pointer_needed))
3b80f6ca 590 return frame_pointer_rtx;
e3339d0f 591#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
e10c79fe
LB
592 if (regno == HARD_FRAME_POINTER_REGNUM
593 && (!reload_completed || frame_pointer_needed))
3b80f6ca
RH
594 return hard_frame_pointer_rtx;
595#endif
e3339d0f 596#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
bcb33994 597 if (regno == ARG_POINTER_REGNUM)
3b80f6ca
RH
598 return arg_pointer_rtx;
599#endif
600#ifdef RETURN_ADDRESS_POINTER_REGNUM
bcb33994 601 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3b80f6ca
RH
602 return return_address_pointer_rtx;
603#endif
fc555370 604 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
bf9412cd 605 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
2d67bd7b 606 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
68252e27 607 return pic_offset_table_rtx;
bcb33994 608 if (regno == STACK_POINTER_REGNUM)
3b80f6ca
RH
609 return stack_pointer_rtx;
610 }
611
006a94b0 612#if 0
6cde4876 613 /* If the per-function register table has been set up, try to re-use
006a94b0
JL
614 an existing entry in that table to avoid useless generation of RTL.
615
616 This code is disabled for now until we can fix the various backends
617 which depend on having non-shared hard registers in some cases. Long
618 term we want to re-enable this code as it can significantly cut down
e10c79fe
LB
619 on the amount of useless RTL that gets generated.
620
621 We'll also need to fix some code that runs after reload that wants to
622 set ORIGINAL_REGNO. */
623
6cde4876
JL
624 if (cfun
625 && cfun->emit
626 && regno_reg_rtx
627 && regno < FIRST_PSEUDO_REGISTER
628 && reg_raw_mode[regno] == mode)
629 return regno_reg_rtx[regno];
006a94b0 630#endif
6cde4876 631
08394eef 632 return gen_raw_REG (mode, regno);
3b80f6ca
RH
633}
634
41472af8 635rtx
502b8322 636gen_rtx_MEM (enum machine_mode mode, rtx addr)
41472af8
MM
637{
638 rtx rt = gen_rtx_raw_MEM (mode, addr);
639
640 /* This field is not cleared by the mere allocation of the rtx, so
641 we clear it here. */
173b24b9 642 MEM_ATTRS (rt) = 0;
41472af8
MM
643
644 return rt;
645}
ddef6bc7 646
542a8afa
RH
647/* Generate a memory referring to non-trapping constant memory. */
648
649rtx
650gen_const_mem (enum machine_mode mode, rtx addr)
651{
652 rtx mem = gen_rtx_MEM (mode, addr);
653 MEM_READONLY_P (mem) = 1;
654 MEM_NOTRAP_P (mem) = 1;
655 return mem;
656}
657
bf877a76
R
658/* Generate a MEM referring to fixed portions of the frame, e.g., register
659 save areas. */
660
661rtx
662gen_frame_mem (enum machine_mode mode, rtx addr)
663{
664 rtx mem = gen_rtx_MEM (mode, addr);
665 MEM_NOTRAP_P (mem) = 1;
666 set_mem_alias_set (mem, get_frame_alias_set ());
667 return mem;
668}
669
670/* Generate a MEM referring to a temporary use of the stack, not part
671 of the fixed stack frame. For example, something which is pushed
672 by a target splitter. */
673rtx
674gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
675{
676 rtx mem = gen_rtx_MEM (mode, addr);
677 MEM_NOTRAP_P (mem) = 1;
e3b5732b 678 if (!cfun->calls_alloca)
bf877a76
R
679 set_mem_alias_set (mem, get_frame_alias_set ());
680 return mem;
681}
682
beb72684
RH
683/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
684 this construct would be valid, and false otherwise. */
685
686bool
687validate_subreg (enum machine_mode omode, enum machine_mode imode,
ed7a4b4b 688 const_rtx reg, unsigned int offset)
ddef6bc7 689{
beb72684
RH
690 unsigned int isize = GET_MODE_SIZE (imode);
691 unsigned int osize = GET_MODE_SIZE (omode);
692
693 /* All subregs must be aligned. */
694 if (offset % osize != 0)
695 return false;
696
697 /* The subreg offset cannot be outside the inner object. */
698 if (offset >= isize)
699 return false;
700
701 /* ??? This should not be here. Temporarily continue to allow word_mode
702 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
703 Generally, backends are doing something sketchy but it'll take time to
704 fix them all. */
705 if (omode == word_mode)
706 ;
707 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
708 is the culprit here, and not the backends. */
709 else if (osize >= UNITS_PER_WORD && isize >= osize)
710 ;
711 /* Allow component subregs of complex and vector. Though given the below
712 extraction rules, it's not always clear what that means. */
713 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
714 && GET_MODE_INNER (imode) == omode)
715 ;
716 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
717 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
718 represent this. It's questionable if this ought to be represented at
719 all -- why can't this all be hidden in post-reload splitters that make
720 arbitrarily mode changes to the registers themselves. */
721 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
722 ;
723 /* Subregs involving floating point modes are not allowed to
724 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
725 (subreg:SI (reg:DF) 0) isn't. */
726 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
727 {
728 if (isize != osize)
729 return false;
730 }
ddef6bc7 731
beb72684
RH
732 /* Paradoxical subregs must have offset zero. */
733 if (osize > isize)
734 return offset == 0;
735
736 /* This is a normal subreg. Verify that the offset is representable. */
737
738 /* For hard registers, we already have most of these rules collected in
739 subreg_offset_representable_p. */
740 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
741 {
742 unsigned int regno = REGNO (reg);
743
744#ifdef CANNOT_CHANGE_MODE_CLASS
745 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
746 && GET_MODE_INNER (imode) == omode)
747 ;
748 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
749 return false;
ddef6bc7 750#endif
beb72684
RH
751
752 return subreg_offset_representable_p (regno, imode, offset, omode);
753 }
754
755 /* For pseudo registers, we want most of the same checks. Namely:
756 If the register no larger than a word, the subreg must be lowpart.
757 If the register is larger than a word, the subreg must be the lowpart
758 of a subword. A subreg does *not* perform arbitrary bit extraction.
759 Given that we've already checked mode/offset alignment, we only have
760 to check subword subregs here. */
761 if (osize < UNITS_PER_WORD)
762 {
763 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
764 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
765 if (offset % UNITS_PER_WORD != low_off)
766 return false;
767 }
768 return true;
769}
770
771rtx
772gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
773{
774 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
5692c7bc 775 return gen_rtx_raw_SUBREG (mode, reg, offset);
ddef6bc7
JJ
776}
777
173b24b9
RK
778/* Generate a SUBREG representing the least-significant part of REG if MODE
779 is smaller than mode of REG, otherwise paradoxical SUBREG. */
780
ddef6bc7 781rtx
502b8322 782gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
ddef6bc7
JJ
783{
784 enum machine_mode inmode;
ddef6bc7
JJ
785
786 inmode = GET_MODE (reg);
787 if (inmode == VOIDmode)
788 inmode = mode;
e0e08ac2
JH
789 return gen_rtx_SUBREG (mode, reg,
790 subreg_lowpart_offset (mode, inmode));
ddef6bc7 791}
c5c76735 792\f
23b2ce53 793
80379f51
PB
794/* Create an rtvec and stores within it the RTXen passed in the arguments. */
795
23b2ce53 796rtvec
e34d07f2 797gen_rtvec (int n, ...)
23b2ce53 798{
80379f51
PB
799 int i;
800 rtvec rt_val;
e34d07f2 801 va_list p;
23b2ce53 802
e34d07f2 803 va_start (p, n);
23b2ce53 804
80379f51 805 /* Don't allocate an empty rtvec... */
23b2ce53 806 if (n == 0)
80379f51 807 return NULL_RTVEC;
23b2ce53 808
80379f51 809 rt_val = rtvec_alloc (n);
4f90e4a0 810
23b2ce53 811 for (i = 0; i < n; i++)
80379f51 812 rt_val->elem[i] = va_arg (p, rtx);
6268b922 813
e34d07f2 814 va_end (p);
80379f51 815 return rt_val;
23b2ce53
RS
816}
817
818rtvec
502b8322 819gen_rtvec_v (int n, rtx *argp)
23b2ce53 820{
b3694847
SS
821 int i;
822 rtvec rt_val;
23b2ce53 823
80379f51 824 /* Don't allocate an empty rtvec... */
23b2ce53 825 if (n == 0)
80379f51 826 return NULL_RTVEC;
23b2ce53 827
80379f51 828 rt_val = rtvec_alloc (n);
23b2ce53
RS
829
830 for (i = 0; i < n; i++)
8f985ec4 831 rt_val->elem[i] = *argp++;
23b2ce53
RS
832
833 return rt_val;
834}
835\f
38ae7651
RS
836/* Return the number of bytes between the start of an OUTER_MODE
837 in-memory value and the start of an INNER_MODE in-memory value,
838 given that the former is a lowpart of the latter. It may be a
839 paradoxical lowpart, in which case the offset will be negative
840 on big-endian targets. */
841
842int
843byte_lowpart_offset (enum machine_mode outer_mode,
844 enum machine_mode inner_mode)
845{
846 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
847 return subreg_lowpart_offset (outer_mode, inner_mode);
848 else
849 return -subreg_lowpart_offset (inner_mode, outer_mode);
850}
851\f
23b2ce53
RS
852/* Generate a REG rtx for a new pseudo register of mode MODE.
853 This pseudo is assigned the next sequential register number. */
854
855rtx
502b8322 856gen_reg_rtx (enum machine_mode mode)
23b2ce53 857{
b3694847 858 rtx val;
2e3f842f 859 unsigned int align = GET_MODE_ALIGNMENT (mode);
23b2ce53 860
f8335a4f 861 gcc_assert (can_create_pseudo_p ());
23b2ce53 862
2e3f842f
L
863 /* If a virtual register with bigger mode alignment is generated,
864 increase stack alignment estimation because it might be spilled
865 to stack later. */
b8698a0f 866 if (SUPPORTS_STACK_ALIGNMENT
2e3f842f
L
867 && crtl->stack_alignment_estimated < align
868 && !crtl->stack_realign_processed)
ae58e548
JJ
869 {
870 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
871 if (crtl->stack_alignment_estimated < min_align)
872 crtl->stack_alignment_estimated = min_align;
873 }
2e3f842f 874
1b3d8f8a
GK
875 if (generating_concat_p
876 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
877 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
fc84e8a8
RS
878 {
879 /* For complex modes, don't make a single pseudo.
880 Instead, make a CONCAT of two pseudos.
881 This allows noncontiguous allocation of the real and imaginary parts,
882 which makes much better code. Besides, allocating DCmode
883 pseudos overstrains reload on some machines like the 386. */
884 rtx realpart, imagpart;
27e58a70 885 enum machine_mode partmode = GET_MODE_INNER (mode);
fc84e8a8
RS
886
887 realpart = gen_reg_rtx (partmode);
888 imagpart = gen_reg_rtx (partmode);
3b80f6ca 889 return gen_rtx_CONCAT (mode, realpart, imagpart);
fc84e8a8
RS
890 }
891
a560d4d4 892 /* Make sure regno_pointer_align, and regno_reg_rtx are large
0d4903b8 893 enough to have an element for this pseudo reg number. */
23b2ce53 894
3e029763 895 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
23b2ce53 896 {
3e029763 897 int old_size = crtl->emit.regno_pointer_align_length;
60564289 898 char *tmp;
0d4903b8 899 rtx *new1;
0d4903b8 900
60564289
KG
901 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
902 memset (tmp + old_size, 0, old_size);
903 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
49ad7cfa 904
1b4572a8 905 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
49ad7cfa 906 memset (new1 + old_size, 0, old_size * sizeof (rtx));
23b2ce53
RS
907 regno_reg_rtx = new1;
908
3e029763 909 crtl->emit.regno_pointer_align_length = old_size * 2;
23b2ce53
RS
910 }
911
08394eef 912 val = gen_raw_REG (mode, reg_rtx_no);
23b2ce53
RS
913 regno_reg_rtx[reg_rtx_no++] = val;
914 return val;
915}
916
38ae7651
RS
917/* Update NEW with the same attributes as REG, but with OFFSET added
918 to the REG_OFFSET. */
a560d4d4 919
e53a16e7 920static void
60564289 921update_reg_offset (rtx new_rtx, rtx reg, int offset)
a560d4d4 922{
60564289 923 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
502b8322 924 REG_OFFSET (reg) + offset);
e53a16e7
ILT
925}
926
38ae7651
RS
927/* Generate a register with same attributes as REG, but with OFFSET
928 added to the REG_OFFSET. */
e53a16e7
ILT
929
930rtx
931gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
932 int offset)
933{
60564289 934 rtx new_rtx = gen_rtx_REG (mode, regno);
e53a16e7 935
60564289
KG
936 update_reg_offset (new_rtx, reg, offset);
937 return new_rtx;
e53a16e7
ILT
938}
939
940/* Generate a new pseudo-register with the same attributes as REG, but
38ae7651 941 with OFFSET added to the REG_OFFSET. */
e53a16e7
ILT
942
943rtx
944gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
945{
60564289 946 rtx new_rtx = gen_reg_rtx (mode);
e53a16e7 947
60564289
KG
948 update_reg_offset (new_rtx, reg, offset);
949 return new_rtx;
a560d4d4
JH
950}
951
38ae7651
RS
952/* Adjust REG in-place so that it has mode MODE. It is assumed that the
953 new register is a (possibly paradoxical) lowpart of the old one. */
a560d4d4
JH
954
955void
38ae7651 956adjust_reg_mode (rtx reg, enum machine_mode mode)
a560d4d4 957{
38ae7651
RS
958 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
959 PUT_MODE (reg, mode);
960}
961
962/* Copy REG's attributes from X, if X has any attributes. If REG and X
963 have different modes, REG is a (possibly paradoxical) lowpart of X. */
964
965void
966set_reg_attrs_from_value (rtx reg, rtx x)
967{
968 int offset;
969
923ba36f
JJ
970 /* Hard registers can be reused for multiple purposes within the same
971 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
972 on them is wrong. */
973 if (HARD_REGISTER_P (reg))
974 return;
975
38ae7651 976 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
46b71b03
PB
977 if (MEM_P (x))
978 {
481683e1 979 if (MEM_OFFSET (x) && CONST_INT_P (MEM_OFFSET (x)))
46b71b03
PB
980 REG_ATTRS (reg)
981 = get_reg_attrs (MEM_EXPR (x), INTVAL (MEM_OFFSET (x)) + offset);
982 if (MEM_POINTER (x))
0a317111 983 mark_reg_pointer (reg, 0);
46b71b03
PB
984 }
985 else if (REG_P (x))
986 {
987 if (REG_ATTRS (x))
988 update_reg_offset (reg, x, offset);
989 if (REG_POINTER (x))
990 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
991 }
992}
993
994/* Generate a REG rtx for a new pseudo register, copying the mode
995 and attributes from X. */
996
997rtx
998gen_reg_rtx_and_attrs (rtx x)
999{
1000 rtx reg = gen_reg_rtx (GET_MODE (x));
1001 set_reg_attrs_from_value (reg, x);
1002 return reg;
a560d4d4
JH
1003}
1004
9d18e06b
JZ
1005/* Set the register attributes for registers contained in PARM_RTX.
1006 Use needed values from memory attributes of MEM. */
1007
1008void
502b8322 1009set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
9d18e06b 1010{
f8cfc6aa 1011 if (REG_P (parm_rtx))
38ae7651 1012 set_reg_attrs_from_value (parm_rtx, mem);
9d18e06b
JZ
1013 else if (GET_CODE (parm_rtx) == PARALLEL)
1014 {
1015 /* Check for a NULL entry in the first slot, used to indicate that the
1016 parameter goes both on the stack and in registers. */
1017 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1018 for (; i < XVECLEN (parm_rtx, 0); i++)
1019 {
1020 rtx x = XVECEXP (parm_rtx, 0, i);
f8cfc6aa 1021 if (REG_P (XEXP (x, 0)))
9d18e06b
JZ
1022 REG_ATTRS (XEXP (x, 0))
1023 = get_reg_attrs (MEM_EXPR (mem),
1024 INTVAL (XEXP (x, 1)));
1025 }
1026 }
1027}
1028
38ae7651
RS
1029/* Set the REG_ATTRS for registers in value X, given that X represents
1030 decl T. */
a560d4d4 1031
4e3825db 1032void
38ae7651
RS
1033set_reg_attrs_for_decl_rtl (tree t, rtx x)
1034{
1035 if (GET_CODE (x) == SUBREG)
fbe6ec81 1036 {
38ae7651
RS
1037 gcc_assert (subreg_lowpart_p (x));
1038 x = SUBREG_REG (x);
fbe6ec81 1039 }
f8cfc6aa 1040 if (REG_P (x))
38ae7651
RS
1041 REG_ATTRS (x)
1042 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
726612d2 1043 DECL_MODE (t)));
a560d4d4
JH
1044 if (GET_CODE (x) == CONCAT)
1045 {
1046 if (REG_P (XEXP (x, 0)))
1047 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1048 if (REG_P (XEXP (x, 1)))
1049 REG_ATTRS (XEXP (x, 1))
1050 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1051 }
1052 if (GET_CODE (x) == PARALLEL)
1053 {
d4afac5b
JZ
1054 int i, start;
1055
1056 /* Check for a NULL entry, used to indicate that the parameter goes
1057 both on the stack and in registers. */
1058 if (XEXP (XVECEXP (x, 0, 0), 0))
1059 start = 0;
1060 else
1061 start = 1;
1062
1063 for (i = start; i < XVECLEN (x, 0); i++)
a560d4d4
JH
1064 {
1065 rtx y = XVECEXP (x, 0, i);
1066 if (REG_P (XEXP (y, 0)))
1067 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1068 }
1069 }
1070}
1071
38ae7651
RS
1072/* Assign the RTX X to declaration T. */
1073
1074void
1075set_decl_rtl (tree t, rtx x)
1076{
1077 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1078 if (x)
1079 set_reg_attrs_for_decl_rtl (t, x);
1080}
1081
5141868d
RS
1082/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1083 if the ABI requires the parameter to be passed by reference. */
38ae7651
RS
1084
1085void
5141868d 1086set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
38ae7651
RS
1087{
1088 DECL_INCOMING_RTL (t) = x;
5141868d 1089 if (x && !by_reference_p)
38ae7651
RS
1090 set_reg_attrs_for_decl_rtl (t, x);
1091}
1092
754fdcca
RK
1093/* Identify REG (which may be a CONCAT) as a user register. */
1094
1095void
502b8322 1096mark_user_reg (rtx reg)
754fdcca
RK
1097{
1098 if (GET_CODE (reg) == CONCAT)
1099 {
1100 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1101 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1102 }
754fdcca 1103 else
5b0264cb
NS
1104 {
1105 gcc_assert (REG_P (reg));
1106 REG_USERVAR_P (reg) = 1;
1107 }
754fdcca
RK
1108}
1109
86fe05e0
RK
1110/* Identify REG as a probable pointer register and show its alignment
1111 as ALIGN, if nonzero. */
23b2ce53
RS
1112
1113void
502b8322 1114mark_reg_pointer (rtx reg, int align)
23b2ce53 1115{
3502dc9c 1116 if (! REG_POINTER (reg))
00995e78 1117 {
3502dc9c 1118 REG_POINTER (reg) = 1;
86fe05e0 1119
00995e78
RE
1120 if (align)
1121 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1122 }
1123 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
6614fd40 1124 /* We can no-longer be sure just how aligned this pointer is. */
86fe05e0 1125 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
23b2ce53
RS
1126}
1127
1128/* Return 1 plus largest pseudo reg number used in the current function. */
1129
1130int
502b8322 1131max_reg_num (void)
23b2ce53
RS
1132{
1133 return reg_rtx_no;
1134}
1135
1136/* Return 1 + the largest label number used so far in the current function. */
1137
1138int
502b8322 1139max_label_num (void)
23b2ce53 1140{
23b2ce53
RS
1141 return label_num;
1142}
1143
1144/* Return first label number used in this function (if any were used). */
1145
1146int
502b8322 1147get_first_label_num (void)
23b2ce53
RS
1148{
1149 return first_label_num;
1150}
6de9cd9a
DN
1151
1152/* If the rtx for label was created during the expansion of a nested
1153 function, then first_label_num won't include this label number.
fa10beec 1154 Fix this now so that array indices work later. */
6de9cd9a
DN
1155
1156void
1157maybe_set_first_label_num (rtx x)
1158{
1159 if (CODE_LABEL_NUMBER (x) < first_label_num)
1160 first_label_num = CODE_LABEL_NUMBER (x);
1161}
23b2ce53
RS
1162\f
1163/* Return a value representing some low-order bits of X, where the number
1164 of low-order bits is given by MODE. Note that no conversion is done
750c9258 1165 between floating-point and fixed-point values, rather, the bit
23b2ce53
RS
1166 representation is returned.
1167
1168 This function handles the cases in common between gen_lowpart, below,
1169 and two variants in cse.c and combine.c. These are the cases that can
1170 be safely handled at all points in the compilation.
1171
1172 If this is not a case we can handle, return 0. */
1173
1174rtx
502b8322 1175gen_lowpart_common (enum machine_mode mode, rtx x)
23b2ce53 1176{
ddef6bc7 1177 int msize = GET_MODE_SIZE (mode);
550d1387 1178 int xsize;
ddef6bc7 1179 int offset = 0;
550d1387
GK
1180 enum machine_mode innermode;
1181
1182 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1183 so we have to make one up. Yuk. */
1184 innermode = GET_MODE (x);
481683e1 1185 if (CONST_INT_P (x)
db487452 1186 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
550d1387
GK
1187 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1188 else if (innermode == VOIDmode)
1189 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
b8698a0f 1190
550d1387
GK
1191 xsize = GET_MODE_SIZE (innermode);
1192
5b0264cb 1193 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
23b2ce53 1194
550d1387 1195 if (innermode == mode)
23b2ce53
RS
1196 return x;
1197
1198 /* MODE must occupy no more words than the mode of X. */
550d1387
GK
1199 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1200 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
23b2ce53
RS
1201 return 0;
1202
53501a19 1203 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
3d8bf70f 1204 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
53501a19
BS
1205 return 0;
1206
550d1387 1207 offset = subreg_lowpart_offset (mode, innermode);
23b2ce53
RS
1208
1209 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
83e9c679
RK
1210 && (GET_MODE_CLASS (mode) == MODE_INT
1211 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
23b2ce53
RS
1212 {
1213 /* If we are getting the low-order part of something that has been
1214 sign- or zero-extended, we can either just use the object being
1215 extended or make a narrower extension. If we want an even smaller
1216 piece than the size of the object being extended, call ourselves
1217 recursively.
1218
1219 This case is used mostly by combine and cse. */
1220
1221 if (GET_MODE (XEXP (x, 0)) == mode)
1222 return XEXP (x, 0);
550d1387 1223 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
23b2ce53 1224 return gen_lowpart_common (mode, XEXP (x, 0));
550d1387 1225 else if (msize < xsize)
3b80f6ca 1226 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
23b2ce53 1227 }
f8cfc6aa 1228 else if (GET_CODE (x) == SUBREG || REG_P (x)
550d1387 1229 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
481683e1 1230 || GET_CODE (x) == CONST_DOUBLE || CONST_INT_P (x))
550d1387 1231 return simplify_gen_subreg (mode, x, innermode, offset);
8aada4ad 1232
23b2ce53
RS
1233 /* Otherwise, we can't do this. */
1234 return 0;
1235}
1236\f
ccba022b 1237rtx
502b8322 1238gen_highpart (enum machine_mode mode, rtx x)
ccba022b 1239{
ddef6bc7 1240 unsigned int msize = GET_MODE_SIZE (mode);
e0e08ac2 1241 rtx result;
ddef6bc7 1242
ccba022b
RS
1243 /* This case loses if X is a subreg. To catch bugs early,
1244 complain if an invalid MODE is used even in other cases. */
5b0264cb
NS
1245 gcc_assert (msize <= UNITS_PER_WORD
1246 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
ddef6bc7 1247
e0e08ac2
JH
1248 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1249 subreg_highpart_offset (mode, GET_MODE (x)));
5b0264cb 1250 gcc_assert (result);
b8698a0f 1251
09482e0d
JW
1252 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1253 the target if we have a MEM. gen_highpart must return a valid operand,
1254 emitting code if necessary to do so. */
5b0264cb
NS
1255 if (MEM_P (result))
1256 {
1257 result = validize_mem (result);
1258 gcc_assert (result);
1259 }
b8698a0f 1260
e0e08ac2
JH
1261 return result;
1262}
5222e470 1263
26d249eb 1264/* Like gen_highpart, but accept mode of EXP operand in case EXP can
5222e470
JH
1265 be VOIDmode constant. */
1266rtx
502b8322 1267gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
5222e470
JH
1268{
1269 if (GET_MODE (exp) != VOIDmode)
1270 {
5b0264cb 1271 gcc_assert (GET_MODE (exp) == innermode);
5222e470
JH
1272 return gen_highpart (outermode, exp);
1273 }
1274 return simplify_gen_subreg (outermode, exp, innermode,
1275 subreg_highpart_offset (outermode, innermode));
1276}
68252e27 1277
38ae7651 1278/* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
8698cce3 1279
e0e08ac2 1280unsigned int
502b8322 1281subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
e0e08ac2
JH
1282{
1283 unsigned int offset = 0;
1284 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
8698cce3 1285
e0e08ac2 1286 if (difference > 0)
ccba022b 1287 {
e0e08ac2
JH
1288 if (WORDS_BIG_ENDIAN)
1289 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1290 if (BYTES_BIG_ENDIAN)
1291 offset += difference % UNITS_PER_WORD;
ccba022b 1292 }
ddef6bc7 1293
e0e08ac2 1294 return offset;
ccba022b 1295}
eea50aa0 1296
e0e08ac2
JH
1297/* Return offset in bytes to get OUTERMODE high part
1298 of the value in mode INNERMODE stored in memory in target format. */
1299unsigned int
502b8322 1300subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
eea50aa0
JH
1301{
1302 unsigned int offset = 0;
1303 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1304
5b0264cb 1305 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
e0e08ac2 1306
eea50aa0
JH
1307 if (difference > 0)
1308 {
e0e08ac2 1309 if (! WORDS_BIG_ENDIAN)
eea50aa0 1310 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
e0e08ac2 1311 if (! BYTES_BIG_ENDIAN)
eea50aa0
JH
1312 offset += difference % UNITS_PER_WORD;
1313 }
1314
e0e08ac2 1315 return offset;
eea50aa0 1316}
ccba022b 1317
23b2ce53
RS
1318/* Return 1 iff X, assumed to be a SUBREG,
1319 refers to the least significant part of its containing reg.
1320 If X is not a SUBREG, always return 1 (it is its own low part!). */
1321
1322int
fa233e34 1323subreg_lowpart_p (const_rtx x)
23b2ce53
RS
1324{
1325 if (GET_CODE (x) != SUBREG)
1326 return 1;
a3a03040
RK
1327 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1328 return 0;
23b2ce53 1329
e0e08ac2
JH
1330 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1331 == SUBREG_BYTE (x));
23b2ce53
RS
1332}
1333\f
ddef6bc7
JJ
1334/* Return subword OFFSET of operand OP.
1335 The word number, OFFSET, is interpreted as the word number starting
1336 at the low-order address. OFFSET 0 is the low-order word if not
1337 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1338
1339 If we cannot extract the required word, we return zero. Otherwise,
1340 an rtx corresponding to the requested word will be returned.
1341
1342 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1343 reload has completed, a valid address will always be returned. After
1344 reload, if a valid address cannot be returned, we return zero.
1345
1346 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1347 it is the responsibility of the caller.
1348
1349 MODE is the mode of OP in case it is a CONST_INT.
1350
1351 ??? This is still rather broken for some cases. The problem for the
1352 moment is that all callers of this thing provide no 'goal mode' to
1353 tell us to work with. This exists because all callers were written
0631e0bf
JH
1354 in a word based SUBREG world.
1355 Now use of this function can be deprecated by simplify_subreg in most
1356 cases.
1357 */
ddef6bc7
JJ
1358
1359rtx
502b8322 1360operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
ddef6bc7
JJ
1361{
1362 if (mode == VOIDmode)
1363 mode = GET_MODE (op);
1364
5b0264cb 1365 gcc_assert (mode != VOIDmode);
ddef6bc7 1366
30f7a378 1367 /* If OP is narrower than a word, fail. */
ddef6bc7
JJ
1368 if (mode != BLKmode
1369 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1370 return 0;
1371
30f7a378 1372 /* If we want a word outside OP, return zero. */
ddef6bc7
JJ
1373 if (mode != BLKmode
1374 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1375 return const0_rtx;
1376
ddef6bc7 1377 /* Form a new MEM at the requested address. */
3c0cb5de 1378 if (MEM_P (op))
ddef6bc7 1379 {
60564289 1380 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
ddef6bc7 1381
f1ec5147 1382 if (! validate_address)
60564289 1383 return new_rtx;
f1ec5147
RK
1384
1385 else if (reload_completed)
ddef6bc7 1386 {
09e881c9
BE
1387 if (! strict_memory_address_addr_space_p (word_mode,
1388 XEXP (new_rtx, 0),
1389 MEM_ADDR_SPACE (op)))
f1ec5147 1390 return 0;
ddef6bc7 1391 }
f1ec5147 1392 else
60564289 1393 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
ddef6bc7
JJ
1394 }
1395
0631e0bf
JH
1396 /* Rest can be handled by simplify_subreg. */
1397 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
ddef6bc7
JJ
1398}
1399
535a42b1
NS
1400/* Similar to `operand_subword', but never return 0. If we can't
1401 extract the required subword, put OP into a register and try again.
1402 The second attempt must succeed. We always validate the address in
1403 this case.
23b2ce53
RS
1404
1405 MODE is the mode of OP, in case it is CONST_INT. */
1406
1407rtx
502b8322 1408operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
23b2ce53 1409{
ddef6bc7 1410 rtx result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1411
1412 if (result)
1413 return result;
1414
1415 if (mode != BLKmode && mode != VOIDmode)
77e6b0eb
JC
1416 {
1417 /* If this is a register which can not be accessed by words, copy it
1418 to a pseudo register. */
f8cfc6aa 1419 if (REG_P (op))
77e6b0eb
JC
1420 op = copy_to_reg (op);
1421 else
1422 op = force_reg (mode, op);
1423 }
23b2ce53 1424
ddef6bc7 1425 result = operand_subword (op, offset, 1, mode);
5b0264cb 1426 gcc_assert (result);
23b2ce53
RS
1427
1428 return result;
1429}
1430\f
2b3493c8
AK
1431/* Returns 1 if both MEM_EXPR can be considered equal
1432 and 0 otherwise. */
1433
1434int
4f588890 1435mem_expr_equal_p (const_tree expr1, const_tree expr2)
2b3493c8
AK
1436{
1437 if (expr1 == expr2)
1438 return 1;
1439
1440 if (! expr1 || ! expr2)
1441 return 0;
1442
1443 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1444 return 0;
1445
55b34b5f 1446 return operand_equal_p (expr1, expr2, 0);
2b3493c8
AK
1447}
1448
805903b5
JJ
1449/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1450 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1451 -1 if not known. */
1452
1453int
d9223014 1454get_mem_align_offset (rtx mem, unsigned int align)
805903b5
JJ
1455{
1456 tree expr;
1457 unsigned HOST_WIDE_INT offset;
1458
1459 /* This function can't use
1460 if (!MEM_EXPR (mem) || !MEM_OFFSET (mem)
1461 || !CONST_INT_P (MEM_OFFSET (mem))
e80c2726
RG
1462 || (MAX (MEM_ALIGN (mem),
1463 get_object_alignment (MEM_EXPR (mem), align))
805903b5
JJ
1464 < align))
1465 return -1;
1466 else
1467 return (- INTVAL (MEM_OFFSET (mem))) & (align / BITS_PER_UNIT - 1);
1468 for two reasons:
1469 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1470 for <variable>. get_inner_reference doesn't handle it and
1471 even if it did, the alignment in that case needs to be determined
1472 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1473 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1474 isn't sufficiently aligned, the object it is in might be. */
1475 gcc_assert (MEM_P (mem));
1476 expr = MEM_EXPR (mem);
1477 if (expr == NULL_TREE
1478 || MEM_OFFSET (mem) == NULL_RTX
1479 || !CONST_INT_P (MEM_OFFSET (mem)))
1480 return -1;
1481
1482 offset = INTVAL (MEM_OFFSET (mem));
1483 if (DECL_P (expr))
1484 {
1485 if (DECL_ALIGN (expr) < align)
1486 return -1;
1487 }
1488 else if (INDIRECT_REF_P (expr))
1489 {
1490 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1491 return -1;
1492 }
1493 else if (TREE_CODE (expr) == COMPONENT_REF)
1494 {
1495 while (1)
1496 {
1497 tree inner = TREE_OPERAND (expr, 0);
1498 tree field = TREE_OPERAND (expr, 1);
1499 tree byte_offset = component_ref_field_offset (expr);
1500 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1501
1502 if (!byte_offset
1503 || !host_integerp (byte_offset, 1)
1504 || !host_integerp (bit_offset, 1))
1505 return -1;
1506
1507 offset += tree_low_cst (byte_offset, 1);
1508 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1509
1510 if (inner == NULL_TREE)
1511 {
1512 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1513 < (unsigned int) align)
1514 return -1;
1515 break;
1516 }
1517 else if (DECL_P (inner))
1518 {
1519 if (DECL_ALIGN (inner) < align)
1520 return -1;
1521 break;
1522 }
1523 else if (TREE_CODE (inner) != COMPONENT_REF)
1524 return -1;
1525 expr = inner;
1526 }
1527 }
1528 else
1529 return -1;
1530
1531 return offset & ((align / BITS_PER_UNIT) - 1);
1532}
1533
6926c713 1534/* Given REF (a MEM) and T, either the type of X or the expression
173b24b9 1535 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f1087be
RH
1536 if we are making a new object of this type. BITPOS is nonzero if
1537 there is an offset outstanding on T that will be applied later. */
173b24b9
RK
1538
1539void
502b8322
AJ
1540set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1541 HOST_WIDE_INT bitpos)
173b24b9 1542{
268f7033
UW
1543 alias_set_type alias;
1544 tree expr = NULL;
1545 rtx offset = NULL_RTX;
1546 rtx size = NULL_RTX;
1547 unsigned int align = BITS_PER_UNIT;
6f1087be 1548 HOST_WIDE_INT apply_bitpos = 0;
173b24b9
RK
1549 tree type;
1550
1551 /* It can happen that type_for_mode was given a mode for which there
1552 is no language-level type. In which case it returns NULL, which
1553 we can see here. */
1554 if (t == NULL_TREE)
1555 return;
1556
1557 type = TYPE_P (t) ? t : TREE_TYPE (t);
eeb23c11
MM
1558 if (type == error_mark_node)
1559 return;
173b24b9 1560
173b24b9
RK
1561 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1562 wrong answer, as it assumes that DECL_RTL already has the right alias
1563 info. Callers should not set DECL_RTL until after the call to
1564 set_mem_attributes. */
5b0264cb 1565 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
173b24b9 1566
738cc472 1567 /* Get the alias set from the expression or type (perhaps using a
8ac61af7
RK
1568 front-end routine) and use it. */
1569 alias = get_alias_set (t);
173b24b9 1570
a5e9c810 1571 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
07cb6e8c
JM
1572 MEM_IN_STRUCT_P (ref)
1573 = AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE;
f8ad8d7c 1574 MEM_POINTER (ref) = POINTER_TYPE_P (type);
173b24b9 1575
8ac61af7
RK
1576 /* If we are making an object of this type, or if this is a DECL, we know
1577 that it is a scalar if the type is not an aggregate. */
07cb6e8c
JM
1578 if ((objectp || DECL_P (t))
1579 && ! AGGREGATE_TYPE_P (type)
1580 && TREE_CODE (type) != COMPLEX_TYPE)
173b24b9
RK
1581 MEM_SCALAR_P (ref) = 1;
1582
268f7033
UW
1583 /* Default values from pre-existing memory attributes if present. */
1584 if (MEM_ATTRS (ref))
1585 {
1586 /* ??? Can this ever happen? Calling this routine on a MEM that
1587 already carries memory attributes should probably be invalid. */
1588 expr = MEM_EXPR (ref);
1589 offset = MEM_OFFSET (ref);
1590 size = MEM_SIZE (ref);
1591 align = MEM_ALIGN (ref);
1592 }
1593
1594 /* Otherwise, default values from the mode of the MEM reference. */
1595 else if (GET_MODE (ref) != BLKmode)
1596 {
1597 /* Respect mode size. */
1598 size = GEN_INT (GET_MODE_SIZE (GET_MODE (ref)));
1599 /* ??? Is this really necessary? We probably should always get
1600 the size from the type below. */
1601
1602 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1603 if T is an object, always compute the object alignment below. */
1604 if (STRICT_ALIGNMENT && TYPE_P (t))
1605 align = GET_MODE_ALIGNMENT (GET_MODE (ref));
1606 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1607 e.g. if the type carries an alignment attribute. Should we be
1608 able to simply always use TYPE_ALIGN? */
1609 }
1610
c3d32120
RK
1611 /* We can set the alignment from the type if we are making an object,
1612 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
a80903ff 1613 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
c3d32120 1614 align = MAX (align, TYPE_ALIGN (type));
a80903ff 1615
70f34814
RG
1616 else if (TREE_CODE (t) == MEM_REF)
1617 {
a80903ff 1618 tree op0 = TREE_OPERAND (t, 0);
3e32c761
RG
1619 if (TREE_CODE (op0) == ADDR_EXPR
1620 && (DECL_P (TREE_OPERAND (op0, 0))
1621 || CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))))
70f34814 1622 {
3e32c761
RG
1623 if (DECL_P (TREE_OPERAND (op0, 0)))
1624 align = DECL_ALIGN (TREE_OPERAND (op0, 0));
1625 else if (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)))
1626 {
1627 align = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (op0, 0)));
70f34814 1628#ifdef CONSTANT_ALIGNMENT
3e32c761 1629 align = CONSTANT_ALIGNMENT (TREE_OPERAND (op0, 0), align);
70f34814 1630#endif
3e32c761
RG
1631 }
1632 if (TREE_INT_CST_LOW (TREE_OPERAND (t, 1)) != 0)
1633 {
1634 unsigned HOST_WIDE_INT ioff
1635 = TREE_INT_CST_LOW (TREE_OPERAND (t, 1));
1636 unsigned HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1637 align = MIN (aoff, align);
1638 }
70f34814
RG
1639 }
1640 else
5951297a
EB
1641 /* ??? This isn't fully correct, we can't set the alignment from the
1642 type in all cases. */
1643 align = MAX (align, TYPE_ALIGN (type));
70f34814 1644 }
a80903ff 1645
9407f6bc
RG
1646 else if (TREE_CODE (t) == TARGET_MEM_REF)
1647 /* ??? This isn't fully correct, we can't set the alignment from the
1648 type in all cases. */
1649 align = MAX (align, TYPE_ALIGN (type));
1650
738cc472
RK
1651 /* If the size is known, we can set that. */
1652 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
8ac61af7 1653 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
738cc472 1654
80965c18
RK
1655 /* If T is not a type, we may be able to deduce some more information about
1656 the expression. */
1657 if (! TYPE_P (t))
8ac61af7 1658 {
8476af98 1659 tree base;
df96b059 1660 bool align_computed = false;
389fdba0 1661
8ac61af7
RK
1662 if (TREE_THIS_VOLATILE (t))
1663 MEM_VOLATILE_P (ref) = 1;
173b24b9 1664
c56e3582
RK
1665 /* Now remove any conversions: they don't change what the underlying
1666 object is. Likewise for SAVE_EXPR. */
1043771b 1667 while (CONVERT_EXPR_P (t)
c56e3582
RK
1668 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1669 || TREE_CODE (t) == SAVE_EXPR)
8ac61af7
RK
1670 t = TREE_OPERAND (t, 0);
1671
8476af98
RH
1672 /* We may look through structure-like accesses for the purposes of
1673 examining TREE_THIS_NOTRAP, but not array-like accesses. */
1674 base = t;
1675 while (TREE_CODE (base) == COMPONENT_REF
1676 || TREE_CODE (base) == REALPART_EXPR
1677 || TREE_CODE (base) == IMAGPART_EXPR
1678 || TREE_CODE (base) == BIT_FIELD_REF)
1679 base = TREE_OPERAND (base, 0);
1680
70f34814
RG
1681 if (TREE_CODE (base) == MEM_REF
1682 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1683 base = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
8476af98
RH
1684 if (DECL_P (base))
1685 {
1686 if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
1687 MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
1688 else
1689 MEM_NOTRAP_P (ref) = 1;
1690 }
3748f5c9
ILT
1691 else if (TREE_CODE (base) == INDIRECT_REF
1692 || TREE_CODE (base) == MEM_REF
1693 || TREE_CODE (base) == TARGET_MEM_REF
1694 || TREE_CODE (base) == ARRAY_REF
1695 || TREE_CODE (base) == ARRAY_RANGE_REF)
8476af98
RH
1696 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
1697
1698 base = get_base_address (base);
1699 if (base && DECL_P (base)
1700 && TREE_READONLY (base)
b1923f0a
RG
1701 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1702 && !TREE_THIS_VOLATILE (base))
21d9971a 1703 MEM_READONLY_P (ref) = 1;
8476af98 1704
2039d7aa
RH
1705 /* If this expression uses it's parent's alias set, mark it such
1706 that we won't change it. */
1707 if (component_uses_parent_alias_set (t))
10b76d73
RK
1708 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1709
8ac61af7
RK
1710 /* If this is a decl, set the attributes of the MEM from it. */
1711 if (DECL_P (t))
1712 {
998d7deb
RH
1713 expr = t;
1714 offset = const0_rtx;
6f1087be 1715 apply_bitpos = bitpos;
8ac61af7
RK
1716 size = (DECL_SIZE_UNIT (t)
1717 && host_integerp (DECL_SIZE_UNIT (t), 1)
1718 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
68252e27 1719 align = DECL_ALIGN (t);
df96b059 1720 align_computed = true;
8ac61af7
RK
1721 }
1722
40c0668b 1723 /* If this is a constant, we know the alignment. */
6615c446 1724 else if (CONSTANT_CLASS_P (t))
9ddfb1a7
RK
1725 {
1726 align = TYPE_ALIGN (type);
1727#ifdef CONSTANT_ALIGNMENT
1728 align = CONSTANT_ALIGNMENT (t, align);
1729#endif
df96b059 1730 align_computed = true;
9ddfb1a7 1731 }
998d7deb
RH
1732
1733 /* If this is a field reference and not a bit-field, record it. */
fa10beec 1734 /* ??? There is some information that can be gleaned from bit-fields,
998d7deb
RH
1735 such as the word offset in the structure that might be modified.
1736 But skip it for now. */
1737 else if (TREE_CODE (t) == COMPONENT_REF
1738 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1739 {
55b34b5f 1740 expr = t;
998d7deb 1741 offset = const0_rtx;
6f1087be 1742 apply_bitpos = bitpos;
998d7deb
RH
1743 /* ??? Any reason the field size would be different than
1744 the size we got from the type? */
1745 }
1746
1747 /* If this is an array reference, look for an outer field reference. */
1748 else if (TREE_CODE (t) == ARRAY_REF)
1749 {
1750 tree off_tree = size_zero_node;
1b1838b6
JW
1751 /* We can't modify t, because we use it at the end of the
1752 function. */
1753 tree t2 = t;
998d7deb
RH
1754
1755 do
1756 {
1b1838b6 1757 tree index = TREE_OPERAND (t2, 1);
44de5aeb
RK
1758 tree low_bound = array_ref_low_bound (t2);
1759 tree unit_size = array_ref_element_size (t2);
2567406a
JH
1760
1761 /* We assume all arrays have sizes that are a multiple of a byte.
1762 First subtract the lower bound, if any, in the type of the
44de5aeb
RK
1763 index, then convert to sizetype and multiply by the size of
1764 the array element. */
1765 if (! integer_zerop (low_bound))
4845b383
KH
1766 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1767 index, low_bound);
2567406a 1768
44de5aeb 1769 off_tree = size_binop (PLUS_EXPR,
b6f65e3c
RS
1770 size_binop (MULT_EXPR,
1771 fold_convert (sizetype,
1772 index),
44de5aeb
RK
1773 unit_size),
1774 off_tree);
1b1838b6 1775 t2 = TREE_OPERAND (t2, 0);
998d7deb 1776 }
1b1838b6 1777 while (TREE_CODE (t2) == ARRAY_REF);
998d7deb 1778
1b1838b6 1779 if (DECL_P (t2))
c67a1cf6 1780 {
1b1838b6 1781 expr = t2;
40cb04f1 1782 offset = NULL;
c67a1cf6 1783 if (host_integerp (off_tree, 1))
40cb04f1
RH
1784 {
1785 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1786 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1b1838b6 1787 align = DECL_ALIGN (t2);
fc555370 1788 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
40cb04f1 1789 align = aoff;
df96b059 1790 align_computed = true;
40cb04f1 1791 offset = GEN_INT (ioff);
6f1087be 1792 apply_bitpos = bitpos;
40cb04f1 1793 }
c67a1cf6 1794 }
1b1838b6 1795 else if (TREE_CODE (t2) == COMPONENT_REF)
998d7deb 1796 {
55b34b5f
RG
1797 expr = t2;
1798 offset = NULL;
998d7deb 1799 if (host_integerp (off_tree, 1))
6f1087be
RH
1800 {
1801 offset = GEN_INT (tree_low_cst (off_tree, 1));
1802 apply_bitpos = bitpos;
1803 }
998d7deb
RH
1804 /* ??? Any reason the field size would be different than
1805 the size we got from the type? */
1806 }
56c47f22 1807
56c47f22 1808 /* If this is an indirect reference, record it. */
be1ac4ec 1809 else if (TREE_CODE (t) == MEM_REF)
56c47f22
RG
1810 {
1811 expr = t;
1812 offset = const0_rtx;
1813 apply_bitpos = bitpos;
1814 }
c67a1cf6
RH
1815 }
1816
56c47f22 1817 /* If this is an indirect reference, record it. */
70f34814 1818 else if (TREE_CODE (t) == MEM_REF
be1ac4ec 1819 || TREE_CODE (t) == TARGET_MEM_REF)
56c47f22
RG
1820 {
1821 expr = t;
1822 offset = const0_rtx;
1823 apply_bitpos = bitpos;
1824 }
1825
df96b059
JJ
1826 if (!align_computed && !INDIRECT_REF_P (t))
1827 {
e80c2726 1828 unsigned int obj_align = get_object_alignment (t, BIGGEST_ALIGNMENT);
df96b059
JJ
1829 align = MAX (align, obj_align);
1830 }
8ac61af7
RK
1831 }
1832
15c812e3 1833 /* If we modified OFFSET based on T, then subtract the outstanding
8c317c5f
RH
1834 bit position offset. Similarly, increase the size of the accessed
1835 object to contain the negative offset. */
6f1087be 1836 if (apply_bitpos)
8c317c5f
RH
1837 {
1838 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1839 if (size)
1840 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1841 }
6f1087be 1842
8ac61af7 1843 /* Now set the attributes we computed above. */
10b76d73 1844 MEM_ATTRS (ref)
09e881c9
BE
1845 = get_mem_attrs (alias, expr, offset, size, align,
1846 TYPE_ADDR_SPACE (type), GET_MODE (ref));
8ac61af7
RK
1847
1848 /* If this is already known to be a scalar or aggregate, we are done. */
1849 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
738cc472
RK
1850 return;
1851
8ac61af7
RK
1852 /* If it is a reference into an aggregate, this is part of an aggregate.
1853 Otherwise we don't know. */
173b24b9
RK
1854 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1855 || TREE_CODE (t) == ARRAY_RANGE_REF
1856 || TREE_CODE (t) == BIT_FIELD_REF)
1857 MEM_IN_STRUCT_P (ref) = 1;
1858}
1859
6f1087be 1860void
502b8322 1861set_mem_attributes (rtx ref, tree t, int objectp)
6f1087be
RH
1862{
1863 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1864}
1865
173b24b9
RK
1866/* Set the alias set of MEM to SET. */
1867
1868void
4862826d 1869set_mem_alias_set (rtx mem, alias_set_type set)
173b24b9 1870{
173b24b9 1871 /* If the new and old alias sets don't conflict, something is wrong. */
77a74ed7 1872 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
173b24b9 1873
998d7deb 1874 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
10b76d73 1875 MEM_SIZE (mem), MEM_ALIGN (mem),
09e881c9
BE
1876 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1877}
1878
1879/* Set the address space of MEM to ADDRSPACE (target-defined). */
1880
1881void
1882set_mem_addr_space (rtx mem, addr_space_t addrspace)
1883{
1884 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1885 MEM_OFFSET (mem), MEM_SIZE (mem),
1886 MEM_ALIGN (mem), addrspace, GET_MODE (mem));
173b24b9 1887}
738cc472 1888
d022d93e 1889/* Set the alignment of MEM to ALIGN bits. */
738cc472
RK
1890
1891void
502b8322 1892set_mem_align (rtx mem, unsigned int align)
738cc472 1893{
998d7deb 1894 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
10b76d73 1895 MEM_OFFSET (mem), MEM_SIZE (mem), align,
09e881c9 1896 MEM_ADDR_SPACE (mem), GET_MODE (mem));
738cc472 1897}
1285011e 1898
998d7deb 1899/* Set the expr for MEM to EXPR. */
1285011e
RK
1900
1901void
502b8322 1902set_mem_expr (rtx mem, tree expr)
1285011e
RK
1903{
1904 MEM_ATTRS (mem)
998d7deb 1905 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
09e881c9
BE
1906 MEM_SIZE (mem), MEM_ALIGN (mem),
1907 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1285011e 1908}
998d7deb
RH
1909
1910/* Set the offset of MEM to OFFSET. */
1911
1912void
502b8322 1913set_mem_offset (rtx mem, rtx offset)
998d7deb
RH
1914{
1915 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1916 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
09e881c9 1917 MEM_ADDR_SPACE (mem), GET_MODE (mem));
35aff10b
AM
1918}
1919
1920/* Set the size of MEM to SIZE. */
1921
1922void
502b8322 1923set_mem_size (rtx mem, rtx size)
35aff10b
AM
1924{
1925 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1926 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
09e881c9 1927 MEM_ADDR_SPACE (mem), GET_MODE (mem));
998d7deb 1928}
173b24b9 1929\f
738cc472
RK
1930/* Return a memory reference like MEMREF, but with its mode changed to MODE
1931 and its address changed to ADDR. (VOIDmode means don't change the mode.
1932 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1933 returned memory location is required to be valid. The memory
1934 attributes are not changed. */
23b2ce53 1935
738cc472 1936static rtx
502b8322 1937change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
23b2ce53 1938{
09e881c9 1939 addr_space_t as;
60564289 1940 rtx new_rtx;
23b2ce53 1941
5b0264cb 1942 gcc_assert (MEM_P (memref));
09e881c9 1943 as = MEM_ADDR_SPACE (memref);
23b2ce53
RS
1944 if (mode == VOIDmode)
1945 mode = GET_MODE (memref);
1946 if (addr == 0)
1947 addr = XEXP (memref, 0);
a74ff877 1948 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
09e881c9 1949 && (!validate || memory_address_addr_space_p (mode, addr, as)))
a74ff877 1950 return memref;
23b2ce53 1951
f1ec5147 1952 if (validate)
23b2ce53 1953 {
f1ec5147 1954 if (reload_in_progress || reload_completed)
09e881c9 1955 gcc_assert (memory_address_addr_space_p (mode, addr, as));
f1ec5147 1956 else
09e881c9 1957 addr = memory_address_addr_space (mode, addr, as);
23b2ce53 1958 }
750c9258 1959
9b04c6a8
RK
1960 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1961 return memref;
1962
60564289
KG
1963 new_rtx = gen_rtx_MEM (mode, addr);
1964 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1965 return new_rtx;
23b2ce53 1966}
792760b9 1967
738cc472
RK
1968/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1969 way we are changing MEMREF, so we only preserve the alias set. */
f4ef873c
RK
1970
1971rtx
502b8322 1972change_address (rtx memref, enum machine_mode mode, rtx addr)
f4ef873c 1973{
60564289
KG
1974 rtx new_rtx = change_address_1 (memref, mode, addr, 1), size;
1975 enum machine_mode mmode = GET_MODE (new_rtx);
4e44c1ef
JJ
1976 unsigned int align;
1977
1978 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1979 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
c2f7bcc3 1980
fdb1c7b3 1981 /* If there are no changes, just return the original memory reference. */
60564289 1982 if (new_rtx == memref)
4e44c1ef
JJ
1983 {
1984 if (MEM_ATTRS (memref) == 0
1985 || (MEM_EXPR (memref) == NULL
1986 && MEM_OFFSET (memref) == NULL
1987 && MEM_SIZE (memref) == size
1988 && MEM_ALIGN (memref) == align))
60564289 1989 return new_rtx;
4e44c1ef 1990
60564289
KG
1991 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
1992 MEM_COPY_ATTRIBUTES (new_rtx, memref);
4e44c1ef 1993 }
fdb1c7b3 1994
60564289 1995 MEM_ATTRS (new_rtx)
09e881c9
BE
1996 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align,
1997 MEM_ADDR_SPACE (memref), mmode);
823e3574 1998
60564289 1999 return new_rtx;
f4ef873c 2000}
792760b9 2001
738cc472
RK
2002/* Return a memory reference like MEMREF, but with its mode changed
2003 to MODE and its address offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
2004 nonzero, the memory address is forced to be valid.
2005 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2006 and caller is responsible for adjusting MEMREF base register. */
f1ec5147
RK
2007
2008rtx
502b8322
AJ
2009adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2010 int validate, int adjust)
f1ec5147 2011{
823e3574 2012 rtx addr = XEXP (memref, 0);
60564289 2013 rtx new_rtx;
738cc472 2014 rtx memoffset = MEM_OFFSET (memref);
10b76d73 2015 rtx size = 0;
738cc472 2016 unsigned int memalign = MEM_ALIGN (memref);
09e881c9 2017 addr_space_t as = MEM_ADDR_SPACE (memref);
d4ebfa65 2018 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
a6fe9ed4 2019 int pbits;
823e3574 2020
fdb1c7b3
JH
2021 /* If there are no changes, just return the original memory reference. */
2022 if (mode == GET_MODE (memref) && !offset
09e881c9 2023 && (!validate || memory_address_addr_space_p (mode, addr, as)))
fdb1c7b3
JH
2024 return memref;
2025
d14419e4 2026 /* ??? Prefer to create garbage instead of creating shared rtl.
cc2902df 2027 This may happen even if offset is nonzero -- consider
d14419e4
RH
2028 (plus (plus reg reg) const_int) -- so do this always. */
2029 addr = copy_rtx (addr);
2030
a6fe9ed4
JM
2031 /* Convert a possibly large offset to a signed value within the
2032 range of the target address space. */
d4ebfa65 2033 pbits = GET_MODE_BITSIZE (address_mode);
a6fe9ed4
JM
2034 if (HOST_BITS_PER_WIDE_INT > pbits)
2035 {
2036 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2037 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2038 >> shift);
2039 }
2040
4a78c787
RH
2041 if (adjust)
2042 {
2043 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2044 object, we can merge it into the LO_SUM. */
2045 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2046 && offset >= 0
2047 && (unsigned HOST_WIDE_INT) offset
2048 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
d4ebfa65 2049 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
4a78c787
RH
2050 plus_constant (XEXP (addr, 1), offset));
2051 else
2052 addr = plus_constant (addr, offset);
2053 }
823e3574 2054
60564289 2055 new_rtx = change_address_1 (memref, mode, addr, validate);
738cc472 2056
09efeca1
PB
2057 /* If the address is a REG, change_address_1 rightfully returns memref,
2058 but this would destroy memref's MEM_ATTRS. */
2059 if (new_rtx == memref && offset != 0)
2060 new_rtx = copy_rtx (new_rtx);
2061
738cc472
RK
2062 /* Compute the new values of the memory attributes due to this adjustment.
2063 We add the offsets and update the alignment. */
2064 if (memoffset)
2065 memoffset = GEN_INT (offset + INTVAL (memoffset));
2066
03bf2c23
RK
2067 /* Compute the new alignment by taking the MIN of the alignment and the
2068 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2069 if zero. */
2070 if (offset != 0)
3bf1e984
RK
2071 memalign
2072 = MIN (memalign,
2073 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
738cc472 2074
10b76d73 2075 /* We can compute the size in a number of ways. */
60564289
KG
2076 if (GET_MODE (new_rtx) != BLKmode)
2077 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new_rtx)));
10b76d73
RK
2078 else if (MEM_SIZE (memref))
2079 size = plus_constant (MEM_SIZE (memref), -offset);
2080
60564289 2081 MEM_ATTRS (new_rtx) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
09e881c9
BE
2082 memoffset, size, memalign, as,
2083 GET_MODE (new_rtx));
738cc472
RK
2084
2085 /* At some point, we should validate that this offset is within the object,
2086 if all the appropriate values are known. */
60564289 2087 return new_rtx;
f1ec5147
RK
2088}
2089
630036c6
JJ
2090/* Return a memory reference like MEMREF, but with its mode changed
2091 to MODE and its address changed to ADDR, which is assumed to be
fa10beec 2092 MEMREF offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
2093 nonzero, the memory address is forced to be valid. */
2094
2095rtx
502b8322
AJ
2096adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2097 HOST_WIDE_INT offset, int validate)
630036c6
JJ
2098{
2099 memref = change_address_1 (memref, VOIDmode, addr, validate);
2100 return adjust_address_1 (memref, mode, offset, validate, 0);
2101}
2102
8ac61af7
RK
2103/* Return a memory reference like MEMREF, but whose address is changed by
2104 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2105 known to be in OFFSET (possibly 1). */
0d4903b8
RK
2106
2107rtx
502b8322 2108offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
0d4903b8 2109{
60564289 2110 rtx new_rtx, addr = XEXP (memref, 0);
09e881c9 2111 addr_space_t as = MEM_ADDR_SPACE (memref);
d4ebfa65 2112 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
e3c8ea67 2113
d4ebfa65 2114 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67 2115
68252e27 2116 /* At this point we don't know _why_ the address is invalid. It
4d6922ee 2117 could have secondary memory references, multiplies or anything.
e3c8ea67
RH
2118
2119 However, if we did go and rearrange things, we can wind up not
2120 being able to recognize the magic around pic_offset_table_rtx.
2121 This stuff is fragile, and is yet another example of why it is
2122 bad to expose PIC machinery too early. */
09e881c9 2123 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx, as)
e3c8ea67
RH
2124 && GET_CODE (addr) == PLUS
2125 && XEXP (addr, 0) == pic_offset_table_rtx)
2126 {
2127 addr = force_reg (GET_MODE (addr), addr);
d4ebfa65 2128 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67
RH
2129 }
2130
60564289
KG
2131 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2132 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
0d4903b8 2133
fdb1c7b3 2134 /* If there are no changes, just return the original memory reference. */
60564289
KG
2135 if (new_rtx == memref)
2136 return new_rtx;
fdb1c7b3 2137
0d4903b8
RK
2138 /* Update the alignment to reflect the offset. Reset the offset, which
2139 we don't know. */
60564289 2140 MEM_ATTRS (new_rtx)
2cc2d4bb 2141 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
9ceca302 2142 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
09e881c9 2143 as, GET_MODE (new_rtx));
60564289 2144 return new_rtx;
0d4903b8 2145}
68252e27 2146
792760b9
RK
2147/* Return a memory reference like MEMREF, but with its address changed to
2148 ADDR. The caller is asserting that the actual piece of memory pointed
2149 to is the same, just the form of the address is being changed, such as
2150 by putting something into a register. */
2151
2152rtx
502b8322 2153replace_equiv_address (rtx memref, rtx addr)
792760b9 2154{
738cc472
RK
2155 /* change_address_1 copies the memory attribute structure without change
2156 and that's exactly what we want here. */
40c0668b 2157 update_temp_slot_address (XEXP (memref, 0), addr);
738cc472 2158 return change_address_1 (memref, VOIDmode, addr, 1);
792760b9 2159}
738cc472 2160
f1ec5147
RK
2161/* Likewise, but the reference is not required to be valid. */
2162
2163rtx
502b8322 2164replace_equiv_address_nv (rtx memref, rtx addr)
f1ec5147 2165{
f1ec5147
RK
2166 return change_address_1 (memref, VOIDmode, addr, 0);
2167}
e7dfe4bb
RH
2168
2169/* Return a memory reference like MEMREF, but with its mode widened to
2170 MODE and offset by OFFSET. This would be used by targets that e.g.
2171 cannot issue QImode memory operations and have to use SImode memory
2172 operations plus masking logic. */
2173
2174rtx
502b8322 2175widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
e7dfe4bb 2176{
60564289
KG
2177 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
2178 tree expr = MEM_EXPR (new_rtx);
2179 rtx memoffset = MEM_OFFSET (new_rtx);
e7dfe4bb
RH
2180 unsigned int size = GET_MODE_SIZE (mode);
2181
fdb1c7b3 2182 /* If there are no changes, just return the original memory reference. */
60564289
KG
2183 if (new_rtx == memref)
2184 return new_rtx;
fdb1c7b3 2185
e7dfe4bb
RH
2186 /* If we don't know what offset we were at within the expression, then
2187 we can't know if we've overstepped the bounds. */
fa1591cb 2188 if (! memoffset)
e7dfe4bb
RH
2189 expr = NULL_TREE;
2190
2191 while (expr)
2192 {
2193 if (TREE_CODE (expr) == COMPONENT_REF)
2194 {
2195 tree field = TREE_OPERAND (expr, 1);
44de5aeb 2196 tree offset = component_ref_field_offset (expr);
e7dfe4bb
RH
2197
2198 if (! DECL_SIZE_UNIT (field))
2199 {
2200 expr = NULL_TREE;
2201 break;
2202 }
2203
2204 /* Is the field at least as large as the access? If so, ok,
2205 otherwise strip back to the containing structure. */
03667700
RK
2206 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2207 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
e7dfe4bb
RH
2208 && INTVAL (memoffset) >= 0)
2209 break;
2210
44de5aeb 2211 if (! host_integerp (offset, 1))
e7dfe4bb
RH
2212 {
2213 expr = NULL_TREE;
2214 break;
2215 }
2216
2217 expr = TREE_OPERAND (expr, 0);
44de5aeb
RK
2218 memoffset
2219 = (GEN_INT (INTVAL (memoffset)
2220 + tree_low_cst (offset, 1)
2221 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2222 / BITS_PER_UNIT)));
e7dfe4bb
RH
2223 }
2224 /* Similarly for the decl. */
2225 else if (DECL_P (expr)
2226 && DECL_SIZE_UNIT (expr)
45f79783 2227 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
e7dfe4bb
RH
2228 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2229 && (! memoffset || INTVAL (memoffset) >= 0))
2230 break;
2231 else
2232 {
2233 /* The widened memory access overflows the expression, which means
2234 that it could alias another expression. Zap it. */
2235 expr = NULL_TREE;
2236 break;
2237 }
2238 }
2239
2240 if (! expr)
2241 memoffset = NULL_RTX;
2242
2243 /* The widened memory may alias other stuff, so zap the alias set. */
2244 /* ??? Maybe use get_alias_set on any remaining expression. */
2245
60564289 2246 MEM_ATTRS (new_rtx) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
09e881c9
BE
2247 MEM_ALIGN (new_rtx),
2248 MEM_ADDR_SPACE (new_rtx), mode);
e7dfe4bb 2249
60564289 2250 return new_rtx;
e7dfe4bb 2251}
23b2ce53 2252\f
f6129d66
RH
2253/* A fake decl that is used as the MEM_EXPR of spill slots. */
2254static GTY(()) tree spill_slot_decl;
2255
3d7e23f6
RH
2256tree
2257get_spill_slot_decl (bool force_build_p)
f6129d66
RH
2258{
2259 tree d = spill_slot_decl;
2260 rtx rd;
2261
3d7e23f6 2262 if (d || !force_build_p)
f6129d66
RH
2263 return d;
2264
c2255bc4
AH
2265 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2266 VAR_DECL, get_identifier ("%sfp"), void_type_node);
f6129d66
RH
2267 DECL_ARTIFICIAL (d) = 1;
2268 DECL_IGNORED_P (d) = 1;
2269 TREE_USED (d) = 1;
f6129d66
RH
2270 spill_slot_decl = d;
2271
2272 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2273 MEM_NOTRAP_P (rd) = 1;
2274 MEM_ATTRS (rd) = get_mem_attrs (new_alias_set (), d, const0_rtx,
09e881c9 2275 NULL_RTX, 0, ADDR_SPACE_GENERIC, BLKmode);
f6129d66
RH
2276 SET_DECL_RTL (d, rd);
2277
2278 return d;
2279}
2280
2281/* Given MEM, a result from assign_stack_local, fill in the memory
2282 attributes as appropriate for a register allocator spill slot.
2283 These slots are not aliasable by other memory. We arrange for
2284 them all to use a single MEM_EXPR, so that the aliasing code can
2285 work properly in the case of shared spill slots. */
2286
2287void
2288set_mem_attrs_for_spill (rtx mem)
2289{
2290 alias_set_type alias;
2291 rtx addr, offset;
2292 tree expr;
2293
3d7e23f6 2294 expr = get_spill_slot_decl (true);
f6129d66
RH
2295 alias = MEM_ALIAS_SET (DECL_RTL (expr));
2296
2297 /* We expect the incoming memory to be of the form:
2298 (mem:MODE (plus (reg sfp) (const_int offset)))
2299 with perhaps the plus missing for offset = 0. */
2300 addr = XEXP (mem, 0);
2301 offset = const0_rtx;
2302 if (GET_CODE (addr) == PLUS
481683e1 2303 && CONST_INT_P (XEXP (addr, 1)))
f6129d66
RH
2304 offset = XEXP (addr, 1);
2305
2306 MEM_ATTRS (mem) = get_mem_attrs (alias, expr, offset,
2307 MEM_SIZE (mem), MEM_ALIGN (mem),
09e881c9 2308 ADDR_SPACE_GENERIC, GET_MODE (mem));
f6129d66
RH
2309 MEM_NOTRAP_P (mem) = 1;
2310}
2311\f
23b2ce53
RS
2312/* Return a newly created CODE_LABEL rtx with a unique label number. */
2313
2314rtx
502b8322 2315gen_label_rtx (void)
23b2ce53 2316{
0dc36574 2317 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
502b8322 2318 NULL, label_num++, NULL);
23b2ce53
RS
2319}
2320\f
2321/* For procedure integration. */
2322
23b2ce53 2323/* Install new pointers to the first and last insns in the chain.
86fe05e0 2324 Also, set cur_insn_uid to one higher than the last in use.
23b2ce53
RS
2325 Used for an inline-procedure after copying the insn chain. */
2326
2327void
502b8322 2328set_new_first_and_last_insn (rtx first, rtx last)
23b2ce53 2329{
86fe05e0
RK
2330 rtx insn;
2331
5936d944
JH
2332 set_first_insn (first);
2333 set_last_insn (last);
86fe05e0
RK
2334 cur_insn_uid = 0;
2335
b5b8b0ac
AO
2336 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2337 {
2338 int debug_count = 0;
2339
2340 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2341 cur_debug_insn_uid = 0;
2342
2343 for (insn = first; insn; insn = NEXT_INSN (insn))
2344 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2345 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2346 else
2347 {
2348 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2349 if (DEBUG_INSN_P (insn))
2350 debug_count++;
2351 }
2352
2353 if (debug_count)
2354 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2355 else
2356 cur_debug_insn_uid++;
2357 }
2358 else
2359 for (insn = first; insn; insn = NEXT_INSN (insn))
2360 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
86fe05e0
RK
2361
2362 cur_insn_uid++;
23b2ce53 2363}
23b2ce53 2364\f
750c9258 2365/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779 2366 structure. This routine should only be called once. */
23b2ce53 2367
fd743bc1 2368static void
b4aaa77b 2369unshare_all_rtl_1 (rtx insn)
23b2ce53 2370{
d1b81779 2371 /* Unshare just about everything else. */
2c07f13b 2372 unshare_all_rtl_in_chain (insn);
750c9258 2373
23b2ce53
RS
2374 /* Make sure the addresses of stack slots found outside the insn chain
2375 (such as, in DECL_RTL of a variable) are not shared
2376 with the insn chain.
2377
2378 This special care is necessary when the stack slot MEM does not
2379 actually appear in the insn chain. If it does appear, its address
2380 is unshared from all else at that point. */
242b0ce6 2381 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
23b2ce53
RS
2382}
2383
750c9258 2384/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779
GK
2385 structure, again. This is a fairly expensive thing to do so it
2386 should be done sparingly. */
2387
2388void
502b8322 2389unshare_all_rtl_again (rtx insn)
d1b81779
GK
2390{
2391 rtx p;
624c87aa
RE
2392 tree decl;
2393
d1b81779 2394 for (p = insn; p; p = NEXT_INSN (p))
2c3c49de 2395 if (INSN_P (p))
d1b81779
GK
2396 {
2397 reset_used_flags (PATTERN (p));
2398 reset_used_flags (REG_NOTES (p));
d1b81779 2399 }
624c87aa 2400
2d4aecb3 2401 /* Make sure that virtual stack slots are not shared. */
5eb2a9f2 2402 set_used_decls (DECL_INITIAL (cfun->decl));
2d4aecb3 2403
624c87aa 2404 /* Make sure that virtual parameters are not shared. */
910ad8de 2405 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
5eb2a9f2 2406 set_used_flags (DECL_RTL (decl));
624c87aa
RE
2407
2408 reset_used_flags (stack_slot_list);
2409
b4aaa77b 2410 unshare_all_rtl_1 (insn);
fd743bc1
PB
2411}
2412
c2924966 2413unsigned int
fd743bc1
PB
2414unshare_all_rtl (void)
2415{
b4aaa77b 2416 unshare_all_rtl_1 (get_insns ());
c2924966 2417 return 0;
d1b81779
GK
2418}
2419
8ddbbcae 2420struct rtl_opt_pass pass_unshare_all_rtl =
ef330312 2421{
8ddbbcae
JH
2422 {
2423 RTL_PASS,
defb77dc 2424 "unshare", /* name */
ef330312
PB
2425 NULL, /* gate */
2426 unshare_all_rtl, /* execute */
2427 NULL, /* sub */
2428 NULL, /* next */
2429 0, /* static_pass_number */
7072a650 2430 TV_NONE, /* tv_id */
ef330312
PB
2431 0, /* properties_required */
2432 0, /* properties_provided */
2433 0, /* properties_destroyed */
2434 0, /* todo_flags_start */
8ddbbcae
JH
2435 TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */
2436 }
ef330312
PB
2437};
2438
2439
2c07f13b
JH
2440/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2441 Recursively does the same for subexpressions. */
2442
2443static void
2444verify_rtx_sharing (rtx orig, rtx insn)
2445{
2446 rtx x = orig;
2447 int i;
2448 enum rtx_code code;
2449 const char *format_ptr;
2450
2451 if (x == 0)
2452 return;
2453
2454 code = GET_CODE (x);
2455
2456 /* These types may be freely shared. */
2457
2458 switch (code)
2459 {
2460 case REG:
0ca5af51
AO
2461 case DEBUG_EXPR:
2462 case VALUE:
2c07f13b
JH
2463 case CONST_INT:
2464 case CONST_DOUBLE:
091a3ac7 2465 case CONST_FIXED:
2c07f13b
JH
2466 case CONST_VECTOR:
2467 case SYMBOL_REF:
2468 case LABEL_REF:
2469 case CODE_LABEL:
2470 case PC:
2471 case CC0:
2472 case SCRATCH:
2c07f13b 2473 return;
3e89ed8d
JH
2474 /* SCRATCH must be shared because they represent distinct values. */
2475 case CLOBBER:
2476 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2477 return;
2478 break;
2c07f13b
JH
2479
2480 case CONST:
6fb5fa3c 2481 if (shared_const_p (orig))
2c07f13b
JH
2482 return;
2483 break;
2484
2485 case MEM:
2486 /* A MEM is allowed to be shared if its address is constant. */
2487 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2488 || reload_completed || reload_in_progress)
2489 return;
2490
2491 break;
2492
2493 default:
2494 break;
2495 }
2496
2497 /* This rtx may not be shared. If it has already been seen,
2498 replace it with a copy of itself. */
1a2caa7a 2499#ifdef ENABLE_CHECKING
2c07f13b
JH
2500 if (RTX_FLAG (x, used))
2501 {
ab532386 2502 error ("invalid rtl sharing found in the insn");
2c07f13b 2503 debug_rtx (insn);
ab532386 2504 error ("shared rtx");
2c07f13b 2505 debug_rtx (x);
ab532386 2506 internal_error ("internal consistency failure");
2c07f13b 2507 }
1a2caa7a
NS
2508#endif
2509 gcc_assert (!RTX_FLAG (x, used));
b8698a0f 2510
2c07f13b
JH
2511 RTX_FLAG (x, used) = 1;
2512
6614fd40 2513 /* Now scan the subexpressions recursively. */
2c07f13b
JH
2514
2515 format_ptr = GET_RTX_FORMAT (code);
2516
2517 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2518 {
2519 switch (*format_ptr++)
2520 {
2521 case 'e':
2522 verify_rtx_sharing (XEXP (x, i), insn);
2523 break;
2524
2525 case 'E':
2526 if (XVEC (x, i) != NULL)
2527 {
2528 int j;
2529 int len = XVECLEN (x, i);
2530
2531 for (j = 0; j < len; j++)
2532 {
1a2caa7a
NS
2533 /* We allow sharing of ASM_OPERANDS inside single
2534 instruction. */
2c07f13b 2535 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
1a2caa7a
NS
2536 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2537 == ASM_OPERANDS))
2c07f13b
JH
2538 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2539 else
2540 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2541 }
2542 }
2543 break;
2544 }
2545 }
2546 return;
2547}
2548
ba228239 2549/* Go through all the RTL insn bodies and check that there is no unexpected
2c07f13b
JH
2550 sharing in between the subexpressions. */
2551
24e47c76 2552DEBUG_FUNCTION void
2c07f13b
JH
2553verify_rtl_sharing (void)
2554{
2555 rtx p;
2556
a222c01a
MM
2557 timevar_push (TV_VERIFY_RTL_SHARING);
2558
2c07f13b
JH
2559 for (p = get_insns (); p; p = NEXT_INSN (p))
2560 if (INSN_P (p))
2561 {
2562 reset_used_flags (PATTERN (p));
2563 reset_used_flags (REG_NOTES (p));
2954a813
KK
2564 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2565 {
2566 int i;
2567 rtx q, sequence = PATTERN (p);
2568
2569 for (i = 0; i < XVECLEN (sequence, 0); i++)
2570 {
2571 q = XVECEXP (sequence, 0, i);
2572 gcc_assert (INSN_P (q));
2573 reset_used_flags (PATTERN (q));
2574 reset_used_flags (REG_NOTES (q));
2954a813
KK
2575 }
2576 }
2c07f13b
JH
2577 }
2578
2579 for (p = get_insns (); p; p = NEXT_INSN (p))
2580 if (INSN_P (p))
2581 {
2582 verify_rtx_sharing (PATTERN (p), p);
2583 verify_rtx_sharing (REG_NOTES (p), p);
2c07f13b 2584 }
a222c01a
MM
2585
2586 timevar_pop (TV_VERIFY_RTL_SHARING);
2c07f13b
JH
2587}
2588
d1b81779
GK
2589/* Go through all the RTL insn bodies and copy any invalid shared structure.
2590 Assumes the mark bits are cleared at entry. */
2591
2c07f13b
JH
2592void
2593unshare_all_rtl_in_chain (rtx insn)
d1b81779
GK
2594{
2595 for (; insn; insn = NEXT_INSN (insn))
2c3c49de 2596 if (INSN_P (insn))
d1b81779
GK
2597 {
2598 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2599 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
d1b81779
GK
2600 }
2601}
2602
2d4aecb3 2603/* Go through all virtual stack slots of a function and mark them as
5eb2a9f2
RS
2604 shared. We never replace the DECL_RTLs themselves with a copy,
2605 but expressions mentioned into a DECL_RTL cannot be shared with
2606 expressions in the instruction stream.
2607
2608 Note that reload may convert pseudo registers into memories in-place.
2609 Pseudo registers are always shared, but MEMs never are. Thus if we
2610 reset the used flags on MEMs in the instruction stream, we must set
2611 them again on MEMs that appear in DECL_RTLs. */
2612
2d4aecb3 2613static void
5eb2a9f2 2614set_used_decls (tree blk)
2d4aecb3
AO
2615{
2616 tree t;
2617
2618 /* Mark decls. */
910ad8de 2619 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
19e7881c 2620 if (DECL_RTL_SET_P (t))
5eb2a9f2 2621 set_used_flags (DECL_RTL (t));
2d4aecb3
AO
2622
2623 /* Now process sub-blocks. */
87caf699 2624 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
5eb2a9f2 2625 set_used_decls (t);
2d4aecb3
AO
2626}
2627
23b2ce53 2628/* Mark ORIG as in use, and return a copy of it if it was already in use.
ff954f39
AP
2629 Recursively does the same for subexpressions. Uses
2630 copy_rtx_if_shared_1 to reduce stack space. */
23b2ce53
RS
2631
2632rtx
502b8322 2633copy_rtx_if_shared (rtx orig)
23b2ce53 2634{
32b32b16
AP
2635 copy_rtx_if_shared_1 (&orig);
2636 return orig;
2637}
2638
ff954f39
AP
2639/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2640 use. Recursively does the same for subexpressions. */
2641
32b32b16
AP
2642static void
2643copy_rtx_if_shared_1 (rtx *orig1)
2644{
2645 rtx x;
b3694847
SS
2646 int i;
2647 enum rtx_code code;
32b32b16 2648 rtx *last_ptr;
b3694847 2649 const char *format_ptr;
23b2ce53 2650 int copied = 0;
32b32b16
AP
2651 int length;
2652
2653 /* Repeat is used to turn tail-recursion into iteration. */
2654repeat:
2655 x = *orig1;
23b2ce53
RS
2656
2657 if (x == 0)
32b32b16 2658 return;
23b2ce53
RS
2659
2660 code = GET_CODE (x);
2661
2662 /* These types may be freely shared. */
2663
2664 switch (code)
2665 {
2666 case REG:
0ca5af51
AO
2667 case DEBUG_EXPR:
2668 case VALUE:
23b2ce53
RS
2669 case CONST_INT:
2670 case CONST_DOUBLE:
091a3ac7 2671 case CONST_FIXED:
69ef87e2 2672 case CONST_VECTOR:
23b2ce53 2673 case SYMBOL_REF:
2c07f13b 2674 case LABEL_REF:
23b2ce53
RS
2675 case CODE_LABEL:
2676 case PC:
2677 case CC0:
2678 case SCRATCH:
0f41302f 2679 /* SCRATCH must be shared because they represent distinct values. */
32b32b16 2680 return;
3e89ed8d
JH
2681 case CLOBBER:
2682 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2683 return;
2684 break;
23b2ce53 2685
b851ea09 2686 case CONST:
6fb5fa3c 2687 if (shared_const_p (x))
32b32b16 2688 return;
b851ea09
RK
2689 break;
2690
b5b8b0ac 2691 case DEBUG_INSN:
23b2ce53
RS
2692 case INSN:
2693 case JUMP_INSN:
2694 case CALL_INSN:
2695 case NOTE:
23b2ce53
RS
2696 case BARRIER:
2697 /* The chain of insns is not being copied. */
32b32b16 2698 return;
23b2ce53 2699
e9a25f70
JL
2700 default:
2701 break;
23b2ce53
RS
2702 }
2703
2704 /* This rtx may not be shared. If it has already been seen,
2705 replace it with a copy of itself. */
2706
2adc7f12 2707 if (RTX_FLAG (x, used))
23b2ce53 2708 {
aacd3885 2709 x = shallow_copy_rtx (x);
23b2ce53
RS
2710 copied = 1;
2711 }
2adc7f12 2712 RTX_FLAG (x, used) = 1;
23b2ce53
RS
2713
2714 /* Now scan the subexpressions recursively.
2715 We can store any replaced subexpressions directly into X
2716 since we know X is not shared! Any vectors in X
2717 must be copied if X was copied. */
2718
2719 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
2720 length = GET_RTX_LENGTH (code);
2721 last_ptr = NULL;
b8698a0f 2722
32b32b16 2723 for (i = 0; i < length; i++)
23b2ce53
RS
2724 {
2725 switch (*format_ptr++)
2726 {
2727 case 'e':
32b32b16
AP
2728 if (last_ptr)
2729 copy_rtx_if_shared_1 (last_ptr);
2730 last_ptr = &XEXP (x, i);
23b2ce53
RS
2731 break;
2732
2733 case 'E':
2734 if (XVEC (x, i) != NULL)
2735 {
b3694847 2736 int j;
f0722107 2737 int len = XVECLEN (x, i);
b8698a0f 2738
6614fd40
KH
2739 /* Copy the vector iff I copied the rtx and the length
2740 is nonzero. */
f0722107 2741 if (copied && len > 0)
8f985ec4 2742 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
b8698a0f 2743
5d3cc252 2744 /* Call recursively on all inside the vector. */
f0722107 2745 for (j = 0; j < len; j++)
32b32b16
AP
2746 {
2747 if (last_ptr)
2748 copy_rtx_if_shared_1 (last_ptr);
2749 last_ptr = &XVECEXP (x, i, j);
2750 }
23b2ce53
RS
2751 }
2752 break;
2753 }
2754 }
32b32b16
AP
2755 *orig1 = x;
2756 if (last_ptr)
2757 {
2758 orig1 = last_ptr;
2759 goto repeat;
2760 }
2761 return;
23b2ce53
RS
2762}
2763
76369a82 2764/* Set the USED bit in X and its non-shareable subparts to FLAG. */
23b2ce53 2765
76369a82
NF
2766static void
2767mark_used_flags (rtx x, int flag)
23b2ce53 2768{
b3694847
SS
2769 int i, j;
2770 enum rtx_code code;
2771 const char *format_ptr;
32b32b16 2772 int length;
23b2ce53 2773
32b32b16
AP
2774 /* Repeat is used to turn tail-recursion into iteration. */
2775repeat:
23b2ce53
RS
2776 if (x == 0)
2777 return;
2778
2779 code = GET_CODE (x);
2780
9faa82d8 2781 /* These types may be freely shared so we needn't do any resetting
23b2ce53
RS
2782 for them. */
2783
2784 switch (code)
2785 {
2786 case REG:
0ca5af51
AO
2787 case DEBUG_EXPR:
2788 case VALUE:
23b2ce53
RS
2789 case CONST_INT:
2790 case CONST_DOUBLE:
091a3ac7 2791 case CONST_FIXED:
69ef87e2 2792 case CONST_VECTOR:
23b2ce53
RS
2793 case SYMBOL_REF:
2794 case CODE_LABEL:
2795 case PC:
2796 case CC0:
2797 return;
2798
b5b8b0ac 2799 case DEBUG_INSN:
23b2ce53
RS
2800 case INSN:
2801 case JUMP_INSN:
2802 case CALL_INSN:
2803 case NOTE:
2804 case LABEL_REF:
2805 case BARRIER:
2806 /* The chain of insns is not being copied. */
2807 return;
750c9258 2808
e9a25f70
JL
2809 default:
2810 break;
23b2ce53
RS
2811 }
2812
76369a82 2813 RTX_FLAG (x, used) = flag;
23b2ce53
RS
2814
2815 format_ptr = GET_RTX_FORMAT (code);
32b32b16 2816 length = GET_RTX_LENGTH (code);
b8698a0f 2817
32b32b16 2818 for (i = 0; i < length; i++)
23b2ce53
RS
2819 {
2820 switch (*format_ptr++)
2821 {
2822 case 'e':
32b32b16
AP
2823 if (i == length-1)
2824 {
2825 x = XEXP (x, i);
2826 goto repeat;
2827 }
76369a82 2828 mark_used_flags (XEXP (x, i), flag);
23b2ce53
RS
2829 break;
2830
2831 case 'E':
2832 for (j = 0; j < XVECLEN (x, i); j++)
76369a82 2833 mark_used_flags (XVECEXP (x, i, j), flag);
23b2ce53
RS
2834 break;
2835 }
2836 }
2837}
2c07f13b 2838
76369a82 2839/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2c07f13b
JH
2840 to look for shared sub-parts. */
2841
2842void
76369a82 2843reset_used_flags (rtx x)
2c07f13b 2844{
76369a82
NF
2845 mark_used_flags (x, 0);
2846}
2c07f13b 2847
76369a82
NF
2848/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2849 to look for shared sub-parts. */
2c07f13b 2850
76369a82
NF
2851void
2852set_used_flags (rtx x)
2853{
2854 mark_used_flags (x, 1);
2c07f13b 2855}
23b2ce53
RS
2856\f
2857/* Copy X if necessary so that it won't be altered by changes in OTHER.
2858 Return X or the rtx for the pseudo reg the value of X was copied into.
2859 OTHER must be valid as a SET_DEST. */
2860
2861rtx
502b8322 2862make_safe_from (rtx x, rtx other)
23b2ce53
RS
2863{
2864 while (1)
2865 switch (GET_CODE (other))
2866 {
2867 case SUBREG:
2868 other = SUBREG_REG (other);
2869 break;
2870 case STRICT_LOW_PART:
2871 case SIGN_EXTEND:
2872 case ZERO_EXTEND:
2873 other = XEXP (other, 0);
2874 break;
2875 default:
2876 goto done;
2877 }
2878 done:
3c0cb5de 2879 if ((MEM_P (other)
23b2ce53 2880 && ! CONSTANT_P (x)
f8cfc6aa 2881 && !REG_P (x)
23b2ce53 2882 && GET_CODE (x) != SUBREG)
f8cfc6aa 2883 || (REG_P (other)
23b2ce53
RS
2884 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2885 || reg_mentioned_p (other, x))))
2886 {
2887 rtx temp = gen_reg_rtx (GET_MODE (x));
2888 emit_move_insn (temp, x);
2889 return temp;
2890 }
2891 return x;
2892}
2893\f
2894/* Emission of insns (adding them to the doubly-linked list). */
2895
23b2ce53
RS
2896/* Return the last insn emitted, even if it is in a sequence now pushed. */
2897
2898rtx
502b8322 2899get_last_insn_anywhere (void)
23b2ce53
RS
2900{
2901 struct sequence_stack *stack;
5936d944
JH
2902 if (get_last_insn ())
2903 return get_last_insn ();
49ad7cfa 2904 for (stack = seq_stack; stack; stack = stack->next)
23b2ce53
RS
2905 if (stack->last != 0)
2906 return stack->last;
2907 return 0;
2908}
2909
2a496e8b
JDA
2910/* Return the first nonnote insn emitted in current sequence or current
2911 function. This routine looks inside SEQUENCEs. */
2912
2913rtx
502b8322 2914get_first_nonnote_insn (void)
2a496e8b 2915{
5936d944 2916 rtx insn = get_insns ();
91373fe8
JDA
2917
2918 if (insn)
2919 {
2920 if (NOTE_P (insn))
2921 for (insn = next_insn (insn);
2922 insn && NOTE_P (insn);
2923 insn = next_insn (insn))
2924 continue;
2925 else
2926 {
2ca202e7 2927 if (NONJUMP_INSN_P (insn)
91373fe8
JDA
2928 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2929 insn = XVECEXP (PATTERN (insn), 0, 0);
2930 }
2931 }
2a496e8b
JDA
2932
2933 return insn;
2934}
2935
2936/* Return the last nonnote insn emitted in current sequence or current
2937 function. This routine looks inside SEQUENCEs. */
2938
2939rtx
502b8322 2940get_last_nonnote_insn (void)
2a496e8b 2941{
5936d944 2942 rtx insn = get_last_insn ();
91373fe8
JDA
2943
2944 if (insn)
2945 {
2946 if (NOTE_P (insn))
2947 for (insn = previous_insn (insn);
2948 insn && NOTE_P (insn);
2949 insn = previous_insn (insn))
2950 continue;
2951 else
2952 {
2ca202e7 2953 if (NONJUMP_INSN_P (insn)
91373fe8
JDA
2954 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2955 insn = XVECEXP (PATTERN (insn), 0,
2956 XVECLEN (PATTERN (insn), 0) - 1);
2957 }
2958 }
2a496e8b
JDA
2959
2960 return insn;
2961}
2962
b5b8b0ac
AO
2963/* Return the number of actual (non-debug) insns emitted in this
2964 function. */
2965
2966int
2967get_max_insn_count (void)
2968{
2969 int n = cur_insn_uid;
2970
2971 /* The table size must be stable across -g, to avoid codegen
2972 differences due to debug insns, and not be affected by
2973 -fmin-insn-uid, to avoid excessive table size and to simplify
2974 debugging of -fcompare-debug failures. */
2975 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
2976 n -= cur_debug_insn_uid;
2977 else
2978 n -= MIN_NONDEBUG_INSN_UID;
2979
2980 return n;
2981}
2982
23b2ce53
RS
2983\f
2984/* Return the next insn. If it is a SEQUENCE, return the first insn
2985 of the sequence. */
2986
2987rtx
502b8322 2988next_insn (rtx insn)
23b2ce53 2989{
75547801
KG
2990 if (insn)
2991 {
2992 insn = NEXT_INSN (insn);
2993 if (insn && NONJUMP_INSN_P (insn)
2994 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2995 insn = XVECEXP (PATTERN (insn), 0, 0);
2996 }
23b2ce53 2997
75547801 2998 return insn;
23b2ce53
RS
2999}
3000
3001/* Return the previous insn. If it is a SEQUENCE, return the last insn
3002 of the sequence. */
3003
3004rtx
502b8322 3005previous_insn (rtx insn)
23b2ce53 3006{
75547801
KG
3007 if (insn)
3008 {
3009 insn = PREV_INSN (insn);
3010 if (insn && NONJUMP_INSN_P (insn)
3011 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3012 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3013 }
23b2ce53 3014
75547801 3015 return insn;
23b2ce53
RS
3016}
3017
3018/* Return the next insn after INSN that is not a NOTE. This routine does not
3019 look inside SEQUENCEs. */
3020
3021rtx
502b8322 3022next_nonnote_insn (rtx insn)
23b2ce53 3023{
75547801
KG
3024 while (insn)
3025 {
3026 insn = NEXT_INSN (insn);
3027 if (insn == 0 || !NOTE_P (insn))
3028 break;
3029 }
23b2ce53 3030
75547801 3031 return insn;
23b2ce53
RS
3032}
3033
1e211590
DD
3034/* Return the next insn after INSN that is not a NOTE, but stop the
3035 search before we enter another basic block. This routine does not
3036 look inside SEQUENCEs. */
3037
3038rtx
3039next_nonnote_insn_bb (rtx insn)
3040{
3041 while (insn)
3042 {
3043 insn = NEXT_INSN (insn);
3044 if (insn == 0 || !NOTE_P (insn))
3045 break;
3046 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3047 return NULL_RTX;
3048 }
3049
3050 return insn;
3051}
3052
23b2ce53
RS
3053/* Return the previous insn before INSN that is not a NOTE. This routine does
3054 not look inside SEQUENCEs. */
3055
3056rtx
502b8322 3057prev_nonnote_insn (rtx insn)
23b2ce53 3058{
75547801
KG
3059 while (insn)
3060 {
3061 insn = PREV_INSN (insn);
3062 if (insn == 0 || !NOTE_P (insn))
3063 break;
3064 }
23b2ce53 3065
75547801 3066 return insn;
23b2ce53
RS
3067}
3068
896aa4ea
DD
3069/* Return the previous insn before INSN that is not a NOTE, but stop
3070 the search before we enter another basic block. This routine does
3071 not look inside SEQUENCEs. */
3072
3073rtx
3074prev_nonnote_insn_bb (rtx insn)
3075{
3076 while (insn)
3077 {
3078 insn = PREV_INSN (insn);
3079 if (insn == 0 || !NOTE_P (insn))
3080 break;
3081 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3082 return NULL_RTX;
3083 }
3084
3085 return insn;
3086}
3087
b5b8b0ac
AO
3088/* Return the next insn after INSN that is not a DEBUG_INSN. This
3089 routine does not look inside SEQUENCEs. */
3090
3091rtx
3092next_nondebug_insn (rtx insn)
3093{
3094 while (insn)
3095 {
3096 insn = NEXT_INSN (insn);
3097 if (insn == 0 || !DEBUG_INSN_P (insn))
3098 break;
3099 }
3100
3101 return insn;
3102}
3103
3104/* Return the previous insn before INSN that is not a DEBUG_INSN.
3105 This routine does not look inside SEQUENCEs. */
3106
3107rtx
3108prev_nondebug_insn (rtx insn)
3109{
3110 while (insn)
3111 {
3112 insn = PREV_INSN (insn);
3113 if (insn == 0 || !DEBUG_INSN_P (insn))
3114 break;
3115 }
3116
3117 return insn;
3118}
3119
f0fc0803
JJ
3120/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3121 This routine does not look inside SEQUENCEs. */
3122
3123rtx
3124next_nonnote_nondebug_insn (rtx insn)
3125{
3126 while (insn)
3127 {
3128 insn = NEXT_INSN (insn);
3129 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3130 break;
3131 }
3132
3133 return insn;
3134}
3135
3136/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3137 This routine does not look inside SEQUENCEs. */
3138
3139rtx
3140prev_nonnote_nondebug_insn (rtx insn)
3141{
3142 while (insn)
3143 {
3144 insn = PREV_INSN (insn);
3145 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3146 break;
3147 }
3148
3149 return insn;
3150}
3151
23b2ce53
RS
3152/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3153 or 0, if there is none. This routine does not look inside
0f41302f 3154 SEQUENCEs. */
23b2ce53
RS
3155
3156rtx
502b8322 3157next_real_insn (rtx insn)
23b2ce53 3158{
75547801
KG
3159 while (insn)
3160 {
3161 insn = NEXT_INSN (insn);
3162 if (insn == 0 || INSN_P (insn))
3163 break;
3164 }
23b2ce53 3165
75547801 3166 return insn;
23b2ce53
RS
3167}
3168
3169/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3170 or 0, if there is none. This routine does not look inside
3171 SEQUENCEs. */
3172
3173rtx
502b8322 3174prev_real_insn (rtx insn)
23b2ce53 3175{
75547801
KG
3176 while (insn)
3177 {
3178 insn = PREV_INSN (insn);
3179 if (insn == 0 || INSN_P (insn))
3180 break;
3181 }
23b2ce53 3182
75547801 3183 return insn;
23b2ce53
RS
3184}
3185
ee960939
OH
3186/* Return the last CALL_INSN in the current list, or 0 if there is none.
3187 This routine does not look inside SEQUENCEs. */
3188
3189rtx
502b8322 3190last_call_insn (void)
ee960939
OH
3191{
3192 rtx insn;
3193
3194 for (insn = get_last_insn ();
4b4bf941 3195 insn && !CALL_P (insn);
ee960939
OH
3196 insn = PREV_INSN (insn))
3197 ;
3198
3199 return insn;
3200}
3201
23b2ce53 3202/* Find the next insn after INSN that really does something. This routine
9c517bf3
AK
3203 does not look inside SEQUENCEs. After reload this also skips over
3204 standalone USE and CLOBBER insn. */
23b2ce53 3205
69732dcb 3206int
4f588890 3207active_insn_p (const_rtx insn)
69732dcb 3208{
4b4bf941
JQ
3209 return (CALL_P (insn) || JUMP_P (insn)
3210 || (NONJUMP_INSN_P (insn)
23b8ba81
RH
3211 && (! reload_completed
3212 || (GET_CODE (PATTERN (insn)) != USE
3213 && GET_CODE (PATTERN (insn)) != CLOBBER))));
69732dcb
RH
3214}
3215
23b2ce53 3216rtx
502b8322 3217next_active_insn (rtx insn)
23b2ce53 3218{
75547801
KG
3219 while (insn)
3220 {
3221 insn = NEXT_INSN (insn);
3222 if (insn == 0 || active_insn_p (insn))
3223 break;
3224 }
23b2ce53 3225
75547801 3226 return insn;
23b2ce53
RS
3227}
3228
3229/* Find the last insn before INSN that really does something. This routine
9c517bf3
AK
3230 does not look inside SEQUENCEs. After reload this also skips over
3231 standalone USE and CLOBBER insn. */
23b2ce53
RS
3232
3233rtx
502b8322 3234prev_active_insn (rtx insn)
23b2ce53 3235{
75547801
KG
3236 while (insn)
3237 {
3238 insn = PREV_INSN (insn);
3239 if (insn == 0 || active_insn_p (insn))
3240 break;
3241 }
23b2ce53 3242
75547801 3243 return insn;
23b2ce53
RS
3244}
3245
3246/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3247
3248rtx
502b8322 3249next_label (rtx insn)
23b2ce53 3250{
75547801
KG
3251 while (insn)
3252 {
3253 insn = NEXT_INSN (insn);
3254 if (insn == 0 || LABEL_P (insn))
3255 break;
3256 }
23b2ce53 3257
75547801 3258 return insn;
23b2ce53
RS
3259}
3260
3261/* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3262
3263rtx
502b8322 3264prev_label (rtx insn)
23b2ce53 3265{
75547801
KG
3266 while (insn)
3267 {
3268 insn = PREV_INSN (insn);
3269 if (insn == 0 || LABEL_P (insn))
3270 break;
3271 }
23b2ce53 3272
75547801 3273 return insn;
23b2ce53 3274}
6c2511d3
RS
3275
3276/* Return the last label to mark the same position as LABEL. Return null
3277 if LABEL itself is null. */
3278
3279rtx
3280skip_consecutive_labels (rtx label)
3281{
3282 rtx insn;
3283
3284 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3285 if (LABEL_P (insn))
3286 label = insn;
3287
3288 return label;
3289}
23b2ce53
RS
3290\f
3291#ifdef HAVE_cc0
c572e5ba
JVA
3292/* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3293 and REG_CC_USER notes so we can find it. */
3294
3295void
502b8322 3296link_cc0_insns (rtx insn)
c572e5ba
JVA
3297{
3298 rtx user = next_nonnote_insn (insn);
3299
4b4bf941 3300 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
c572e5ba
JVA
3301 user = XVECEXP (PATTERN (user), 0, 0);
3302
65c5f2a6
ILT
3303 add_reg_note (user, REG_CC_SETTER, insn);
3304 add_reg_note (insn, REG_CC_USER, user);
c572e5ba
JVA
3305}
3306
23b2ce53
RS
3307/* Return the next insn that uses CC0 after INSN, which is assumed to
3308 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3309 applied to the result of this function should yield INSN).
3310
3311 Normally, this is simply the next insn. However, if a REG_CC_USER note
3312 is present, it contains the insn that uses CC0.
3313
3314 Return 0 if we can't find the insn. */
3315
3316rtx
502b8322 3317next_cc0_user (rtx insn)
23b2ce53 3318{
906c4e36 3319 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
23b2ce53
RS
3320
3321 if (note)
3322 return XEXP (note, 0);
3323
3324 insn = next_nonnote_insn (insn);
4b4bf941 3325 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
23b2ce53
RS
3326 insn = XVECEXP (PATTERN (insn), 0, 0);
3327
2c3c49de 3328 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
23b2ce53
RS
3329 return insn;
3330
3331 return 0;
3332}
3333
3334/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3335 note, it is the previous insn. */
3336
3337rtx
502b8322 3338prev_cc0_setter (rtx insn)
23b2ce53 3339{
906c4e36 3340 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
23b2ce53
RS
3341
3342 if (note)
3343 return XEXP (note, 0);
3344
3345 insn = prev_nonnote_insn (insn);
5b0264cb 3346 gcc_assert (sets_cc0_p (PATTERN (insn)));
23b2ce53
RS
3347
3348 return insn;
3349}
3350#endif
e5bef2e4 3351
594f8779
RZ
3352#ifdef AUTO_INC_DEC
3353/* Find a RTX_AUTOINC class rtx which matches DATA. */
3354
3355static int
3356find_auto_inc (rtx *xp, void *data)
3357{
3358 rtx x = *xp;
5ead67f6 3359 rtx reg = (rtx) data;
594f8779
RZ
3360
3361 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3362 return 0;
3363
3364 switch (GET_CODE (x))
3365 {
3366 case PRE_DEC:
3367 case PRE_INC:
3368 case POST_DEC:
3369 case POST_INC:
3370 case PRE_MODIFY:
3371 case POST_MODIFY:
3372 if (rtx_equal_p (reg, XEXP (x, 0)))
3373 return 1;
3374 break;
3375
3376 default:
3377 gcc_unreachable ();
3378 }
3379 return -1;
3380}
3381#endif
3382
e5bef2e4
HB
3383/* Increment the label uses for all labels present in rtx. */
3384
3385static void
502b8322 3386mark_label_nuses (rtx x)
e5bef2e4 3387{
b3694847
SS
3388 enum rtx_code code;
3389 int i, j;
3390 const char *fmt;
e5bef2e4
HB
3391
3392 code = GET_CODE (x);
7537fc90 3393 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
e5bef2e4
HB
3394 LABEL_NUSES (XEXP (x, 0))++;
3395
3396 fmt = GET_RTX_FORMAT (code);
3397 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3398 {
3399 if (fmt[i] == 'e')
0fb7aeda 3400 mark_label_nuses (XEXP (x, i));
e5bef2e4 3401 else if (fmt[i] == 'E')
0fb7aeda 3402 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
e5bef2e4
HB
3403 mark_label_nuses (XVECEXP (x, i, j));
3404 }
3405}
3406
23b2ce53
RS
3407\f
3408/* Try splitting insns that can be split for better scheduling.
3409 PAT is the pattern which might split.
3410 TRIAL is the insn providing PAT.
cc2902df 3411 LAST is nonzero if we should return the last insn of the sequence produced.
23b2ce53
RS
3412
3413 If this routine succeeds in splitting, it returns the first or last
11147ebe 3414 replacement insn depending on the value of LAST. Otherwise, it
23b2ce53
RS
3415 returns TRIAL. If the insn to be returned can be split, it will be. */
3416
3417rtx
502b8322 3418try_split (rtx pat, rtx trial, int last)
23b2ce53
RS
3419{
3420 rtx before = PREV_INSN (trial);
3421 rtx after = NEXT_INSN (trial);
23b2ce53 3422 int has_barrier = 0;
4a8cae83 3423 rtx note, seq, tem;
6b24c259 3424 int probability;
599aedd9
RH
3425 rtx insn_last, insn;
3426 int njumps = 0;
6b24c259 3427
cd9c1ca8
RH
3428 /* We're not good at redistributing frame information. */
3429 if (RTX_FRAME_RELATED_P (trial))
3430 return trial;
3431
6b24c259
JH
3432 if (any_condjump_p (trial)
3433 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3434 split_branch_probability = INTVAL (XEXP (note, 0));
3435 probability = split_branch_probability;
3436
3437 seq = split_insns (pat, trial);
3438
3439 split_branch_probability = -1;
23b2ce53
RS
3440
3441 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3442 We may need to handle this specially. */
4b4bf941 3443 if (after && BARRIER_P (after))
23b2ce53
RS
3444 {
3445 has_barrier = 1;
3446 after = NEXT_INSN (after);
3447 }
3448
599aedd9
RH
3449 if (!seq)
3450 return trial;
3451
3452 /* Avoid infinite loop if any insn of the result matches
3453 the original pattern. */
3454 insn_last = seq;
3455 while (1)
23b2ce53 3456 {
599aedd9
RH
3457 if (INSN_P (insn_last)
3458 && rtx_equal_p (PATTERN (insn_last), pat))
3459 return trial;
3460 if (!NEXT_INSN (insn_last))
3461 break;
3462 insn_last = NEXT_INSN (insn_last);
3463 }
750c9258 3464
6fb5fa3c
DB
3465 /* We will be adding the new sequence to the function. The splitters
3466 may have introduced invalid RTL sharing, so unshare the sequence now. */
3467 unshare_all_rtl_in_chain (seq);
3468
599aedd9
RH
3469 /* Mark labels. */
3470 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3471 {
4b4bf941 3472 if (JUMP_P (insn))
599aedd9
RH
3473 {
3474 mark_jump_label (PATTERN (insn), insn, 0);
3475 njumps++;
3476 if (probability != -1
3477 && any_condjump_p (insn)
3478 && !find_reg_note (insn, REG_BR_PROB, 0))
2f937369 3479 {
599aedd9
RH
3480 /* We can preserve the REG_BR_PROB notes only if exactly
3481 one jump is created, otherwise the machine description
3482 is responsible for this step using
3483 split_branch_probability variable. */
5b0264cb 3484 gcc_assert (njumps == 1);
65c5f2a6 3485 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
2f937369 3486 }
599aedd9
RH
3487 }
3488 }
3489
3490 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3491 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
4b4bf941 3492 if (CALL_P (trial))
599aedd9
RH
3493 {
3494 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
4b4bf941 3495 if (CALL_P (insn))
599aedd9 3496 {
f6a1f3f6
RH
3497 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3498 while (*p)
3499 p = &XEXP (*p, 1);
3500 *p = CALL_INSN_FUNCTION_USAGE (trial);
599aedd9 3501 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
d0539838
CC
3502
3503 /* Update the debug information for the CALL_INSN. */
3504 if (flag_enable_icf_debug)
3505 (*debug_hooks->copy_call_info) (trial, insn);
599aedd9
RH
3506 }
3507 }
4b5e8abe 3508
599aedd9
RH
3509 /* Copy notes, particularly those related to the CFG. */
3510 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3511 {
3512 switch (REG_NOTE_KIND (note))
3513 {
3514 case REG_EH_REGION:
1d65f45c 3515 copy_reg_eh_region_note_backward (note, insn_last, NULL);
599aedd9 3516 break;
216183ce 3517
599aedd9
RH
3518 case REG_NORETURN:
3519 case REG_SETJMP:
594f8779 3520 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
216183ce 3521 {
4b4bf941 3522 if (CALL_P (insn))
65c5f2a6 3523 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
216183ce 3524 }
599aedd9 3525 break;
d6e95df8 3526
599aedd9 3527 case REG_NON_LOCAL_GOTO:
594f8779 3528 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
2f937369 3529 {
4b4bf941 3530 if (JUMP_P (insn))
65c5f2a6 3531 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
2f937369 3532 }
599aedd9 3533 break;
e5bef2e4 3534
594f8779
RZ
3535#ifdef AUTO_INC_DEC
3536 case REG_INC:
3537 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3538 {
3539 rtx reg = XEXP (note, 0);
3540 if (!FIND_REG_INC_NOTE (insn, reg)
3541 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
65c5f2a6 3542 add_reg_note (insn, REG_INC, reg);
594f8779
RZ
3543 }
3544 break;
3545#endif
3546
599aedd9
RH
3547 default:
3548 break;
23b2ce53 3549 }
599aedd9
RH
3550 }
3551
3552 /* If there are LABELS inside the split insns increment the
3553 usage count so we don't delete the label. */
cf7c4aa6 3554 if (INSN_P (trial))
599aedd9
RH
3555 {
3556 insn = insn_last;
3557 while (insn != NULL_RTX)
23b2ce53 3558 {
cf7c4aa6 3559 /* JUMP_P insns have already been "marked" above. */
4b4bf941 3560 if (NONJUMP_INSN_P (insn))
599aedd9 3561 mark_label_nuses (PATTERN (insn));
23b2ce53 3562
599aedd9
RH
3563 insn = PREV_INSN (insn);
3564 }
23b2ce53
RS
3565 }
3566
0435312e 3567 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
599aedd9
RH
3568
3569 delete_insn (trial);
3570 if (has_barrier)
3571 emit_barrier_after (tem);
3572
3573 /* Recursively call try_split for each new insn created; by the
3574 time control returns here that insn will be fully split, so
3575 set LAST and continue from the insn after the one returned.
3576 We can't use next_active_insn here since AFTER may be a note.
3577 Ignore deleted insns, which can be occur if not optimizing. */
3578 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3579 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3580 tem = try_split (PATTERN (tem), tem, 1);
3581
3582 /* Return either the first or the last insn, depending on which was
3583 requested. */
3584 return last
5936d944 3585 ? (after ? PREV_INSN (after) : get_last_insn ())
599aedd9 3586 : NEXT_INSN (before);
23b2ce53
RS
3587}
3588\f
3589/* Make and return an INSN rtx, initializing all its slots.
4b1f5e8c 3590 Store PATTERN in the pattern slots. */
23b2ce53
RS
3591
3592rtx
502b8322 3593make_insn_raw (rtx pattern)
23b2ce53 3594{
b3694847 3595 rtx insn;
23b2ce53 3596
1f8f4a0b 3597 insn = rtx_alloc (INSN);
23b2ce53 3598
43127294 3599 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3600 PATTERN (insn) = pattern;
3601 INSN_CODE (insn) = -1;
1632afca 3602 REG_NOTES (insn) = NULL;
55e092c4 3603 INSN_LOCATOR (insn) = curr_insn_locator ();
ba4f7968 3604 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53 3605
47984720
NC
3606#ifdef ENABLE_RTL_CHECKING
3607 if (insn
2c3c49de 3608 && INSN_P (insn)
47984720
NC
3609 && (returnjump_p (insn)
3610 || (GET_CODE (insn) == SET
3611 && SET_DEST (insn) == pc_rtx)))
3612 {
d4ee4d25 3613 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
47984720
NC
3614 debug_rtx (insn);
3615 }
3616#endif
750c9258 3617
23b2ce53
RS
3618 return insn;
3619}
3620
b5b8b0ac
AO
3621/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3622
3623rtx
3624make_debug_insn_raw (rtx pattern)
3625{
3626 rtx insn;
3627
3628 insn = rtx_alloc (DEBUG_INSN);
3629 INSN_UID (insn) = cur_debug_insn_uid++;
3630 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3631 INSN_UID (insn) = cur_insn_uid++;
3632
3633 PATTERN (insn) = pattern;
3634 INSN_CODE (insn) = -1;
3635 REG_NOTES (insn) = NULL;
3636 INSN_LOCATOR (insn) = curr_insn_locator ();
3637 BLOCK_FOR_INSN (insn) = NULL;
3638
3639 return insn;
3640}
3641
2f937369 3642/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
23b2ce53 3643
38109dab 3644rtx
502b8322 3645make_jump_insn_raw (rtx pattern)
23b2ce53 3646{
b3694847 3647 rtx insn;
23b2ce53 3648
4b1f5e8c 3649 insn = rtx_alloc (JUMP_INSN);
1632afca 3650 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3651
3652 PATTERN (insn) = pattern;
3653 INSN_CODE (insn) = -1;
1632afca
RS
3654 REG_NOTES (insn) = NULL;
3655 JUMP_LABEL (insn) = NULL;
55e092c4 3656 INSN_LOCATOR (insn) = curr_insn_locator ();
ba4f7968 3657 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53
RS
3658
3659 return insn;
3660}
aff507f4 3661
2f937369 3662/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
aff507f4
RK
3663
3664static rtx
502b8322 3665make_call_insn_raw (rtx pattern)
aff507f4 3666{
b3694847 3667 rtx insn;
aff507f4
RK
3668
3669 insn = rtx_alloc (CALL_INSN);
3670 INSN_UID (insn) = cur_insn_uid++;
3671
3672 PATTERN (insn) = pattern;
3673 INSN_CODE (insn) = -1;
aff507f4
RK
3674 REG_NOTES (insn) = NULL;
3675 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
55e092c4 3676 INSN_LOCATOR (insn) = curr_insn_locator ();
ba4f7968 3677 BLOCK_FOR_INSN (insn) = NULL;
aff507f4
RK
3678
3679 return insn;
3680}
23b2ce53
RS
3681\f
3682/* Add INSN to the end of the doubly-linked list.
3683 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3684
3685void
502b8322 3686add_insn (rtx insn)
23b2ce53 3687{
5936d944 3688 PREV_INSN (insn) = get_last_insn();
23b2ce53
RS
3689 NEXT_INSN (insn) = 0;
3690
5936d944
JH
3691 if (NULL != get_last_insn())
3692 NEXT_INSN (get_last_insn ()) = insn;
23b2ce53 3693
5936d944
JH
3694 if (NULL == get_insns ())
3695 set_first_insn (insn);
23b2ce53 3696
5936d944 3697 set_last_insn (insn);
23b2ce53
RS
3698}
3699
a0ae8e8d
RK
3700/* Add INSN into the doubly-linked list after insn AFTER. This and
3701 the next should be the only functions called to insert an insn once
ba213285 3702 delay slots have been filled since only they know how to update a
a0ae8e8d 3703 SEQUENCE. */
23b2ce53
RS
3704
3705void
6fb5fa3c 3706add_insn_after (rtx insn, rtx after, basic_block bb)
23b2ce53
RS
3707{
3708 rtx next = NEXT_INSN (after);
3709
5b0264cb 3710 gcc_assert (!optimize || !INSN_DELETED_P (after));
ba213285 3711
23b2ce53
RS
3712 NEXT_INSN (insn) = next;
3713 PREV_INSN (insn) = after;
3714
3715 if (next)
3716 {
3717 PREV_INSN (next) = insn;
4b4bf941 3718 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
23b2ce53
RS
3719 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3720 }
5936d944
JH
3721 else if (get_last_insn () == after)
3722 set_last_insn (insn);
23b2ce53
RS
3723 else
3724 {
49ad7cfa 3725 struct sequence_stack *stack = seq_stack;
23b2ce53
RS
3726 /* Scan all pending sequences too. */
3727 for (; stack; stack = stack->next)
3728 if (after == stack->last)
fef0509b
RK
3729 {
3730 stack->last = insn;
3731 break;
3732 }
a0ae8e8d 3733
5b0264cb 3734 gcc_assert (stack);
23b2ce53
RS
3735 }
3736
4b4bf941
JQ
3737 if (!BARRIER_P (after)
3738 && !BARRIER_P (insn)
3c030e88
JH
3739 && (bb = BLOCK_FOR_INSN (after)))
3740 {
3741 set_block_for_insn (insn, bb);
38c1593d 3742 if (INSN_P (insn))
6fb5fa3c 3743 df_insn_rescan (insn);
3c030e88 3744 /* Should not happen as first in the BB is always
a1f300c0 3745 either NOTE or LABEL. */
a813c111 3746 if (BB_END (bb) == after
3c030e88 3747 /* Avoid clobbering of structure when creating new BB. */
4b4bf941 3748 && !BARRIER_P (insn)
a38e7aa5 3749 && !NOTE_INSN_BASIC_BLOCK_P (insn))
a813c111 3750 BB_END (bb) = insn;
3c030e88
JH
3751 }
3752
23b2ce53 3753 NEXT_INSN (after) = insn;
4b4bf941 3754 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
23b2ce53
RS
3755 {
3756 rtx sequence = PATTERN (after);
3757 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3758 }
3759}
3760
a0ae8e8d 3761/* Add INSN into the doubly-linked list before insn BEFORE. This and
6fb5fa3c
DB
3762 the previous should be the only functions called to insert an insn
3763 once delay slots have been filled since only they know how to
3764 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3765 bb from before. */
a0ae8e8d
RK
3766
3767void
6fb5fa3c 3768add_insn_before (rtx insn, rtx before, basic_block bb)
a0ae8e8d
RK
3769{
3770 rtx prev = PREV_INSN (before);
3771
5b0264cb 3772 gcc_assert (!optimize || !INSN_DELETED_P (before));
ba213285 3773
a0ae8e8d
RK
3774 PREV_INSN (insn) = prev;
3775 NEXT_INSN (insn) = before;
3776
3777 if (prev)
3778 {
3779 NEXT_INSN (prev) = insn;
4b4bf941 3780 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
a0ae8e8d
RK
3781 {
3782 rtx sequence = PATTERN (prev);
3783 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3784 }
3785 }
5936d944
JH
3786 else if (get_insns () == before)
3787 set_first_insn (insn);
a0ae8e8d
RK
3788 else
3789 {
49ad7cfa 3790 struct sequence_stack *stack = seq_stack;
a0ae8e8d
RK
3791 /* Scan all pending sequences too. */
3792 for (; stack; stack = stack->next)
3793 if (before == stack->first)
fef0509b
RK
3794 {
3795 stack->first = insn;
3796 break;
3797 }
a0ae8e8d 3798
5b0264cb 3799 gcc_assert (stack);
a0ae8e8d
RK
3800 }
3801
b8698a0f 3802 if (!bb
6fb5fa3c
DB
3803 && !BARRIER_P (before)
3804 && !BARRIER_P (insn))
3805 bb = BLOCK_FOR_INSN (before);
3806
3807 if (bb)
3c030e88
JH
3808 {
3809 set_block_for_insn (insn, bb);
38c1593d 3810 if (INSN_P (insn))
6fb5fa3c 3811 df_insn_rescan (insn);
5b0264cb 3812 /* Should not happen as first in the BB is always either NOTE or
43e05e45 3813 LABEL. */
5b0264cb
NS
3814 gcc_assert (BB_HEAD (bb) != insn
3815 /* Avoid clobbering of structure when creating new BB. */
3816 || BARRIER_P (insn)
a38e7aa5 3817 || NOTE_INSN_BASIC_BLOCK_P (insn));
3c030e88
JH
3818 }
3819
a0ae8e8d 3820 PREV_INSN (before) = insn;
4b4bf941 3821 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
a0ae8e8d
RK
3822 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3823}
3824
6fb5fa3c
DB
3825
3826/* Replace insn with an deleted instruction note. */
3827
0ce2b299
EB
3828void
3829set_insn_deleted (rtx insn)
6fb5fa3c
DB
3830{
3831 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3832 PUT_CODE (insn, NOTE);
3833 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3834}
3835
3836
89e99eea
DB
3837/* Remove an insn from its doubly-linked list. This function knows how
3838 to handle sequences. */
3839void
502b8322 3840remove_insn (rtx insn)
89e99eea
DB
3841{
3842 rtx next = NEXT_INSN (insn);
3843 rtx prev = PREV_INSN (insn);
53c17031
JH
3844 basic_block bb;
3845
6fb5fa3c
DB
3846 /* Later in the code, the block will be marked dirty. */
3847 df_insn_delete (NULL, INSN_UID (insn));
3848
89e99eea
DB
3849 if (prev)
3850 {
3851 NEXT_INSN (prev) = next;
4b4bf941 3852 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
89e99eea
DB
3853 {
3854 rtx sequence = PATTERN (prev);
3855 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3856 }
3857 }
5936d944
JH
3858 else if (get_insns () == insn)
3859 {
fb9ef4c1
JH
3860 if (next)
3861 PREV_INSN (next) = NULL;
5936d944
JH
3862 set_first_insn (next);
3863 }
89e99eea
DB
3864 else
3865 {
49ad7cfa 3866 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3867 /* Scan all pending sequences too. */
3868 for (; stack; stack = stack->next)
3869 if (insn == stack->first)
3870 {
3871 stack->first = next;
3872 break;
3873 }
3874
5b0264cb 3875 gcc_assert (stack);
89e99eea
DB
3876 }
3877
3878 if (next)
3879 {
3880 PREV_INSN (next) = prev;
4b4bf941 3881 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
89e99eea
DB
3882 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3883 }
5936d944
JH
3884 else if (get_last_insn () == insn)
3885 set_last_insn (prev);
89e99eea
DB
3886 else
3887 {
49ad7cfa 3888 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3889 /* Scan all pending sequences too. */
3890 for (; stack; stack = stack->next)
3891 if (insn == stack->last)
3892 {
3893 stack->last = prev;
3894 break;
3895 }
3896
5b0264cb 3897 gcc_assert (stack);
89e99eea 3898 }
4b4bf941 3899 if (!BARRIER_P (insn)
53c17031
JH
3900 && (bb = BLOCK_FOR_INSN (insn)))
3901 {
4e0084e4 3902 if (NONDEBUG_INSN_P (insn))
6fb5fa3c 3903 df_set_bb_dirty (bb);
a813c111 3904 if (BB_HEAD (bb) == insn)
53c17031 3905 {
3bf1e984
RK
3906 /* Never ever delete the basic block note without deleting whole
3907 basic block. */
5b0264cb 3908 gcc_assert (!NOTE_P (insn));
a813c111 3909 BB_HEAD (bb) = next;
53c17031 3910 }
a813c111
SB
3911 if (BB_END (bb) == insn)
3912 BB_END (bb) = prev;
53c17031 3913 }
89e99eea
DB
3914}
3915
ee960939
OH
3916/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3917
3918void
502b8322 3919add_function_usage_to (rtx call_insn, rtx call_fusage)
ee960939 3920{
5b0264cb 3921 gcc_assert (call_insn && CALL_P (call_insn));
ee960939
OH
3922
3923 /* Put the register usage information on the CALL. If there is already
3924 some usage information, put ours at the end. */
3925 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3926 {
3927 rtx link;
3928
3929 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3930 link = XEXP (link, 1))
3931 ;
3932
3933 XEXP (link, 1) = call_fusage;
3934 }
3935 else
3936 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3937}
3938
23b2ce53
RS
3939/* Delete all insns made since FROM.
3940 FROM becomes the new last instruction. */
3941
3942void
502b8322 3943delete_insns_since (rtx from)
23b2ce53
RS
3944{
3945 if (from == 0)
5936d944 3946 set_first_insn (0);
23b2ce53
RS
3947 else
3948 NEXT_INSN (from) = 0;
5936d944 3949 set_last_insn (from);
23b2ce53
RS
3950}
3951
5dab5552
MS
3952/* This function is deprecated, please use sequences instead.
3953
3954 Move a consecutive bunch of insns to a different place in the chain.
23b2ce53
RS
3955 The insns to be moved are those between FROM and TO.
3956 They are moved to a new position after the insn AFTER.
3957 AFTER must not be FROM or TO or any insn in between.
3958
3959 This function does not know about SEQUENCEs and hence should not be
3960 called after delay-slot filling has been done. */
3961
3962void
502b8322 3963reorder_insns_nobb (rtx from, rtx to, rtx after)
23b2ce53 3964{
4f8344eb
HPN
3965#ifdef ENABLE_CHECKING
3966 rtx x;
3967 for (x = from; x != to; x = NEXT_INSN (x))
3968 gcc_assert (after != x);
3969 gcc_assert (after != to);
3970#endif
3971
23b2ce53
RS
3972 /* Splice this bunch out of where it is now. */
3973 if (PREV_INSN (from))
3974 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3975 if (NEXT_INSN (to))
3976 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
5936d944
JH
3977 if (get_last_insn () == to)
3978 set_last_insn (PREV_INSN (from));
3979 if (get_insns () == from)
3980 set_first_insn (NEXT_INSN (to));
23b2ce53
RS
3981
3982 /* Make the new neighbors point to it and it to them. */
3983 if (NEXT_INSN (after))
3984 PREV_INSN (NEXT_INSN (after)) = to;
3985
3986 NEXT_INSN (to) = NEXT_INSN (after);
3987 PREV_INSN (from) = after;
3988 NEXT_INSN (after) = from;
5936d944
JH
3989 if (after == get_last_insn())
3990 set_last_insn (to);
23b2ce53
RS
3991}
3992
3c030e88
JH
3993/* Same as function above, but take care to update BB boundaries. */
3994void
502b8322 3995reorder_insns (rtx from, rtx to, rtx after)
3c030e88
JH
3996{
3997 rtx prev = PREV_INSN (from);
3998 basic_block bb, bb2;
3999
4000 reorder_insns_nobb (from, to, after);
4001
4b4bf941 4002 if (!BARRIER_P (after)
3c030e88
JH
4003 && (bb = BLOCK_FOR_INSN (after)))
4004 {
4005 rtx x;
6fb5fa3c 4006 df_set_bb_dirty (bb);
68252e27 4007
4b4bf941 4008 if (!BARRIER_P (from)
3c030e88
JH
4009 && (bb2 = BLOCK_FOR_INSN (from)))
4010 {
a813c111
SB
4011 if (BB_END (bb2) == to)
4012 BB_END (bb2) = prev;
6fb5fa3c 4013 df_set_bb_dirty (bb2);
3c030e88
JH
4014 }
4015
a813c111
SB
4016 if (BB_END (bb) == after)
4017 BB_END (bb) = to;
3c030e88
JH
4018
4019 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7bd5ed5c 4020 if (!BARRIER_P (x))
63642d5a 4021 df_insn_change_bb (x, bb);
3c030e88
JH
4022 }
4023}
4024
23b2ce53 4025\f
2f937369
DM
4026/* Emit insn(s) of given code and pattern
4027 at a specified place within the doubly-linked list.
23b2ce53 4028
2f937369
DM
4029 All of the emit_foo global entry points accept an object
4030 X which is either an insn list or a PATTERN of a single
4031 instruction.
23b2ce53 4032
2f937369
DM
4033 There are thus a few canonical ways to generate code and
4034 emit it at a specific place in the instruction stream. For
4035 example, consider the instruction named SPOT and the fact that
4036 we would like to emit some instructions before SPOT. We might
4037 do it like this:
23b2ce53 4038
2f937369
DM
4039 start_sequence ();
4040 ... emit the new instructions ...
4041 insns_head = get_insns ();
4042 end_sequence ();
23b2ce53 4043
2f937369 4044 emit_insn_before (insns_head, SPOT);
23b2ce53 4045
2f937369
DM
4046 It used to be common to generate SEQUENCE rtl instead, but that
4047 is a relic of the past which no longer occurs. The reason is that
4048 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4049 generated would almost certainly die right after it was created. */
23b2ce53 4050
2f937369 4051/* Make X be output before the instruction BEFORE. */
23b2ce53
RS
4052
4053rtx
6fb5fa3c 4054emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
23b2ce53 4055{
2f937369 4056 rtx last = before;
b3694847 4057 rtx insn;
23b2ce53 4058
5b0264cb 4059 gcc_assert (before);
2f937369
DM
4060
4061 if (x == NULL_RTX)
4062 return last;
4063
4064 switch (GET_CODE (x))
23b2ce53 4065 {
b5b8b0ac 4066 case DEBUG_INSN:
2f937369
DM
4067 case INSN:
4068 case JUMP_INSN:
4069 case CALL_INSN:
4070 case CODE_LABEL:
4071 case BARRIER:
4072 case NOTE:
4073 insn = x;
4074 while (insn)
4075 {
4076 rtx next = NEXT_INSN (insn);
6fb5fa3c 4077 add_insn_before (insn, before, bb);
2f937369
DM
4078 last = insn;
4079 insn = next;
4080 }
4081 break;
4082
4083#ifdef ENABLE_RTL_CHECKING
4084 case SEQUENCE:
5b0264cb 4085 gcc_unreachable ();
2f937369
DM
4086 break;
4087#endif
4088
4089 default:
4090 last = make_insn_raw (x);
6fb5fa3c 4091 add_insn_before (last, before, bb);
2f937369 4092 break;
23b2ce53
RS
4093 }
4094
2f937369 4095 return last;
23b2ce53
RS
4096}
4097
2f937369 4098/* Make an instruction with body X and code JUMP_INSN
23b2ce53
RS
4099 and output it before the instruction BEFORE. */
4100
4101rtx
a7102479 4102emit_jump_insn_before_noloc (rtx x, rtx before)
23b2ce53 4103{
d950dee3 4104 rtx insn, last = NULL_RTX;
aff507f4 4105
5b0264cb 4106 gcc_assert (before);
2f937369
DM
4107
4108 switch (GET_CODE (x))
aff507f4 4109 {
b5b8b0ac 4110 case DEBUG_INSN:
2f937369
DM
4111 case INSN:
4112 case JUMP_INSN:
4113 case CALL_INSN:
4114 case CODE_LABEL:
4115 case BARRIER:
4116 case NOTE:
4117 insn = x;
4118 while (insn)
4119 {
4120 rtx next = NEXT_INSN (insn);
6fb5fa3c 4121 add_insn_before (insn, before, NULL);
2f937369
DM
4122 last = insn;
4123 insn = next;
4124 }
4125 break;
4126
4127#ifdef ENABLE_RTL_CHECKING
4128 case SEQUENCE:
5b0264cb 4129 gcc_unreachable ();
2f937369
DM
4130 break;
4131#endif
4132
4133 default:
4134 last = make_jump_insn_raw (x);
6fb5fa3c 4135 add_insn_before (last, before, NULL);
2f937369 4136 break;
aff507f4
RK
4137 }
4138
2f937369 4139 return last;
23b2ce53
RS
4140}
4141
2f937369 4142/* Make an instruction with body X and code CALL_INSN
969d70ca
JH
4143 and output it before the instruction BEFORE. */
4144
4145rtx
a7102479 4146emit_call_insn_before_noloc (rtx x, rtx before)
969d70ca 4147{
d950dee3 4148 rtx last = NULL_RTX, insn;
969d70ca 4149
5b0264cb 4150 gcc_assert (before);
2f937369
DM
4151
4152 switch (GET_CODE (x))
969d70ca 4153 {
b5b8b0ac 4154 case DEBUG_INSN:
2f937369
DM
4155 case INSN:
4156 case JUMP_INSN:
4157 case CALL_INSN:
4158 case CODE_LABEL:
4159 case BARRIER:
4160 case NOTE:
4161 insn = x;
4162 while (insn)
4163 {
4164 rtx next = NEXT_INSN (insn);
6fb5fa3c 4165 add_insn_before (insn, before, NULL);
2f937369
DM
4166 last = insn;
4167 insn = next;
4168 }
4169 break;
4170
4171#ifdef ENABLE_RTL_CHECKING
4172 case SEQUENCE:
5b0264cb 4173 gcc_unreachable ();
2f937369
DM
4174 break;
4175#endif
4176
4177 default:
4178 last = make_call_insn_raw (x);
6fb5fa3c 4179 add_insn_before (last, before, NULL);
2f937369 4180 break;
969d70ca
JH
4181 }
4182
2f937369 4183 return last;
969d70ca
JH
4184}
4185
b5b8b0ac
AO
4186/* Make an instruction with body X and code DEBUG_INSN
4187 and output it before the instruction BEFORE. */
4188
4189rtx
4190emit_debug_insn_before_noloc (rtx x, rtx before)
4191{
4192 rtx last = NULL_RTX, insn;
4193
4194 gcc_assert (before);
4195
4196 switch (GET_CODE (x))
4197 {
4198 case DEBUG_INSN:
4199 case INSN:
4200 case JUMP_INSN:
4201 case CALL_INSN:
4202 case CODE_LABEL:
4203 case BARRIER:
4204 case NOTE:
4205 insn = x;
4206 while (insn)
4207 {
4208 rtx next = NEXT_INSN (insn);
4209 add_insn_before (insn, before, NULL);
4210 last = insn;
4211 insn = next;
4212 }
4213 break;
4214
4215#ifdef ENABLE_RTL_CHECKING
4216 case SEQUENCE:
4217 gcc_unreachable ();
4218 break;
4219#endif
4220
4221 default:
4222 last = make_debug_insn_raw (x);
4223 add_insn_before (last, before, NULL);
4224 break;
4225 }
4226
4227 return last;
4228}
4229
23b2ce53 4230/* Make an insn of code BARRIER
e881bb1b 4231 and output it before the insn BEFORE. */
23b2ce53
RS
4232
4233rtx
502b8322 4234emit_barrier_before (rtx before)
23b2ce53 4235{
b3694847 4236 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
4237
4238 INSN_UID (insn) = cur_insn_uid++;
4239
6fb5fa3c 4240 add_insn_before (insn, before, NULL);
23b2ce53
RS
4241 return insn;
4242}
4243
e881bb1b
RH
4244/* Emit the label LABEL before the insn BEFORE. */
4245
4246rtx
502b8322 4247emit_label_before (rtx label, rtx before)
e881bb1b
RH
4248{
4249 /* This can be called twice for the same label as a result of the
4250 confusion that follows a syntax error! So make it harmless. */
4251 if (INSN_UID (label) == 0)
4252 {
4253 INSN_UID (label) = cur_insn_uid++;
6fb5fa3c 4254 add_insn_before (label, before, NULL);
e881bb1b
RH
4255 }
4256
4257 return label;
4258}
4259
23b2ce53
RS
4260/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4261
4262rtx
a38e7aa5 4263emit_note_before (enum insn_note subtype, rtx before)
23b2ce53 4264{
b3694847 4265 rtx note = rtx_alloc (NOTE);
23b2ce53 4266 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 4267 NOTE_KIND (note) = subtype;
ba4f7968 4268 BLOCK_FOR_INSN (note) = NULL;
9dbe7947 4269 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
23b2ce53 4270
6fb5fa3c 4271 add_insn_before (note, before, NULL);
23b2ce53
RS
4272 return note;
4273}
4274\f
2f937369
DM
4275/* Helper for emit_insn_after, handles lists of instructions
4276 efficiently. */
23b2ce53 4277
2f937369 4278static rtx
6fb5fa3c 4279emit_insn_after_1 (rtx first, rtx after, basic_block bb)
23b2ce53 4280{
2f937369
DM
4281 rtx last;
4282 rtx after_after;
6fb5fa3c
DB
4283 if (!bb && !BARRIER_P (after))
4284 bb = BLOCK_FOR_INSN (after);
23b2ce53 4285
6fb5fa3c 4286 if (bb)
23b2ce53 4287 {
6fb5fa3c 4288 df_set_bb_dirty (bb);
2f937369 4289 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4b4bf941 4290 if (!BARRIER_P (last))
6fb5fa3c
DB
4291 {
4292 set_block_for_insn (last, bb);
4293 df_insn_rescan (last);
4294 }
4b4bf941 4295 if (!BARRIER_P (last))
6fb5fa3c
DB
4296 {
4297 set_block_for_insn (last, bb);
4298 df_insn_rescan (last);
4299 }
a813c111
SB
4300 if (BB_END (bb) == after)
4301 BB_END (bb) = last;
23b2ce53
RS
4302 }
4303 else
2f937369
DM
4304 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4305 continue;
4306
4307 after_after = NEXT_INSN (after);
4308
4309 NEXT_INSN (after) = first;
4310 PREV_INSN (first) = after;
4311 NEXT_INSN (last) = after_after;
4312 if (after_after)
4313 PREV_INSN (after_after) = last;
4314
5936d944
JH
4315 if (after == get_last_insn())
4316 set_last_insn (last);
e855c69d 4317
2f937369
DM
4318 return last;
4319}
4320
6fb5fa3c
DB
4321/* Make X be output after the insn AFTER and set the BB of insn. If
4322 BB is NULL, an attempt is made to infer the BB from AFTER. */
2f937369
DM
4323
4324rtx
6fb5fa3c 4325emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
2f937369
DM
4326{
4327 rtx last = after;
4328
5b0264cb 4329 gcc_assert (after);
2f937369
DM
4330
4331 if (x == NULL_RTX)
4332 return last;
4333
4334 switch (GET_CODE (x))
23b2ce53 4335 {
b5b8b0ac 4336 case DEBUG_INSN:
2f937369
DM
4337 case INSN:
4338 case JUMP_INSN:
4339 case CALL_INSN:
4340 case CODE_LABEL:
4341 case BARRIER:
4342 case NOTE:
6fb5fa3c 4343 last = emit_insn_after_1 (x, after, bb);
2f937369
DM
4344 break;
4345
4346#ifdef ENABLE_RTL_CHECKING
4347 case SEQUENCE:
5b0264cb 4348 gcc_unreachable ();
2f937369
DM
4349 break;
4350#endif
4351
4352 default:
4353 last = make_insn_raw (x);
6fb5fa3c 4354 add_insn_after (last, after, bb);
2f937369 4355 break;
23b2ce53
RS
4356 }
4357
2f937369 4358 return last;
23b2ce53
RS
4359}
4360
255680cf 4361
2f937369 4362/* Make an insn of code JUMP_INSN with body X
23b2ce53
RS
4363 and output it after the insn AFTER. */
4364
4365rtx
a7102479 4366emit_jump_insn_after_noloc (rtx x, rtx after)
23b2ce53 4367{
2f937369 4368 rtx last;
23b2ce53 4369
5b0264cb 4370 gcc_assert (after);
2f937369
DM
4371
4372 switch (GET_CODE (x))
23b2ce53 4373 {
b5b8b0ac 4374 case DEBUG_INSN:
2f937369
DM
4375 case INSN:
4376 case JUMP_INSN:
4377 case CALL_INSN:
4378 case CODE_LABEL:
4379 case BARRIER:
4380 case NOTE:
6fb5fa3c 4381 last = emit_insn_after_1 (x, after, NULL);
2f937369
DM
4382 break;
4383
4384#ifdef ENABLE_RTL_CHECKING
4385 case SEQUENCE:
5b0264cb 4386 gcc_unreachable ();
2f937369
DM
4387 break;
4388#endif
4389
4390 default:
4391 last = make_jump_insn_raw (x);
6fb5fa3c 4392 add_insn_after (last, after, NULL);
2f937369 4393 break;
23b2ce53
RS
4394 }
4395
2f937369
DM
4396 return last;
4397}
4398
4399/* Make an instruction with body X and code CALL_INSN
4400 and output it after the instruction AFTER. */
4401
4402rtx
a7102479 4403emit_call_insn_after_noloc (rtx x, rtx after)
2f937369
DM
4404{
4405 rtx last;
4406
5b0264cb 4407 gcc_assert (after);
2f937369
DM
4408
4409 switch (GET_CODE (x))
4410 {
b5b8b0ac 4411 case DEBUG_INSN:
2f937369
DM
4412 case INSN:
4413 case JUMP_INSN:
4414 case CALL_INSN:
4415 case CODE_LABEL:
4416 case BARRIER:
4417 case NOTE:
6fb5fa3c 4418 last = emit_insn_after_1 (x, after, NULL);
2f937369
DM
4419 break;
4420
4421#ifdef ENABLE_RTL_CHECKING
4422 case SEQUENCE:
5b0264cb 4423 gcc_unreachable ();
2f937369
DM
4424 break;
4425#endif
4426
4427 default:
4428 last = make_call_insn_raw (x);
6fb5fa3c 4429 add_insn_after (last, after, NULL);
2f937369
DM
4430 break;
4431 }
4432
4433 return last;
23b2ce53
RS
4434}
4435
b5b8b0ac
AO
4436/* Make an instruction with body X and code CALL_INSN
4437 and output it after the instruction AFTER. */
4438
4439rtx
4440emit_debug_insn_after_noloc (rtx x, rtx after)
4441{
4442 rtx last;
4443
4444 gcc_assert (after);
4445
4446 switch (GET_CODE (x))
4447 {
4448 case DEBUG_INSN:
4449 case INSN:
4450 case JUMP_INSN:
4451 case CALL_INSN:
4452 case CODE_LABEL:
4453 case BARRIER:
4454 case NOTE:
4455 last = emit_insn_after_1 (x, after, NULL);
4456 break;
4457
4458#ifdef ENABLE_RTL_CHECKING
4459 case SEQUENCE:
4460 gcc_unreachable ();
4461 break;
4462#endif
4463
4464 default:
4465 last = make_debug_insn_raw (x);
4466 add_insn_after (last, after, NULL);
4467 break;
4468 }
4469
4470 return last;
4471}
4472
23b2ce53
RS
4473/* Make an insn of code BARRIER
4474 and output it after the insn AFTER. */
4475
4476rtx
502b8322 4477emit_barrier_after (rtx after)
23b2ce53 4478{
b3694847 4479 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
4480
4481 INSN_UID (insn) = cur_insn_uid++;
4482
6fb5fa3c 4483 add_insn_after (insn, after, NULL);
23b2ce53
RS
4484 return insn;
4485}
4486
4487/* Emit the label LABEL after the insn AFTER. */
4488
4489rtx
502b8322 4490emit_label_after (rtx label, rtx after)
23b2ce53
RS
4491{
4492 /* This can be called twice for the same label
4493 as a result of the confusion that follows a syntax error!
4494 So make it harmless. */
4495 if (INSN_UID (label) == 0)
4496 {
4497 INSN_UID (label) = cur_insn_uid++;
6fb5fa3c 4498 add_insn_after (label, after, NULL);
23b2ce53
RS
4499 }
4500
4501 return label;
4502}
4503
4504/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4505
4506rtx
a38e7aa5 4507emit_note_after (enum insn_note subtype, rtx after)
23b2ce53 4508{
b3694847 4509 rtx note = rtx_alloc (NOTE);
23b2ce53 4510 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 4511 NOTE_KIND (note) = subtype;
ba4f7968 4512 BLOCK_FOR_INSN (note) = NULL;
9dbe7947 4513 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
6fb5fa3c 4514 add_insn_after (note, after, NULL);
23b2ce53
RS
4515 return note;
4516}
23b2ce53 4517\f
a7102479 4518/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
0d682900 4519rtx
502b8322 4520emit_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4521{
6fb5fa3c 4522 rtx last = emit_insn_after_noloc (pattern, after, NULL);
0d682900 4523
a7102479 4524 if (pattern == NULL_RTX || !loc)
dd3adcf8
DJ
4525 return last;
4526
2f937369
DM
4527 after = NEXT_INSN (after);
4528 while (1)
4529 {
a7102479 4530 if (active_insn_p (after) && !INSN_LOCATOR (after))
0435312e 4531 INSN_LOCATOR (after) = loc;
2f937369
DM
4532 if (after == last)
4533 break;
4534 after = NEXT_INSN (after);
4535 }
0d682900
JH
4536 return last;
4537}
4538
a7102479
JH
4539/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4540rtx
4541emit_insn_after (rtx pattern, rtx after)
4542{
b5b8b0ac
AO
4543 rtx prev = after;
4544
4545 while (DEBUG_INSN_P (prev))
4546 prev = PREV_INSN (prev);
4547
4548 if (INSN_P (prev))
4549 return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
a7102479 4550 else
6fb5fa3c 4551 return emit_insn_after_noloc (pattern, after, NULL);
a7102479
JH
4552}
4553
4554/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
0d682900 4555rtx
502b8322 4556emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4557{
a7102479 4558 rtx last = emit_jump_insn_after_noloc (pattern, after);
2f937369 4559
a7102479 4560 if (pattern == NULL_RTX || !loc)
dd3adcf8
DJ
4561 return last;
4562
2f937369
DM
4563 after = NEXT_INSN (after);
4564 while (1)
4565 {
a7102479 4566 if (active_insn_p (after) && !INSN_LOCATOR (after))
0435312e 4567 INSN_LOCATOR (after) = loc;
2f937369
DM
4568 if (after == last)
4569 break;
4570 after = NEXT_INSN (after);
4571 }
0d682900
JH
4572 return last;
4573}
4574
a7102479
JH
4575/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4576rtx
4577emit_jump_insn_after (rtx pattern, rtx after)
4578{
b5b8b0ac
AO
4579 rtx prev = after;
4580
4581 while (DEBUG_INSN_P (prev))
4582 prev = PREV_INSN (prev);
4583
4584 if (INSN_P (prev))
4585 return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
a7102479
JH
4586 else
4587 return emit_jump_insn_after_noloc (pattern, after);
4588}
4589
4590/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
0d682900 4591rtx
502b8322 4592emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4593{
a7102479 4594 rtx last = emit_call_insn_after_noloc (pattern, after);
2f937369 4595
a7102479 4596 if (pattern == NULL_RTX || !loc)
dd3adcf8
DJ
4597 return last;
4598
2f937369
DM
4599 after = NEXT_INSN (after);
4600 while (1)
4601 {
a7102479 4602 if (active_insn_p (after) && !INSN_LOCATOR (after))
0435312e 4603 INSN_LOCATOR (after) = loc;
2f937369
DM
4604 if (after == last)
4605 break;
4606 after = NEXT_INSN (after);
4607 }
0d682900
JH
4608 return last;
4609}
4610
a7102479
JH
4611/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4612rtx
4613emit_call_insn_after (rtx pattern, rtx after)
4614{
b5b8b0ac
AO
4615 rtx prev = after;
4616
4617 while (DEBUG_INSN_P (prev))
4618 prev = PREV_INSN (prev);
4619
4620 if (INSN_P (prev))
4621 return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
a7102479
JH
4622 else
4623 return emit_call_insn_after_noloc (pattern, after);
4624}
4625
b5b8b0ac
AO
4626/* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4627rtx
4628emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4629{
4630 rtx last = emit_debug_insn_after_noloc (pattern, after);
4631
4632 if (pattern == NULL_RTX || !loc)
4633 return last;
4634
4635 after = NEXT_INSN (after);
4636 while (1)
4637 {
4638 if (active_insn_p (after) && !INSN_LOCATOR (after))
4639 INSN_LOCATOR (after) = loc;
4640 if (after == last)
4641 break;
4642 after = NEXT_INSN (after);
4643 }
4644 return last;
4645}
4646
4647/* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4648rtx
4649emit_debug_insn_after (rtx pattern, rtx after)
4650{
4651 if (INSN_P (after))
4652 return emit_debug_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4653 else
4654 return emit_debug_insn_after_noloc (pattern, after);
4655}
4656
a7102479 4657/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */
0d682900 4658rtx
502b8322 4659emit_insn_before_setloc (rtx pattern, rtx before, int loc)
0d682900
JH
4660{
4661 rtx first = PREV_INSN (before);
6fb5fa3c 4662 rtx last = emit_insn_before_noloc (pattern, before, NULL);
a7102479
JH
4663
4664 if (pattern == NULL_RTX || !loc)
4665 return last;
4666
26cb3993
JH
4667 if (!first)
4668 first = get_insns ();
4669 else
4670 first = NEXT_INSN (first);
a7102479
JH
4671 while (1)
4672 {
4673 if (active_insn_p (first) && !INSN_LOCATOR (first))
4674 INSN_LOCATOR (first) = loc;
4675 if (first == last)
4676 break;
4677 first = NEXT_INSN (first);
4678 }
4679 return last;
4680}
4681
4682/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4683rtx
4684emit_insn_before (rtx pattern, rtx before)
4685{
b5b8b0ac
AO
4686 rtx next = before;
4687
4688 while (DEBUG_INSN_P (next))
4689 next = PREV_INSN (next);
4690
4691 if (INSN_P (next))
4692 return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
a7102479 4693 else
6fb5fa3c 4694 return emit_insn_before_noloc (pattern, before, NULL);
a7102479
JH
4695}
4696
4697/* like emit_insn_before_noloc, but set insn_locator according to scope. */
4698rtx
4699emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4700{
4701 rtx first = PREV_INSN (before);
4702 rtx last = emit_jump_insn_before_noloc (pattern, before);
4703
4704 if (pattern == NULL_RTX)
4705 return last;
4706
4707 first = NEXT_INSN (first);
4708 while (1)
4709 {
4710 if (active_insn_p (first) && !INSN_LOCATOR (first))
4711 INSN_LOCATOR (first) = loc;
4712 if (first == last)
4713 break;
4714 first = NEXT_INSN (first);
4715 }
4716 return last;
4717}
4718
4719/* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4720rtx
4721emit_jump_insn_before (rtx pattern, rtx before)
4722{
b5b8b0ac
AO
4723 rtx next = before;
4724
4725 while (DEBUG_INSN_P (next))
4726 next = PREV_INSN (next);
4727
4728 if (INSN_P (next))
4729 return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
a7102479
JH
4730 else
4731 return emit_jump_insn_before_noloc (pattern, before);
4732}
4733
4734/* like emit_insn_before_noloc, but set insn_locator according to scope. */
4735rtx
4736emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4737{
4738 rtx first = PREV_INSN (before);
4739 rtx last = emit_call_insn_before_noloc (pattern, before);
0d682900 4740
dd3adcf8
DJ
4741 if (pattern == NULL_RTX)
4742 return last;
4743
2f937369
DM
4744 first = NEXT_INSN (first);
4745 while (1)
4746 {
a7102479 4747 if (active_insn_p (first) && !INSN_LOCATOR (first))
0435312e 4748 INSN_LOCATOR (first) = loc;
2f937369
DM
4749 if (first == last)
4750 break;
4751 first = NEXT_INSN (first);
4752 }
0d682900
JH
4753 return last;
4754}
a7102479
JH
4755
4756/* like emit_call_insn_before_noloc,
4757 but set insn_locator according to before. */
4758rtx
4759emit_call_insn_before (rtx pattern, rtx before)
4760{
b5b8b0ac
AO
4761 rtx next = before;
4762
4763 while (DEBUG_INSN_P (next))
4764 next = PREV_INSN (next);
4765
4766 if (INSN_P (next))
4767 return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
a7102479
JH
4768 else
4769 return emit_call_insn_before_noloc (pattern, before);
4770}
b5b8b0ac
AO
4771
4772/* like emit_insn_before_noloc, but set insn_locator according to scope. */
4773rtx
4774emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4775{
4776 rtx first = PREV_INSN (before);
4777 rtx last = emit_debug_insn_before_noloc (pattern, before);
4778
4779 if (pattern == NULL_RTX)
4780 return last;
4781
4782 first = NEXT_INSN (first);
4783 while (1)
4784 {
4785 if (active_insn_p (first) && !INSN_LOCATOR (first))
4786 INSN_LOCATOR (first) = loc;
4787 if (first == last)
4788 break;
4789 first = NEXT_INSN (first);
4790 }
4791 return last;
4792}
4793
4794/* like emit_debug_insn_before_noloc,
4795 but set insn_locator according to before. */
4796rtx
4797emit_debug_insn_before (rtx pattern, rtx before)
4798{
4799 if (INSN_P (before))
4800 return emit_debug_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4801 else
4802 return emit_debug_insn_before_noloc (pattern, before);
4803}
0d682900 4804\f
2f937369
DM
4805/* Take X and emit it at the end of the doubly-linked
4806 INSN list.
23b2ce53
RS
4807
4808 Returns the last insn emitted. */
4809
4810rtx
502b8322 4811emit_insn (rtx x)
23b2ce53 4812{
5936d944 4813 rtx last = get_last_insn();
2f937369 4814 rtx insn;
23b2ce53 4815
2f937369
DM
4816 if (x == NULL_RTX)
4817 return last;
23b2ce53 4818
2f937369
DM
4819 switch (GET_CODE (x))
4820 {
b5b8b0ac 4821 case DEBUG_INSN:
2f937369
DM
4822 case INSN:
4823 case JUMP_INSN:
4824 case CALL_INSN:
4825 case CODE_LABEL:
4826 case BARRIER:
4827 case NOTE:
4828 insn = x;
4829 while (insn)
23b2ce53 4830 {
2f937369 4831 rtx next = NEXT_INSN (insn);
23b2ce53 4832 add_insn (insn);
2f937369
DM
4833 last = insn;
4834 insn = next;
23b2ce53 4835 }
2f937369 4836 break;
23b2ce53 4837
2f937369
DM
4838#ifdef ENABLE_RTL_CHECKING
4839 case SEQUENCE:
5b0264cb 4840 gcc_unreachable ();
2f937369
DM
4841 break;
4842#endif
23b2ce53 4843
2f937369
DM
4844 default:
4845 last = make_insn_raw (x);
4846 add_insn (last);
4847 break;
23b2ce53
RS
4848 }
4849
4850 return last;
4851}
4852
b5b8b0ac
AO
4853/* Make an insn of code DEBUG_INSN with pattern X
4854 and add it to the end of the doubly-linked list. */
4855
4856rtx
4857emit_debug_insn (rtx x)
4858{
5936d944 4859 rtx last = get_last_insn();
b5b8b0ac
AO
4860 rtx insn;
4861
4862 if (x == NULL_RTX)
4863 return last;
4864
4865 switch (GET_CODE (x))
4866 {
4867 case DEBUG_INSN:
4868 case INSN:
4869 case JUMP_INSN:
4870 case CALL_INSN:
4871 case CODE_LABEL:
4872 case BARRIER:
4873 case NOTE:
4874 insn = x;
4875 while (insn)
4876 {
4877 rtx next = NEXT_INSN (insn);
4878 add_insn (insn);
4879 last = insn;
4880 insn = next;
4881 }
4882 break;
4883
4884#ifdef ENABLE_RTL_CHECKING
4885 case SEQUENCE:
4886 gcc_unreachable ();
4887 break;
4888#endif
4889
4890 default:
4891 last = make_debug_insn_raw (x);
4892 add_insn (last);
4893 break;
4894 }
4895
4896 return last;
4897}
4898
2f937369
DM
4899/* Make an insn of code JUMP_INSN with pattern X
4900 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4901
4902rtx
502b8322 4903emit_jump_insn (rtx x)
23b2ce53 4904{
d950dee3 4905 rtx last = NULL_RTX, insn;
23b2ce53 4906
2f937369 4907 switch (GET_CODE (x))
23b2ce53 4908 {
b5b8b0ac 4909 case DEBUG_INSN:
2f937369
DM
4910 case INSN:
4911 case JUMP_INSN:
4912 case CALL_INSN:
4913 case CODE_LABEL:
4914 case BARRIER:
4915 case NOTE:
4916 insn = x;
4917 while (insn)
4918 {
4919 rtx next = NEXT_INSN (insn);
4920 add_insn (insn);
4921 last = insn;
4922 insn = next;
4923 }
4924 break;
e0a5c5eb 4925
2f937369
DM
4926#ifdef ENABLE_RTL_CHECKING
4927 case SEQUENCE:
5b0264cb 4928 gcc_unreachable ();
2f937369
DM
4929 break;
4930#endif
e0a5c5eb 4931
2f937369
DM
4932 default:
4933 last = make_jump_insn_raw (x);
4934 add_insn (last);
4935 break;
3c030e88 4936 }
e0a5c5eb
RS
4937
4938 return last;
4939}
4940
2f937369 4941/* Make an insn of code CALL_INSN with pattern X
23b2ce53
RS
4942 and add it to the end of the doubly-linked list. */
4943
4944rtx
502b8322 4945emit_call_insn (rtx x)
23b2ce53 4946{
2f937369
DM
4947 rtx insn;
4948
4949 switch (GET_CODE (x))
23b2ce53 4950 {
b5b8b0ac 4951 case DEBUG_INSN:
2f937369
DM
4952 case INSN:
4953 case JUMP_INSN:
4954 case CALL_INSN:
4955 case CODE_LABEL:
4956 case BARRIER:
4957 case NOTE:
4958 insn = emit_insn (x);
4959 break;
23b2ce53 4960
2f937369
DM
4961#ifdef ENABLE_RTL_CHECKING
4962 case SEQUENCE:
5b0264cb 4963 gcc_unreachable ();
2f937369
DM
4964 break;
4965#endif
23b2ce53 4966
2f937369
DM
4967 default:
4968 insn = make_call_insn_raw (x);
23b2ce53 4969 add_insn (insn);
2f937369 4970 break;
23b2ce53 4971 }
2f937369
DM
4972
4973 return insn;
23b2ce53
RS
4974}
4975
4976/* Add the label LABEL to the end of the doubly-linked list. */
4977
4978rtx
502b8322 4979emit_label (rtx label)
23b2ce53
RS
4980{
4981 /* This can be called twice for the same label
4982 as a result of the confusion that follows a syntax error!
4983 So make it harmless. */
4984 if (INSN_UID (label) == 0)
4985 {
4986 INSN_UID (label) = cur_insn_uid++;
4987 add_insn (label);
4988 }
4989 return label;
4990}
4991
4992/* Make an insn of code BARRIER
4993 and add it to the end of the doubly-linked list. */
4994
4995rtx
502b8322 4996emit_barrier (void)
23b2ce53 4997{
b3694847 4998 rtx barrier = rtx_alloc (BARRIER);
23b2ce53
RS
4999 INSN_UID (barrier) = cur_insn_uid++;
5000 add_insn (barrier);
5001 return barrier;
5002}
5003
5f2fc772 5004/* Emit a copy of note ORIG. */
502b8322 5005
5f2fc772
NS
5006rtx
5007emit_note_copy (rtx orig)
5008{
5009 rtx note;
b8698a0f 5010
5f2fc772 5011 note = rtx_alloc (NOTE);
b8698a0f 5012
5f2fc772
NS
5013 INSN_UID (note) = cur_insn_uid++;
5014 NOTE_DATA (note) = NOTE_DATA (orig);
a38e7aa5 5015 NOTE_KIND (note) = NOTE_KIND (orig);
5f2fc772
NS
5016 BLOCK_FOR_INSN (note) = NULL;
5017 add_insn (note);
b8698a0f 5018
2e040219 5019 return note;
23b2ce53
RS
5020}
5021
2e040219
NS
5022/* Make an insn of code NOTE or type NOTE_NO
5023 and add it to the end of the doubly-linked list. */
23b2ce53
RS
5024
5025rtx
a38e7aa5 5026emit_note (enum insn_note kind)
23b2ce53 5027{
b3694847 5028 rtx note;
23b2ce53 5029
23b2ce53
RS
5030 note = rtx_alloc (NOTE);
5031 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 5032 NOTE_KIND (note) = kind;
dd107e66 5033 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
ba4f7968 5034 BLOCK_FOR_INSN (note) = NULL;
23b2ce53
RS
5035 add_insn (note);
5036 return note;
5037}
5038
c41c1387
RS
5039/* Emit a clobber of lvalue X. */
5040
5041rtx
5042emit_clobber (rtx x)
5043{
5044 /* CONCATs should not appear in the insn stream. */
5045 if (GET_CODE (x) == CONCAT)
5046 {
5047 emit_clobber (XEXP (x, 0));
5048 return emit_clobber (XEXP (x, 1));
5049 }
5050 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5051}
5052
5053/* Return a sequence of insns to clobber lvalue X. */
5054
5055rtx
5056gen_clobber (rtx x)
5057{
5058 rtx seq;
5059
5060 start_sequence ();
5061 emit_clobber (x);
5062 seq = get_insns ();
5063 end_sequence ();
5064 return seq;
5065}
5066
5067/* Emit a use of rvalue X. */
5068
5069rtx
5070emit_use (rtx x)
5071{
5072 /* CONCATs should not appear in the insn stream. */
5073 if (GET_CODE (x) == CONCAT)
5074 {
5075 emit_use (XEXP (x, 0));
5076 return emit_use (XEXP (x, 1));
5077 }
5078 return emit_insn (gen_rtx_USE (VOIDmode, x));
5079}
5080
5081/* Return a sequence of insns to use rvalue X. */
5082
5083rtx
5084gen_use (rtx x)
5085{
5086 rtx seq;
5087
5088 start_sequence ();
5089 emit_use (x);
5090 seq = get_insns ();
5091 end_sequence ();
5092 return seq;
5093}
5094
23b2ce53 5095/* Cause next statement to emit a line note even if the line number
0cea056b 5096 has not changed. */
23b2ce53
RS
5097
5098void
502b8322 5099force_next_line_note (void)
23b2ce53 5100{
6773e15f 5101 last_location = -1;
23b2ce53 5102}
87b47c85
AM
5103
5104/* Place a note of KIND on insn INSN with DATUM as the datum. If a
30f7a378 5105 note of this type already exists, remove it first. */
87b47c85 5106
3d238248 5107rtx
502b8322 5108set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
87b47c85
AM
5109{
5110 rtx note = find_reg_note (insn, kind, NULL_RTX);
5111
52488da1
JW
5112 switch (kind)
5113 {
5114 case REG_EQUAL:
5115 case REG_EQUIV:
5116 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
5117 has multiple sets (some callers assume single_set
5118 means the insn only has one set, when in fact it
5119 means the insn only has one * useful * set). */
5120 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
5121 {
5b0264cb 5122 gcc_assert (!note);
52488da1
JW
5123 return NULL_RTX;
5124 }
5125
5126 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5127 It serves no useful purpose and breaks eliminate_regs. */
5128 if (GET_CODE (datum) == ASM_OPERANDS)
5129 return NULL_RTX;
6fb5fa3c
DB
5130
5131 if (note)
5132 {
5133 XEXP (note, 0) = datum;
5134 df_notes_rescan (insn);
5135 return note;
5136 }
52488da1
JW
5137 break;
5138
5139 default:
6fb5fa3c
DB
5140 if (note)
5141 {
5142 XEXP (note, 0) = datum;
5143 return note;
5144 }
52488da1
JW
5145 break;
5146 }
3d238248 5147
65c5f2a6 5148 add_reg_note (insn, kind, datum);
6fb5fa3c
DB
5149
5150 switch (kind)
3d238248 5151 {
6fb5fa3c
DB
5152 case REG_EQUAL:
5153 case REG_EQUIV:
5154 df_notes_rescan (insn);
5155 break;
5156 default:
5157 break;
3d238248 5158 }
87b47c85 5159
3d238248 5160 return REG_NOTES (insn);
87b47c85 5161}
23b2ce53
RS
5162\f
5163/* Return an indication of which type of insn should have X as a body.
5164 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5165
d78db459 5166static enum rtx_code
502b8322 5167classify_insn (rtx x)
23b2ce53 5168{
4b4bf941 5169 if (LABEL_P (x))
23b2ce53
RS
5170 return CODE_LABEL;
5171 if (GET_CODE (x) == CALL)
5172 return CALL_INSN;
5173 if (GET_CODE (x) == RETURN)
5174 return JUMP_INSN;
5175 if (GET_CODE (x) == SET)
5176 {
5177 if (SET_DEST (x) == pc_rtx)
5178 return JUMP_INSN;
5179 else if (GET_CODE (SET_SRC (x)) == CALL)
5180 return CALL_INSN;
5181 else
5182 return INSN;
5183 }
5184 if (GET_CODE (x) == PARALLEL)
5185 {
b3694847 5186 int j;
23b2ce53
RS
5187 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5188 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5189 return CALL_INSN;
5190 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5191 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5192 return JUMP_INSN;
5193 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5194 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5195 return CALL_INSN;
5196 }
5197 return INSN;
5198}
5199
5200/* Emit the rtl pattern X as an appropriate kind of insn.
5201 If X is a label, it is simply added into the insn chain. */
5202
5203rtx
502b8322 5204emit (rtx x)
23b2ce53
RS
5205{
5206 enum rtx_code code = classify_insn (x);
5207
5b0264cb 5208 switch (code)
23b2ce53 5209 {
5b0264cb
NS
5210 case CODE_LABEL:
5211 return emit_label (x);
5212 case INSN:
5213 return emit_insn (x);
5214 case JUMP_INSN:
5215 {
5216 rtx insn = emit_jump_insn (x);
5217 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5218 return emit_barrier ();
5219 return insn;
5220 }
5221 case CALL_INSN:
5222 return emit_call_insn (x);
b5b8b0ac
AO
5223 case DEBUG_INSN:
5224 return emit_debug_insn (x);
5b0264cb
NS
5225 default:
5226 gcc_unreachable ();
23b2ce53 5227 }
23b2ce53
RS
5228}
5229\f
e2500fed 5230/* Space for free sequence stack entries. */
1431042e 5231static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
e2500fed 5232
4dfa0342
RH
5233/* Begin emitting insns to a sequence. If this sequence will contain
5234 something that might cause the compiler to pop arguments to function
5235 calls (because those pops have previously been deferred; see
5236 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5237 before calling this function. That will ensure that the deferred
5238 pops are not accidentally emitted in the middle of this sequence. */
23b2ce53
RS
5239
5240void
502b8322 5241start_sequence (void)
23b2ce53
RS
5242{
5243 struct sequence_stack *tem;
5244
e2500fed
GK
5245 if (free_sequence_stack != NULL)
5246 {
5247 tem = free_sequence_stack;
5248 free_sequence_stack = tem->next;
5249 }
5250 else
a9429e29 5251 tem = ggc_alloc_sequence_stack ();
23b2ce53 5252
49ad7cfa 5253 tem->next = seq_stack;
5936d944
JH
5254 tem->first = get_insns ();
5255 tem->last = get_last_insn ();
23b2ce53 5256
49ad7cfa 5257 seq_stack = tem;
23b2ce53 5258
5936d944
JH
5259 set_first_insn (0);
5260 set_last_insn (0);
23b2ce53
RS
5261}
5262
5c7a310f
MM
5263/* Set up the insn chain starting with FIRST as the current sequence,
5264 saving the previously current one. See the documentation for
5265 start_sequence for more information about how to use this function. */
23b2ce53
RS
5266
5267void
502b8322 5268push_to_sequence (rtx first)
23b2ce53
RS
5269{
5270 rtx last;
5271
5272 start_sequence ();
5273
5274 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
5275
5936d944
JH
5276 set_first_insn (first);
5277 set_last_insn (last);
23b2ce53
RS
5278}
5279
bb27eeda
SE
5280/* Like push_to_sequence, but take the last insn as an argument to avoid
5281 looping through the list. */
5282
5283void
5284push_to_sequence2 (rtx first, rtx last)
5285{
5286 start_sequence ();
5287
5936d944
JH
5288 set_first_insn (first);
5289 set_last_insn (last);
bb27eeda
SE
5290}
5291
f15ae3a1
TW
5292/* Set up the outer-level insn chain
5293 as the current sequence, saving the previously current one. */
5294
5295void
502b8322 5296push_topmost_sequence (void)
f15ae3a1 5297{
aefdd5ab 5298 struct sequence_stack *stack, *top = NULL;
f15ae3a1
TW
5299
5300 start_sequence ();
5301
49ad7cfa 5302 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
5303 top = stack;
5304
5936d944
JH
5305 set_first_insn (top->first);
5306 set_last_insn (top->last);
f15ae3a1
TW
5307}
5308
5309/* After emitting to the outer-level insn chain, update the outer-level
5310 insn chain, and restore the previous saved state. */
5311
5312void
502b8322 5313pop_topmost_sequence (void)
f15ae3a1 5314{
aefdd5ab 5315 struct sequence_stack *stack, *top = NULL;
f15ae3a1 5316
49ad7cfa 5317 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
5318 top = stack;
5319
5936d944
JH
5320 top->first = get_insns ();
5321 top->last = get_last_insn ();
f15ae3a1
TW
5322
5323 end_sequence ();
5324}
5325
23b2ce53
RS
5326/* After emitting to a sequence, restore previous saved state.
5327
5c7a310f 5328 To get the contents of the sequence just made, you must call
2f937369 5329 `get_insns' *before* calling here.
5c7a310f
MM
5330
5331 If the compiler might have deferred popping arguments while
5332 generating this sequence, and this sequence will not be immediately
5333 inserted into the instruction stream, use do_pending_stack_adjust
2f937369 5334 before calling get_insns. That will ensure that the deferred
5c7a310f
MM
5335 pops are inserted into this sequence, and not into some random
5336 location in the instruction stream. See INHIBIT_DEFER_POP for more
5337 information about deferred popping of arguments. */
23b2ce53
RS
5338
5339void
502b8322 5340end_sequence (void)
23b2ce53 5341{
49ad7cfa 5342 struct sequence_stack *tem = seq_stack;
23b2ce53 5343
5936d944
JH
5344 set_first_insn (tem->first);
5345 set_last_insn (tem->last);
49ad7cfa 5346 seq_stack = tem->next;
23b2ce53 5347
e2500fed
GK
5348 memset (tem, 0, sizeof (*tem));
5349 tem->next = free_sequence_stack;
5350 free_sequence_stack = tem;
23b2ce53
RS
5351}
5352
5353/* Return 1 if currently emitting into a sequence. */
5354
5355int
502b8322 5356in_sequence_p (void)
23b2ce53 5357{
49ad7cfa 5358 return seq_stack != 0;
23b2ce53 5359}
23b2ce53 5360\f
59ec66dc
MM
5361/* Put the various virtual registers into REGNO_REG_RTX. */
5362
2bbdec73 5363static void
bd60bab2 5364init_virtual_regs (void)
59ec66dc 5365{
bd60bab2
JH
5366 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5367 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5368 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5369 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5370 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
32990d5b
JJ
5371 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5372 = virtual_preferred_stack_boundary_rtx;
49ad7cfa
BS
5373}
5374
da43a810
BS
5375\f
5376/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5377static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5378static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5379static int copy_insn_n_scratches;
5380
5381/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5382 copied an ASM_OPERANDS.
5383 In that case, it is the original input-operand vector. */
5384static rtvec orig_asm_operands_vector;
5385
5386/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5387 copied an ASM_OPERANDS.
5388 In that case, it is the copied input-operand vector. */
5389static rtvec copy_asm_operands_vector;
5390
5391/* Likewise for the constraints vector. */
5392static rtvec orig_asm_constraints_vector;
5393static rtvec copy_asm_constraints_vector;
5394
5395/* Recursively create a new copy of an rtx for copy_insn.
5396 This function differs from copy_rtx in that it handles SCRATCHes and
5397 ASM_OPERANDs properly.
5398 Normally, this function is not used directly; use copy_insn as front end.
5399 However, you could first copy an insn pattern with copy_insn and then use
5400 this function afterwards to properly copy any REG_NOTEs containing
5401 SCRATCHes. */
5402
5403rtx
502b8322 5404copy_insn_1 (rtx orig)
da43a810 5405{
b3694847
SS
5406 rtx copy;
5407 int i, j;
5408 RTX_CODE code;
5409 const char *format_ptr;
da43a810 5410
cd9c1ca8
RH
5411 if (orig == NULL)
5412 return NULL;
5413
da43a810
BS
5414 code = GET_CODE (orig);
5415
5416 switch (code)
5417 {
5418 case REG:
da43a810
BS
5419 case CONST_INT:
5420 case CONST_DOUBLE:
091a3ac7 5421 case CONST_FIXED:
69ef87e2 5422 case CONST_VECTOR:
da43a810
BS
5423 case SYMBOL_REF:
5424 case CODE_LABEL:
5425 case PC:
5426 case CC0:
da43a810 5427 return orig;
3e89ed8d
JH
5428 case CLOBBER:
5429 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5430 return orig;
5431 break;
da43a810
BS
5432
5433 case SCRATCH:
5434 for (i = 0; i < copy_insn_n_scratches; i++)
5435 if (copy_insn_scratch_in[i] == orig)
5436 return copy_insn_scratch_out[i];
5437 break;
5438
5439 case CONST:
6fb5fa3c 5440 if (shared_const_p (orig))
da43a810
BS
5441 return orig;
5442 break;
750c9258 5443
da43a810
BS
5444 /* A MEM with a constant address is not sharable. The problem is that
5445 the constant address may need to be reloaded. If the mem is shared,
5446 then reloading one copy of this mem will cause all copies to appear
5447 to have been reloaded. */
5448
5449 default:
5450 break;
5451 }
5452
aacd3885
RS
5453 /* Copy the various flags, fields, and other information. We assume
5454 that all fields need copying, and then clear the fields that should
da43a810
BS
5455 not be copied. That is the sensible default behavior, and forces
5456 us to explicitly document why we are *not* copying a flag. */
aacd3885 5457 copy = shallow_copy_rtx (orig);
da43a810
BS
5458
5459 /* We do not copy the USED flag, which is used as a mark bit during
5460 walks over the RTL. */
2adc7f12 5461 RTX_FLAG (copy, used) = 0;
da43a810
BS
5462
5463 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
ec8e098d 5464 if (INSN_P (orig))
da43a810 5465 {
2adc7f12
JJ
5466 RTX_FLAG (copy, jump) = 0;
5467 RTX_FLAG (copy, call) = 0;
5468 RTX_FLAG (copy, frame_related) = 0;
da43a810 5469 }
750c9258 5470
da43a810
BS
5471 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5472
5473 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
aacd3885
RS
5474 switch (*format_ptr++)
5475 {
5476 case 'e':
5477 if (XEXP (orig, i) != NULL)
5478 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5479 break;
da43a810 5480
aacd3885
RS
5481 case 'E':
5482 case 'V':
5483 if (XVEC (orig, i) == orig_asm_constraints_vector)
5484 XVEC (copy, i) = copy_asm_constraints_vector;
5485 else if (XVEC (orig, i) == orig_asm_operands_vector)
5486 XVEC (copy, i) = copy_asm_operands_vector;
5487 else if (XVEC (orig, i) != NULL)
5488 {
5489 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5490 for (j = 0; j < XVECLEN (copy, i); j++)
5491 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5492 }
5493 break;
da43a810 5494
aacd3885
RS
5495 case 't':
5496 case 'w':
5497 case 'i':
5498 case 's':
5499 case 'S':
5500 case 'u':
5501 case '0':
5502 /* These are left unchanged. */
5503 break;
da43a810 5504
aacd3885
RS
5505 default:
5506 gcc_unreachable ();
5507 }
da43a810
BS
5508
5509 if (code == SCRATCH)
5510 {
5511 i = copy_insn_n_scratches++;
5b0264cb 5512 gcc_assert (i < MAX_RECOG_OPERANDS);
da43a810
BS
5513 copy_insn_scratch_in[i] = orig;
5514 copy_insn_scratch_out[i] = copy;
5515 }
5516 else if (code == ASM_OPERANDS)
5517 {
6462bb43
AO
5518 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5519 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5520 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5521 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
da43a810
BS
5522 }
5523
5524 return copy;
5525}
5526
5527/* Create a new copy of an rtx.
5528 This function differs from copy_rtx in that it handles SCRATCHes and
5529 ASM_OPERANDs properly.
5530 INSN doesn't really have to be a full INSN; it could be just the
5531 pattern. */
5532rtx
502b8322 5533copy_insn (rtx insn)
da43a810
BS
5534{
5535 copy_insn_n_scratches = 0;
5536 orig_asm_operands_vector = 0;
5537 orig_asm_constraints_vector = 0;
5538 copy_asm_operands_vector = 0;
5539 copy_asm_constraints_vector = 0;
5540 return copy_insn_1 (insn);
5541}
59ec66dc 5542
23b2ce53
RS
5543/* Initialize data structures and variables in this file
5544 before generating rtl for each function. */
5545
5546void
502b8322 5547init_emit (void)
23b2ce53 5548{
5936d944
JH
5549 set_first_insn (NULL);
5550 set_last_insn (NULL);
b5b8b0ac
AO
5551 if (MIN_NONDEBUG_INSN_UID)
5552 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5553 else
5554 cur_insn_uid = 1;
5555 cur_debug_insn_uid = 1;
23b2ce53 5556 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
6773e15f 5557 last_location = UNKNOWN_LOCATION;
23b2ce53 5558 first_label_num = label_num;
49ad7cfa 5559 seq_stack = NULL;
23b2ce53 5560
23b2ce53
RS
5561 /* Init the tables that describe all the pseudo regs. */
5562
3e029763 5563 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
23b2ce53 5564
3e029763 5565 crtl->emit.regno_pointer_align
1b4572a8 5566 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
86fe05e0 5567
a9429e29 5568 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
0d4903b8 5569
e50126e8 5570 /* Put copies of all the hard registers into regno_reg_rtx. */
6cde4876 5571 memcpy (regno_reg_rtx,
5fb0e246 5572 initial_regno_reg_rtx,
6cde4876 5573 FIRST_PSEUDO_REGISTER * sizeof (rtx));
e50126e8 5574
23b2ce53 5575 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
bd60bab2 5576 init_virtual_regs ();
740ab4a2
RK
5577
5578 /* Indicate that the virtual registers and stack locations are
5579 all pointers. */
3502dc9c
JDA
5580 REG_POINTER (stack_pointer_rtx) = 1;
5581 REG_POINTER (frame_pointer_rtx) = 1;
5582 REG_POINTER (hard_frame_pointer_rtx) = 1;
5583 REG_POINTER (arg_pointer_rtx) = 1;
740ab4a2 5584
3502dc9c
JDA
5585 REG_POINTER (virtual_incoming_args_rtx) = 1;
5586 REG_POINTER (virtual_stack_vars_rtx) = 1;
5587 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5588 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5589 REG_POINTER (virtual_cfa_rtx) = 1;
5e82e7bd 5590
86fe05e0 5591#ifdef STACK_BOUNDARY
bdb429a5
RK
5592 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5593 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5594 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5595 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5596
5597 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5598 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5599 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5600 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5601 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
86fe05e0
RK
5602#endif
5603
5e82e7bd
JVA
5604#ifdef INIT_EXPANDERS
5605 INIT_EXPANDERS;
5606#endif
23b2ce53
RS
5607}
5608
a73b091d 5609/* Generate a vector constant for mode MODE and constant value CONSTANT. */
69ef87e2
AH
5610
5611static rtx
a73b091d 5612gen_const_vector (enum machine_mode mode, int constant)
69ef87e2
AH
5613{
5614 rtx tem;
5615 rtvec v;
5616 int units, i;
5617 enum machine_mode inner;
5618
5619 units = GET_MODE_NUNITS (mode);
5620 inner = GET_MODE_INNER (mode);
5621
15ed7b52
JG
5622 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5623
69ef87e2
AH
5624 v = rtvec_alloc (units);
5625
a73b091d
JW
5626 /* We need to call this function after we set the scalar const_tiny_rtx
5627 entries. */
5628 gcc_assert (const_tiny_rtx[constant][(int) inner]);
69ef87e2
AH
5629
5630 for (i = 0; i < units; ++i)
a73b091d 5631 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
69ef87e2 5632
a06e3c40 5633 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
69ef87e2
AH
5634 return tem;
5635}
5636
a06e3c40 5637/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
a73b091d 5638 all elements are zero, and the one vector when all elements are one. */
a06e3c40 5639rtx
502b8322 5640gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
a06e3c40 5641{
a73b091d
JW
5642 enum machine_mode inner = GET_MODE_INNER (mode);
5643 int nunits = GET_MODE_NUNITS (mode);
5644 rtx x;
a06e3c40
R
5645 int i;
5646
a73b091d
JW
5647 /* Check to see if all of the elements have the same value. */
5648 x = RTVEC_ELT (v, nunits - 1);
5649 for (i = nunits - 2; i >= 0; i--)
5650 if (RTVEC_ELT (v, i) != x)
5651 break;
5652
5653 /* If the values are all the same, check to see if we can use one of the
5654 standard constant vectors. */
5655 if (i == -1)
5656 {
5657 if (x == CONST0_RTX (inner))
5658 return CONST0_RTX (mode);
5659 else if (x == CONST1_RTX (inner))
5660 return CONST1_RTX (mode);
5661 }
5662
5663 return gen_rtx_raw_CONST_VECTOR (mode, v);
a06e3c40
R
5664}
5665
b5deb7b6
SL
5666/* Initialise global register information required by all functions. */
5667
5668void
5669init_emit_regs (void)
5670{
5671 int i;
5672
5673 /* Reset register attributes */
5674 htab_empty (reg_attrs_htab);
5675
5676 /* We need reg_raw_mode, so initialize the modes now. */
5677 init_reg_modes_target ();
5678
5679 /* Assign register numbers to the globally defined register rtx. */
5680 pc_rtx = gen_rtx_PC (VOIDmode);
5681 cc0_rtx = gen_rtx_CC0 (VOIDmode);
5682 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5683 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5684 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5685 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5686 virtual_incoming_args_rtx =
5687 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5688 virtual_stack_vars_rtx =
5689 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5690 virtual_stack_dynamic_rtx =
5691 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5692 virtual_outgoing_args_rtx =
5693 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5694 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
32990d5b
JJ
5695 virtual_preferred_stack_boundary_rtx =
5696 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
b5deb7b6
SL
5697
5698 /* Initialize RTL for commonly used hard registers. These are
5699 copied into regno_reg_rtx as we begin to compile each function. */
5700 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5fb0e246 5701 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
b5deb7b6
SL
5702
5703#ifdef RETURN_ADDRESS_POINTER_REGNUM
5704 return_address_pointer_rtx
5705 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5706#endif
5707
b5deb7b6
SL
5708 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5709 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5710 else
5711 pic_offset_table_rtx = NULL_RTX;
5712}
5713
2d888286 5714/* Create some permanent unique rtl objects shared between all functions. */
23b2ce53
RS
5715
5716void
2d888286 5717init_emit_once (void)
23b2ce53
RS
5718{
5719 int i;
5720 enum machine_mode mode;
9ec36da5 5721 enum machine_mode double_mode;
23b2ce53 5722
091a3ac7
CF
5723 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5724 hash tables. */
17211ab5
GK
5725 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5726 const_int_htab_eq, NULL);
173b24b9 5727
17211ab5
GK
5728 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5729 const_double_htab_eq, NULL);
5692c7bc 5730
091a3ac7
CF
5731 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5732 const_fixed_htab_eq, NULL);
5733
17211ab5
GK
5734 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5735 mem_attrs_htab_eq, NULL);
a560d4d4
JH
5736 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5737 reg_attrs_htab_eq, NULL);
67673f5c 5738
43fa6302
AS
5739 /* Compute the word and byte modes. */
5740
5741 byte_mode = VOIDmode;
5742 word_mode = VOIDmode;
5743 double_mode = VOIDmode;
5744
15ed7b52
JG
5745 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5746 mode != VOIDmode;
43fa6302
AS
5747 mode = GET_MODE_WIDER_MODE (mode))
5748 {
5749 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5750 && byte_mode == VOIDmode)
5751 byte_mode = mode;
5752
5753 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5754 && word_mode == VOIDmode)
5755 word_mode = mode;
5756 }
5757
15ed7b52
JG
5758 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5759 mode != VOIDmode;
43fa6302
AS
5760 mode = GET_MODE_WIDER_MODE (mode))
5761 {
5762 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5763 && double_mode == VOIDmode)
5764 double_mode = mode;
5765 }
5766
5767 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5768
5da077de 5769#ifdef INIT_EXPANDERS
414c4dc4
NC
5770 /* This is to initialize {init|mark|free}_machine_status before the first
5771 call to push_function_context_to. This is needed by the Chill front
a1f300c0 5772 end which calls push_function_context_to before the first call to
5da077de
AS
5773 init_function_start. */
5774 INIT_EXPANDERS;
5775#endif
5776
23b2ce53
RS
5777 /* Create the unique rtx's for certain rtx codes and operand values. */
5778
a2a8cc44 5779 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
c5c76735 5780 tries to use these variables. */
23b2ce53 5781 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
750c9258 5782 const_int_rtx[i + MAX_SAVED_CONST_INT] =
f1b690f1 5783 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
23b2ce53 5784
68d75312
JC
5785 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5786 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5da077de 5787 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
68d75312 5788 else
3b80f6ca 5789 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
23b2ce53 5790
5692c7bc
ZW
5791 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5792 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5793 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
aefa9d43
KG
5794
5795 dconstm1 = dconst1;
5796 dconstm1.sign = 1;
03f2ea93
RS
5797
5798 dconsthalf = dconst1;
1e92bbb9 5799 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
23b2ce53 5800
f7657db9 5801 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
23b2ce53 5802 {
aefa9d43 5803 const REAL_VALUE_TYPE *const r =
b216cd4a
ZW
5804 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5805
15ed7b52
JG
5806 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5807 mode != VOIDmode;
5808 mode = GET_MODE_WIDER_MODE (mode))
5809 const_tiny_rtx[i][(int) mode] =
5810 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5811
5812 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5813 mode != VOIDmode;
23b2ce53 5814 mode = GET_MODE_WIDER_MODE (mode))
5692c7bc
ZW
5815 const_tiny_rtx[i][(int) mode] =
5816 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
23b2ce53 5817
906c4e36 5818 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
23b2ce53 5819
15ed7b52
JG
5820 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5821 mode != VOIDmode;
23b2ce53 5822 mode = GET_MODE_WIDER_MODE (mode))
906c4e36 5823 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
33d3e559
RS
5824
5825 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5826 mode != VOIDmode;
5827 mode = GET_MODE_WIDER_MODE (mode))
5828 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
23b2ce53
RS
5829 }
5830
e90721b1
AP
5831 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5832 mode != VOIDmode;
5833 mode = GET_MODE_WIDER_MODE (mode))
5834 {
5835 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5836 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5837 }
5838
5839 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5840 mode != VOIDmode;
5841 mode = GET_MODE_WIDER_MODE (mode))
5842 {
5843 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5844 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5845 }
5846
69ef87e2
AH
5847 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5848 mode != VOIDmode;
5849 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5850 {
5851 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5852 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5853 }
69ef87e2
AH
5854
5855 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5856 mode != VOIDmode;
5857 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5858 {
5859 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5860 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5861 }
69ef87e2 5862
325217ed
CF
5863 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5864 mode != VOIDmode;
5865 mode = GET_MODE_WIDER_MODE (mode))
5866 {
5867 FCONST0(mode).data.high = 0;
5868 FCONST0(mode).data.low = 0;
5869 FCONST0(mode).mode = mode;
091a3ac7
CF
5870 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5871 FCONST0 (mode), mode);
325217ed
CF
5872 }
5873
5874 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5875 mode != VOIDmode;
5876 mode = GET_MODE_WIDER_MODE (mode))
5877 {
5878 FCONST0(mode).data.high = 0;
5879 FCONST0(mode).data.low = 0;
5880 FCONST0(mode).mode = mode;
091a3ac7
CF
5881 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5882 FCONST0 (mode), mode);
325217ed
CF
5883 }
5884
5885 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5886 mode != VOIDmode;
5887 mode = GET_MODE_WIDER_MODE (mode))
5888 {
5889 FCONST0(mode).data.high = 0;
5890 FCONST0(mode).data.low = 0;
5891 FCONST0(mode).mode = mode;
091a3ac7
CF
5892 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5893 FCONST0 (mode), mode);
325217ed
CF
5894
5895 /* We store the value 1. */
5896 FCONST1(mode).data.high = 0;
5897 FCONST1(mode).data.low = 0;
5898 FCONST1(mode).mode = mode;
5899 lshift_double (1, 0, GET_MODE_FBIT (mode),
5900 2 * HOST_BITS_PER_WIDE_INT,
5901 &FCONST1(mode).data.low,
5902 &FCONST1(mode).data.high,
5903 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
5904 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5905 FCONST1 (mode), mode);
325217ed
CF
5906 }
5907
5908 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5909 mode != VOIDmode;
5910 mode = GET_MODE_WIDER_MODE (mode))
5911 {
5912 FCONST0(mode).data.high = 0;
5913 FCONST0(mode).data.low = 0;
5914 FCONST0(mode).mode = mode;
091a3ac7
CF
5915 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5916 FCONST0 (mode), mode);
325217ed
CF
5917
5918 /* We store the value 1. */
5919 FCONST1(mode).data.high = 0;
5920 FCONST1(mode).data.low = 0;
5921 FCONST1(mode).mode = mode;
5922 lshift_double (1, 0, GET_MODE_FBIT (mode),
5923 2 * HOST_BITS_PER_WIDE_INT,
5924 &FCONST1(mode).data.low,
5925 &FCONST1(mode).data.high,
5926 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
5927 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5928 FCONST1 (mode), mode);
5929 }
5930
5931 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5932 mode != VOIDmode;
5933 mode = GET_MODE_WIDER_MODE (mode))
5934 {
5935 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5936 }
5937
5938 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5939 mode != VOIDmode;
5940 mode = GET_MODE_WIDER_MODE (mode))
5941 {
5942 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5943 }
5944
5945 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5946 mode != VOIDmode;
5947 mode = GET_MODE_WIDER_MODE (mode))
5948 {
5949 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5950 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5951 }
5952
5953 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5954 mode != VOIDmode;
5955 mode = GET_MODE_WIDER_MODE (mode))
5956 {
5957 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5958 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
325217ed
CF
5959 }
5960
dbbbbf3b
JDA
5961 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5962 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5963 const_tiny_rtx[0][i] = const0_rtx;
23b2ce53 5964
f0417c82
RH
5965 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5966 if (STORE_FLAG_VALUE == 1)
5967 const_tiny_rtx[1][(int) BImode] = const1_rtx;
23b2ce53 5968}
a11759a3 5969\f
969d70ca
JH
5970/* Produce exact duplicate of insn INSN after AFTER.
5971 Care updating of libcall regions if present. */
5972
5973rtx
502b8322 5974emit_copy_of_insn_after (rtx insn, rtx after)
969d70ca 5975{
60564289 5976 rtx new_rtx, link;
969d70ca
JH
5977
5978 switch (GET_CODE (insn))
5979 {
5980 case INSN:
60564289 5981 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
5982 break;
5983
5984 case JUMP_INSN:
60564289 5985 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
5986 break;
5987
b5b8b0ac
AO
5988 case DEBUG_INSN:
5989 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
5990 break;
5991
969d70ca 5992 case CALL_INSN:
60564289 5993 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca 5994 if (CALL_INSN_FUNCTION_USAGE (insn))
60564289 5995 CALL_INSN_FUNCTION_USAGE (new_rtx)
969d70ca 5996 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
60564289
KG
5997 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5998 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5999 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
b8698a0f 6000 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
becfd6e5 6001 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
969d70ca
JH
6002 break;
6003
6004 default:
5b0264cb 6005 gcc_unreachable ();
969d70ca
JH
6006 }
6007
6008 /* Update LABEL_NUSES. */
60564289 6009 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
969d70ca 6010
60564289 6011 INSN_LOCATOR (new_rtx) = INSN_LOCATOR (insn);
ba4f7968 6012
0a3d71f5
JW
6013 /* If the old insn is frame related, then so is the new one. This is
6014 primarily needed for IA-64 unwind info which marks epilogue insns,
6015 which may be duplicated by the basic block reordering code. */
60564289 6016 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
0a3d71f5 6017
cf7c4aa6
HPN
6018 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6019 will make them. REG_LABEL_TARGETs are created there too, but are
6020 supposed to be sticky, so we copy them. */
969d70ca 6021 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
cf7c4aa6 6022 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
969d70ca
JH
6023 {
6024 if (GET_CODE (link) == EXPR_LIST)
60564289 6025 add_reg_note (new_rtx, REG_NOTE_KIND (link),
65c5f2a6 6026 copy_insn_1 (XEXP (link, 0)));
969d70ca 6027 else
60564289 6028 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
969d70ca
JH
6029 }
6030
60564289
KG
6031 INSN_CODE (new_rtx) = INSN_CODE (insn);
6032 return new_rtx;
969d70ca 6033}
e2500fed 6034
1431042e 6035static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
3e89ed8d
JH
6036rtx
6037gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
6038{
6039 if (hard_reg_clobbers[mode][regno])
6040 return hard_reg_clobbers[mode][regno];
6041 else
6042 return (hard_reg_clobbers[mode][regno] =
6043 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6044}
6045
e2500fed 6046#include "gt-emit-rtl.h"