]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
re PR rtl-optimization/45792 (cris-elf build failure (hangs) due to fix for PR44374)
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
5e6908ea 1/* Emit RTL for the GCC expander.
ef58a523 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
54fb1ae0
AS
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010
b6f65e3c 5 Free Software Foundation, Inc.
23b2ce53 6
1322177d 7This file is part of GCC.
23b2ce53 8
1322177d
LB
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
9dcd6f09 11Software Foundation; either version 3, or (at your option) any later
1322177d 12version.
23b2ce53 13
1322177d
LB
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
23b2ce53
RS
18
19You should have received a copy of the GNU General Public License
9dcd6f09
NC
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
23b2ce53
RS
22
23
24/* Middle-to-low level generation of rtx code and insns.
25
f822fcf7
KH
26 This file contains support functions for creating rtl expressions
27 and manipulating them in the doubly-linked chain of insns.
23b2ce53
RS
28
29 The patterns of the insns are created by machine-dependent
30 routines in insn-emit.c, which is generated automatically from
f822fcf7
KH
31 the machine description. These routines make the individual rtx's
32 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33 which are automatically generated from rtl.def; what is machine
a2a8cc44
KH
34 dependent is the kind of rtx's they make and what arguments they
35 use. */
23b2ce53
RS
36
37#include "config.h"
670ee920 38#include "system.h"
4977bab6
ZW
39#include "coretypes.h"
40#include "tm.h"
718f9c0f 41#include "diagnostic-core.h"
01198c2f 42#include "toplev.h"
23b2ce53 43#include "rtl.h"
a25c7971 44#include "tree.h"
6baf1cc8 45#include "tm_p.h"
23b2ce53
RS
46#include "flags.h"
47#include "function.h"
48#include "expr.h"
49#include "regs.h"
aff48bca 50#include "hard-reg-set.h"
c13e8210 51#include "hashtab.h"
23b2ce53 52#include "insn-config.h"
e9a25f70 53#include "recog.h"
0dfa1860 54#include "bitmap.h"
a05924f9 55#include "basic-block.h"
87ff9c8e 56#include "ggc.h"
e1772ac0 57#include "debug.h"
d23c55c2 58#include "langhooks.h"
ef330312 59#include "tree-pass.h"
6fb5fa3c 60#include "df.h"
b5b8b0ac 61#include "params.h"
d4ebfa65 62#include "target.h"
ca695ac9 63
5fb0e246
RS
64struct target_rtl default_target_rtl;
65#if SWITCHABLE_TARGET
66struct target_rtl *this_target_rtl = &default_target_rtl;
67#endif
68
69#define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
70
1d445e9e
ILT
71/* Commonly used modes. */
72
0f41302f
MS
73enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
74enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
9ec36da5 75enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
0f41302f 76enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
1d445e9e 77
bd60bab2
JH
78/* Datastructures maintained for currently processed function in RTL form. */
79
3e029763 80struct rtl_data x_rtl;
bd60bab2
JH
81
82/* Indexed by pseudo register number, gives the rtx for that pseudo.
b8698a0f 83 Allocated in parallel with regno_pointer_align.
bd60bab2
JH
84 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
85 with length attribute nested in top level structures. */
86
87rtx * regno_reg_rtx;
23b2ce53
RS
88
89/* This is *not* reset after each function. It gives each CODE_LABEL
90 in the entire compilation a unique label number. */
91
044b4de3 92static GTY(()) int label_num = 1;
23b2ce53 93
23b2ce53
RS
94/* We record floating-point CONST_DOUBLEs in each floating-point mode for
95 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
96 record a copy of const[012]_rtx. */
97
98rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
99
68d75312
JC
100rtx const_true_rtx;
101
23b2ce53
RS
102REAL_VALUE_TYPE dconst0;
103REAL_VALUE_TYPE dconst1;
104REAL_VALUE_TYPE dconst2;
105REAL_VALUE_TYPE dconstm1;
03f2ea93 106REAL_VALUE_TYPE dconsthalf;
23b2ce53 107
325217ed
CF
108/* Record fixed-point constant 0 and 1. */
109FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
110FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
111
23b2ce53
RS
112/* We make one copy of (const_int C) where C is in
113 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
114 to save space during the compilation and simplify comparisons of
115 integers. */
116
5da077de 117rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
23b2ce53 118
c13e8210
MM
119/* A hash table storing CONST_INTs whose absolute value is greater
120 than MAX_SAVED_CONST_INT. */
121
e2500fed
GK
122static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
123 htab_t const_int_htab;
c13e8210 124
173b24b9 125/* A hash table storing memory attribute structures. */
e2500fed
GK
126static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
127 htab_t mem_attrs_htab;
173b24b9 128
a560d4d4
JH
129/* A hash table storing register attribute structures. */
130static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
131 htab_t reg_attrs_htab;
132
5692c7bc 133/* A hash table storing all CONST_DOUBLEs. */
e2500fed
GK
134static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
135 htab_t const_double_htab;
5692c7bc 136
091a3ac7
CF
137/* A hash table storing all CONST_FIXEDs. */
138static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
139 htab_t const_fixed_htab;
140
3e029763 141#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
b5b8b0ac 142#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
3e029763
JH
143#define last_location (crtl->emit.x_last_location)
144#define first_label_num (crtl->emit.x_first_label_num)
23b2ce53 145
502b8322 146static rtx make_call_insn_raw (rtx);
502b8322 147static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
5eb2a9f2 148static void set_used_decls (tree);
502b8322
AJ
149static void mark_label_nuses (rtx);
150static hashval_t const_int_htab_hash (const void *);
151static int const_int_htab_eq (const void *, const void *);
152static hashval_t const_double_htab_hash (const void *);
153static int const_double_htab_eq (const void *, const void *);
154static rtx lookup_const_double (rtx);
091a3ac7
CF
155static hashval_t const_fixed_htab_hash (const void *);
156static int const_fixed_htab_eq (const void *, const void *);
157static rtx lookup_const_fixed (rtx);
502b8322
AJ
158static hashval_t mem_attrs_htab_hash (const void *);
159static int mem_attrs_htab_eq (const void *, const void *);
4862826d 160static mem_attrs *get_mem_attrs (alias_set_type, tree, rtx, rtx, unsigned int,
09e881c9 161 addr_space_t, enum machine_mode);
502b8322
AJ
162static hashval_t reg_attrs_htab_hash (const void *);
163static int reg_attrs_htab_eq (const void *, const void *);
164static reg_attrs *get_reg_attrs (tree, int);
a73b091d 165static rtx gen_const_vector (enum machine_mode, int);
32b32b16 166static void copy_rtx_if_shared_1 (rtx *orig);
c13e8210 167
6b24c259
JH
168/* Probability of the conditional branch currently proceeded by try_split.
169 Set to -1 otherwise. */
170int split_branch_probability = -1;
ca695ac9 171\f
c13e8210
MM
172/* Returns a hash code for X (which is a really a CONST_INT). */
173
174static hashval_t
502b8322 175const_int_htab_hash (const void *x)
c13e8210 176{
f7d504c2 177 return (hashval_t) INTVAL ((const_rtx) x);
c13e8210
MM
178}
179
cc2902df 180/* Returns nonzero if the value represented by X (which is really a
c13e8210
MM
181 CONST_INT) is the same as that given by Y (which is really a
182 HOST_WIDE_INT *). */
183
184static int
502b8322 185const_int_htab_eq (const void *x, const void *y)
c13e8210 186{
f7d504c2 187 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
5692c7bc
ZW
188}
189
190/* Returns a hash code for X (which is really a CONST_DOUBLE). */
191static hashval_t
502b8322 192const_double_htab_hash (const void *x)
5692c7bc 193{
f7d504c2 194 const_rtx const value = (const_rtx) x;
46b33600 195 hashval_t h;
5692c7bc 196
46b33600
RH
197 if (GET_MODE (value) == VOIDmode)
198 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
199 else
fe352c29 200 {
15c812e3 201 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
fe352c29
DJ
202 /* MODE is used in the comparison, so it should be in the hash. */
203 h ^= GET_MODE (value);
204 }
5692c7bc
ZW
205 return h;
206}
207
cc2902df 208/* Returns nonzero if the value represented by X (really a ...)
5692c7bc
ZW
209 is the same as that represented by Y (really a ...) */
210static int
502b8322 211const_double_htab_eq (const void *x, const void *y)
5692c7bc 212{
f7d504c2 213 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
5692c7bc
ZW
214
215 if (GET_MODE (a) != GET_MODE (b))
216 return 0;
8580f7a0
RH
217 if (GET_MODE (a) == VOIDmode)
218 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
219 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
220 else
221 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
222 CONST_DOUBLE_REAL_VALUE (b));
c13e8210
MM
223}
224
091a3ac7
CF
225/* Returns a hash code for X (which is really a CONST_FIXED). */
226
227static hashval_t
228const_fixed_htab_hash (const void *x)
229{
3101faab 230 const_rtx const value = (const_rtx) x;
091a3ac7
CF
231 hashval_t h;
232
233 h = fixed_hash (CONST_FIXED_VALUE (value));
234 /* MODE is used in the comparison, so it should be in the hash. */
235 h ^= GET_MODE (value);
236 return h;
237}
238
239/* Returns nonzero if the value represented by X (really a ...)
240 is the same as that represented by Y (really a ...). */
241
242static int
243const_fixed_htab_eq (const void *x, const void *y)
244{
3101faab 245 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
091a3ac7
CF
246
247 if (GET_MODE (a) != GET_MODE (b))
248 return 0;
249 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
250}
251
173b24b9
RK
252/* Returns a hash code for X (which is a really a mem_attrs *). */
253
254static hashval_t
502b8322 255mem_attrs_htab_hash (const void *x)
173b24b9 256{
f7d504c2 257 const mem_attrs *const p = (const mem_attrs *) x;
173b24b9
RK
258
259 return (p->alias ^ (p->align * 1000)
09e881c9 260 ^ (p->addrspace * 4000)
173b24b9
RK
261 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
262 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
78b76d08 263 ^ (size_t) iterative_hash_expr (p->expr, 0));
173b24b9
RK
264}
265
cc2902df 266/* Returns nonzero if the value represented by X (which is really a
173b24b9
RK
267 mem_attrs *) is the same as that given by Y (which is also really a
268 mem_attrs *). */
c13e8210
MM
269
270static int
502b8322 271mem_attrs_htab_eq (const void *x, const void *y)
c13e8210 272{
741ac903
KG
273 const mem_attrs *const p = (const mem_attrs *) x;
274 const mem_attrs *const q = (const mem_attrs *) y;
173b24b9 275
78b76d08
SB
276 return (p->alias == q->alias && p->offset == q->offset
277 && p->size == q->size && p->align == q->align
09e881c9 278 && p->addrspace == q->addrspace
78b76d08
SB
279 && (p->expr == q->expr
280 || (p->expr != NULL_TREE && q->expr != NULL_TREE
281 && operand_equal_p (p->expr, q->expr, 0))));
c13e8210
MM
282}
283
173b24b9 284/* Allocate a new mem_attrs structure and insert it into the hash table if
10b76d73
RK
285 one identical to it is not already in the table. We are doing this for
286 MEM of mode MODE. */
173b24b9
RK
287
288static mem_attrs *
4862826d 289get_mem_attrs (alias_set_type alias, tree expr, rtx offset, rtx size,
09e881c9 290 unsigned int align, addr_space_t addrspace, enum machine_mode mode)
173b24b9
RK
291{
292 mem_attrs attrs;
293 void **slot;
294
bb056a77
OH
295 /* If everything is the default, we can just return zero.
296 This must match what the corresponding MEM_* macros return when the
297 field is not present. */
09e881c9 298 if (alias == 0 && expr == 0 && offset == 0 && addrspace == 0
10b76d73
RK
299 && (size == 0
300 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
bb056a77
OH
301 && (STRICT_ALIGNMENT && mode != BLKmode
302 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
10b76d73
RK
303 return 0;
304
173b24b9 305 attrs.alias = alias;
998d7deb 306 attrs.expr = expr;
173b24b9
RK
307 attrs.offset = offset;
308 attrs.size = size;
309 attrs.align = align;
09e881c9 310 attrs.addrspace = addrspace;
173b24b9
RK
311
312 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
313 if (*slot == 0)
314 {
a9429e29 315 *slot = ggc_alloc_mem_attrs ();
173b24b9
RK
316 memcpy (*slot, &attrs, sizeof (mem_attrs));
317 }
318
1b4572a8 319 return (mem_attrs *) *slot;
c13e8210
MM
320}
321
a560d4d4
JH
322/* Returns a hash code for X (which is a really a reg_attrs *). */
323
324static hashval_t
502b8322 325reg_attrs_htab_hash (const void *x)
a560d4d4 326{
741ac903 327 const reg_attrs *const p = (const reg_attrs *) x;
a560d4d4
JH
328
329 return ((p->offset * 1000) ^ (long) p->decl);
330}
331
6356f892 332/* Returns nonzero if the value represented by X (which is really a
a560d4d4
JH
333 reg_attrs *) is the same as that given by Y (which is also really a
334 reg_attrs *). */
335
336static int
502b8322 337reg_attrs_htab_eq (const void *x, const void *y)
a560d4d4 338{
741ac903
KG
339 const reg_attrs *const p = (const reg_attrs *) x;
340 const reg_attrs *const q = (const reg_attrs *) y;
a560d4d4
JH
341
342 return (p->decl == q->decl && p->offset == q->offset);
343}
344/* Allocate a new reg_attrs structure and insert it into the hash table if
345 one identical to it is not already in the table. We are doing this for
346 MEM of mode MODE. */
347
348static reg_attrs *
502b8322 349get_reg_attrs (tree decl, int offset)
a560d4d4
JH
350{
351 reg_attrs attrs;
352 void **slot;
353
354 /* If everything is the default, we can just return zero. */
355 if (decl == 0 && offset == 0)
356 return 0;
357
358 attrs.decl = decl;
359 attrs.offset = offset;
360
361 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
362 if (*slot == 0)
363 {
a9429e29 364 *slot = ggc_alloc_reg_attrs ();
a560d4d4
JH
365 memcpy (*slot, &attrs, sizeof (reg_attrs));
366 }
367
1b4572a8 368 return (reg_attrs *) *slot;
a560d4d4
JH
369}
370
6fb5fa3c
DB
371
372#if !HAVE_blockage
373/* Generate an empty ASM_INPUT, which is used to block attempts to schedule
374 across this insn. */
375
376rtx
377gen_blockage (void)
378{
379 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
380 MEM_VOLATILE_P (x) = true;
381 return x;
382}
383#endif
384
385
08394eef
BS
386/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
387 don't attempt to share with the various global pieces of rtl (such as
388 frame_pointer_rtx). */
389
390rtx
502b8322 391gen_raw_REG (enum machine_mode mode, int regno)
08394eef
BS
392{
393 rtx x = gen_rtx_raw_REG (mode, regno);
394 ORIGINAL_REGNO (x) = regno;
395 return x;
396}
397
c5c76735
JL
398/* There are some RTL codes that require special attention; the generation
399 functions do the raw handling. If you add to this list, modify
400 special_rtx in gengenrtl.c as well. */
401
3b80f6ca 402rtx
502b8322 403gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3b80f6ca 404{
c13e8210
MM
405 void **slot;
406
3b80f6ca 407 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
5da077de 408 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3b80f6ca
RH
409
410#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
411 if (const_true_rtx && arg == STORE_FLAG_VALUE)
412 return const_true_rtx;
413#endif
414
c13e8210 415 /* Look up the CONST_INT in the hash table. */
e38992e8
RK
416 slot = htab_find_slot_with_hash (const_int_htab, &arg,
417 (hashval_t) arg, INSERT);
29105cea 418 if (*slot == 0)
1f8f4a0b 419 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
c13e8210
MM
420
421 return (rtx) *slot;
3b80f6ca
RH
422}
423
2496c7bd 424rtx
502b8322 425gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
2496c7bd
LB
426{
427 return GEN_INT (trunc_int_for_mode (c, mode));
428}
429
5692c7bc
ZW
430/* CONST_DOUBLEs might be created from pairs of integers, or from
431 REAL_VALUE_TYPEs. Also, their length is known only at run time,
432 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
433
434/* Determine whether REAL, a CONST_DOUBLE, already exists in the
435 hash table. If so, return its counterpart; otherwise add it
436 to the hash table and return it. */
437static rtx
502b8322 438lookup_const_double (rtx real)
5692c7bc
ZW
439{
440 void **slot = htab_find_slot (const_double_htab, real, INSERT);
441 if (*slot == 0)
442 *slot = real;
443
444 return (rtx) *slot;
445}
29105cea 446
5692c7bc
ZW
447/* Return a CONST_DOUBLE rtx for a floating-point value specified by
448 VALUE in mode MODE. */
0133b7d9 449rtx
502b8322 450const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
0133b7d9 451{
5692c7bc
ZW
452 rtx real = rtx_alloc (CONST_DOUBLE);
453 PUT_MODE (real, mode);
454
9e254451 455 real->u.rv = value;
5692c7bc
ZW
456
457 return lookup_const_double (real);
458}
459
091a3ac7
CF
460/* Determine whether FIXED, a CONST_FIXED, already exists in the
461 hash table. If so, return its counterpart; otherwise add it
462 to the hash table and return it. */
463
464static rtx
465lookup_const_fixed (rtx fixed)
466{
467 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
468 if (*slot == 0)
469 *slot = fixed;
470
471 return (rtx) *slot;
472}
473
474/* Return a CONST_FIXED rtx for a fixed-point value specified by
475 VALUE in mode MODE. */
476
477rtx
478const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
479{
480 rtx fixed = rtx_alloc (CONST_FIXED);
481 PUT_MODE (fixed, mode);
482
483 fixed->u.fv = value;
484
485 return lookup_const_fixed (fixed);
486}
487
3e93ff81
AS
488/* Constructs double_int from rtx CST. */
489
490double_int
491rtx_to_double_int (const_rtx cst)
492{
493 double_int r;
494
495 if (CONST_INT_P (cst))
496 r = shwi_to_double_int (INTVAL (cst));
497 else if (CONST_DOUBLE_P (cst) && GET_MODE (cst) == VOIDmode)
498 {
499 r.low = CONST_DOUBLE_LOW (cst);
500 r.high = CONST_DOUBLE_HIGH (cst);
501 }
502 else
503 gcc_unreachable ();
504
505 return r;
506}
507
508
54fb1ae0
AS
509/* Return a CONST_DOUBLE or CONST_INT for a value specified as
510 a double_int. */
511
512rtx
513immed_double_int_const (double_int i, enum machine_mode mode)
514{
515 return immed_double_const (i.low, i.high, mode);
516}
517
5692c7bc
ZW
518/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
519 of ints: I0 is the low-order word and I1 is the high-order word.
520 Do not use this routine for non-integer modes; convert to
521 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
522
523rtx
502b8322 524immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
5692c7bc
ZW
525{
526 rtx value;
527 unsigned int i;
528
65acccdd
ZD
529 /* There are the following cases (note that there are no modes with
530 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
531
532 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
533 gen_int_mode.
534 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
535 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
b8698a0f 536 from copies of the sign bit, and sign of i0 and i1 are the same), then
65acccdd
ZD
537 we return a CONST_INT for i0.
538 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
5692c7bc
ZW
539 if (mode != VOIDmode)
540 {
5b0264cb
NS
541 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
542 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
543 /* We can get a 0 for an error mark. */
544 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
545 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
5692c7bc 546
65acccdd
ZD
547 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
548 return gen_int_mode (i0, mode);
549
550 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
5692c7bc
ZW
551 }
552
553 /* If this integer fits in one word, return a CONST_INT. */
554 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
555 return GEN_INT (i0);
556
557 /* We use VOIDmode for integers. */
558 value = rtx_alloc (CONST_DOUBLE);
559 PUT_MODE (value, VOIDmode);
560
561 CONST_DOUBLE_LOW (value) = i0;
562 CONST_DOUBLE_HIGH (value) = i1;
563
564 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
565 XWINT (value, i) = 0;
566
567 return lookup_const_double (value);
0133b7d9
RH
568}
569
3b80f6ca 570rtx
502b8322 571gen_rtx_REG (enum machine_mode mode, unsigned int regno)
3b80f6ca
RH
572{
573 /* In case the MD file explicitly references the frame pointer, have
574 all such references point to the same frame pointer. This is
575 used during frame pointer elimination to distinguish the explicit
576 references to these registers from pseudos that happened to be
577 assigned to them.
578
579 If we have eliminated the frame pointer or arg pointer, we will
580 be using it as a normal register, for example as a spill
581 register. In such cases, we might be accessing it in a mode that
582 is not Pmode and therefore cannot use the pre-allocated rtx.
583
584 Also don't do this when we are making new REGs in reload, since
585 we don't want to get confused with the real pointers. */
586
587 if (mode == Pmode && !reload_in_progress)
588 {
e10c79fe
LB
589 if (regno == FRAME_POINTER_REGNUM
590 && (!reload_completed || frame_pointer_needed))
3b80f6ca
RH
591 return frame_pointer_rtx;
592#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
e10c79fe
LB
593 if (regno == HARD_FRAME_POINTER_REGNUM
594 && (!reload_completed || frame_pointer_needed))
3b80f6ca
RH
595 return hard_frame_pointer_rtx;
596#endif
597#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
bcb33994 598 if (regno == ARG_POINTER_REGNUM)
3b80f6ca
RH
599 return arg_pointer_rtx;
600#endif
601#ifdef RETURN_ADDRESS_POINTER_REGNUM
bcb33994 602 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3b80f6ca
RH
603 return return_address_pointer_rtx;
604#endif
fc555370 605 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
2d67bd7b 606 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
68252e27 607 return pic_offset_table_rtx;
bcb33994 608 if (regno == STACK_POINTER_REGNUM)
3b80f6ca
RH
609 return stack_pointer_rtx;
610 }
611
006a94b0 612#if 0
6cde4876 613 /* If the per-function register table has been set up, try to re-use
006a94b0
JL
614 an existing entry in that table to avoid useless generation of RTL.
615
616 This code is disabled for now until we can fix the various backends
617 which depend on having non-shared hard registers in some cases. Long
618 term we want to re-enable this code as it can significantly cut down
e10c79fe
LB
619 on the amount of useless RTL that gets generated.
620
621 We'll also need to fix some code that runs after reload that wants to
622 set ORIGINAL_REGNO. */
623
6cde4876
JL
624 if (cfun
625 && cfun->emit
626 && regno_reg_rtx
627 && regno < FIRST_PSEUDO_REGISTER
628 && reg_raw_mode[regno] == mode)
629 return regno_reg_rtx[regno];
006a94b0 630#endif
6cde4876 631
08394eef 632 return gen_raw_REG (mode, regno);
3b80f6ca
RH
633}
634
41472af8 635rtx
502b8322 636gen_rtx_MEM (enum machine_mode mode, rtx addr)
41472af8
MM
637{
638 rtx rt = gen_rtx_raw_MEM (mode, addr);
639
640 /* This field is not cleared by the mere allocation of the rtx, so
641 we clear it here. */
173b24b9 642 MEM_ATTRS (rt) = 0;
41472af8
MM
643
644 return rt;
645}
ddef6bc7 646
542a8afa
RH
647/* Generate a memory referring to non-trapping constant memory. */
648
649rtx
650gen_const_mem (enum machine_mode mode, rtx addr)
651{
652 rtx mem = gen_rtx_MEM (mode, addr);
653 MEM_READONLY_P (mem) = 1;
654 MEM_NOTRAP_P (mem) = 1;
655 return mem;
656}
657
bf877a76
R
658/* Generate a MEM referring to fixed portions of the frame, e.g., register
659 save areas. */
660
661rtx
662gen_frame_mem (enum machine_mode mode, rtx addr)
663{
664 rtx mem = gen_rtx_MEM (mode, addr);
665 MEM_NOTRAP_P (mem) = 1;
666 set_mem_alias_set (mem, get_frame_alias_set ());
667 return mem;
668}
669
670/* Generate a MEM referring to a temporary use of the stack, not part
671 of the fixed stack frame. For example, something which is pushed
672 by a target splitter. */
673rtx
674gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
675{
676 rtx mem = gen_rtx_MEM (mode, addr);
677 MEM_NOTRAP_P (mem) = 1;
e3b5732b 678 if (!cfun->calls_alloca)
bf877a76
R
679 set_mem_alias_set (mem, get_frame_alias_set ());
680 return mem;
681}
682
beb72684
RH
683/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
684 this construct would be valid, and false otherwise. */
685
686bool
687validate_subreg (enum machine_mode omode, enum machine_mode imode,
ed7a4b4b 688 const_rtx reg, unsigned int offset)
ddef6bc7 689{
beb72684
RH
690 unsigned int isize = GET_MODE_SIZE (imode);
691 unsigned int osize = GET_MODE_SIZE (omode);
692
693 /* All subregs must be aligned. */
694 if (offset % osize != 0)
695 return false;
696
697 /* The subreg offset cannot be outside the inner object. */
698 if (offset >= isize)
699 return false;
700
701 /* ??? This should not be here. Temporarily continue to allow word_mode
702 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
703 Generally, backends are doing something sketchy but it'll take time to
704 fix them all. */
705 if (omode == word_mode)
706 ;
707 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
708 is the culprit here, and not the backends. */
709 else if (osize >= UNITS_PER_WORD && isize >= osize)
710 ;
711 /* Allow component subregs of complex and vector. Though given the below
712 extraction rules, it's not always clear what that means. */
713 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
714 && GET_MODE_INNER (imode) == omode)
715 ;
716 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
717 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
718 represent this. It's questionable if this ought to be represented at
719 all -- why can't this all be hidden in post-reload splitters that make
720 arbitrarily mode changes to the registers themselves. */
721 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
722 ;
723 /* Subregs involving floating point modes are not allowed to
724 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
725 (subreg:SI (reg:DF) 0) isn't. */
726 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
727 {
728 if (isize != osize)
729 return false;
730 }
ddef6bc7 731
beb72684
RH
732 /* Paradoxical subregs must have offset zero. */
733 if (osize > isize)
734 return offset == 0;
735
736 /* This is a normal subreg. Verify that the offset is representable. */
737
738 /* For hard registers, we already have most of these rules collected in
739 subreg_offset_representable_p. */
740 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
741 {
742 unsigned int regno = REGNO (reg);
743
744#ifdef CANNOT_CHANGE_MODE_CLASS
745 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
746 && GET_MODE_INNER (imode) == omode)
747 ;
748 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
749 return false;
ddef6bc7 750#endif
beb72684
RH
751
752 return subreg_offset_representable_p (regno, imode, offset, omode);
753 }
754
755 /* For pseudo registers, we want most of the same checks. Namely:
756 If the register no larger than a word, the subreg must be lowpart.
757 If the register is larger than a word, the subreg must be the lowpart
758 of a subword. A subreg does *not* perform arbitrary bit extraction.
759 Given that we've already checked mode/offset alignment, we only have
760 to check subword subregs here. */
761 if (osize < UNITS_PER_WORD)
762 {
763 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
764 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
765 if (offset % UNITS_PER_WORD != low_off)
766 return false;
767 }
768 return true;
769}
770
771rtx
772gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
773{
774 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
5692c7bc 775 return gen_rtx_raw_SUBREG (mode, reg, offset);
ddef6bc7
JJ
776}
777
173b24b9
RK
778/* Generate a SUBREG representing the least-significant part of REG if MODE
779 is smaller than mode of REG, otherwise paradoxical SUBREG. */
780
ddef6bc7 781rtx
502b8322 782gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
ddef6bc7
JJ
783{
784 enum machine_mode inmode;
ddef6bc7
JJ
785
786 inmode = GET_MODE (reg);
787 if (inmode == VOIDmode)
788 inmode = mode;
e0e08ac2
JH
789 return gen_rtx_SUBREG (mode, reg,
790 subreg_lowpart_offset (mode, inmode));
ddef6bc7 791}
c5c76735 792\f
23b2ce53 793
80379f51
PB
794/* Create an rtvec and stores within it the RTXen passed in the arguments. */
795
23b2ce53 796rtvec
e34d07f2 797gen_rtvec (int n, ...)
23b2ce53 798{
80379f51
PB
799 int i;
800 rtvec rt_val;
e34d07f2 801 va_list p;
23b2ce53 802
e34d07f2 803 va_start (p, n);
23b2ce53 804
80379f51 805 /* Don't allocate an empty rtvec... */
23b2ce53 806 if (n == 0)
80379f51 807 return NULL_RTVEC;
23b2ce53 808
80379f51 809 rt_val = rtvec_alloc (n);
4f90e4a0 810
23b2ce53 811 for (i = 0; i < n; i++)
80379f51 812 rt_val->elem[i] = va_arg (p, rtx);
6268b922 813
e34d07f2 814 va_end (p);
80379f51 815 return rt_val;
23b2ce53
RS
816}
817
818rtvec
502b8322 819gen_rtvec_v (int n, rtx *argp)
23b2ce53 820{
b3694847
SS
821 int i;
822 rtvec rt_val;
23b2ce53 823
80379f51 824 /* Don't allocate an empty rtvec... */
23b2ce53 825 if (n == 0)
80379f51 826 return NULL_RTVEC;
23b2ce53 827
80379f51 828 rt_val = rtvec_alloc (n);
23b2ce53
RS
829
830 for (i = 0; i < n; i++)
8f985ec4 831 rt_val->elem[i] = *argp++;
23b2ce53
RS
832
833 return rt_val;
834}
835\f
38ae7651
RS
836/* Return the number of bytes between the start of an OUTER_MODE
837 in-memory value and the start of an INNER_MODE in-memory value,
838 given that the former is a lowpart of the latter. It may be a
839 paradoxical lowpart, in which case the offset will be negative
840 on big-endian targets. */
841
842int
843byte_lowpart_offset (enum machine_mode outer_mode,
844 enum machine_mode inner_mode)
845{
846 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
847 return subreg_lowpart_offset (outer_mode, inner_mode);
848 else
849 return -subreg_lowpart_offset (inner_mode, outer_mode);
850}
851\f
23b2ce53
RS
852/* Generate a REG rtx for a new pseudo register of mode MODE.
853 This pseudo is assigned the next sequential register number. */
854
855rtx
502b8322 856gen_reg_rtx (enum machine_mode mode)
23b2ce53 857{
b3694847 858 rtx val;
2e3f842f 859 unsigned int align = GET_MODE_ALIGNMENT (mode);
23b2ce53 860
f8335a4f 861 gcc_assert (can_create_pseudo_p ());
23b2ce53 862
2e3f842f
L
863 /* If a virtual register with bigger mode alignment is generated,
864 increase stack alignment estimation because it might be spilled
865 to stack later. */
b8698a0f 866 if (SUPPORTS_STACK_ALIGNMENT
2e3f842f
L
867 && crtl->stack_alignment_estimated < align
868 && !crtl->stack_realign_processed)
ae58e548
JJ
869 {
870 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
871 if (crtl->stack_alignment_estimated < min_align)
872 crtl->stack_alignment_estimated = min_align;
873 }
2e3f842f 874
1b3d8f8a
GK
875 if (generating_concat_p
876 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
877 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
fc84e8a8
RS
878 {
879 /* For complex modes, don't make a single pseudo.
880 Instead, make a CONCAT of two pseudos.
881 This allows noncontiguous allocation of the real and imaginary parts,
882 which makes much better code. Besides, allocating DCmode
883 pseudos overstrains reload on some machines like the 386. */
884 rtx realpart, imagpart;
27e58a70 885 enum machine_mode partmode = GET_MODE_INNER (mode);
fc84e8a8
RS
886
887 realpart = gen_reg_rtx (partmode);
888 imagpart = gen_reg_rtx (partmode);
3b80f6ca 889 return gen_rtx_CONCAT (mode, realpart, imagpart);
fc84e8a8
RS
890 }
891
a560d4d4 892 /* Make sure regno_pointer_align, and regno_reg_rtx are large
0d4903b8 893 enough to have an element for this pseudo reg number. */
23b2ce53 894
3e029763 895 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
23b2ce53 896 {
3e029763 897 int old_size = crtl->emit.regno_pointer_align_length;
60564289 898 char *tmp;
0d4903b8 899 rtx *new1;
0d4903b8 900
60564289
KG
901 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
902 memset (tmp + old_size, 0, old_size);
903 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
49ad7cfa 904
1b4572a8 905 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
49ad7cfa 906 memset (new1 + old_size, 0, old_size * sizeof (rtx));
23b2ce53
RS
907 regno_reg_rtx = new1;
908
3e029763 909 crtl->emit.regno_pointer_align_length = old_size * 2;
23b2ce53
RS
910 }
911
08394eef 912 val = gen_raw_REG (mode, reg_rtx_no);
23b2ce53
RS
913 regno_reg_rtx[reg_rtx_no++] = val;
914 return val;
915}
916
38ae7651
RS
917/* Update NEW with the same attributes as REG, but with OFFSET added
918 to the REG_OFFSET. */
a560d4d4 919
e53a16e7 920static void
60564289 921update_reg_offset (rtx new_rtx, rtx reg, int offset)
a560d4d4 922{
60564289 923 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
502b8322 924 REG_OFFSET (reg) + offset);
e53a16e7
ILT
925}
926
38ae7651
RS
927/* Generate a register with same attributes as REG, but with OFFSET
928 added to the REG_OFFSET. */
e53a16e7
ILT
929
930rtx
931gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
932 int offset)
933{
60564289 934 rtx new_rtx = gen_rtx_REG (mode, regno);
e53a16e7 935
60564289
KG
936 update_reg_offset (new_rtx, reg, offset);
937 return new_rtx;
e53a16e7
ILT
938}
939
940/* Generate a new pseudo-register with the same attributes as REG, but
38ae7651 941 with OFFSET added to the REG_OFFSET. */
e53a16e7
ILT
942
943rtx
944gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
945{
60564289 946 rtx new_rtx = gen_reg_rtx (mode);
e53a16e7 947
60564289
KG
948 update_reg_offset (new_rtx, reg, offset);
949 return new_rtx;
a560d4d4
JH
950}
951
38ae7651
RS
952/* Adjust REG in-place so that it has mode MODE. It is assumed that the
953 new register is a (possibly paradoxical) lowpart of the old one. */
a560d4d4
JH
954
955void
38ae7651 956adjust_reg_mode (rtx reg, enum machine_mode mode)
a560d4d4 957{
38ae7651
RS
958 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
959 PUT_MODE (reg, mode);
960}
961
962/* Copy REG's attributes from X, if X has any attributes. If REG and X
963 have different modes, REG is a (possibly paradoxical) lowpart of X. */
964
965void
966set_reg_attrs_from_value (rtx reg, rtx x)
967{
968 int offset;
969
923ba36f
JJ
970 /* Hard registers can be reused for multiple purposes within the same
971 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
972 on them is wrong. */
973 if (HARD_REGISTER_P (reg))
974 return;
975
38ae7651 976 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
46b71b03
PB
977 if (MEM_P (x))
978 {
481683e1 979 if (MEM_OFFSET (x) && CONST_INT_P (MEM_OFFSET (x)))
46b71b03
PB
980 REG_ATTRS (reg)
981 = get_reg_attrs (MEM_EXPR (x), INTVAL (MEM_OFFSET (x)) + offset);
982 if (MEM_POINTER (x))
0a317111 983 mark_reg_pointer (reg, 0);
46b71b03
PB
984 }
985 else if (REG_P (x))
986 {
987 if (REG_ATTRS (x))
988 update_reg_offset (reg, x, offset);
989 if (REG_POINTER (x))
990 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
991 }
992}
993
994/* Generate a REG rtx for a new pseudo register, copying the mode
995 and attributes from X. */
996
997rtx
998gen_reg_rtx_and_attrs (rtx x)
999{
1000 rtx reg = gen_reg_rtx (GET_MODE (x));
1001 set_reg_attrs_from_value (reg, x);
1002 return reg;
a560d4d4
JH
1003}
1004
9d18e06b
JZ
1005/* Set the register attributes for registers contained in PARM_RTX.
1006 Use needed values from memory attributes of MEM. */
1007
1008void
502b8322 1009set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
9d18e06b 1010{
f8cfc6aa 1011 if (REG_P (parm_rtx))
38ae7651 1012 set_reg_attrs_from_value (parm_rtx, mem);
9d18e06b
JZ
1013 else if (GET_CODE (parm_rtx) == PARALLEL)
1014 {
1015 /* Check for a NULL entry in the first slot, used to indicate that the
1016 parameter goes both on the stack and in registers. */
1017 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1018 for (; i < XVECLEN (parm_rtx, 0); i++)
1019 {
1020 rtx x = XVECEXP (parm_rtx, 0, i);
f8cfc6aa 1021 if (REG_P (XEXP (x, 0)))
9d18e06b
JZ
1022 REG_ATTRS (XEXP (x, 0))
1023 = get_reg_attrs (MEM_EXPR (mem),
1024 INTVAL (XEXP (x, 1)));
1025 }
1026 }
1027}
1028
38ae7651
RS
1029/* Set the REG_ATTRS for registers in value X, given that X represents
1030 decl T. */
a560d4d4 1031
4e3825db 1032void
38ae7651
RS
1033set_reg_attrs_for_decl_rtl (tree t, rtx x)
1034{
1035 if (GET_CODE (x) == SUBREG)
fbe6ec81 1036 {
38ae7651
RS
1037 gcc_assert (subreg_lowpart_p (x));
1038 x = SUBREG_REG (x);
fbe6ec81 1039 }
f8cfc6aa 1040 if (REG_P (x))
38ae7651
RS
1041 REG_ATTRS (x)
1042 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
726612d2 1043 DECL_MODE (t)));
a560d4d4
JH
1044 if (GET_CODE (x) == CONCAT)
1045 {
1046 if (REG_P (XEXP (x, 0)))
1047 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1048 if (REG_P (XEXP (x, 1)))
1049 REG_ATTRS (XEXP (x, 1))
1050 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1051 }
1052 if (GET_CODE (x) == PARALLEL)
1053 {
d4afac5b
JZ
1054 int i, start;
1055
1056 /* Check for a NULL entry, used to indicate that the parameter goes
1057 both on the stack and in registers. */
1058 if (XEXP (XVECEXP (x, 0, 0), 0))
1059 start = 0;
1060 else
1061 start = 1;
1062
1063 for (i = start; i < XVECLEN (x, 0); i++)
a560d4d4
JH
1064 {
1065 rtx y = XVECEXP (x, 0, i);
1066 if (REG_P (XEXP (y, 0)))
1067 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1068 }
1069 }
1070}
1071
38ae7651
RS
1072/* Assign the RTX X to declaration T. */
1073
1074void
1075set_decl_rtl (tree t, rtx x)
1076{
1077 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1078 if (x)
1079 set_reg_attrs_for_decl_rtl (t, x);
1080}
1081
5141868d
RS
1082/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1083 if the ABI requires the parameter to be passed by reference. */
38ae7651
RS
1084
1085void
5141868d 1086set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
38ae7651
RS
1087{
1088 DECL_INCOMING_RTL (t) = x;
5141868d 1089 if (x && !by_reference_p)
38ae7651
RS
1090 set_reg_attrs_for_decl_rtl (t, x);
1091}
1092
754fdcca
RK
1093/* Identify REG (which may be a CONCAT) as a user register. */
1094
1095void
502b8322 1096mark_user_reg (rtx reg)
754fdcca
RK
1097{
1098 if (GET_CODE (reg) == CONCAT)
1099 {
1100 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1101 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1102 }
754fdcca 1103 else
5b0264cb
NS
1104 {
1105 gcc_assert (REG_P (reg));
1106 REG_USERVAR_P (reg) = 1;
1107 }
754fdcca
RK
1108}
1109
86fe05e0
RK
1110/* Identify REG as a probable pointer register and show its alignment
1111 as ALIGN, if nonzero. */
23b2ce53
RS
1112
1113void
502b8322 1114mark_reg_pointer (rtx reg, int align)
23b2ce53 1115{
3502dc9c 1116 if (! REG_POINTER (reg))
00995e78 1117 {
3502dc9c 1118 REG_POINTER (reg) = 1;
86fe05e0 1119
00995e78
RE
1120 if (align)
1121 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1122 }
1123 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
6614fd40 1124 /* We can no-longer be sure just how aligned this pointer is. */
86fe05e0 1125 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
23b2ce53
RS
1126}
1127
1128/* Return 1 plus largest pseudo reg number used in the current function. */
1129
1130int
502b8322 1131max_reg_num (void)
23b2ce53
RS
1132{
1133 return reg_rtx_no;
1134}
1135
1136/* Return 1 + the largest label number used so far in the current function. */
1137
1138int
502b8322 1139max_label_num (void)
23b2ce53 1140{
23b2ce53
RS
1141 return label_num;
1142}
1143
1144/* Return first label number used in this function (if any were used). */
1145
1146int
502b8322 1147get_first_label_num (void)
23b2ce53
RS
1148{
1149 return first_label_num;
1150}
6de9cd9a
DN
1151
1152/* If the rtx for label was created during the expansion of a nested
1153 function, then first_label_num won't include this label number.
fa10beec 1154 Fix this now so that array indices work later. */
6de9cd9a
DN
1155
1156void
1157maybe_set_first_label_num (rtx x)
1158{
1159 if (CODE_LABEL_NUMBER (x) < first_label_num)
1160 first_label_num = CODE_LABEL_NUMBER (x);
1161}
23b2ce53
RS
1162\f
1163/* Return a value representing some low-order bits of X, where the number
1164 of low-order bits is given by MODE. Note that no conversion is done
750c9258 1165 between floating-point and fixed-point values, rather, the bit
23b2ce53
RS
1166 representation is returned.
1167
1168 This function handles the cases in common between gen_lowpart, below,
1169 and two variants in cse.c and combine.c. These are the cases that can
1170 be safely handled at all points in the compilation.
1171
1172 If this is not a case we can handle, return 0. */
1173
1174rtx
502b8322 1175gen_lowpart_common (enum machine_mode mode, rtx x)
23b2ce53 1176{
ddef6bc7 1177 int msize = GET_MODE_SIZE (mode);
550d1387 1178 int xsize;
ddef6bc7 1179 int offset = 0;
550d1387
GK
1180 enum machine_mode innermode;
1181
1182 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1183 so we have to make one up. Yuk. */
1184 innermode = GET_MODE (x);
481683e1 1185 if (CONST_INT_P (x)
db487452 1186 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
550d1387
GK
1187 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1188 else if (innermode == VOIDmode)
1189 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
b8698a0f 1190
550d1387
GK
1191 xsize = GET_MODE_SIZE (innermode);
1192
5b0264cb 1193 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
23b2ce53 1194
550d1387 1195 if (innermode == mode)
23b2ce53
RS
1196 return x;
1197
1198 /* MODE must occupy no more words than the mode of X. */
550d1387
GK
1199 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1200 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
23b2ce53
RS
1201 return 0;
1202
53501a19 1203 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
3d8bf70f 1204 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
53501a19
BS
1205 return 0;
1206
550d1387 1207 offset = subreg_lowpart_offset (mode, innermode);
23b2ce53
RS
1208
1209 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
83e9c679
RK
1210 && (GET_MODE_CLASS (mode) == MODE_INT
1211 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
23b2ce53
RS
1212 {
1213 /* If we are getting the low-order part of something that has been
1214 sign- or zero-extended, we can either just use the object being
1215 extended or make a narrower extension. If we want an even smaller
1216 piece than the size of the object being extended, call ourselves
1217 recursively.
1218
1219 This case is used mostly by combine and cse. */
1220
1221 if (GET_MODE (XEXP (x, 0)) == mode)
1222 return XEXP (x, 0);
550d1387 1223 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
23b2ce53 1224 return gen_lowpart_common (mode, XEXP (x, 0));
550d1387 1225 else if (msize < xsize)
3b80f6ca 1226 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
23b2ce53 1227 }
f8cfc6aa 1228 else if (GET_CODE (x) == SUBREG || REG_P (x)
550d1387 1229 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
481683e1 1230 || GET_CODE (x) == CONST_DOUBLE || CONST_INT_P (x))
550d1387 1231 return simplify_gen_subreg (mode, x, innermode, offset);
8aada4ad 1232
23b2ce53
RS
1233 /* Otherwise, we can't do this. */
1234 return 0;
1235}
1236\f
ccba022b 1237rtx
502b8322 1238gen_highpart (enum machine_mode mode, rtx x)
ccba022b 1239{
ddef6bc7 1240 unsigned int msize = GET_MODE_SIZE (mode);
e0e08ac2 1241 rtx result;
ddef6bc7 1242
ccba022b
RS
1243 /* This case loses if X is a subreg. To catch bugs early,
1244 complain if an invalid MODE is used even in other cases. */
5b0264cb
NS
1245 gcc_assert (msize <= UNITS_PER_WORD
1246 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
ddef6bc7 1247
e0e08ac2
JH
1248 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1249 subreg_highpart_offset (mode, GET_MODE (x)));
5b0264cb 1250 gcc_assert (result);
b8698a0f 1251
09482e0d
JW
1252 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1253 the target if we have a MEM. gen_highpart must return a valid operand,
1254 emitting code if necessary to do so. */
5b0264cb
NS
1255 if (MEM_P (result))
1256 {
1257 result = validize_mem (result);
1258 gcc_assert (result);
1259 }
b8698a0f 1260
e0e08ac2
JH
1261 return result;
1262}
5222e470 1263
26d249eb 1264/* Like gen_highpart, but accept mode of EXP operand in case EXP can
5222e470
JH
1265 be VOIDmode constant. */
1266rtx
502b8322 1267gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
5222e470
JH
1268{
1269 if (GET_MODE (exp) != VOIDmode)
1270 {
5b0264cb 1271 gcc_assert (GET_MODE (exp) == innermode);
5222e470
JH
1272 return gen_highpart (outermode, exp);
1273 }
1274 return simplify_gen_subreg (outermode, exp, innermode,
1275 subreg_highpart_offset (outermode, innermode));
1276}
68252e27 1277
38ae7651 1278/* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
8698cce3 1279
e0e08ac2 1280unsigned int
502b8322 1281subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
e0e08ac2
JH
1282{
1283 unsigned int offset = 0;
1284 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
8698cce3 1285
e0e08ac2 1286 if (difference > 0)
ccba022b 1287 {
e0e08ac2
JH
1288 if (WORDS_BIG_ENDIAN)
1289 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1290 if (BYTES_BIG_ENDIAN)
1291 offset += difference % UNITS_PER_WORD;
ccba022b 1292 }
ddef6bc7 1293
e0e08ac2 1294 return offset;
ccba022b 1295}
eea50aa0 1296
e0e08ac2
JH
1297/* Return offset in bytes to get OUTERMODE high part
1298 of the value in mode INNERMODE stored in memory in target format. */
1299unsigned int
502b8322 1300subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
eea50aa0
JH
1301{
1302 unsigned int offset = 0;
1303 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1304
5b0264cb 1305 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
e0e08ac2 1306
eea50aa0
JH
1307 if (difference > 0)
1308 {
e0e08ac2 1309 if (! WORDS_BIG_ENDIAN)
eea50aa0 1310 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
e0e08ac2 1311 if (! BYTES_BIG_ENDIAN)
eea50aa0
JH
1312 offset += difference % UNITS_PER_WORD;
1313 }
1314
e0e08ac2 1315 return offset;
eea50aa0 1316}
ccba022b 1317
23b2ce53
RS
1318/* Return 1 iff X, assumed to be a SUBREG,
1319 refers to the least significant part of its containing reg.
1320 If X is not a SUBREG, always return 1 (it is its own low part!). */
1321
1322int
fa233e34 1323subreg_lowpart_p (const_rtx x)
23b2ce53
RS
1324{
1325 if (GET_CODE (x) != SUBREG)
1326 return 1;
a3a03040
RK
1327 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1328 return 0;
23b2ce53 1329
e0e08ac2
JH
1330 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1331 == SUBREG_BYTE (x));
23b2ce53
RS
1332}
1333\f
ddef6bc7
JJ
1334/* Return subword OFFSET of operand OP.
1335 The word number, OFFSET, is interpreted as the word number starting
1336 at the low-order address. OFFSET 0 is the low-order word if not
1337 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1338
1339 If we cannot extract the required word, we return zero. Otherwise,
1340 an rtx corresponding to the requested word will be returned.
1341
1342 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1343 reload has completed, a valid address will always be returned. After
1344 reload, if a valid address cannot be returned, we return zero.
1345
1346 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1347 it is the responsibility of the caller.
1348
1349 MODE is the mode of OP in case it is a CONST_INT.
1350
1351 ??? This is still rather broken for some cases. The problem for the
1352 moment is that all callers of this thing provide no 'goal mode' to
1353 tell us to work with. This exists because all callers were written
0631e0bf
JH
1354 in a word based SUBREG world.
1355 Now use of this function can be deprecated by simplify_subreg in most
1356 cases.
1357 */
ddef6bc7
JJ
1358
1359rtx
502b8322 1360operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
ddef6bc7
JJ
1361{
1362 if (mode == VOIDmode)
1363 mode = GET_MODE (op);
1364
5b0264cb 1365 gcc_assert (mode != VOIDmode);
ddef6bc7 1366
30f7a378 1367 /* If OP is narrower than a word, fail. */
ddef6bc7
JJ
1368 if (mode != BLKmode
1369 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1370 return 0;
1371
30f7a378 1372 /* If we want a word outside OP, return zero. */
ddef6bc7
JJ
1373 if (mode != BLKmode
1374 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1375 return const0_rtx;
1376
ddef6bc7 1377 /* Form a new MEM at the requested address. */
3c0cb5de 1378 if (MEM_P (op))
ddef6bc7 1379 {
60564289 1380 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
ddef6bc7 1381
f1ec5147 1382 if (! validate_address)
60564289 1383 return new_rtx;
f1ec5147
RK
1384
1385 else if (reload_completed)
ddef6bc7 1386 {
09e881c9
BE
1387 if (! strict_memory_address_addr_space_p (word_mode,
1388 XEXP (new_rtx, 0),
1389 MEM_ADDR_SPACE (op)))
f1ec5147 1390 return 0;
ddef6bc7 1391 }
f1ec5147 1392 else
60564289 1393 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
ddef6bc7
JJ
1394 }
1395
0631e0bf
JH
1396 /* Rest can be handled by simplify_subreg. */
1397 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
ddef6bc7
JJ
1398}
1399
535a42b1
NS
1400/* Similar to `operand_subword', but never return 0. If we can't
1401 extract the required subword, put OP into a register and try again.
1402 The second attempt must succeed. We always validate the address in
1403 this case.
23b2ce53
RS
1404
1405 MODE is the mode of OP, in case it is CONST_INT. */
1406
1407rtx
502b8322 1408operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
23b2ce53 1409{
ddef6bc7 1410 rtx result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1411
1412 if (result)
1413 return result;
1414
1415 if (mode != BLKmode && mode != VOIDmode)
77e6b0eb
JC
1416 {
1417 /* If this is a register which can not be accessed by words, copy it
1418 to a pseudo register. */
f8cfc6aa 1419 if (REG_P (op))
77e6b0eb
JC
1420 op = copy_to_reg (op);
1421 else
1422 op = force_reg (mode, op);
1423 }
23b2ce53 1424
ddef6bc7 1425 result = operand_subword (op, offset, 1, mode);
5b0264cb 1426 gcc_assert (result);
23b2ce53
RS
1427
1428 return result;
1429}
1430\f
2b3493c8
AK
1431/* Returns 1 if both MEM_EXPR can be considered equal
1432 and 0 otherwise. */
1433
1434int
4f588890 1435mem_expr_equal_p (const_tree expr1, const_tree expr2)
2b3493c8
AK
1436{
1437 if (expr1 == expr2)
1438 return 1;
1439
1440 if (! expr1 || ! expr2)
1441 return 0;
1442
1443 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1444 return 0;
1445
55b34b5f 1446 return operand_equal_p (expr1, expr2, 0);
2b3493c8
AK
1447}
1448
805903b5
JJ
1449/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1450 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1451 -1 if not known. */
1452
1453int
d9223014 1454get_mem_align_offset (rtx mem, unsigned int align)
805903b5
JJ
1455{
1456 tree expr;
1457 unsigned HOST_WIDE_INT offset;
1458
1459 /* This function can't use
1460 if (!MEM_EXPR (mem) || !MEM_OFFSET (mem)
1461 || !CONST_INT_P (MEM_OFFSET (mem))
e80c2726
RG
1462 || (MAX (MEM_ALIGN (mem),
1463 get_object_alignment (MEM_EXPR (mem), align))
805903b5
JJ
1464 < align))
1465 return -1;
1466 else
1467 return (- INTVAL (MEM_OFFSET (mem))) & (align / BITS_PER_UNIT - 1);
1468 for two reasons:
1469 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1470 for <variable>. get_inner_reference doesn't handle it and
1471 even if it did, the alignment in that case needs to be determined
1472 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1473 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1474 isn't sufficiently aligned, the object it is in might be. */
1475 gcc_assert (MEM_P (mem));
1476 expr = MEM_EXPR (mem);
1477 if (expr == NULL_TREE
1478 || MEM_OFFSET (mem) == NULL_RTX
1479 || !CONST_INT_P (MEM_OFFSET (mem)))
1480 return -1;
1481
1482 offset = INTVAL (MEM_OFFSET (mem));
1483 if (DECL_P (expr))
1484 {
1485 if (DECL_ALIGN (expr) < align)
1486 return -1;
1487 }
1488 else if (INDIRECT_REF_P (expr))
1489 {
1490 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1491 return -1;
1492 }
1493 else if (TREE_CODE (expr) == COMPONENT_REF)
1494 {
1495 while (1)
1496 {
1497 tree inner = TREE_OPERAND (expr, 0);
1498 tree field = TREE_OPERAND (expr, 1);
1499 tree byte_offset = component_ref_field_offset (expr);
1500 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1501
1502 if (!byte_offset
1503 || !host_integerp (byte_offset, 1)
1504 || !host_integerp (bit_offset, 1))
1505 return -1;
1506
1507 offset += tree_low_cst (byte_offset, 1);
1508 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1509
1510 if (inner == NULL_TREE)
1511 {
1512 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1513 < (unsigned int) align)
1514 return -1;
1515 break;
1516 }
1517 else if (DECL_P (inner))
1518 {
1519 if (DECL_ALIGN (inner) < align)
1520 return -1;
1521 break;
1522 }
1523 else if (TREE_CODE (inner) != COMPONENT_REF)
1524 return -1;
1525 expr = inner;
1526 }
1527 }
1528 else
1529 return -1;
1530
1531 return offset & ((align / BITS_PER_UNIT) - 1);
1532}
1533
6926c713 1534/* Given REF (a MEM) and T, either the type of X or the expression
173b24b9 1535 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f1087be
RH
1536 if we are making a new object of this type. BITPOS is nonzero if
1537 there is an offset outstanding on T that will be applied later. */
173b24b9
RK
1538
1539void
502b8322
AJ
1540set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1541 HOST_WIDE_INT bitpos)
173b24b9 1542{
4862826d 1543 alias_set_type alias = MEM_ALIAS_SET (ref);
998d7deb 1544 tree expr = MEM_EXPR (ref);
8ac61af7
RK
1545 rtx offset = MEM_OFFSET (ref);
1546 rtx size = MEM_SIZE (ref);
1547 unsigned int align = MEM_ALIGN (ref);
6f1087be 1548 HOST_WIDE_INT apply_bitpos = 0;
173b24b9
RK
1549 tree type;
1550
1551 /* It can happen that type_for_mode was given a mode for which there
1552 is no language-level type. In which case it returns NULL, which
1553 we can see here. */
1554 if (t == NULL_TREE)
1555 return;
1556
1557 type = TYPE_P (t) ? t : TREE_TYPE (t);
eeb23c11
MM
1558 if (type == error_mark_node)
1559 return;
173b24b9 1560
173b24b9
RK
1561 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1562 wrong answer, as it assumes that DECL_RTL already has the right alias
1563 info. Callers should not set DECL_RTL until after the call to
1564 set_mem_attributes. */
5b0264cb 1565 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
173b24b9 1566
738cc472 1567 /* Get the alias set from the expression or type (perhaps using a
8ac61af7
RK
1568 front-end routine) and use it. */
1569 alias = get_alias_set (t);
173b24b9 1570
a5e9c810 1571 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
07cb6e8c
JM
1572 MEM_IN_STRUCT_P (ref)
1573 = AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE;
f8ad8d7c 1574 MEM_POINTER (ref) = POINTER_TYPE_P (type);
173b24b9 1575
8ac61af7
RK
1576 /* If we are making an object of this type, or if this is a DECL, we know
1577 that it is a scalar if the type is not an aggregate. */
07cb6e8c
JM
1578 if ((objectp || DECL_P (t))
1579 && ! AGGREGATE_TYPE_P (type)
1580 && TREE_CODE (type) != COMPLEX_TYPE)
173b24b9
RK
1581 MEM_SCALAR_P (ref) = 1;
1582
c3d32120
RK
1583 /* We can set the alignment from the type if we are making an object,
1584 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
a80903ff 1585 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
c3d32120 1586 align = MAX (align, TYPE_ALIGN (type));
a80903ff 1587
70f34814
RG
1588 else if (TREE_CODE (t) == MEM_REF)
1589 {
a80903ff 1590 tree op0 = TREE_OPERAND (t, 0);
3e32c761
RG
1591 if (TREE_CODE (op0) == ADDR_EXPR
1592 && (DECL_P (TREE_OPERAND (op0, 0))
1593 || CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))))
70f34814 1594 {
3e32c761
RG
1595 if (DECL_P (TREE_OPERAND (op0, 0)))
1596 align = DECL_ALIGN (TREE_OPERAND (op0, 0));
1597 else if (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)))
1598 {
1599 align = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (op0, 0)));
70f34814 1600#ifdef CONSTANT_ALIGNMENT
3e32c761 1601 align = CONSTANT_ALIGNMENT (TREE_OPERAND (op0, 0), align);
70f34814 1602#endif
3e32c761
RG
1603 }
1604 if (TREE_INT_CST_LOW (TREE_OPERAND (t, 1)) != 0)
1605 {
1606 unsigned HOST_WIDE_INT ioff
1607 = TREE_INT_CST_LOW (TREE_OPERAND (t, 1));
1608 unsigned HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1609 align = MIN (aoff, align);
1610 }
70f34814
RG
1611 }
1612 else
5951297a
EB
1613 /* ??? This isn't fully correct, we can't set the alignment from the
1614 type in all cases. */
1615 align = MAX (align, TYPE_ALIGN (type));
70f34814 1616 }
a80903ff 1617
9407f6bc
RG
1618 else if (TREE_CODE (t) == TARGET_MEM_REF)
1619 /* ??? This isn't fully correct, we can't set the alignment from the
1620 type in all cases. */
1621 align = MAX (align, TYPE_ALIGN (type));
1622
738cc472
RK
1623 /* If the size is known, we can set that. */
1624 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
8ac61af7 1625 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
738cc472 1626
80965c18
RK
1627 /* If T is not a type, we may be able to deduce some more information about
1628 the expression. */
1629 if (! TYPE_P (t))
8ac61af7 1630 {
8476af98 1631 tree base;
df96b059 1632 bool align_computed = false;
389fdba0 1633
8ac61af7
RK
1634 if (TREE_THIS_VOLATILE (t))
1635 MEM_VOLATILE_P (ref) = 1;
173b24b9 1636
c56e3582
RK
1637 /* Now remove any conversions: they don't change what the underlying
1638 object is. Likewise for SAVE_EXPR. */
1043771b 1639 while (CONVERT_EXPR_P (t)
c56e3582
RK
1640 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1641 || TREE_CODE (t) == SAVE_EXPR)
8ac61af7
RK
1642 t = TREE_OPERAND (t, 0);
1643
8476af98
RH
1644 /* We may look through structure-like accesses for the purposes of
1645 examining TREE_THIS_NOTRAP, but not array-like accesses. */
1646 base = t;
1647 while (TREE_CODE (base) == COMPONENT_REF
1648 || TREE_CODE (base) == REALPART_EXPR
1649 || TREE_CODE (base) == IMAGPART_EXPR
1650 || TREE_CODE (base) == BIT_FIELD_REF)
1651 base = TREE_OPERAND (base, 0);
1652
70f34814
RG
1653 if (TREE_CODE (base) == MEM_REF
1654 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1655 base = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
8476af98
RH
1656 if (DECL_P (base))
1657 {
1658 if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
1659 MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
1660 else
1661 MEM_NOTRAP_P (ref) = 1;
1662 }
1663 else
1664 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
1665
1666 base = get_base_address (base);
1667 if (base && DECL_P (base)
1668 && TREE_READONLY (base)
1669 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
21d9971a 1670 MEM_READONLY_P (ref) = 1;
8476af98 1671
2039d7aa
RH
1672 /* If this expression uses it's parent's alias set, mark it such
1673 that we won't change it. */
1674 if (component_uses_parent_alias_set (t))
10b76d73
RK
1675 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1676
8ac61af7
RK
1677 /* If this is a decl, set the attributes of the MEM from it. */
1678 if (DECL_P (t))
1679 {
998d7deb
RH
1680 expr = t;
1681 offset = const0_rtx;
6f1087be 1682 apply_bitpos = bitpos;
8ac61af7
RK
1683 size = (DECL_SIZE_UNIT (t)
1684 && host_integerp (DECL_SIZE_UNIT (t), 1)
1685 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
68252e27 1686 align = DECL_ALIGN (t);
df96b059 1687 align_computed = true;
8ac61af7
RK
1688 }
1689
40c0668b 1690 /* If this is a constant, we know the alignment. */
6615c446 1691 else if (CONSTANT_CLASS_P (t))
9ddfb1a7
RK
1692 {
1693 align = TYPE_ALIGN (type);
1694#ifdef CONSTANT_ALIGNMENT
1695 align = CONSTANT_ALIGNMENT (t, align);
1696#endif
df96b059 1697 align_computed = true;
9ddfb1a7 1698 }
998d7deb
RH
1699
1700 /* If this is a field reference and not a bit-field, record it. */
fa10beec 1701 /* ??? There is some information that can be gleaned from bit-fields,
998d7deb
RH
1702 such as the word offset in the structure that might be modified.
1703 But skip it for now. */
1704 else if (TREE_CODE (t) == COMPONENT_REF
1705 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1706 {
55b34b5f 1707 expr = t;
998d7deb 1708 offset = const0_rtx;
6f1087be 1709 apply_bitpos = bitpos;
998d7deb
RH
1710 /* ??? Any reason the field size would be different than
1711 the size we got from the type? */
1712 }
1713
1714 /* If this is an array reference, look for an outer field reference. */
1715 else if (TREE_CODE (t) == ARRAY_REF)
1716 {
1717 tree off_tree = size_zero_node;
1b1838b6
JW
1718 /* We can't modify t, because we use it at the end of the
1719 function. */
1720 tree t2 = t;
998d7deb
RH
1721
1722 do
1723 {
1b1838b6 1724 tree index = TREE_OPERAND (t2, 1);
44de5aeb
RK
1725 tree low_bound = array_ref_low_bound (t2);
1726 tree unit_size = array_ref_element_size (t2);
2567406a
JH
1727
1728 /* We assume all arrays have sizes that are a multiple of a byte.
1729 First subtract the lower bound, if any, in the type of the
44de5aeb
RK
1730 index, then convert to sizetype and multiply by the size of
1731 the array element. */
1732 if (! integer_zerop (low_bound))
4845b383
KH
1733 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1734 index, low_bound);
2567406a 1735
44de5aeb 1736 off_tree = size_binop (PLUS_EXPR,
b6f65e3c
RS
1737 size_binop (MULT_EXPR,
1738 fold_convert (sizetype,
1739 index),
44de5aeb
RK
1740 unit_size),
1741 off_tree);
1b1838b6 1742 t2 = TREE_OPERAND (t2, 0);
998d7deb 1743 }
1b1838b6 1744 while (TREE_CODE (t2) == ARRAY_REF);
998d7deb 1745
1b1838b6 1746 if (DECL_P (t2))
c67a1cf6 1747 {
1b1838b6 1748 expr = t2;
40cb04f1 1749 offset = NULL;
c67a1cf6 1750 if (host_integerp (off_tree, 1))
40cb04f1
RH
1751 {
1752 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1753 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1b1838b6 1754 align = DECL_ALIGN (t2);
fc555370 1755 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
40cb04f1 1756 align = aoff;
df96b059 1757 align_computed = true;
40cb04f1 1758 offset = GEN_INT (ioff);
6f1087be 1759 apply_bitpos = bitpos;
40cb04f1 1760 }
c67a1cf6 1761 }
1b1838b6 1762 else if (TREE_CODE (t2) == COMPONENT_REF)
998d7deb 1763 {
55b34b5f
RG
1764 expr = t2;
1765 offset = NULL;
998d7deb 1766 if (host_integerp (off_tree, 1))
6f1087be
RH
1767 {
1768 offset = GEN_INT (tree_low_cst (off_tree, 1));
1769 apply_bitpos = bitpos;
1770 }
998d7deb
RH
1771 /* ??? Any reason the field size would be different than
1772 the size we got from the type? */
1773 }
56c47f22 1774
56c47f22 1775 /* If this is an indirect reference, record it. */
be1ac4ec 1776 else if (TREE_CODE (t) == MEM_REF)
56c47f22
RG
1777 {
1778 expr = t;
1779 offset = const0_rtx;
1780 apply_bitpos = bitpos;
1781 }
c67a1cf6
RH
1782 }
1783
56c47f22 1784 /* If this is an indirect reference, record it. */
70f34814 1785 else if (TREE_CODE (t) == MEM_REF
be1ac4ec 1786 || TREE_CODE (t) == TARGET_MEM_REF)
56c47f22
RG
1787 {
1788 expr = t;
1789 offset = const0_rtx;
1790 apply_bitpos = bitpos;
1791 }
1792
df96b059
JJ
1793 if (!align_computed && !INDIRECT_REF_P (t))
1794 {
e80c2726 1795 unsigned int obj_align = get_object_alignment (t, BIGGEST_ALIGNMENT);
df96b059
JJ
1796 align = MAX (align, obj_align);
1797 }
8ac61af7
RK
1798 }
1799
15c812e3 1800 /* If we modified OFFSET based on T, then subtract the outstanding
8c317c5f
RH
1801 bit position offset. Similarly, increase the size of the accessed
1802 object to contain the negative offset. */
6f1087be 1803 if (apply_bitpos)
8c317c5f
RH
1804 {
1805 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1806 if (size)
1807 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1808 }
6f1087be 1809
8ac61af7 1810 /* Now set the attributes we computed above. */
10b76d73 1811 MEM_ATTRS (ref)
09e881c9
BE
1812 = get_mem_attrs (alias, expr, offset, size, align,
1813 TYPE_ADDR_SPACE (type), GET_MODE (ref));
8ac61af7
RK
1814
1815 /* If this is already known to be a scalar or aggregate, we are done. */
1816 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
738cc472
RK
1817 return;
1818
8ac61af7
RK
1819 /* If it is a reference into an aggregate, this is part of an aggregate.
1820 Otherwise we don't know. */
173b24b9
RK
1821 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1822 || TREE_CODE (t) == ARRAY_RANGE_REF
1823 || TREE_CODE (t) == BIT_FIELD_REF)
1824 MEM_IN_STRUCT_P (ref) = 1;
1825}
1826
6f1087be 1827void
502b8322 1828set_mem_attributes (rtx ref, tree t, int objectp)
6f1087be
RH
1829{
1830 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1831}
1832
173b24b9
RK
1833/* Set the alias set of MEM to SET. */
1834
1835void
4862826d 1836set_mem_alias_set (rtx mem, alias_set_type set)
173b24b9 1837{
68252e27 1838#ifdef ENABLE_CHECKING
173b24b9 1839 /* If the new and old alias sets don't conflict, something is wrong. */
5b0264cb 1840 gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
173b24b9
RK
1841#endif
1842
998d7deb 1843 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
10b76d73 1844 MEM_SIZE (mem), MEM_ALIGN (mem),
09e881c9
BE
1845 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1846}
1847
1848/* Set the address space of MEM to ADDRSPACE (target-defined). */
1849
1850void
1851set_mem_addr_space (rtx mem, addr_space_t addrspace)
1852{
1853 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1854 MEM_OFFSET (mem), MEM_SIZE (mem),
1855 MEM_ALIGN (mem), addrspace, GET_MODE (mem));
173b24b9 1856}
738cc472 1857
d022d93e 1858/* Set the alignment of MEM to ALIGN bits. */
738cc472
RK
1859
1860void
502b8322 1861set_mem_align (rtx mem, unsigned int align)
738cc472 1862{
998d7deb 1863 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
10b76d73 1864 MEM_OFFSET (mem), MEM_SIZE (mem), align,
09e881c9 1865 MEM_ADDR_SPACE (mem), GET_MODE (mem));
738cc472 1866}
1285011e 1867
998d7deb 1868/* Set the expr for MEM to EXPR. */
1285011e
RK
1869
1870void
502b8322 1871set_mem_expr (rtx mem, tree expr)
1285011e
RK
1872{
1873 MEM_ATTRS (mem)
998d7deb 1874 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
09e881c9
BE
1875 MEM_SIZE (mem), MEM_ALIGN (mem),
1876 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1285011e 1877}
998d7deb
RH
1878
1879/* Set the offset of MEM to OFFSET. */
1880
1881void
502b8322 1882set_mem_offset (rtx mem, rtx offset)
998d7deb
RH
1883{
1884 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1885 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
09e881c9 1886 MEM_ADDR_SPACE (mem), GET_MODE (mem));
35aff10b
AM
1887}
1888
1889/* Set the size of MEM to SIZE. */
1890
1891void
502b8322 1892set_mem_size (rtx mem, rtx size)
35aff10b
AM
1893{
1894 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1895 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
09e881c9 1896 MEM_ADDR_SPACE (mem), GET_MODE (mem));
998d7deb 1897}
173b24b9 1898\f
738cc472
RK
1899/* Return a memory reference like MEMREF, but with its mode changed to MODE
1900 and its address changed to ADDR. (VOIDmode means don't change the mode.
1901 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1902 returned memory location is required to be valid. The memory
1903 attributes are not changed. */
23b2ce53 1904
738cc472 1905static rtx
502b8322 1906change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
23b2ce53 1907{
09e881c9 1908 addr_space_t as;
60564289 1909 rtx new_rtx;
23b2ce53 1910
5b0264cb 1911 gcc_assert (MEM_P (memref));
09e881c9 1912 as = MEM_ADDR_SPACE (memref);
23b2ce53
RS
1913 if (mode == VOIDmode)
1914 mode = GET_MODE (memref);
1915 if (addr == 0)
1916 addr = XEXP (memref, 0);
a74ff877 1917 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
09e881c9 1918 && (!validate || memory_address_addr_space_p (mode, addr, as)))
a74ff877 1919 return memref;
23b2ce53 1920
f1ec5147 1921 if (validate)
23b2ce53 1922 {
f1ec5147 1923 if (reload_in_progress || reload_completed)
09e881c9 1924 gcc_assert (memory_address_addr_space_p (mode, addr, as));
f1ec5147 1925 else
09e881c9 1926 addr = memory_address_addr_space (mode, addr, as);
23b2ce53 1927 }
750c9258 1928
9b04c6a8
RK
1929 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1930 return memref;
1931
60564289
KG
1932 new_rtx = gen_rtx_MEM (mode, addr);
1933 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1934 return new_rtx;
23b2ce53 1935}
792760b9 1936
738cc472
RK
1937/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1938 way we are changing MEMREF, so we only preserve the alias set. */
f4ef873c
RK
1939
1940rtx
502b8322 1941change_address (rtx memref, enum machine_mode mode, rtx addr)
f4ef873c 1942{
60564289
KG
1943 rtx new_rtx = change_address_1 (memref, mode, addr, 1), size;
1944 enum machine_mode mmode = GET_MODE (new_rtx);
4e44c1ef
JJ
1945 unsigned int align;
1946
1947 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1948 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
c2f7bcc3 1949
fdb1c7b3 1950 /* If there are no changes, just return the original memory reference. */
60564289 1951 if (new_rtx == memref)
4e44c1ef
JJ
1952 {
1953 if (MEM_ATTRS (memref) == 0
1954 || (MEM_EXPR (memref) == NULL
1955 && MEM_OFFSET (memref) == NULL
1956 && MEM_SIZE (memref) == size
1957 && MEM_ALIGN (memref) == align))
60564289 1958 return new_rtx;
4e44c1ef 1959
60564289
KG
1960 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
1961 MEM_COPY_ATTRIBUTES (new_rtx, memref);
4e44c1ef 1962 }
fdb1c7b3 1963
60564289 1964 MEM_ATTRS (new_rtx)
09e881c9
BE
1965 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align,
1966 MEM_ADDR_SPACE (memref), mmode);
823e3574 1967
60564289 1968 return new_rtx;
f4ef873c 1969}
792760b9 1970
738cc472
RK
1971/* Return a memory reference like MEMREF, but with its mode changed
1972 to MODE and its address offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
1973 nonzero, the memory address is forced to be valid.
1974 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1975 and caller is responsible for adjusting MEMREF base register. */
f1ec5147
RK
1976
1977rtx
502b8322
AJ
1978adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1979 int validate, int adjust)
f1ec5147 1980{
823e3574 1981 rtx addr = XEXP (memref, 0);
60564289 1982 rtx new_rtx;
738cc472 1983 rtx memoffset = MEM_OFFSET (memref);
10b76d73 1984 rtx size = 0;
738cc472 1985 unsigned int memalign = MEM_ALIGN (memref);
09e881c9 1986 addr_space_t as = MEM_ADDR_SPACE (memref);
d4ebfa65 1987 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
a6fe9ed4 1988 int pbits;
823e3574 1989
fdb1c7b3
JH
1990 /* If there are no changes, just return the original memory reference. */
1991 if (mode == GET_MODE (memref) && !offset
09e881c9 1992 && (!validate || memory_address_addr_space_p (mode, addr, as)))
fdb1c7b3
JH
1993 return memref;
1994
d14419e4 1995 /* ??? Prefer to create garbage instead of creating shared rtl.
cc2902df 1996 This may happen even if offset is nonzero -- consider
d14419e4
RH
1997 (plus (plus reg reg) const_int) -- so do this always. */
1998 addr = copy_rtx (addr);
1999
a6fe9ed4
JM
2000 /* Convert a possibly large offset to a signed value within the
2001 range of the target address space. */
d4ebfa65 2002 pbits = GET_MODE_BITSIZE (address_mode);
a6fe9ed4
JM
2003 if (HOST_BITS_PER_WIDE_INT > pbits)
2004 {
2005 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2006 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2007 >> shift);
2008 }
2009
4a78c787
RH
2010 if (adjust)
2011 {
2012 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2013 object, we can merge it into the LO_SUM. */
2014 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2015 && offset >= 0
2016 && (unsigned HOST_WIDE_INT) offset
2017 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
d4ebfa65 2018 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
4a78c787
RH
2019 plus_constant (XEXP (addr, 1), offset));
2020 else
2021 addr = plus_constant (addr, offset);
2022 }
823e3574 2023
60564289 2024 new_rtx = change_address_1 (memref, mode, addr, validate);
738cc472 2025
09efeca1
PB
2026 /* If the address is a REG, change_address_1 rightfully returns memref,
2027 but this would destroy memref's MEM_ATTRS. */
2028 if (new_rtx == memref && offset != 0)
2029 new_rtx = copy_rtx (new_rtx);
2030
738cc472
RK
2031 /* Compute the new values of the memory attributes due to this adjustment.
2032 We add the offsets and update the alignment. */
2033 if (memoffset)
2034 memoffset = GEN_INT (offset + INTVAL (memoffset));
2035
03bf2c23
RK
2036 /* Compute the new alignment by taking the MIN of the alignment and the
2037 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2038 if zero. */
2039 if (offset != 0)
3bf1e984
RK
2040 memalign
2041 = MIN (memalign,
2042 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
738cc472 2043
10b76d73 2044 /* We can compute the size in a number of ways. */
60564289
KG
2045 if (GET_MODE (new_rtx) != BLKmode)
2046 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new_rtx)));
10b76d73
RK
2047 else if (MEM_SIZE (memref))
2048 size = plus_constant (MEM_SIZE (memref), -offset);
2049
60564289 2050 MEM_ATTRS (new_rtx) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
09e881c9
BE
2051 memoffset, size, memalign, as,
2052 GET_MODE (new_rtx));
738cc472
RK
2053
2054 /* At some point, we should validate that this offset is within the object,
2055 if all the appropriate values are known. */
60564289 2056 return new_rtx;
f1ec5147
RK
2057}
2058
630036c6
JJ
2059/* Return a memory reference like MEMREF, but with its mode changed
2060 to MODE and its address changed to ADDR, which is assumed to be
fa10beec 2061 MEMREF offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
2062 nonzero, the memory address is forced to be valid. */
2063
2064rtx
502b8322
AJ
2065adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2066 HOST_WIDE_INT offset, int validate)
630036c6
JJ
2067{
2068 memref = change_address_1 (memref, VOIDmode, addr, validate);
2069 return adjust_address_1 (memref, mode, offset, validate, 0);
2070}
2071
8ac61af7
RK
2072/* Return a memory reference like MEMREF, but whose address is changed by
2073 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2074 known to be in OFFSET (possibly 1). */
0d4903b8
RK
2075
2076rtx
502b8322 2077offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
0d4903b8 2078{
60564289 2079 rtx new_rtx, addr = XEXP (memref, 0);
09e881c9 2080 addr_space_t as = MEM_ADDR_SPACE (memref);
d4ebfa65 2081 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
e3c8ea67 2082
d4ebfa65 2083 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67 2084
68252e27 2085 /* At this point we don't know _why_ the address is invalid. It
4d6922ee 2086 could have secondary memory references, multiplies or anything.
e3c8ea67
RH
2087
2088 However, if we did go and rearrange things, we can wind up not
2089 being able to recognize the magic around pic_offset_table_rtx.
2090 This stuff is fragile, and is yet another example of why it is
2091 bad to expose PIC machinery too early. */
09e881c9 2092 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx, as)
e3c8ea67
RH
2093 && GET_CODE (addr) == PLUS
2094 && XEXP (addr, 0) == pic_offset_table_rtx)
2095 {
2096 addr = force_reg (GET_MODE (addr), addr);
d4ebfa65 2097 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67
RH
2098 }
2099
60564289
KG
2100 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2101 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
0d4903b8 2102
fdb1c7b3 2103 /* If there are no changes, just return the original memory reference. */
60564289
KG
2104 if (new_rtx == memref)
2105 return new_rtx;
fdb1c7b3 2106
0d4903b8
RK
2107 /* Update the alignment to reflect the offset. Reset the offset, which
2108 we don't know. */
60564289 2109 MEM_ATTRS (new_rtx)
2cc2d4bb 2110 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
9ceca302 2111 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
09e881c9 2112 as, GET_MODE (new_rtx));
60564289 2113 return new_rtx;
0d4903b8 2114}
68252e27 2115
792760b9
RK
2116/* Return a memory reference like MEMREF, but with its address changed to
2117 ADDR. The caller is asserting that the actual piece of memory pointed
2118 to is the same, just the form of the address is being changed, such as
2119 by putting something into a register. */
2120
2121rtx
502b8322 2122replace_equiv_address (rtx memref, rtx addr)
792760b9 2123{
738cc472
RK
2124 /* change_address_1 copies the memory attribute structure without change
2125 and that's exactly what we want here. */
40c0668b 2126 update_temp_slot_address (XEXP (memref, 0), addr);
738cc472 2127 return change_address_1 (memref, VOIDmode, addr, 1);
792760b9 2128}
738cc472 2129
f1ec5147
RK
2130/* Likewise, but the reference is not required to be valid. */
2131
2132rtx
502b8322 2133replace_equiv_address_nv (rtx memref, rtx addr)
f1ec5147 2134{
f1ec5147
RK
2135 return change_address_1 (memref, VOIDmode, addr, 0);
2136}
e7dfe4bb
RH
2137
2138/* Return a memory reference like MEMREF, but with its mode widened to
2139 MODE and offset by OFFSET. This would be used by targets that e.g.
2140 cannot issue QImode memory operations and have to use SImode memory
2141 operations plus masking logic. */
2142
2143rtx
502b8322 2144widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
e7dfe4bb 2145{
60564289
KG
2146 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
2147 tree expr = MEM_EXPR (new_rtx);
2148 rtx memoffset = MEM_OFFSET (new_rtx);
e7dfe4bb
RH
2149 unsigned int size = GET_MODE_SIZE (mode);
2150
fdb1c7b3 2151 /* If there are no changes, just return the original memory reference. */
60564289
KG
2152 if (new_rtx == memref)
2153 return new_rtx;
fdb1c7b3 2154
e7dfe4bb
RH
2155 /* If we don't know what offset we were at within the expression, then
2156 we can't know if we've overstepped the bounds. */
fa1591cb 2157 if (! memoffset)
e7dfe4bb
RH
2158 expr = NULL_TREE;
2159
2160 while (expr)
2161 {
2162 if (TREE_CODE (expr) == COMPONENT_REF)
2163 {
2164 tree field = TREE_OPERAND (expr, 1);
44de5aeb 2165 tree offset = component_ref_field_offset (expr);
e7dfe4bb
RH
2166
2167 if (! DECL_SIZE_UNIT (field))
2168 {
2169 expr = NULL_TREE;
2170 break;
2171 }
2172
2173 /* Is the field at least as large as the access? If so, ok,
2174 otherwise strip back to the containing structure. */
03667700
RK
2175 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2176 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
e7dfe4bb
RH
2177 && INTVAL (memoffset) >= 0)
2178 break;
2179
44de5aeb 2180 if (! host_integerp (offset, 1))
e7dfe4bb
RH
2181 {
2182 expr = NULL_TREE;
2183 break;
2184 }
2185
2186 expr = TREE_OPERAND (expr, 0);
44de5aeb
RK
2187 memoffset
2188 = (GEN_INT (INTVAL (memoffset)
2189 + tree_low_cst (offset, 1)
2190 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2191 / BITS_PER_UNIT)));
e7dfe4bb
RH
2192 }
2193 /* Similarly for the decl. */
2194 else if (DECL_P (expr)
2195 && DECL_SIZE_UNIT (expr)
45f79783 2196 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
e7dfe4bb
RH
2197 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2198 && (! memoffset || INTVAL (memoffset) >= 0))
2199 break;
2200 else
2201 {
2202 /* The widened memory access overflows the expression, which means
2203 that it could alias another expression. Zap it. */
2204 expr = NULL_TREE;
2205 break;
2206 }
2207 }
2208
2209 if (! expr)
2210 memoffset = NULL_RTX;
2211
2212 /* The widened memory may alias other stuff, so zap the alias set. */
2213 /* ??? Maybe use get_alias_set on any remaining expression. */
2214
60564289 2215 MEM_ATTRS (new_rtx) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
09e881c9
BE
2216 MEM_ALIGN (new_rtx),
2217 MEM_ADDR_SPACE (new_rtx), mode);
e7dfe4bb 2218
60564289 2219 return new_rtx;
e7dfe4bb 2220}
23b2ce53 2221\f
f6129d66
RH
2222/* A fake decl that is used as the MEM_EXPR of spill slots. */
2223static GTY(()) tree spill_slot_decl;
2224
3d7e23f6
RH
2225tree
2226get_spill_slot_decl (bool force_build_p)
f6129d66
RH
2227{
2228 tree d = spill_slot_decl;
2229 rtx rd;
2230
3d7e23f6 2231 if (d || !force_build_p)
f6129d66
RH
2232 return d;
2233
c2255bc4
AH
2234 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2235 VAR_DECL, get_identifier ("%sfp"), void_type_node);
f6129d66
RH
2236 DECL_ARTIFICIAL (d) = 1;
2237 DECL_IGNORED_P (d) = 1;
2238 TREE_USED (d) = 1;
2239 TREE_THIS_NOTRAP (d) = 1;
2240 spill_slot_decl = d;
2241
2242 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2243 MEM_NOTRAP_P (rd) = 1;
2244 MEM_ATTRS (rd) = get_mem_attrs (new_alias_set (), d, const0_rtx,
09e881c9 2245 NULL_RTX, 0, ADDR_SPACE_GENERIC, BLKmode);
f6129d66
RH
2246 SET_DECL_RTL (d, rd);
2247
2248 return d;
2249}
2250
2251/* Given MEM, a result from assign_stack_local, fill in the memory
2252 attributes as appropriate for a register allocator spill slot.
2253 These slots are not aliasable by other memory. We arrange for
2254 them all to use a single MEM_EXPR, so that the aliasing code can
2255 work properly in the case of shared spill slots. */
2256
2257void
2258set_mem_attrs_for_spill (rtx mem)
2259{
2260 alias_set_type alias;
2261 rtx addr, offset;
2262 tree expr;
2263
3d7e23f6 2264 expr = get_spill_slot_decl (true);
f6129d66
RH
2265 alias = MEM_ALIAS_SET (DECL_RTL (expr));
2266
2267 /* We expect the incoming memory to be of the form:
2268 (mem:MODE (plus (reg sfp) (const_int offset)))
2269 with perhaps the plus missing for offset = 0. */
2270 addr = XEXP (mem, 0);
2271 offset = const0_rtx;
2272 if (GET_CODE (addr) == PLUS
481683e1 2273 && CONST_INT_P (XEXP (addr, 1)))
f6129d66
RH
2274 offset = XEXP (addr, 1);
2275
2276 MEM_ATTRS (mem) = get_mem_attrs (alias, expr, offset,
2277 MEM_SIZE (mem), MEM_ALIGN (mem),
09e881c9 2278 ADDR_SPACE_GENERIC, GET_MODE (mem));
f6129d66
RH
2279 MEM_NOTRAP_P (mem) = 1;
2280}
2281\f
23b2ce53
RS
2282/* Return a newly created CODE_LABEL rtx with a unique label number. */
2283
2284rtx
502b8322 2285gen_label_rtx (void)
23b2ce53 2286{
0dc36574 2287 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
502b8322 2288 NULL, label_num++, NULL);
23b2ce53
RS
2289}
2290\f
2291/* For procedure integration. */
2292
23b2ce53 2293/* Install new pointers to the first and last insns in the chain.
86fe05e0 2294 Also, set cur_insn_uid to one higher than the last in use.
23b2ce53
RS
2295 Used for an inline-procedure after copying the insn chain. */
2296
2297void
502b8322 2298set_new_first_and_last_insn (rtx first, rtx last)
23b2ce53 2299{
86fe05e0
RK
2300 rtx insn;
2301
5936d944
JH
2302 set_first_insn (first);
2303 set_last_insn (last);
86fe05e0
RK
2304 cur_insn_uid = 0;
2305
b5b8b0ac
AO
2306 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2307 {
2308 int debug_count = 0;
2309
2310 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2311 cur_debug_insn_uid = 0;
2312
2313 for (insn = first; insn; insn = NEXT_INSN (insn))
2314 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2315 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2316 else
2317 {
2318 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2319 if (DEBUG_INSN_P (insn))
2320 debug_count++;
2321 }
2322
2323 if (debug_count)
2324 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2325 else
2326 cur_debug_insn_uid++;
2327 }
2328 else
2329 for (insn = first; insn; insn = NEXT_INSN (insn))
2330 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
86fe05e0
RK
2331
2332 cur_insn_uid++;
23b2ce53 2333}
23b2ce53 2334\f
750c9258 2335/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779 2336 structure. This routine should only be called once. */
23b2ce53 2337
fd743bc1 2338static void
b4aaa77b 2339unshare_all_rtl_1 (rtx insn)
23b2ce53 2340{
d1b81779 2341 /* Unshare just about everything else. */
2c07f13b 2342 unshare_all_rtl_in_chain (insn);
750c9258 2343
23b2ce53
RS
2344 /* Make sure the addresses of stack slots found outside the insn chain
2345 (such as, in DECL_RTL of a variable) are not shared
2346 with the insn chain.
2347
2348 This special care is necessary when the stack slot MEM does not
2349 actually appear in the insn chain. If it does appear, its address
2350 is unshared from all else at that point. */
242b0ce6 2351 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
23b2ce53
RS
2352}
2353
750c9258 2354/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779
GK
2355 structure, again. This is a fairly expensive thing to do so it
2356 should be done sparingly. */
2357
2358void
502b8322 2359unshare_all_rtl_again (rtx insn)
d1b81779
GK
2360{
2361 rtx p;
624c87aa
RE
2362 tree decl;
2363
d1b81779 2364 for (p = insn; p; p = NEXT_INSN (p))
2c3c49de 2365 if (INSN_P (p))
d1b81779
GK
2366 {
2367 reset_used_flags (PATTERN (p));
2368 reset_used_flags (REG_NOTES (p));
d1b81779 2369 }
624c87aa 2370
2d4aecb3 2371 /* Make sure that virtual stack slots are not shared. */
5eb2a9f2 2372 set_used_decls (DECL_INITIAL (cfun->decl));
2d4aecb3 2373
624c87aa 2374 /* Make sure that virtual parameters are not shared. */
910ad8de 2375 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
5eb2a9f2 2376 set_used_flags (DECL_RTL (decl));
624c87aa
RE
2377
2378 reset_used_flags (stack_slot_list);
2379
b4aaa77b 2380 unshare_all_rtl_1 (insn);
fd743bc1
PB
2381}
2382
c2924966 2383unsigned int
fd743bc1
PB
2384unshare_all_rtl (void)
2385{
b4aaa77b 2386 unshare_all_rtl_1 (get_insns ());
c2924966 2387 return 0;
d1b81779
GK
2388}
2389
8ddbbcae 2390struct rtl_opt_pass pass_unshare_all_rtl =
ef330312 2391{
8ddbbcae
JH
2392 {
2393 RTL_PASS,
defb77dc 2394 "unshare", /* name */
ef330312
PB
2395 NULL, /* gate */
2396 unshare_all_rtl, /* execute */
2397 NULL, /* sub */
2398 NULL, /* next */
2399 0, /* static_pass_number */
7072a650 2400 TV_NONE, /* tv_id */
ef330312
PB
2401 0, /* properties_required */
2402 0, /* properties_provided */
2403 0, /* properties_destroyed */
2404 0, /* todo_flags_start */
8ddbbcae
JH
2405 TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */
2406 }
ef330312
PB
2407};
2408
2409
2c07f13b
JH
2410/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2411 Recursively does the same for subexpressions. */
2412
2413static void
2414verify_rtx_sharing (rtx orig, rtx insn)
2415{
2416 rtx x = orig;
2417 int i;
2418 enum rtx_code code;
2419 const char *format_ptr;
2420
2421 if (x == 0)
2422 return;
2423
2424 code = GET_CODE (x);
2425
2426 /* These types may be freely shared. */
2427
2428 switch (code)
2429 {
2430 case REG:
0ca5af51
AO
2431 case DEBUG_EXPR:
2432 case VALUE:
2c07f13b
JH
2433 case CONST_INT:
2434 case CONST_DOUBLE:
091a3ac7 2435 case CONST_FIXED:
2c07f13b
JH
2436 case CONST_VECTOR:
2437 case SYMBOL_REF:
2438 case LABEL_REF:
2439 case CODE_LABEL:
2440 case PC:
2441 case CC0:
2442 case SCRATCH:
2c07f13b 2443 return;
3e89ed8d
JH
2444 /* SCRATCH must be shared because they represent distinct values. */
2445 case CLOBBER:
2446 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2447 return;
2448 break;
2c07f13b
JH
2449
2450 case CONST:
6fb5fa3c 2451 if (shared_const_p (orig))
2c07f13b
JH
2452 return;
2453 break;
2454
2455 case MEM:
2456 /* A MEM is allowed to be shared if its address is constant. */
2457 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2458 || reload_completed || reload_in_progress)
2459 return;
2460
2461 break;
2462
2463 default:
2464 break;
2465 }
2466
2467 /* This rtx may not be shared. If it has already been seen,
2468 replace it with a copy of itself. */
1a2caa7a 2469#ifdef ENABLE_CHECKING
2c07f13b
JH
2470 if (RTX_FLAG (x, used))
2471 {
ab532386 2472 error ("invalid rtl sharing found in the insn");
2c07f13b 2473 debug_rtx (insn);
ab532386 2474 error ("shared rtx");
2c07f13b 2475 debug_rtx (x);
ab532386 2476 internal_error ("internal consistency failure");
2c07f13b 2477 }
1a2caa7a
NS
2478#endif
2479 gcc_assert (!RTX_FLAG (x, used));
b8698a0f 2480
2c07f13b
JH
2481 RTX_FLAG (x, used) = 1;
2482
6614fd40 2483 /* Now scan the subexpressions recursively. */
2c07f13b
JH
2484
2485 format_ptr = GET_RTX_FORMAT (code);
2486
2487 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2488 {
2489 switch (*format_ptr++)
2490 {
2491 case 'e':
2492 verify_rtx_sharing (XEXP (x, i), insn);
2493 break;
2494
2495 case 'E':
2496 if (XVEC (x, i) != NULL)
2497 {
2498 int j;
2499 int len = XVECLEN (x, i);
2500
2501 for (j = 0; j < len; j++)
2502 {
1a2caa7a
NS
2503 /* We allow sharing of ASM_OPERANDS inside single
2504 instruction. */
2c07f13b 2505 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
1a2caa7a
NS
2506 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2507 == ASM_OPERANDS))
2c07f13b
JH
2508 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2509 else
2510 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2511 }
2512 }
2513 break;
2514 }
2515 }
2516 return;
2517}
2518
ba228239 2519/* Go through all the RTL insn bodies and check that there is no unexpected
2c07f13b
JH
2520 sharing in between the subexpressions. */
2521
24e47c76 2522DEBUG_FUNCTION void
2c07f13b
JH
2523verify_rtl_sharing (void)
2524{
2525 rtx p;
2526
2527 for (p = get_insns (); p; p = NEXT_INSN (p))
2528 if (INSN_P (p))
2529 {
2530 reset_used_flags (PATTERN (p));
2531 reset_used_flags (REG_NOTES (p));
2954a813
KK
2532 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2533 {
2534 int i;
2535 rtx q, sequence = PATTERN (p);
2536
2537 for (i = 0; i < XVECLEN (sequence, 0); i++)
2538 {
2539 q = XVECEXP (sequence, 0, i);
2540 gcc_assert (INSN_P (q));
2541 reset_used_flags (PATTERN (q));
2542 reset_used_flags (REG_NOTES (q));
2954a813
KK
2543 }
2544 }
2c07f13b
JH
2545 }
2546
2547 for (p = get_insns (); p; p = NEXT_INSN (p))
2548 if (INSN_P (p))
2549 {
2550 verify_rtx_sharing (PATTERN (p), p);
2551 verify_rtx_sharing (REG_NOTES (p), p);
2c07f13b
JH
2552 }
2553}
2554
d1b81779
GK
2555/* Go through all the RTL insn bodies and copy any invalid shared structure.
2556 Assumes the mark bits are cleared at entry. */
2557
2c07f13b
JH
2558void
2559unshare_all_rtl_in_chain (rtx insn)
d1b81779
GK
2560{
2561 for (; insn; insn = NEXT_INSN (insn))
2c3c49de 2562 if (INSN_P (insn))
d1b81779
GK
2563 {
2564 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2565 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
d1b81779
GK
2566 }
2567}
2568
2d4aecb3 2569/* Go through all virtual stack slots of a function and mark them as
5eb2a9f2
RS
2570 shared. We never replace the DECL_RTLs themselves with a copy,
2571 but expressions mentioned into a DECL_RTL cannot be shared with
2572 expressions in the instruction stream.
2573
2574 Note that reload may convert pseudo registers into memories in-place.
2575 Pseudo registers are always shared, but MEMs never are. Thus if we
2576 reset the used flags on MEMs in the instruction stream, we must set
2577 them again on MEMs that appear in DECL_RTLs. */
2578
2d4aecb3 2579static void
5eb2a9f2 2580set_used_decls (tree blk)
2d4aecb3
AO
2581{
2582 tree t;
2583
2584 /* Mark decls. */
910ad8de 2585 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
19e7881c 2586 if (DECL_RTL_SET_P (t))
5eb2a9f2 2587 set_used_flags (DECL_RTL (t));
2d4aecb3
AO
2588
2589 /* Now process sub-blocks. */
87caf699 2590 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
5eb2a9f2 2591 set_used_decls (t);
2d4aecb3
AO
2592}
2593
23b2ce53 2594/* Mark ORIG as in use, and return a copy of it if it was already in use.
ff954f39
AP
2595 Recursively does the same for subexpressions. Uses
2596 copy_rtx_if_shared_1 to reduce stack space. */
23b2ce53
RS
2597
2598rtx
502b8322 2599copy_rtx_if_shared (rtx orig)
23b2ce53 2600{
32b32b16
AP
2601 copy_rtx_if_shared_1 (&orig);
2602 return orig;
2603}
2604
ff954f39
AP
2605/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2606 use. Recursively does the same for subexpressions. */
2607
32b32b16
AP
2608static void
2609copy_rtx_if_shared_1 (rtx *orig1)
2610{
2611 rtx x;
b3694847
SS
2612 int i;
2613 enum rtx_code code;
32b32b16 2614 rtx *last_ptr;
b3694847 2615 const char *format_ptr;
23b2ce53 2616 int copied = 0;
32b32b16
AP
2617 int length;
2618
2619 /* Repeat is used to turn tail-recursion into iteration. */
2620repeat:
2621 x = *orig1;
23b2ce53
RS
2622
2623 if (x == 0)
32b32b16 2624 return;
23b2ce53
RS
2625
2626 code = GET_CODE (x);
2627
2628 /* These types may be freely shared. */
2629
2630 switch (code)
2631 {
2632 case REG:
0ca5af51
AO
2633 case DEBUG_EXPR:
2634 case VALUE:
23b2ce53
RS
2635 case CONST_INT:
2636 case CONST_DOUBLE:
091a3ac7 2637 case CONST_FIXED:
69ef87e2 2638 case CONST_VECTOR:
23b2ce53 2639 case SYMBOL_REF:
2c07f13b 2640 case LABEL_REF:
23b2ce53
RS
2641 case CODE_LABEL:
2642 case PC:
2643 case CC0:
2644 case SCRATCH:
0f41302f 2645 /* SCRATCH must be shared because they represent distinct values. */
32b32b16 2646 return;
3e89ed8d
JH
2647 case CLOBBER:
2648 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2649 return;
2650 break;
23b2ce53 2651
b851ea09 2652 case CONST:
6fb5fa3c 2653 if (shared_const_p (x))
32b32b16 2654 return;
b851ea09
RK
2655 break;
2656
b5b8b0ac 2657 case DEBUG_INSN:
23b2ce53
RS
2658 case INSN:
2659 case JUMP_INSN:
2660 case CALL_INSN:
2661 case NOTE:
23b2ce53
RS
2662 case BARRIER:
2663 /* The chain of insns is not being copied. */
32b32b16 2664 return;
23b2ce53 2665
e9a25f70
JL
2666 default:
2667 break;
23b2ce53
RS
2668 }
2669
2670 /* This rtx may not be shared. If it has already been seen,
2671 replace it with a copy of itself. */
2672
2adc7f12 2673 if (RTX_FLAG (x, used))
23b2ce53 2674 {
aacd3885 2675 x = shallow_copy_rtx (x);
23b2ce53
RS
2676 copied = 1;
2677 }
2adc7f12 2678 RTX_FLAG (x, used) = 1;
23b2ce53
RS
2679
2680 /* Now scan the subexpressions recursively.
2681 We can store any replaced subexpressions directly into X
2682 since we know X is not shared! Any vectors in X
2683 must be copied if X was copied. */
2684
2685 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
2686 length = GET_RTX_LENGTH (code);
2687 last_ptr = NULL;
b8698a0f 2688
32b32b16 2689 for (i = 0; i < length; i++)
23b2ce53
RS
2690 {
2691 switch (*format_ptr++)
2692 {
2693 case 'e':
32b32b16
AP
2694 if (last_ptr)
2695 copy_rtx_if_shared_1 (last_ptr);
2696 last_ptr = &XEXP (x, i);
23b2ce53
RS
2697 break;
2698
2699 case 'E':
2700 if (XVEC (x, i) != NULL)
2701 {
b3694847 2702 int j;
f0722107 2703 int len = XVECLEN (x, i);
b8698a0f 2704
6614fd40
KH
2705 /* Copy the vector iff I copied the rtx and the length
2706 is nonzero. */
f0722107 2707 if (copied && len > 0)
8f985ec4 2708 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
b8698a0f 2709
5d3cc252 2710 /* Call recursively on all inside the vector. */
f0722107 2711 for (j = 0; j < len; j++)
32b32b16
AP
2712 {
2713 if (last_ptr)
2714 copy_rtx_if_shared_1 (last_ptr);
2715 last_ptr = &XVECEXP (x, i, j);
2716 }
23b2ce53
RS
2717 }
2718 break;
2719 }
2720 }
32b32b16
AP
2721 *orig1 = x;
2722 if (last_ptr)
2723 {
2724 orig1 = last_ptr;
2725 goto repeat;
2726 }
2727 return;
23b2ce53
RS
2728}
2729
2730/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2731 to look for shared sub-parts. */
2732
2733void
502b8322 2734reset_used_flags (rtx x)
23b2ce53 2735{
b3694847
SS
2736 int i, j;
2737 enum rtx_code code;
2738 const char *format_ptr;
32b32b16 2739 int length;
23b2ce53 2740
32b32b16
AP
2741 /* Repeat is used to turn tail-recursion into iteration. */
2742repeat:
23b2ce53
RS
2743 if (x == 0)
2744 return;
2745
2746 code = GET_CODE (x);
2747
9faa82d8 2748 /* These types may be freely shared so we needn't do any resetting
23b2ce53
RS
2749 for them. */
2750
2751 switch (code)
2752 {
2753 case REG:
0ca5af51
AO
2754 case DEBUG_EXPR:
2755 case VALUE:
23b2ce53
RS
2756 case CONST_INT:
2757 case CONST_DOUBLE:
091a3ac7 2758 case CONST_FIXED:
69ef87e2 2759 case CONST_VECTOR:
23b2ce53
RS
2760 case SYMBOL_REF:
2761 case CODE_LABEL:
2762 case PC:
2763 case CC0:
2764 return;
2765
b5b8b0ac 2766 case DEBUG_INSN:
23b2ce53
RS
2767 case INSN:
2768 case JUMP_INSN:
2769 case CALL_INSN:
2770 case NOTE:
2771 case LABEL_REF:
2772 case BARRIER:
2773 /* The chain of insns is not being copied. */
2774 return;
750c9258 2775
e9a25f70
JL
2776 default:
2777 break;
23b2ce53
RS
2778 }
2779
2adc7f12 2780 RTX_FLAG (x, used) = 0;
23b2ce53
RS
2781
2782 format_ptr = GET_RTX_FORMAT (code);
32b32b16 2783 length = GET_RTX_LENGTH (code);
b8698a0f 2784
32b32b16 2785 for (i = 0; i < length; i++)
23b2ce53
RS
2786 {
2787 switch (*format_ptr++)
2788 {
2789 case 'e':
32b32b16
AP
2790 if (i == length-1)
2791 {
2792 x = XEXP (x, i);
2793 goto repeat;
2794 }
23b2ce53
RS
2795 reset_used_flags (XEXP (x, i));
2796 break;
2797
2798 case 'E':
2799 for (j = 0; j < XVECLEN (x, i); j++)
2800 reset_used_flags (XVECEXP (x, i, j));
2801 break;
2802 }
2803 }
2804}
2c07f13b
JH
2805
2806/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2807 to look for shared sub-parts. */
2808
2809void
2810set_used_flags (rtx x)
2811{
2812 int i, j;
2813 enum rtx_code code;
2814 const char *format_ptr;
2815
2816 if (x == 0)
2817 return;
2818
2819 code = GET_CODE (x);
2820
2821 /* These types may be freely shared so we needn't do any resetting
2822 for them. */
2823
2824 switch (code)
2825 {
2826 case REG:
0ca5af51
AO
2827 case DEBUG_EXPR:
2828 case VALUE:
2c07f13b
JH
2829 case CONST_INT:
2830 case CONST_DOUBLE:
091a3ac7 2831 case CONST_FIXED:
2c07f13b
JH
2832 case CONST_VECTOR:
2833 case SYMBOL_REF:
2834 case CODE_LABEL:
2835 case PC:
2836 case CC0:
2837 return;
2838
b5b8b0ac 2839 case DEBUG_INSN:
2c07f13b
JH
2840 case INSN:
2841 case JUMP_INSN:
2842 case CALL_INSN:
2843 case NOTE:
2844 case LABEL_REF:
2845 case BARRIER:
2846 /* The chain of insns is not being copied. */
2847 return;
2848
2849 default:
2850 break;
2851 }
2852
2853 RTX_FLAG (x, used) = 1;
2854
2855 format_ptr = GET_RTX_FORMAT (code);
2856 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2857 {
2858 switch (*format_ptr++)
2859 {
2860 case 'e':
2861 set_used_flags (XEXP (x, i));
2862 break;
2863
2864 case 'E':
2865 for (j = 0; j < XVECLEN (x, i); j++)
2866 set_used_flags (XVECEXP (x, i, j));
2867 break;
2868 }
2869 }
2870}
23b2ce53
RS
2871\f
2872/* Copy X if necessary so that it won't be altered by changes in OTHER.
2873 Return X or the rtx for the pseudo reg the value of X was copied into.
2874 OTHER must be valid as a SET_DEST. */
2875
2876rtx
502b8322 2877make_safe_from (rtx x, rtx other)
23b2ce53
RS
2878{
2879 while (1)
2880 switch (GET_CODE (other))
2881 {
2882 case SUBREG:
2883 other = SUBREG_REG (other);
2884 break;
2885 case STRICT_LOW_PART:
2886 case SIGN_EXTEND:
2887 case ZERO_EXTEND:
2888 other = XEXP (other, 0);
2889 break;
2890 default:
2891 goto done;
2892 }
2893 done:
3c0cb5de 2894 if ((MEM_P (other)
23b2ce53 2895 && ! CONSTANT_P (x)
f8cfc6aa 2896 && !REG_P (x)
23b2ce53 2897 && GET_CODE (x) != SUBREG)
f8cfc6aa 2898 || (REG_P (other)
23b2ce53
RS
2899 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2900 || reg_mentioned_p (other, x))))
2901 {
2902 rtx temp = gen_reg_rtx (GET_MODE (x));
2903 emit_move_insn (temp, x);
2904 return temp;
2905 }
2906 return x;
2907}
2908\f
2909/* Emission of insns (adding them to the doubly-linked list). */
2910
23b2ce53
RS
2911/* Return the last insn emitted, even if it is in a sequence now pushed. */
2912
2913rtx
502b8322 2914get_last_insn_anywhere (void)
23b2ce53
RS
2915{
2916 struct sequence_stack *stack;
5936d944
JH
2917 if (get_last_insn ())
2918 return get_last_insn ();
49ad7cfa 2919 for (stack = seq_stack; stack; stack = stack->next)
23b2ce53
RS
2920 if (stack->last != 0)
2921 return stack->last;
2922 return 0;
2923}
2924
2a496e8b
JDA
2925/* Return the first nonnote insn emitted in current sequence or current
2926 function. This routine looks inside SEQUENCEs. */
2927
2928rtx
502b8322 2929get_first_nonnote_insn (void)
2a496e8b 2930{
5936d944 2931 rtx insn = get_insns ();
91373fe8
JDA
2932
2933 if (insn)
2934 {
2935 if (NOTE_P (insn))
2936 for (insn = next_insn (insn);
2937 insn && NOTE_P (insn);
2938 insn = next_insn (insn))
2939 continue;
2940 else
2941 {
2ca202e7 2942 if (NONJUMP_INSN_P (insn)
91373fe8
JDA
2943 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2944 insn = XVECEXP (PATTERN (insn), 0, 0);
2945 }
2946 }
2a496e8b
JDA
2947
2948 return insn;
2949}
2950
2951/* Return the last nonnote insn emitted in current sequence or current
2952 function. This routine looks inside SEQUENCEs. */
2953
2954rtx
502b8322 2955get_last_nonnote_insn (void)
2a496e8b 2956{
5936d944 2957 rtx insn = get_last_insn ();
91373fe8
JDA
2958
2959 if (insn)
2960 {
2961 if (NOTE_P (insn))
2962 for (insn = previous_insn (insn);
2963 insn && NOTE_P (insn);
2964 insn = previous_insn (insn))
2965 continue;
2966 else
2967 {
2ca202e7 2968 if (NONJUMP_INSN_P (insn)
91373fe8
JDA
2969 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2970 insn = XVECEXP (PATTERN (insn), 0,
2971 XVECLEN (PATTERN (insn), 0) - 1);
2972 }
2973 }
2a496e8b
JDA
2974
2975 return insn;
2976}
2977
b5b8b0ac
AO
2978/* Return the number of actual (non-debug) insns emitted in this
2979 function. */
2980
2981int
2982get_max_insn_count (void)
2983{
2984 int n = cur_insn_uid;
2985
2986 /* The table size must be stable across -g, to avoid codegen
2987 differences due to debug insns, and not be affected by
2988 -fmin-insn-uid, to avoid excessive table size and to simplify
2989 debugging of -fcompare-debug failures. */
2990 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
2991 n -= cur_debug_insn_uid;
2992 else
2993 n -= MIN_NONDEBUG_INSN_UID;
2994
2995 return n;
2996}
2997
23b2ce53
RS
2998\f
2999/* Return the next insn. If it is a SEQUENCE, return the first insn
3000 of the sequence. */
3001
3002rtx
502b8322 3003next_insn (rtx insn)
23b2ce53 3004{
75547801
KG
3005 if (insn)
3006 {
3007 insn = NEXT_INSN (insn);
3008 if (insn && NONJUMP_INSN_P (insn)
3009 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3010 insn = XVECEXP (PATTERN (insn), 0, 0);
3011 }
23b2ce53 3012
75547801 3013 return insn;
23b2ce53
RS
3014}
3015
3016/* Return the previous insn. If it is a SEQUENCE, return the last insn
3017 of the sequence. */
3018
3019rtx
502b8322 3020previous_insn (rtx insn)
23b2ce53 3021{
75547801
KG
3022 if (insn)
3023 {
3024 insn = PREV_INSN (insn);
3025 if (insn && NONJUMP_INSN_P (insn)
3026 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3027 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3028 }
23b2ce53 3029
75547801 3030 return insn;
23b2ce53
RS
3031}
3032
3033/* Return the next insn after INSN that is not a NOTE. This routine does not
3034 look inside SEQUENCEs. */
3035
3036rtx
502b8322 3037next_nonnote_insn (rtx insn)
23b2ce53 3038{
75547801
KG
3039 while (insn)
3040 {
3041 insn = NEXT_INSN (insn);
3042 if (insn == 0 || !NOTE_P (insn))
3043 break;
3044 }
23b2ce53 3045
75547801 3046 return insn;
23b2ce53
RS
3047}
3048
1e211590
DD
3049/* Return the next insn after INSN that is not a NOTE, but stop the
3050 search before we enter another basic block. This routine does not
3051 look inside SEQUENCEs. */
3052
3053rtx
3054next_nonnote_insn_bb (rtx insn)
3055{
3056 while (insn)
3057 {
3058 insn = NEXT_INSN (insn);
3059 if (insn == 0 || !NOTE_P (insn))
3060 break;
3061 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3062 return NULL_RTX;
3063 }
3064
3065 return insn;
3066}
3067
23b2ce53
RS
3068/* Return the previous insn before INSN that is not a NOTE. This routine does
3069 not look inside SEQUENCEs. */
3070
3071rtx
502b8322 3072prev_nonnote_insn (rtx insn)
23b2ce53 3073{
75547801
KG
3074 while (insn)
3075 {
3076 insn = PREV_INSN (insn);
3077 if (insn == 0 || !NOTE_P (insn))
3078 break;
3079 }
23b2ce53 3080
75547801 3081 return insn;
23b2ce53
RS
3082}
3083
896aa4ea
DD
3084/* Return the previous insn before INSN that is not a NOTE, but stop
3085 the search before we enter another basic block. This routine does
3086 not look inside SEQUENCEs. */
3087
3088rtx
3089prev_nonnote_insn_bb (rtx insn)
3090{
3091 while (insn)
3092 {
3093 insn = PREV_INSN (insn);
3094 if (insn == 0 || !NOTE_P (insn))
3095 break;
3096 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3097 return NULL_RTX;
3098 }
3099
3100 return insn;
3101}
3102
b5b8b0ac
AO
3103/* Return the next insn after INSN that is not a DEBUG_INSN. This
3104 routine does not look inside SEQUENCEs. */
3105
3106rtx
3107next_nondebug_insn (rtx insn)
3108{
3109 while (insn)
3110 {
3111 insn = NEXT_INSN (insn);
3112 if (insn == 0 || !DEBUG_INSN_P (insn))
3113 break;
3114 }
3115
3116 return insn;
3117}
3118
3119/* Return the previous insn before INSN that is not a DEBUG_INSN.
3120 This routine does not look inside SEQUENCEs. */
3121
3122rtx
3123prev_nondebug_insn (rtx insn)
3124{
3125 while (insn)
3126 {
3127 insn = PREV_INSN (insn);
3128 if (insn == 0 || !DEBUG_INSN_P (insn))
3129 break;
3130 }
3131
3132 return insn;
3133}
3134
f0fc0803
JJ
3135/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3136 This routine does not look inside SEQUENCEs. */
3137
3138rtx
3139next_nonnote_nondebug_insn (rtx insn)
3140{
3141 while (insn)
3142 {
3143 insn = NEXT_INSN (insn);
3144 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3145 break;
3146 }
3147
3148 return insn;
3149}
3150
3151/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3152 This routine does not look inside SEQUENCEs. */
3153
3154rtx
3155prev_nonnote_nondebug_insn (rtx insn)
3156{
3157 while (insn)
3158 {
3159 insn = PREV_INSN (insn);
3160 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3161 break;
3162 }
3163
3164 return insn;
3165}
3166
23b2ce53
RS
3167/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3168 or 0, if there is none. This routine does not look inside
0f41302f 3169 SEQUENCEs. */
23b2ce53
RS
3170
3171rtx
502b8322 3172next_real_insn (rtx insn)
23b2ce53 3173{
75547801
KG
3174 while (insn)
3175 {
3176 insn = NEXT_INSN (insn);
3177 if (insn == 0 || INSN_P (insn))
3178 break;
3179 }
23b2ce53 3180
75547801 3181 return insn;
23b2ce53
RS
3182}
3183
3184/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3185 or 0, if there is none. This routine does not look inside
3186 SEQUENCEs. */
3187
3188rtx
502b8322 3189prev_real_insn (rtx insn)
23b2ce53 3190{
75547801
KG
3191 while (insn)
3192 {
3193 insn = PREV_INSN (insn);
3194 if (insn == 0 || INSN_P (insn))
3195 break;
3196 }
23b2ce53 3197
75547801 3198 return insn;
23b2ce53
RS
3199}
3200
ee960939
OH
3201/* Return the last CALL_INSN in the current list, or 0 if there is none.
3202 This routine does not look inside SEQUENCEs. */
3203
3204rtx
502b8322 3205last_call_insn (void)
ee960939
OH
3206{
3207 rtx insn;
3208
3209 for (insn = get_last_insn ();
4b4bf941 3210 insn && !CALL_P (insn);
ee960939
OH
3211 insn = PREV_INSN (insn))
3212 ;
3213
3214 return insn;
3215}
3216
23b2ce53 3217/* Find the next insn after INSN that really does something. This routine
9c517bf3
AK
3218 does not look inside SEQUENCEs. After reload this also skips over
3219 standalone USE and CLOBBER insn. */
23b2ce53 3220
69732dcb 3221int
4f588890 3222active_insn_p (const_rtx insn)
69732dcb 3223{
4b4bf941
JQ
3224 return (CALL_P (insn) || JUMP_P (insn)
3225 || (NONJUMP_INSN_P (insn)
23b8ba81
RH
3226 && (! reload_completed
3227 || (GET_CODE (PATTERN (insn)) != USE
3228 && GET_CODE (PATTERN (insn)) != CLOBBER))));
69732dcb
RH
3229}
3230
23b2ce53 3231rtx
502b8322 3232next_active_insn (rtx insn)
23b2ce53 3233{
75547801
KG
3234 while (insn)
3235 {
3236 insn = NEXT_INSN (insn);
3237 if (insn == 0 || active_insn_p (insn))
3238 break;
3239 }
23b2ce53 3240
75547801 3241 return insn;
23b2ce53
RS
3242}
3243
3244/* Find the last insn before INSN that really does something. This routine
9c517bf3
AK
3245 does not look inside SEQUENCEs. After reload this also skips over
3246 standalone USE and CLOBBER insn. */
23b2ce53
RS
3247
3248rtx
502b8322 3249prev_active_insn (rtx insn)
23b2ce53 3250{
75547801
KG
3251 while (insn)
3252 {
3253 insn = PREV_INSN (insn);
3254 if (insn == 0 || active_insn_p (insn))
3255 break;
3256 }
23b2ce53 3257
75547801 3258 return insn;
23b2ce53
RS
3259}
3260
3261/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3262
3263rtx
502b8322 3264next_label (rtx insn)
23b2ce53 3265{
75547801
KG
3266 while (insn)
3267 {
3268 insn = NEXT_INSN (insn);
3269 if (insn == 0 || LABEL_P (insn))
3270 break;
3271 }
23b2ce53 3272
75547801 3273 return insn;
23b2ce53
RS
3274}
3275
3276/* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3277
3278rtx
502b8322 3279prev_label (rtx insn)
23b2ce53 3280{
75547801
KG
3281 while (insn)
3282 {
3283 insn = PREV_INSN (insn);
3284 if (insn == 0 || LABEL_P (insn))
3285 break;
3286 }
23b2ce53 3287
75547801 3288 return insn;
23b2ce53 3289}
6c2511d3
RS
3290
3291/* Return the last label to mark the same position as LABEL. Return null
3292 if LABEL itself is null. */
3293
3294rtx
3295skip_consecutive_labels (rtx label)
3296{
3297 rtx insn;
3298
3299 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3300 if (LABEL_P (insn))
3301 label = insn;
3302
3303 return label;
3304}
23b2ce53
RS
3305\f
3306#ifdef HAVE_cc0
c572e5ba
JVA
3307/* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3308 and REG_CC_USER notes so we can find it. */
3309
3310void
502b8322 3311link_cc0_insns (rtx insn)
c572e5ba
JVA
3312{
3313 rtx user = next_nonnote_insn (insn);
3314
4b4bf941 3315 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
c572e5ba
JVA
3316 user = XVECEXP (PATTERN (user), 0, 0);
3317
65c5f2a6
ILT
3318 add_reg_note (user, REG_CC_SETTER, insn);
3319 add_reg_note (insn, REG_CC_USER, user);
c572e5ba
JVA
3320}
3321
23b2ce53
RS
3322/* Return the next insn that uses CC0 after INSN, which is assumed to
3323 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3324 applied to the result of this function should yield INSN).
3325
3326 Normally, this is simply the next insn. However, if a REG_CC_USER note
3327 is present, it contains the insn that uses CC0.
3328
3329 Return 0 if we can't find the insn. */
3330
3331rtx
502b8322 3332next_cc0_user (rtx insn)
23b2ce53 3333{
906c4e36 3334 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
23b2ce53
RS
3335
3336 if (note)
3337 return XEXP (note, 0);
3338
3339 insn = next_nonnote_insn (insn);
4b4bf941 3340 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
23b2ce53
RS
3341 insn = XVECEXP (PATTERN (insn), 0, 0);
3342
2c3c49de 3343 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
23b2ce53
RS
3344 return insn;
3345
3346 return 0;
3347}
3348
3349/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3350 note, it is the previous insn. */
3351
3352rtx
502b8322 3353prev_cc0_setter (rtx insn)
23b2ce53 3354{
906c4e36 3355 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
23b2ce53
RS
3356
3357 if (note)
3358 return XEXP (note, 0);
3359
3360 insn = prev_nonnote_insn (insn);
5b0264cb 3361 gcc_assert (sets_cc0_p (PATTERN (insn)));
23b2ce53
RS
3362
3363 return insn;
3364}
3365#endif
e5bef2e4 3366
594f8779
RZ
3367#ifdef AUTO_INC_DEC
3368/* Find a RTX_AUTOINC class rtx which matches DATA. */
3369
3370static int
3371find_auto_inc (rtx *xp, void *data)
3372{
3373 rtx x = *xp;
5ead67f6 3374 rtx reg = (rtx) data;
594f8779
RZ
3375
3376 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3377 return 0;
3378
3379 switch (GET_CODE (x))
3380 {
3381 case PRE_DEC:
3382 case PRE_INC:
3383 case POST_DEC:
3384 case POST_INC:
3385 case PRE_MODIFY:
3386 case POST_MODIFY:
3387 if (rtx_equal_p (reg, XEXP (x, 0)))
3388 return 1;
3389 break;
3390
3391 default:
3392 gcc_unreachable ();
3393 }
3394 return -1;
3395}
3396#endif
3397
e5bef2e4
HB
3398/* Increment the label uses for all labels present in rtx. */
3399
3400static void
502b8322 3401mark_label_nuses (rtx x)
e5bef2e4 3402{
b3694847
SS
3403 enum rtx_code code;
3404 int i, j;
3405 const char *fmt;
e5bef2e4
HB
3406
3407 code = GET_CODE (x);
7537fc90 3408 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
e5bef2e4
HB
3409 LABEL_NUSES (XEXP (x, 0))++;
3410
3411 fmt = GET_RTX_FORMAT (code);
3412 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3413 {
3414 if (fmt[i] == 'e')
0fb7aeda 3415 mark_label_nuses (XEXP (x, i));
e5bef2e4 3416 else if (fmt[i] == 'E')
0fb7aeda 3417 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
e5bef2e4
HB
3418 mark_label_nuses (XVECEXP (x, i, j));
3419 }
3420}
3421
23b2ce53
RS
3422\f
3423/* Try splitting insns that can be split for better scheduling.
3424 PAT is the pattern which might split.
3425 TRIAL is the insn providing PAT.
cc2902df 3426 LAST is nonzero if we should return the last insn of the sequence produced.
23b2ce53
RS
3427
3428 If this routine succeeds in splitting, it returns the first or last
11147ebe 3429 replacement insn depending on the value of LAST. Otherwise, it
23b2ce53
RS
3430 returns TRIAL. If the insn to be returned can be split, it will be. */
3431
3432rtx
502b8322 3433try_split (rtx pat, rtx trial, int last)
23b2ce53
RS
3434{
3435 rtx before = PREV_INSN (trial);
3436 rtx after = NEXT_INSN (trial);
23b2ce53 3437 int has_barrier = 0;
4a8cae83 3438 rtx note, seq, tem;
6b24c259 3439 int probability;
599aedd9
RH
3440 rtx insn_last, insn;
3441 int njumps = 0;
6b24c259 3442
cd9c1ca8
RH
3443 /* We're not good at redistributing frame information. */
3444 if (RTX_FRAME_RELATED_P (trial))
3445 return trial;
3446
6b24c259
JH
3447 if (any_condjump_p (trial)
3448 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3449 split_branch_probability = INTVAL (XEXP (note, 0));
3450 probability = split_branch_probability;
3451
3452 seq = split_insns (pat, trial);
3453
3454 split_branch_probability = -1;
23b2ce53
RS
3455
3456 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3457 We may need to handle this specially. */
4b4bf941 3458 if (after && BARRIER_P (after))
23b2ce53
RS
3459 {
3460 has_barrier = 1;
3461 after = NEXT_INSN (after);
3462 }
3463
599aedd9
RH
3464 if (!seq)
3465 return trial;
3466
3467 /* Avoid infinite loop if any insn of the result matches
3468 the original pattern. */
3469 insn_last = seq;
3470 while (1)
23b2ce53 3471 {
599aedd9
RH
3472 if (INSN_P (insn_last)
3473 && rtx_equal_p (PATTERN (insn_last), pat))
3474 return trial;
3475 if (!NEXT_INSN (insn_last))
3476 break;
3477 insn_last = NEXT_INSN (insn_last);
3478 }
750c9258 3479
6fb5fa3c
DB
3480 /* We will be adding the new sequence to the function. The splitters
3481 may have introduced invalid RTL sharing, so unshare the sequence now. */
3482 unshare_all_rtl_in_chain (seq);
3483
599aedd9
RH
3484 /* Mark labels. */
3485 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3486 {
4b4bf941 3487 if (JUMP_P (insn))
599aedd9
RH
3488 {
3489 mark_jump_label (PATTERN (insn), insn, 0);
3490 njumps++;
3491 if (probability != -1
3492 && any_condjump_p (insn)
3493 && !find_reg_note (insn, REG_BR_PROB, 0))
2f937369 3494 {
599aedd9
RH
3495 /* We can preserve the REG_BR_PROB notes only if exactly
3496 one jump is created, otherwise the machine description
3497 is responsible for this step using
3498 split_branch_probability variable. */
5b0264cb 3499 gcc_assert (njumps == 1);
65c5f2a6 3500 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
2f937369 3501 }
599aedd9
RH
3502 }
3503 }
3504
3505 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3506 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
4b4bf941 3507 if (CALL_P (trial))
599aedd9
RH
3508 {
3509 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
4b4bf941 3510 if (CALL_P (insn))
599aedd9 3511 {
f6a1f3f6
RH
3512 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3513 while (*p)
3514 p = &XEXP (*p, 1);
3515 *p = CALL_INSN_FUNCTION_USAGE (trial);
599aedd9 3516 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
d0539838
CC
3517
3518 /* Update the debug information for the CALL_INSN. */
3519 if (flag_enable_icf_debug)
3520 (*debug_hooks->copy_call_info) (trial, insn);
599aedd9
RH
3521 }
3522 }
4b5e8abe 3523
599aedd9
RH
3524 /* Copy notes, particularly those related to the CFG. */
3525 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3526 {
3527 switch (REG_NOTE_KIND (note))
3528 {
3529 case REG_EH_REGION:
1d65f45c 3530 copy_reg_eh_region_note_backward (note, insn_last, NULL);
599aedd9 3531 break;
216183ce 3532
599aedd9
RH
3533 case REG_NORETURN:
3534 case REG_SETJMP:
594f8779 3535 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
216183ce 3536 {
4b4bf941 3537 if (CALL_P (insn))
65c5f2a6 3538 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
216183ce 3539 }
599aedd9 3540 break;
d6e95df8 3541
599aedd9 3542 case REG_NON_LOCAL_GOTO:
594f8779 3543 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
2f937369 3544 {
4b4bf941 3545 if (JUMP_P (insn))
65c5f2a6 3546 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
2f937369 3547 }
599aedd9 3548 break;
e5bef2e4 3549
594f8779
RZ
3550#ifdef AUTO_INC_DEC
3551 case REG_INC:
3552 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3553 {
3554 rtx reg = XEXP (note, 0);
3555 if (!FIND_REG_INC_NOTE (insn, reg)
3556 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
65c5f2a6 3557 add_reg_note (insn, REG_INC, reg);
594f8779
RZ
3558 }
3559 break;
3560#endif
3561
599aedd9
RH
3562 default:
3563 break;
23b2ce53 3564 }
599aedd9
RH
3565 }
3566
3567 /* If there are LABELS inside the split insns increment the
3568 usage count so we don't delete the label. */
cf7c4aa6 3569 if (INSN_P (trial))
599aedd9
RH
3570 {
3571 insn = insn_last;
3572 while (insn != NULL_RTX)
23b2ce53 3573 {
cf7c4aa6 3574 /* JUMP_P insns have already been "marked" above. */
4b4bf941 3575 if (NONJUMP_INSN_P (insn))
599aedd9 3576 mark_label_nuses (PATTERN (insn));
23b2ce53 3577
599aedd9
RH
3578 insn = PREV_INSN (insn);
3579 }
23b2ce53
RS
3580 }
3581
0435312e 3582 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
599aedd9
RH
3583
3584 delete_insn (trial);
3585 if (has_barrier)
3586 emit_barrier_after (tem);
3587
3588 /* Recursively call try_split for each new insn created; by the
3589 time control returns here that insn will be fully split, so
3590 set LAST and continue from the insn after the one returned.
3591 We can't use next_active_insn here since AFTER may be a note.
3592 Ignore deleted insns, which can be occur if not optimizing. */
3593 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3594 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3595 tem = try_split (PATTERN (tem), tem, 1);
3596
3597 /* Return either the first or the last insn, depending on which was
3598 requested. */
3599 return last
5936d944 3600 ? (after ? PREV_INSN (after) : get_last_insn ())
599aedd9 3601 : NEXT_INSN (before);
23b2ce53
RS
3602}
3603\f
3604/* Make and return an INSN rtx, initializing all its slots.
4b1f5e8c 3605 Store PATTERN in the pattern slots. */
23b2ce53
RS
3606
3607rtx
502b8322 3608make_insn_raw (rtx pattern)
23b2ce53 3609{
b3694847 3610 rtx insn;
23b2ce53 3611
1f8f4a0b 3612 insn = rtx_alloc (INSN);
23b2ce53 3613
43127294 3614 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3615 PATTERN (insn) = pattern;
3616 INSN_CODE (insn) = -1;
1632afca 3617 REG_NOTES (insn) = NULL;
55e092c4 3618 INSN_LOCATOR (insn) = curr_insn_locator ();
ba4f7968 3619 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53 3620
47984720
NC
3621#ifdef ENABLE_RTL_CHECKING
3622 if (insn
2c3c49de 3623 && INSN_P (insn)
47984720
NC
3624 && (returnjump_p (insn)
3625 || (GET_CODE (insn) == SET
3626 && SET_DEST (insn) == pc_rtx)))
3627 {
d4ee4d25 3628 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
47984720
NC
3629 debug_rtx (insn);
3630 }
3631#endif
750c9258 3632
23b2ce53
RS
3633 return insn;
3634}
3635
b5b8b0ac
AO
3636/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3637
3638rtx
3639make_debug_insn_raw (rtx pattern)
3640{
3641 rtx insn;
3642
3643 insn = rtx_alloc (DEBUG_INSN);
3644 INSN_UID (insn) = cur_debug_insn_uid++;
3645 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3646 INSN_UID (insn) = cur_insn_uid++;
3647
3648 PATTERN (insn) = pattern;
3649 INSN_CODE (insn) = -1;
3650 REG_NOTES (insn) = NULL;
3651 INSN_LOCATOR (insn) = curr_insn_locator ();
3652 BLOCK_FOR_INSN (insn) = NULL;
3653
3654 return insn;
3655}
3656
2f937369 3657/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
23b2ce53 3658
38109dab 3659rtx
502b8322 3660make_jump_insn_raw (rtx pattern)
23b2ce53 3661{
b3694847 3662 rtx insn;
23b2ce53 3663
4b1f5e8c 3664 insn = rtx_alloc (JUMP_INSN);
1632afca 3665 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3666
3667 PATTERN (insn) = pattern;
3668 INSN_CODE (insn) = -1;
1632afca
RS
3669 REG_NOTES (insn) = NULL;
3670 JUMP_LABEL (insn) = NULL;
55e092c4 3671 INSN_LOCATOR (insn) = curr_insn_locator ();
ba4f7968 3672 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53
RS
3673
3674 return insn;
3675}
aff507f4 3676
2f937369 3677/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
aff507f4
RK
3678
3679static rtx
502b8322 3680make_call_insn_raw (rtx pattern)
aff507f4 3681{
b3694847 3682 rtx insn;
aff507f4
RK
3683
3684 insn = rtx_alloc (CALL_INSN);
3685 INSN_UID (insn) = cur_insn_uid++;
3686
3687 PATTERN (insn) = pattern;
3688 INSN_CODE (insn) = -1;
aff507f4
RK
3689 REG_NOTES (insn) = NULL;
3690 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
55e092c4 3691 INSN_LOCATOR (insn) = curr_insn_locator ();
ba4f7968 3692 BLOCK_FOR_INSN (insn) = NULL;
aff507f4
RK
3693
3694 return insn;
3695}
23b2ce53
RS
3696\f
3697/* Add INSN to the end of the doubly-linked list.
3698 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3699
3700void
502b8322 3701add_insn (rtx insn)
23b2ce53 3702{
5936d944 3703 PREV_INSN (insn) = get_last_insn();
23b2ce53
RS
3704 NEXT_INSN (insn) = 0;
3705
5936d944
JH
3706 if (NULL != get_last_insn())
3707 NEXT_INSN (get_last_insn ()) = insn;
23b2ce53 3708
5936d944
JH
3709 if (NULL == get_insns ())
3710 set_first_insn (insn);
23b2ce53 3711
5936d944 3712 set_last_insn (insn);
23b2ce53
RS
3713}
3714
a0ae8e8d
RK
3715/* Add INSN into the doubly-linked list after insn AFTER. This and
3716 the next should be the only functions called to insert an insn once
ba213285 3717 delay slots have been filled since only they know how to update a
a0ae8e8d 3718 SEQUENCE. */
23b2ce53
RS
3719
3720void
6fb5fa3c 3721add_insn_after (rtx insn, rtx after, basic_block bb)
23b2ce53
RS
3722{
3723 rtx next = NEXT_INSN (after);
3724
5b0264cb 3725 gcc_assert (!optimize || !INSN_DELETED_P (after));
ba213285 3726
23b2ce53
RS
3727 NEXT_INSN (insn) = next;
3728 PREV_INSN (insn) = after;
3729
3730 if (next)
3731 {
3732 PREV_INSN (next) = insn;
4b4bf941 3733 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
23b2ce53
RS
3734 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3735 }
5936d944
JH
3736 else if (get_last_insn () == after)
3737 set_last_insn (insn);
23b2ce53
RS
3738 else
3739 {
49ad7cfa 3740 struct sequence_stack *stack = seq_stack;
23b2ce53
RS
3741 /* Scan all pending sequences too. */
3742 for (; stack; stack = stack->next)
3743 if (after == stack->last)
fef0509b
RK
3744 {
3745 stack->last = insn;
3746 break;
3747 }
a0ae8e8d 3748
5b0264cb 3749 gcc_assert (stack);
23b2ce53
RS
3750 }
3751
4b4bf941
JQ
3752 if (!BARRIER_P (after)
3753 && !BARRIER_P (insn)
3c030e88
JH
3754 && (bb = BLOCK_FOR_INSN (after)))
3755 {
3756 set_block_for_insn (insn, bb);
38c1593d 3757 if (INSN_P (insn))
6fb5fa3c 3758 df_insn_rescan (insn);
3c030e88 3759 /* Should not happen as first in the BB is always
a1f300c0 3760 either NOTE or LABEL. */
a813c111 3761 if (BB_END (bb) == after
3c030e88 3762 /* Avoid clobbering of structure when creating new BB. */
4b4bf941 3763 && !BARRIER_P (insn)
a38e7aa5 3764 && !NOTE_INSN_BASIC_BLOCK_P (insn))
a813c111 3765 BB_END (bb) = insn;
3c030e88
JH
3766 }
3767
23b2ce53 3768 NEXT_INSN (after) = insn;
4b4bf941 3769 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
23b2ce53
RS
3770 {
3771 rtx sequence = PATTERN (after);
3772 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3773 }
3774}
3775
a0ae8e8d 3776/* Add INSN into the doubly-linked list before insn BEFORE. This and
6fb5fa3c
DB
3777 the previous should be the only functions called to insert an insn
3778 once delay slots have been filled since only they know how to
3779 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3780 bb from before. */
a0ae8e8d
RK
3781
3782void
6fb5fa3c 3783add_insn_before (rtx insn, rtx before, basic_block bb)
a0ae8e8d
RK
3784{
3785 rtx prev = PREV_INSN (before);
3786
5b0264cb 3787 gcc_assert (!optimize || !INSN_DELETED_P (before));
ba213285 3788
a0ae8e8d
RK
3789 PREV_INSN (insn) = prev;
3790 NEXT_INSN (insn) = before;
3791
3792 if (prev)
3793 {
3794 NEXT_INSN (prev) = insn;
4b4bf941 3795 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
a0ae8e8d
RK
3796 {
3797 rtx sequence = PATTERN (prev);
3798 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3799 }
3800 }
5936d944
JH
3801 else if (get_insns () == before)
3802 set_first_insn (insn);
a0ae8e8d
RK
3803 else
3804 {
49ad7cfa 3805 struct sequence_stack *stack = seq_stack;
a0ae8e8d
RK
3806 /* Scan all pending sequences too. */
3807 for (; stack; stack = stack->next)
3808 if (before == stack->first)
fef0509b
RK
3809 {
3810 stack->first = insn;
3811 break;
3812 }
a0ae8e8d 3813
5b0264cb 3814 gcc_assert (stack);
a0ae8e8d
RK
3815 }
3816
b8698a0f 3817 if (!bb
6fb5fa3c
DB
3818 && !BARRIER_P (before)
3819 && !BARRIER_P (insn))
3820 bb = BLOCK_FOR_INSN (before);
3821
3822 if (bb)
3c030e88
JH
3823 {
3824 set_block_for_insn (insn, bb);
38c1593d 3825 if (INSN_P (insn))
6fb5fa3c 3826 df_insn_rescan (insn);
5b0264cb 3827 /* Should not happen as first in the BB is always either NOTE or
43e05e45 3828 LABEL. */
5b0264cb
NS
3829 gcc_assert (BB_HEAD (bb) != insn
3830 /* Avoid clobbering of structure when creating new BB. */
3831 || BARRIER_P (insn)
a38e7aa5 3832 || NOTE_INSN_BASIC_BLOCK_P (insn));
3c030e88
JH
3833 }
3834
a0ae8e8d 3835 PREV_INSN (before) = insn;
4b4bf941 3836 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
a0ae8e8d
RK
3837 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3838}
3839
6fb5fa3c
DB
3840
3841/* Replace insn with an deleted instruction note. */
3842
0ce2b299
EB
3843void
3844set_insn_deleted (rtx insn)
6fb5fa3c
DB
3845{
3846 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3847 PUT_CODE (insn, NOTE);
3848 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3849}
3850
3851
89e99eea
DB
3852/* Remove an insn from its doubly-linked list. This function knows how
3853 to handle sequences. */
3854void
502b8322 3855remove_insn (rtx insn)
89e99eea
DB
3856{
3857 rtx next = NEXT_INSN (insn);
3858 rtx prev = PREV_INSN (insn);
53c17031
JH
3859 basic_block bb;
3860
6fb5fa3c
DB
3861 /* Later in the code, the block will be marked dirty. */
3862 df_insn_delete (NULL, INSN_UID (insn));
3863
89e99eea
DB
3864 if (prev)
3865 {
3866 NEXT_INSN (prev) = next;
4b4bf941 3867 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
89e99eea
DB
3868 {
3869 rtx sequence = PATTERN (prev);
3870 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3871 }
3872 }
5936d944
JH
3873 else if (get_insns () == insn)
3874 {
fb9ef4c1
JH
3875 if (next)
3876 PREV_INSN (next) = NULL;
5936d944
JH
3877 set_first_insn (next);
3878 }
89e99eea
DB
3879 else
3880 {
49ad7cfa 3881 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3882 /* Scan all pending sequences too. */
3883 for (; stack; stack = stack->next)
3884 if (insn == stack->first)
3885 {
3886 stack->first = next;
3887 break;
3888 }
3889
5b0264cb 3890 gcc_assert (stack);
89e99eea
DB
3891 }
3892
3893 if (next)
3894 {
3895 PREV_INSN (next) = prev;
4b4bf941 3896 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
89e99eea
DB
3897 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3898 }
5936d944
JH
3899 else if (get_last_insn () == insn)
3900 set_last_insn (prev);
89e99eea
DB
3901 else
3902 {
49ad7cfa 3903 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3904 /* Scan all pending sequences too. */
3905 for (; stack; stack = stack->next)
3906 if (insn == stack->last)
3907 {
3908 stack->last = prev;
3909 break;
3910 }
3911
5b0264cb 3912 gcc_assert (stack);
89e99eea 3913 }
4b4bf941 3914 if (!BARRIER_P (insn)
53c17031
JH
3915 && (bb = BLOCK_FOR_INSN (insn)))
3916 {
38c1593d 3917 if (INSN_P (insn))
6fb5fa3c 3918 df_set_bb_dirty (bb);
a813c111 3919 if (BB_HEAD (bb) == insn)
53c17031 3920 {
3bf1e984
RK
3921 /* Never ever delete the basic block note without deleting whole
3922 basic block. */
5b0264cb 3923 gcc_assert (!NOTE_P (insn));
a813c111 3924 BB_HEAD (bb) = next;
53c17031 3925 }
a813c111
SB
3926 if (BB_END (bb) == insn)
3927 BB_END (bb) = prev;
53c17031 3928 }
89e99eea
DB
3929}
3930
ee960939
OH
3931/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3932
3933void
502b8322 3934add_function_usage_to (rtx call_insn, rtx call_fusage)
ee960939 3935{
5b0264cb 3936 gcc_assert (call_insn && CALL_P (call_insn));
ee960939
OH
3937
3938 /* Put the register usage information on the CALL. If there is already
3939 some usage information, put ours at the end. */
3940 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3941 {
3942 rtx link;
3943
3944 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3945 link = XEXP (link, 1))
3946 ;
3947
3948 XEXP (link, 1) = call_fusage;
3949 }
3950 else
3951 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3952}
3953
23b2ce53
RS
3954/* Delete all insns made since FROM.
3955 FROM becomes the new last instruction. */
3956
3957void
502b8322 3958delete_insns_since (rtx from)
23b2ce53
RS
3959{
3960 if (from == 0)
5936d944 3961 set_first_insn (0);
23b2ce53
RS
3962 else
3963 NEXT_INSN (from) = 0;
5936d944 3964 set_last_insn (from);
23b2ce53
RS
3965}
3966
5dab5552
MS
3967/* This function is deprecated, please use sequences instead.
3968
3969 Move a consecutive bunch of insns to a different place in the chain.
23b2ce53
RS
3970 The insns to be moved are those between FROM and TO.
3971 They are moved to a new position after the insn AFTER.
3972 AFTER must not be FROM or TO or any insn in between.
3973
3974 This function does not know about SEQUENCEs and hence should not be
3975 called after delay-slot filling has been done. */
3976
3977void
502b8322 3978reorder_insns_nobb (rtx from, rtx to, rtx after)
23b2ce53
RS
3979{
3980 /* Splice this bunch out of where it is now. */
3981 if (PREV_INSN (from))
3982 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3983 if (NEXT_INSN (to))
3984 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
5936d944
JH
3985 if (get_last_insn () == to)
3986 set_last_insn (PREV_INSN (from));
3987 if (get_insns () == from)
3988 set_first_insn (NEXT_INSN (to));
23b2ce53
RS
3989
3990 /* Make the new neighbors point to it and it to them. */
3991 if (NEXT_INSN (after))
3992 PREV_INSN (NEXT_INSN (after)) = to;
3993
3994 NEXT_INSN (to) = NEXT_INSN (after);
3995 PREV_INSN (from) = after;
3996 NEXT_INSN (after) = from;
5936d944
JH
3997 if (after == get_last_insn())
3998 set_last_insn (to);
23b2ce53
RS
3999}
4000
3c030e88
JH
4001/* Same as function above, but take care to update BB boundaries. */
4002void
502b8322 4003reorder_insns (rtx from, rtx to, rtx after)
3c030e88
JH
4004{
4005 rtx prev = PREV_INSN (from);
4006 basic_block bb, bb2;
4007
4008 reorder_insns_nobb (from, to, after);
4009
4b4bf941 4010 if (!BARRIER_P (after)
3c030e88
JH
4011 && (bb = BLOCK_FOR_INSN (after)))
4012 {
4013 rtx x;
6fb5fa3c 4014 df_set_bb_dirty (bb);
68252e27 4015
4b4bf941 4016 if (!BARRIER_P (from)
3c030e88
JH
4017 && (bb2 = BLOCK_FOR_INSN (from)))
4018 {
a813c111
SB
4019 if (BB_END (bb2) == to)
4020 BB_END (bb2) = prev;
6fb5fa3c 4021 df_set_bb_dirty (bb2);
3c030e88
JH
4022 }
4023
a813c111
SB
4024 if (BB_END (bb) == after)
4025 BB_END (bb) = to;
3c030e88
JH
4026
4027 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7bd5ed5c 4028 if (!BARRIER_P (x))
63642d5a 4029 df_insn_change_bb (x, bb);
3c030e88
JH
4030 }
4031}
4032
23b2ce53 4033\f
2f937369
DM
4034/* Emit insn(s) of given code and pattern
4035 at a specified place within the doubly-linked list.
23b2ce53 4036
2f937369
DM
4037 All of the emit_foo global entry points accept an object
4038 X which is either an insn list or a PATTERN of a single
4039 instruction.
23b2ce53 4040
2f937369
DM
4041 There are thus a few canonical ways to generate code and
4042 emit it at a specific place in the instruction stream. For
4043 example, consider the instruction named SPOT and the fact that
4044 we would like to emit some instructions before SPOT. We might
4045 do it like this:
23b2ce53 4046
2f937369
DM
4047 start_sequence ();
4048 ... emit the new instructions ...
4049 insns_head = get_insns ();
4050 end_sequence ();
23b2ce53 4051
2f937369 4052 emit_insn_before (insns_head, SPOT);
23b2ce53 4053
2f937369
DM
4054 It used to be common to generate SEQUENCE rtl instead, but that
4055 is a relic of the past which no longer occurs. The reason is that
4056 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4057 generated would almost certainly die right after it was created. */
23b2ce53 4058
2f937369 4059/* Make X be output before the instruction BEFORE. */
23b2ce53
RS
4060
4061rtx
6fb5fa3c 4062emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
23b2ce53 4063{
2f937369 4064 rtx last = before;
b3694847 4065 rtx insn;
23b2ce53 4066
5b0264cb 4067 gcc_assert (before);
2f937369
DM
4068
4069 if (x == NULL_RTX)
4070 return last;
4071
4072 switch (GET_CODE (x))
23b2ce53 4073 {
b5b8b0ac 4074 case DEBUG_INSN:
2f937369
DM
4075 case INSN:
4076 case JUMP_INSN:
4077 case CALL_INSN:
4078 case CODE_LABEL:
4079 case BARRIER:
4080 case NOTE:
4081 insn = x;
4082 while (insn)
4083 {
4084 rtx next = NEXT_INSN (insn);
6fb5fa3c 4085 add_insn_before (insn, before, bb);
2f937369
DM
4086 last = insn;
4087 insn = next;
4088 }
4089 break;
4090
4091#ifdef ENABLE_RTL_CHECKING
4092 case SEQUENCE:
5b0264cb 4093 gcc_unreachable ();
2f937369
DM
4094 break;
4095#endif
4096
4097 default:
4098 last = make_insn_raw (x);
6fb5fa3c 4099 add_insn_before (last, before, bb);
2f937369 4100 break;
23b2ce53
RS
4101 }
4102
2f937369 4103 return last;
23b2ce53
RS
4104}
4105
2f937369 4106/* Make an instruction with body X and code JUMP_INSN
23b2ce53
RS
4107 and output it before the instruction BEFORE. */
4108
4109rtx
a7102479 4110emit_jump_insn_before_noloc (rtx x, rtx before)
23b2ce53 4111{
d950dee3 4112 rtx insn, last = NULL_RTX;
aff507f4 4113
5b0264cb 4114 gcc_assert (before);
2f937369
DM
4115
4116 switch (GET_CODE (x))
aff507f4 4117 {
b5b8b0ac 4118 case DEBUG_INSN:
2f937369
DM
4119 case INSN:
4120 case JUMP_INSN:
4121 case CALL_INSN:
4122 case CODE_LABEL:
4123 case BARRIER:
4124 case NOTE:
4125 insn = x;
4126 while (insn)
4127 {
4128 rtx next = NEXT_INSN (insn);
6fb5fa3c 4129 add_insn_before (insn, before, NULL);
2f937369
DM
4130 last = insn;
4131 insn = next;
4132 }
4133 break;
4134
4135#ifdef ENABLE_RTL_CHECKING
4136 case SEQUENCE:
5b0264cb 4137 gcc_unreachable ();
2f937369
DM
4138 break;
4139#endif
4140
4141 default:
4142 last = make_jump_insn_raw (x);
6fb5fa3c 4143 add_insn_before (last, before, NULL);
2f937369 4144 break;
aff507f4
RK
4145 }
4146
2f937369 4147 return last;
23b2ce53
RS
4148}
4149
2f937369 4150/* Make an instruction with body X and code CALL_INSN
969d70ca
JH
4151 and output it before the instruction BEFORE. */
4152
4153rtx
a7102479 4154emit_call_insn_before_noloc (rtx x, rtx before)
969d70ca 4155{
d950dee3 4156 rtx last = NULL_RTX, insn;
969d70ca 4157
5b0264cb 4158 gcc_assert (before);
2f937369
DM
4159
4160 switch (GET_CODE (x))
969d70ca 4161 {
b5b8b0ac 4162 case DEBUG_INSN:
2f937369
DM
4163 case INSN:
4164 case JUMP_INSN:
4165 case CALL_INSN:
4166 case CODE_LABEL:
4167 case BARRIER:
4168 case NOTE:
4169 insn = x;
4170 while (insn)
4171 {
4172 rtx next = NEXT_INSN (insn);
6fb5fa3c 4173 add_insn_before (insn, before, NULL);
2f937369
DM
4174 last = insn;
4175 insn = next;
4176 }
4177 break;
4178
4179#ifdef ENABLE_RTL_CHECKING
4180 case SEQUENCE:
5b0264cb 4181 gcc_unreachable ();
2f937369
DM
4182 break;
4183#endif
4184
4185 default:
4186 last = make_call_insn_raw (x);
6fb5fa3c 4187 add_insn_before (last, before, NULL);
2f937369 4188 break;
969d70ca
JH
4189 }
4190
2f937369 4191 return last;
969d70ca
JH
4192}
4193
b5b8b0ac
AO
4194/* Make an instruction with body X and code DEBUG_INSN
4195 and output it before the instruction BEFORE. */
4196
4197rtx
4198emit_debug_insn_before_noloc (rtx x, rtx before)
4199{
4200 rtx last = NULL_RTX, insn;
4201
4202 gcc_assert (before);
4203
4204 switch (GET_CODE (x))
4205 {
4206 case DEBUG_INSN:
4207 case INSN:
4208 case JUMP_INSN:
4209 case CALL_INSN:
4210 case CODE_LABEL:
4211 case BARRIER:
4212 case NOTE:
4213 insn = x;
4214 while (insn)
4215 {
4216 rtx next = NEXT_INSN (insn);
4217 add_insn_before (insn, before, NULL);
4218 last = insn;
4219 insn = next;
4220 }
4221 break;
4222
4223#ifdef ENABLE_RTL_CHECKING
4224 case SEQUENCE:
4225 gcc_unreachable ();
4226 break;
4227#endif
4228
4229 default:
4230 last = make_debug_insn_raw (x);
4231 add_insn_before (last, before, NULL);
4232 break;
4233 }
4234
4235 return last;
4236}
4237
23b2ce53 4238/* Make an insn of code BARRIER
e881bb1b 4239 and output it before the insn BEFORE. */
23b2ce53
RS
4240
4241rtx
502b8322 4242emit_barrier_before (rtx before)
23b2ce53 4243{
b3694847 4244 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
4245
4246 INSN_UID (insn) = cur_insn_uid++;
4247
6fb5fa3c 4248 add_insn_before (insn, before, NULL);
23b2ce53
RS
4249 return insn;
4250}
4251
e881bb1b
RH
4252/* Emit the label LABEL before the insn BEFORE. */
4253
4254rtx
502b8322 4255emit_label_before (rtx label, rtx before)
e881bb1b
RH
4256{
4257 /* This can be called twice for the same label as a result of the
4258 confusion that follows a syntax error! So make it harmless. */
4259 if (INSN_UID (label) == 0)
4260 {
4261 INSN_UID (label) = cur_insn_uid++;
6fb5fa3c 4262 add_insn_before (label, before, NULL);
e881bb1b
RH
4263 }
4264
4265 return label;
4266}
4267
23b2ce53
RS
4268/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4269
4270rtx
a38e7aa5 4271emit_note_before (enum insn_note subtype, rtx before)
23b2ce53 4272{
b3694847 4273 rtx note = rtx_alloc (NOTE);
23b2ce53 4274 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 4275 NOTE_KIND (note) = subtype;
ba4f7968 4276 BLOCK_FOR_INSN (note) = NULL;
9dbe7947 4277 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
23b2ce53 4278
6fb5fa3c 4279 add_insn_before (note, before, NULL);
23b2ce53
RS
4280 return note;
4281}
4282\f
2f937369
DM
4283/* Helper for emit_insn_after, handles lists of instructions
4284 efficiently. */
23b2ce53 4285
2f937369 4286static rtx
6fb5fa3c 4287emit_insn_after_1 (rtx first, rtx after, basic_block bb)
23b2ce53 4288{
2f937369
DM
4289 rtx last;
4290 rtx after_after;
6fb5fa3c
DB
4291 if (!bb && !BARRIER_P (after))
4292 bb = BLOCK_FOR_INSN (after);
23b2ce53 4293
6fb5fa3c 4294 if (bb)
23b2ce53 4295 {
6fb5fa3c 4296 df_set_bb_dirty (bb);
2f937369 4297 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4b4bf941 4298 if (!BARRIER_P (last))
6fb5fa3c
DB
4299 {
4300 set_block_for_insn (last, bb);
4301 df_insn_rescan (last);
4302 }
4b4bf941 4303 if (!BARRIER_P (last))
6fb5fa3c
DB
4304 {
4305 set_block_for_insn (last, bb);
4306 df_insn_rescan (last);
4307 }
a813c111
SB
4308 if (BB_END (bb) == after)
4309 BB_END (bb) = last;
23b2ce53
RS
4310 }
4311 else
2f937369
DM
4312 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4313 continue;
4314
4315 after_after = NEXT_INSN (after);
4316
4317 NEXT_INSN (after) = first;
4318 PREV_INSN (first) = after;
4319 NEXT_INSN (last) = after_after;
4320 if (after_after)
4321 PREV_INSN (after_after) = last;
4322
5936d944
JH
4323 if (after == get_last_insn())
4324 set_last_insn (last);
e855c69d 4325
2f937369
DM
4326 return last;
4327}
4328
6fb5fa3c
DB
4329/* Make X be output after the insn AFTER and set the BB of insn. If
4330 BB is NULL, an attempt is made to infer the BB from AFTER. */
2f937369
DM
4331
4332rtx
6fb5fa3c 4333emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
2f937369
DM
4334{
4335 rtx last = after;
4336
5b0264cb 4337 gcc_assert (after);
2f937369
DM
4338
4339 if (x == NULL_RTX)
4340 return last;
4341
4342 switch (GET_CODE (x))
23b2ce53 4343 {
b5b8b0ac 4344 case DEBUG_INSN:
2f937369
DM
4345 case INSN:
4346 case JUMP_INSN:
4347 case CALL_INSN:
4348 case CODE_LABEL:
4349 case BARRIER:
4350 case NOTE:
6fb5fa3c 4351 last = emit_insn_after_1 (x, after, bb);
2f937369
DM
4352 break;
4353
4354#ifdef ENABLE_RTL_CHECKING
4355 case SEQUENCE:
5b0264cb 4356 gcc_unreachable ();
2f937369
DM
4357 break;
4358#endif
4359
4360 default:
4361 last = make_insn_raw (x);
6fb5fa3c 4362 add_insn_after (last, after, bb);
2f937369 4363 break;
23b2ce53
RS
4364 }
4365
2f937369 4366 return last;
23b2ce53
RS
4367}
4368
255680cf 4369
2f937369 4370/* Make an insn of code JUMP_INSN with body X
23b2ce53
RS
4371 and output it after the insn AFTER. */
4372
4373rtx
a7102479 4374emit_jump_insn_after_noloc (rtx x, rtx after)
23b2ce53 4375{
2f937369 4376 rtx last;
23b2ce53 4377
5b0264cb 4378 gcc_assert (after);
2f937369
DM
4379
4380 switch (GET_CODE (x))
23b2ce53 4381 {
b5b8b0ac 4382 case DEBUG_INSN:
2f937369
DM
4383 case INSN:
4384 case JUMP_INSN:
4385 case CALL_INSN:
4386 case CODE_LABEL:
4387 case BARRIER:
4388 case NOTE:
6fb5fa3c 4389 last = emit_insn_after_1 (x, after, NULL);
2f937369
DM
4390 break;
4391
4392#ifdef ENABLE_RTL_CHECKING
4393 case SEQUENCE:
5b0264cb 4394 gcc_unreachable ();
2f937369
DM
4395 break;
4396#endif
4397
4398 default:
4399 last = make_jump_insn_raw (x);
6fb5fa3c 4400 add_insn_after (last, after, NULL);
2f937369 4401 break;
23b2ce53
RS
4402 }
4403
2f937369
DM
4404 return last;
4405}
4406
4407/* Make an instruction with body X and code CALL_INSN
4408 and output it after the instruction AFTER. */
4409
4410rtx
a7102479 4411emit_call_insn_after_noloc (rtx x, rtx after)
2f937369
DM
4412{
4413 rtx last;
4414
5b0264cb 4415 gcc_assert (after);
2f937369
DM
4416
4417 switch (GET_CODE (x))
4418 {
b5b8b0ac 4419 case DEBUG_INSN:
2f937369
DM
4420 case INSN:
4421 case JUMP_INSN:
4422 case CALL_INSN:
4423 case CODE_LABEL:
4424 case BARRIER:
4425 case NOTE:
6fb5fa3c 4426 last = emit_insn_after_1 (x, after, NULL);
2f937369
DM
4427 break;
4428
4429#ifdef ENABLE_RTL_CHECKING
4430 case SEQUENCE:
5b0264cb 4431 gcc_unreachable ();
2f937369
DM
4432 break;
4433#endif
4434
4435 default:
4436 last = make_call_insn_raw (x);
6fb5fa3c 4437 add_insn_after (last, after, NULL);
2f937369
DM
4438 break;
4439 }
4440
4441 return last;
23b2ce53
RS
4442}
4443
b5b8b0ac
AO
4444/* Make an instruction with body X and code CALL_INSN
4445 and output it after the instruction AFTER. */
4446
4447rtx
4448emit_debug_insn_after_noloc (rtx x, rtx after)
4449{
4450 rtx last;
4451
4452 gcc_assert (after);
4453
4454 switch (GET_CODE (x))
4455 {
4456 case DEBUG_INSN:
4457 case INSN:
4458 case JUMP_INSN:
4459 case CALL_INSN:
4460 case CODE_LABEL:
4461 case BARRIER:
4462 case NOTE:
4463 last = emit_insn_after_1 (x, after, NULL);
4464 break;
4465
4466#ifdef ENABLE_RTL_CHECKING
4467 case SEQUENCE:
4468 gcc_unreachable ();
4469 break;
4470#endif
4471
4472 default:
4473 last = make_debug_insn_raw (x);
4474 add_insn_after (last, after, NULL);
4475 break;
4476 }
4477
4478 return last;
4479}
4480
23b2ce53
RS
4481/* Make an insn of code BARRIER
4482 and output it after the insn AFTER. */
4483
4484rtx
502b8322 4485emit_barrier_after (rtx after)
23b2ce53 4486{
b3694847 4487 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
4488
4489 INSN_UID (insn) = cur_insn_uid++;
4490
6fb5fa3c 4491 add_insn_after (insn, after, NULL);
23b2ce53
RS
4492 return insn;
4493}
4494
4495/* Emit the label LABEL after the insn AFTER. */
4496
4497rtx
502b8322 4498emit_label_after (rtx label, rtx after)
23b2ce53
RS
4499{
4500 /* This can be called twice for the same label
4501 as a result of the confusion that follows a syntax error!
4502 So make it harmless. */
4503 if (INSN_UID (label) == 0)
4504 {
4505 INSN_UID (label) = cur_insn_uid++;
6fb5fa3c 4506 add_insn_after (label, after, NULL);
23b2ce53
RS
4507 }
4508
4509 return label;
4510}
4511
4512/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4513
4514rtx
a38e7aa5 4515emit_note_after (enum insn_note subtype, rtx after)
23b2ce53 4516{
b3694847 4517 rtx note = rtx_alloc (NOTE);
23b2ce53 4518 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 4519 NOTE_KIND (note) = subtype;
ba4f7968 4520 BLOCK_FOR_INSN (note) = NULL;
9dbe7947 4521 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
6fb5fa3c 4522 add_insn_after (note, after, NULL);
23b2ce53
RS
4523 return note;
4524}
23b2ce53 4525\f
a7102479 4526/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
0d682900 4527rtx
502b8322 4528emit_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4529{
6fb5fa3c 4530 rtx last = emit_insn_after_noloc (pattern, after, NULL);
0d682900 4531
a7102479 4532 if (pattern == NULL_RTX || !loc)
dd3adcf8
DJ
4533 return last;
4534
2f937369
DM
4535 after = NEXT_INSN (after);
4536 while (1)
4537 {
a7102479 4538 if (active_insn_p (after) && !INSN_LOCATOR (after))
0435312e 4539 INSN_LOCATOR (after) = loc;
2f937369
DM
4540 if (after == last)
4541 break;
4542 after = NEXT_INSN (after);
4543 }
0d682900
JH
4544 return last;
4545}
4546
a7102479
JH
4547/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4548rtx
4549emit_insn_after (rtx pattern, rtx after)
4550{
b5b8b0ac
AO
4551 rtx prev = after;
4552
4553 while (DEBUG_INSN_P (prev))
4554 prev = PREV_INSN (prev);
4555
4556 if (INSN_P (prev))
4557 return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
a7102479 4558 else
6fb5fa3c 4559 return emit_insn_after_noloc (pattern, after, NULL);
a7102479
JH
4560}
4561
4562/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
0d682900 4563rtx
502b8322 4564emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4565{
a7102479 4566 rtx last = emit_jump_insn_after_noloc (pattern, after);
2f937369 4567
a7102479 4568 if (pattern == NULL_RTX || !loc)
dd3adcf8
DJ
4569 return last;
4570
2f937369
DM
4571 after = NEXT_INSN (after);
4572 while (1)
4573 {
a7102479 4574 if (active_insn_p (after) && !INSN_LOCATOR (after))
0435312e 4575 INSN_LOCATOR (after) = loc;
2f937369
DM
4576 if (after == last)
4577 break;
4578 after = NEXT_INSN (after);
4579 }
0d682900
JH
4580 return last;
4581}
4582
a7102479
JH
4583/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4584rtx
4585emit_jump_insn_after (rtx pattern, rtx after)
4586{
b5b8b0ac
AO
4587 rtx prev = after;
4588
4589 while (DEBUG_INSN_P (prev))
4590 prev = PREV_INSN (prev);
4591
4592 if (INSN_P (prev))
4593 return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
a7102479
JH
4594 else
4595 return emit_jump_insn_after_noloc (pattern, after);
4596}
4597
4598/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
0d682900 4599rtx
502b8322 4600emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4601{
a7102479 4602 rtx last = emit_call_insn_after_noloc (pattern, after);
2f937369 4603
a7102479 4604 if (pattern == NULL_RTX || !loc)
dd3adcf8
DJ
4605 return last;
4606
2f937369
DM
4607 after = NEXT_INSN (after);
4608 while (1)
4609 {
a7102479 4610 if (active_insn_p (after) && !INSN_LOCATOR (after))
0435312e 4611 INSN_LOCATOR (after) = loc;
2f937369
DM
4612 if (after == last)
4613 break;
4614 after = NEXT_INSN (after);
4615 }
0d682900
JH
4616 return last;
4617}
4618
a7102479
JH
4619/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4620rtx
4621emit_call_insn_after (rtx pattern, rtx after)
4622{
b5b8b0ac
AO
4623 rtx prev = after;
4624
4625 while (DEBUG_INSN_P (prev))
4626 prev = PREV_INSN (prev);
4627
4628 if (INSN_P (prev))
4629 return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
a7102479
JH
4630 else
4631 return emit_call_insn_after_noloc (pattern, after);
4632}
4633
b5b8b0ac
AO
4634/* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4635rtx
4636emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4637{
4638 rtx last = emit_debug_insn_after_noloc (pattern, after);
4639
4640 if (pattern == NULL_RTX || !loc)
4641 return last;
4642
4643 after = NEXT_INSN (after);
4644 while (1)
4645 {
4646 if (active_insn_p (after) && !INSN_LOCATOR (after))
4647 INSN_LOCATOR (after) = loc;
4648 if (after == last)
4649 break;
4650 after = NEXT_INSN (after);
4651 }
4652 return last;
4653}
4654
4655/* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4656rtx
4657emit_debug_insn_after (rtx pattern, rtx after)
4658{
4659 if (INSN_P (after))
4660 return emit_debug_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4661 else
4662 return emit_debug_insn_after_noloc (pattern, after);
4663}
4664
a7102479 4665/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */
0d682900 4666rtx
502b8322 4667emit_insn_before_setloc (rtx pattern, rtx before, int loc)
0d682900
JH
4668{
4669 rtx first = PREV_INSN (before);
6fb5fa3c 4670 rtx last = emit_insn_before_noloc (pattern, before, NULL);
a7102479
JH
4671
4672 if (pattern == NULL_RTX || !loc)
4673 return last;
4674
26cb3993
JH
4675 if (!first)
4676 first = get_insns ();
4677 else
4678 first = NEXT_INSN (first);
a7102479
JH
4679 while (1)
4680 {
4681 if (active_insn_p (first) && !INSN_LOCATOR (first))
4682 INSN_LOCATOR (first) = loc;
4683 if (first == last)
4684 break;
4685 first = NEXT_INSN (first);
4686 }
4687 return last;
4688}
4689
4690/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4691rtx
4692emit_insn_before (rtx pattern, rtx before)
4693{
b5b8b0ac
AO
4694 rtx next = before;
4695
4696 while (DEBUG_INSN_P (next))
4697 next = PREV_INSN (next);
4698
4699 if (INSN_P (next))
4700 return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
a7102479 4701 else
6fb5fa3c 4702 return emit_insn_before_noloc (pattern, before, NULL);
a7102479
JH
4703}
4704
4705/* like emit_insn_before_noloc, but set insn_locator according to scope. */
4706rtx
4707emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4708{
4709 rtx first = PREV_INSN (before);
4710 rtx last = emit_jump_insn_before_noloc (pattern, before);
4711
4712 if (pattern == NULL_RTX)
4713 return last;
4714
4715 first = NEXT_INSN (first);
4716 while (1)
4717 {
4718 if (active_insn_p (first) && !INSN_LOCATOR (first))
4719 INSN_LOCATOR (first) = loc;
4720 if (first == last)
4721 break;
4722 first = NEXT_INSN (first);
4723 }
4724 return last;
4725}
4726
4727/* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4728rtx
4729emit_jump_insn_before (rtx pattern, rtx before)
4730{
b5b8b0ac
AO
4731 rtx next = before;
4732
4733 while (DEBUG_INSN_P (next))
4734 next = PREV_INSN (next);
4735
4736 if (INSN_P (next))
4737 return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
a7102479
JH
4738 else
4739 return emit_jump_insn_before_noloc (pattern, before);
4740}
4741
4742/* like emit_insn_before_noloc, but set insn_locator according to scope. */
4743rtx
4744emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4745{
4746 rtx first = PREV_INSN (before);
4747 rtx last = emit_call_insn_before_noloc (pattern, before);
0d682900 4748
dd3adcf8
DJ
4749 if (pattern == NULL_RTX)
4750 return last;
4751
2f937369
DM
4752 first = NEXT_INSN (first);
4753 while (1)
4754 {
a7102479 4755 if (active_insn_p (first) && !INSN_LOCATOR (first))
0435312e 4756 INSN_LOCATOR (first) = loc;
2f937369
DM
4757 if (first == last)
4758 break;
4759 first = NEXT_INSN (first);
4760 }
0d682900
JH
4761 return last;
4762}
a7102479
JH
4763
4764/* like emit_call_insn_before_noloc,
4765 but set insn_locator according to before. */
4766rtx
4767emit_call_insn_before (rtx pattern, rtx before)
4768{
b5b8b0ac
AO
4769 rtx next = before;
4770
4771 while (DEBUG_INSN_P (next))
4772 next = PREV_INSN (next);
4773
4774 if (INSN_P (next))
4775 return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
a7102479
JH
4776 else
4777 return emit_call_insn_before_noloc (pattern, before);
4778}
b5b8b0ac
AO
4779
4780/* like emit_insn_before_noloc, but set insn_locator according to scope. */
4781rtx
4782emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4783{
4784 rtx first = PREV_INSN (before);
4785 rtx last = emit_debug_insn_before_noloc (pattern, before);
4786
4787 if (pattern == NULL_RTX)
4788 return last;
4789
4790 first = NEXT_INSN (first);
4791 while (1)
4792 {
4793 if (active_insn_p (first) && !INSN_LOCATOR (first))
4794 INSN_LOCATOR (first) = loc;
4795 if (first == last)
4796 break;
4797 first = NEXT_INSN (first);
4798 }
4799 return last;
4800}
4801
4802/* like emit_debug_insn_before_noloc,
4803 but set insn_locator according to before. */
4804rtx
4805emit_debug_insn_before (rtx pattern, rtx before)
4806{
4807 if (INSN_P (before))
4808 return emit_debug_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4809 else
4810 return emit_debug_insn_before_noloc (pattern, before);
4811}
0d682900 4812\f
2f937369
DM
4813/* Take X and emit it at the end of the doubly-linked
4814 INSN list.
23b2ce53
RS
4815
4816 Returns the last insn emitted. */
4817
4818rtx
502b8322 4819emit_insn (rtx x)
23b2ce53 4820{
5936d944 4821 rtx last = get_last_insn();
2f937369 4822 rtx insn;
23b2ce53 4823
2f937369
DM
4824 if (x == NULL_RTX)
4825 return last;
23b2ce53 4826
2f937369
DM
4827 switch (GET_CODE (x))
4828 {
b5b8b0ac 4829 case DEBUG_INSN:
2f937369
DM
4830 case INSN:
4831 case JUMP_INSN:
4832 case CALL_INSN:
4833 case CODE_LABEL:
4834 case BARRIER:
4835 case NOTE:
4836 insn = x;
4837 while (insn)
23b2ce53 4838 {
2f937369 4839 rtx next = NEXT_INSN (insn);
23b2ce53 4840 add_insn (insn);
2f937369
DM
4841 last = insn;
4842 insn = next;
23b2ce53 4843 }
2f937369 4844 break;
23b2ce53 4845
2f937369
DM
4846#ifdef ENABLE_RTL_CHECKING
4847 case SEQUENCE:
5b0264cb 4848 gcc_unreachable ();
2f937369
DM
4849 break;
4850#endif
23b2ce53 4851
2f937369
DM
4852 default:
4853 last = make_insn_raw (x);
4854 add_insn (last);
4855 break;
23b2ce53
RS
4856 }
4857
4858 return last;
4859}
4860
b5b8b0ac
AO
4861/* Make an insn of code DEBUG_INSN with pattern X
4862 and add it to the end of the doubly-linked list. */
4863
4864rtx
4865emit_debug_insn (rtx x)
4866{
5936d944 4867 rtx last = get_last_insn();
b5b8b0ac
AO
4868 rtx insn;
4869
4870 if (x == NULL_RTX)
4871 return last;
4872
4873 switch (GET_CODE (x))
4874 {
4875 case DEBUG_INSN:
4876 case INSN:
4877 case JUMP_INSN:
4878 case CALL_INSN:
4879 case CODE_LABEL:
4880 case BARRIER:
4881 case NOTE:
4882 insn = x;
4883 while (insn)
4884 {
4885 rtx next = NEXT_INSN (insn);
4886 add_insn (insn);
4887 last = insn;
4888 insn = next;
4889 }
4890 break;
4891
4892#ifdef ENABLE_RTL_CHECKING
4893 case SEQUENCE:
4894 gcc_unreachable ();
4895 break;
4896#endif
4897
4898 default:
4899 last = make_debug_insn_raw (x);
4900 add_insn (last);
4901 break;
4902 }
4903
4904 return last;
4905}
4906
2f937369
DM
4907/* Make an insn of code JUMP_INSN with pattern X
4908 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4909
4910rtx
502b8322 4911emit_jump_insn (rtx x)
23b2ce53 4912{
d950dee3 4913 rtx last = NULL_RTX, insn;
23b2ce53 4914
2f937369 4915 switch (GET_CODE (x))
23b2ce53 4916 {
b5b8b0ac 4917 case DEBUG_INSN:
2f937369
DM
4918 case INSN:
4919 case JUMP_INSN:
4920 case CALL_INSN:
4921 case CODE_LABEL:
4922 case BARRIER:
4923 case NOTE:
4924 insn = x;
4925 while (insn)
4926 {
4927 rtx next = NEXT_INSN (insn);
4928 add_insn (insn);
4929 last = insn;
4930 insn = next;
4931 }
4932 break;
e0a5c5eb 4933
2f937369
DM
4934#ifdef ENABLE_RTL_CHECKING
4935 case SEQUENCE:
5b0264cb 4936 gcc_unreachable ();
2f937369
DM
4937 break;
4938#endif
e0a5c5eb 4939
2f937369
DM
4940 default:
4941 last = make_jump_insn_raw (x);
4942 add_insn (last);
4943 break;
3c030e88 4944 }
e0a5c5eb
RS
4945
4946 return last;
4947}
4948
2f937369 4949/* Make an insn of code CALL_INSN with pattern X
23b2ce53
RS
4950 and add it to the end of the doubly-linked list. */
4951
4952rtx
502b8322 4953emit_call_insn (rtx x)
23b2ce53 4954{
2f937369
DM
4955 rtx insn;
4956
4957 switch (GET_CODE (x))
23b2ce53 4958 {
b5b8b0ac 4959 case DEBUG_INSN:
2f937369
DM
4960 case INSN:
4961 case JUMP_INSN:
4962 case CALL_INSN:
4963 case CODE_LABEL:
4964 case BARRIER:
4965 case NOTE:
4966 insn = emit_insn (x);
4967 break;
23b2ce53 4968
2f937369
DM
4969#ifdef ENABLE_RTL_CHECKING
4970 case SEQUENCE:
5b0264cb 4971 gcc_unreachable ();
2f937369
DM
4972 break;
4973#endif
23b2ce53 4974
2f937369
DM
4975 default:
4976 insn = make_call_insn_raw (x);
23b2ce53 4977 add_insn (insn);
2f937369 4978 break;
23b2ce53 4979 }
2f937369
DM
4980
4981 return insn;
23b2ce53
RS
4982}
4983
4984/* Add the label LABEL to the end of the doubly-linked list. */
4985
4986rtx
502b8322 4987emit_label (rtx label)
23b2ce53
RS
4988{
4989 /* This can be called twice for the same label
4990 as a result of the confusion that follows a syntax error!
4991 So make it harmless. */
4992 if (INSN_UID (label) == 0)
4993 {
4994 INSN_UID (label) = cur_insn_uid++;
4995 add_insn (label);
4996 }
4997 return label;
4998}
4999
5000/* Make an insn of code BARRIER
5001 and add it to the end of the doubly-linked list. */
5002
5003rtx
502b8322 5004emit_barrier (void)
23b2ce53 5005{
b3694847 5006 rtx barrier = rtx_alloc (BARRIER);
23b2ce53
RS
5007 INSN_UID (barrier) = cur_insn_uid++;
5008 add_insn (barrier);
5009 return barrier;
5010}
5011
5f2fc772 5012/* Emit a copy of note ORIG. */
502b8322 5013
5f2fc772
NS
5014rtx
5015emit_note_copy (rtx orig)
5016{
5017 rtx note;
b8698a0f 5018
5f2fc772 5019 note = rtx_alloc (NOTE);
b8698a0f 5020
5f2fc772
NS
5021 INSN_UID (note) = cur_insn_uid++;
5022 NOTE_DATA (note) = NOTE_DATA (orig);
a38e7aa5 5023 NOTE_KIND (note) = NOTE_KIND (orig);
5f2fc772
NS
5024 BLOCK_FOR_INSN (note) = NULL;
5025 add_insn (note);
b8698a0f 5026
2e040219 5027 return note;
23b2ce53
RS
5028}
5029
2e040219
NS
5030/* Make an insn of code NOTE or type NOTE_NO
5031 and add it to the end of the doubly-linked list. */
23b2ce53
RS
5032
5033rtx
a38e7aa5 5034emit_note (enum insn_note kind)
23b2ce53 5035{
b3694847 5036 rtx note;
23b2ce53 5037
23b2ce53
RS
5038 note = rtx_alloc (NOTE);
5039 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 5040 NOTE_KIND (note) = kind;
dd107e66 5041 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
ba4f7968 5042 BLOCK_FOR_INSN (note) = NULL;
23b2ce53
RS
5043 add_insn (note);
5044 return note;
5045}
5046
c41c1387
RS
5047/* Emit a clobber of lvalue X. */
5048
5049rtx
5050emit_clobber (rtx x)
5051{
5052 /* CONCATs should not appear in the insn stream. */
5053 if (GET_CODE (x) == CONCAT)
5054 {
5055 emit_clobber (XEXP (x, 0));
5056 return emit_clobber (XEXP (x, 1));
5057 }
5058 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5059}
5060
5061/* Return a sequence of insns to clobber lvalue X. */
5062
5063rtx
5064gen_clobber (rtx x)
5065{
5066 rtx seq;
5067
5068 start_sequence ();
5069 emit_clobber (x);
5070 seq = get_insns ();
5071 end_sequence ();
5072 return seq;
5073}
5074
5075/* Emit a use of rvalue X. */
5076
5077rtx
5078emit_use (rtx x)
5079{
5080 /* CONCATs should not appear in the insn stream. */
5081 if (GET_CODE (x) == CONCAT)
5082 {
5083 emit_use (XEXP (x, 0));
5084 return emit_use (XEXP (x, 1));
5085 }
5086 return emit_insn (gen_rtx_USE (VOIDmode, x));
5087}
5088
5089/* Return a sequence of insns to use rvalue X. */
5090
5091rtx
5092gen_use (rtx x)
5093{
5094 rtx seq;
5095
5096 start_sequence ();
5097 emit_use (x);
5098 seq = get_insns ();
5099 end_sequence ();
5100 return seq;
5101}
5102
23b2ce53 5103/* Cause next statement to emit a line note even if the line number
0cea056b 5104 has not changed. */
23b2ce53
RS
5105
5106void
502b8322 5107force_next_line_note (void)
23b2ce53 5108{
6773e15f 5109 last_location = -1;
23b2ce53 5110}
87b47c85
AM
5111
5112/* Place a note of KIND on insn INSN with DATUM as the datum. If a
30f7a378 5113 note of this type already exists, remove it first. */
87b47c85 5114
3d238248 5115rtx
502b8322 5116set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
87b47c85
AM
5117{
5118 rtx note = find_reg_note (insn, kind, NULL_RTX);
5119
52488da1
JW
5120 switch (kind)
5121 {
5122 case REG_EQUAL:
5123 case REG_EQUIV:
5124 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
5125 has multiple sets (some callers assume single_set
5126 means the insn only has one set, when in fact it
5127 means the insn only has one * useful * set). */
5128 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
5129 {
5b0264cb 5130 gcc_assert (!note);
52488da1
JW
5131 return NULL_RTX;
5132 }
5133
5134 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5135 It serves no useful purpose and breaks eliminate_regs. */
5136 if (GET_CODE (datum) == ASM_OPERANDS)
5137 return NULL_RTX;
6fb5fa3c
DB
5138
5139 if (note)
5140 {
5141 XEXP (note, 0) = datum;
5142 df_notes_rescan (insn);
5143 return note;
5144 }
52488da1
JW
5145 break;
5146
5147 default:
6fb5fa3c
DB
5148 if (note)
5149 {
5150 XEXP (note, 0) = datum;
5151 return note;
5152 }
52488da1
JW
5153 break;
5154 }
3d238248 5155
65c5f2a6 5156 add_reg_note (insn, kind, datum);
6fb5fa3c
DB
5157
5158 switch (kind)
3d238248 5159 {
6fb5fa3c
DB
5160 case REG_EQUAL:
5161 case REG_EQUIV:
5162 df_notes_rescan (insn);
5163 break;
5164 default:
5165 break;
3d238248 5166 }
87b47c85 5167
3d238248 5168 return REG_NOTES (insn);
87b47c85 5169}
23b2ce53
RS
5170\f
5171/* Return an indication of which type of insn should have X as a body.
5172 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5173
d78db459 5174static enum rtx_code
502b8322 5175classify_insn (rtx x)
23b2ce53 5176{
4b4bf941 5177 if (LABEL_P (x))
23b2ce53
RS
5178 return CODE_LABEL;
5179 if (GET_CODE (x) == CALL)
5180 return CALL_INSN;
5181 if (GET_CODE (x) == RETURN)
5182 return JUMP_INSN;
5183 if (GET_CODE (x) == SET)
5184 {
5185 if (SET_DEST (x) == pc_rtx)
5186 return JUMP_INSN;
5187 else if (GET_CODE (SET_SRC (x)) == CALL)
5188 return CALL_INSN;
5189 else
5190 return INSN;
5191 }
5192 if (GET_CODE (x) == PARALLEL)
5193 {
b3694847 5194 int j;
23b2ce53
RS
5195 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5196 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5197 return CALL_INSN;
5198 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5199 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5200 return JUMP_INSN;
5201 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5202 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5203 return CALL_INSN;
5204 }
5205 return INSN;
5206}
5207
5208/* Emit the rtl pattern X as an appropriate kind of insn.
5209 If X is a label, it is simply added into the insn chain. */
5210
5211rtx
502b8322 5212emit (rtx x)
23b2ce53
RS
5213{
5214 enum rtx_code code = classify_insn (x);
5215
5b0264cb 5216 switch (code)
23b2ce53 5217 {
5b0264cb
NS
5218 case CODE_LABEL:
5219 return emit_label (x);
5220 case INSN:
5221 return emit_insn (x);
5222 case JUMP_INSN:
5223 {
5224 rtx insn = emit_jump_insn (x);
5225 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5226 return emit_barrier ();
5227 return insn;
5228 }
5229 case CALL_INSN:
5230 return emit_call_insn (x);
b5b8b0ac
AO
5231 case DEBUG_INSN:
5232 return emit_debug_insn (x);
5b0264cb
NS
5233 default:
5234 gcc_unreachable ();
23b2ce53 5235 }
23b2ce53
RS
5236}
5237\f
e2500fed 5238/* Space for free sequence stack entries. */
1431042e 5239static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
e2500fed 5240
4dfa0342
RH
5241/* Begin emitting insns to a sequence. If this sequence will contain
5242 something that might cause the compiler to pop arguments to function
5243 calls (because those pops have previously been deferred; see
5244 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5245 before calling this function. That will ensure that the deferred
5246 pops are not accidentally emitted in the middle of this sequence. */
23b2ce53
RS
5247
5248void
502b8322 5249start_sequence (void)
23b2ce53
RS
5250{
5251 struct sequence_stack *tem;
5252
e2500fed
GK
5253 if (free_sequence_stack != NULL)
5254 {
5255 tem = free_sequence_stack;
5256 free_sequence_stack = tem->next;
5257 }
5258 else
a9429e29 5259 tem = ggc_alloc_sequence_stack ();
23b2ce53 5260
49ad7cfa 5261 tem->next = seq_stack;
5936d944
JH
5262 tem->first = get_insns ();
5263 tem->last = get_last_insn ();
23b2ce53 5264
49ad7cfa 5265 seq_stack = tem;
23b2ce53 5266
5936d944
JH
5267 set_first_insn (0);
5268 set_last_insn (0);
23b2ce53
RS
5269}
5270
5c7a310f
MM
5271/* Set up the insn chain starting with FIRST as the current sequence,
5272 saving the previously current one. See the documentation for
5273 start_sequence for more information about how to use this function. */
23b2ce53
RS
5274
5275void
502b8322 5276push_to_sequence (rtx first)
23b2ce53
RS
5277{
5278 rtx last;
5279
5280 start_sequence ();
5281
5282 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
5283
5936d944
JH
5284 set_first_insn (first);
5285 set_last_insn (last);
23b2ce53
RS
5286}
5287
bb27eeda
SE
5288/* Like push_to_sequence, but take the last insn as an argument to avoid
5289 looping through the list. */
5290
5291void
5292push_to_sequence2 (rtx first, rtx last)
5293{
5294 start_sequence ();
5295
5936d944
JH
5296 set_first_insn (first);
5297 set_last_insn (last);
bb27eeda
SE
5298}
5299
f15ae3a1
TW
5300/* Set up the outer-level insn chain
5301 as the current sequence, saving the previously current one. */
5302
5303void
502b8322 5304push_topmost_sequence (void)
f15ae3a1 5305{
aefdd5ab 5306 struct sequence_stack *stack, *top = NULL;
f15ae3a1
TW
5307
5308 start_sequence ();
5309
49ad7cfa 5310 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
5311 top = stack;
5312
5936d944
JH
5313 set_first_insn (top->first);
5314 set_last_insn (top->last);
f15ae3a1
TW
5315}
5316
5317/* After emitting to the outer-level insn chain, update the outer-level
5318 insn chain, and restore the previous saved state. */
5319
5320void
502b8322 5321pop_topmost_sequence (void)
f15ae3a1 5322{
aefdd5ab 5323 struct sequence_stack *stack, *top = NULL;
f15ae3a1 5324
49ad7cfa 5325 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
5326 top = stack;
5327
5936d944
JH
5328 top->first = get_insns ();
5329 top->last = get_last_insn ();
f15ae3a1
TW
5330
5331 end_sequence ();
5332}
5333
23b2ce53
RS
5334/* After emitting to a sequence, restore previous saved state.
5335
5c7a310f 5336 To get the contents of the sequence just made, you must call
2f937369 5337 `get_insns' *before* calling here.
5c7a310f
MM
5338
5339 If the compiler might have deferred popping arguments while
5340 generating this sequence, and this sequence will not be immediately
5341 inserted into the instruction stream, use do_pending_stack_adjust
2f937369 5342 before calling get_insns. That will ensure that the deferred
5c7a310f
MM
5343 pops are inserted into this sequence, and not into some random
5344 location in the instruction stream. See INHIBIT_DEFER_POP for more
5345 information about deferred popping of arguments. */
23b2ce53
RS
5346
5347void
502b8322 5348end_sequence (void)
23b2ce53 5349{
49ad7cfa 5350 struct sequence_stack *tem = seq_stack;
23b2ce53 5351
5936d944
JH
5352 set_first_insn (tem->first);
5353 set_last_insn (tem->last);
49ad7cfa 5354 seq_stack = tem->next;
23b2ce53 5355
e2500fed
GK
5356 memset (tem, 0, sizeof (*tem));
5357 tem->next = free_sequence_stack;
5358 free_sequence_stack = tem;
23b2ce53
RS
5359}
5360
5361/* Return 1 if currently emitting into a sequence. */
5362
5363int
502b8322 5364in_sequence_p (void)
23b2ce53 5365{
49ad7cfa 5366 return seq_stack != 0;
23b2ce53 5367}
23b2ce53 5368\f
59ec66dc
MM
5369/* Put the various virtual registers into REGNO_REG_RTX. */
5370
2bbdec73 5371static void
bd60bab2 5372init_virtual_regs (void)
59ec66dc 5373{
bd60bab2
JH
5374 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5375 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5376 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5377 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5378 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
32990d5b
JJ
5379 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5380 = virtual_preferred_stack_boundary_rtx;
49ad7cfa
BS
5381}
5382
da43a810
BS
5383\f
5384/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5385static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5386static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5387static int copy_insn_n_scratches;
5388
5389/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5390 copied an ASM_OPERANDS.
5391 In that case, it is the original input-operand vector. */
5392static rtvec orig_asm_operands_vector;
5393
5394/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5395 copied an ASM_OPERANDS.
5396 In that case, it is the copied input-operand vector. */
5397static rtvec copy_asm_operands_vector;
5398
5399/* Likewise for the constraints vector. */
5400static rtvec orig_asm_constraints_vector;
5401static rtvec copy_asm_constraints_vector;
5402
5403/* Recursively create a new copy of an rtx for copy_insn.
5404 This function differs from copy_rtx in that it handles SCRATCHes and
5405 ASM_OPERANDs properly.
5406 Normally, this function is not used directly; use copy_insn as front end.
5407 However, you could first copy an insn pattern with copy_insn and then use
5408 this function afterwards to properly copy any REG_NOTEs containing
5409 SCRATCHes. */
5410
5411rtx
502b8322 5412copy_insn_1 (rtx orig)
da43a810 5413{
b3694847
SS
5414 rtx copy;
5415 int i, j;
5416 RTX_CODE code;
5417 const char *format_ptr;
da43a810 5418
cd9c1ca8
RH
5419 if (orig == NULL)
5420 return NULL;
5421
da43a810
BS
5422 code = GET_CODE (orig);
5423
5424 switch (code)
5425 {
5426 case REG:
da43a810
BS
5427 case CONST_INT:
5428 case CONST_DOUBLE:
091a3ac7 5429 case CONST_FIXED:
69ef87e2 5430 case CONST_VECTOR:
da43a810
BS
5431 case SYMBOL_REF:
5432 case CODE_LABEL:
5433 case PC:
5434 case CC0:
da43a810 5435 return orig;
3e89ed8d
JH
5436 case CLOBBER:
5437 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5438 return orig;
5439 break;
da43a810
BS
5440
5441 case SCRATCH:
5442 for (i = 0; i < copy_insn_n_scratches; i++)
5443 if (copy_insn_scratch_in[i] == orig)
5444 return copy_insn_scratch_out[i];
5445 break;
5446
5447 case CONST:
6fb5fa3c 5448 if (shared_const_p (orig))
da43a810
BS
5449 return orig;
5450 break;
750c9258 5451
da43a810
BS
5452 /* A MEM with a constant address is not sharable. The problem is that
5453 the constant address may need to be reloaded. If the mem is shared,
5454 then reloading one copy of this mem will cause all copies to appear
5455 to have been reloaded. */
5456
5457 default:
5458 break;
5459 }
5460
aacd3885
RS
5461 /* Copy the various flags, fields, and other information. We assume
5462 that all fields need copying, and then clear the fields that should
da43a810
BS
5463 not be copied. That is the sensible default behavior, and forces
5464 us to explicitly document why we are *not* copying a flag. */
aacd3885 5465 copy = shallow_copy_rtx (orig);
da43a810
BS
5466
5467 /* We do not copy the USED flag, which is used as a mark bit during
5468 walks over the RTL. */
2adc7f12 5469 RTX_FLAG (copy, used) = 0;
da43a810
BS
5470
5471 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
ec8e098d 5472 if (INSN_P (orig))
da43a810 5473 {
2adc7f12
JJ
5474 RTX_FLAG (copy, jump) = 0;
5475 RTX_FLAG (copy, call) = 0;
5476 RTX_FLAG (copy, frame_related) = 0;
da43a810 5477 }
750c9258 5478
da43a810
BS
5479 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5480
5481 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
aacd3885
RS
5482 switch (*format_ptr++)
5483 {
5484 case 'e':
5485 if (XEXP (orig, i) != NULL)
5486 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5487 break;
da43a810 5488
aacd3885
RS
5489 case 'E':
5490 case 'V':
5491 if (XVEC (orig, i) == orig_asm_constraints_vector)
5492 XVEC (copy, i) = copy_asm_constraints_vector;
5493 else if (XVEC (orig, i) == orig_asm_operands_vector)
5494 XVEC (copy, i) = copy_asm_operands_vector;
5495 else if (XVEC (orig, i) != NULL)
5496 {
5497 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5498 for (j = 0; j < XVECLEN (copy, i); j++)
5499 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5500 }
5501 break;
da43a810 5502
aacd3885
RS
5503 case 't':
5504 case 'w':
5505 case 'i':
5506 case 's':
5507 case 'S':
5508 case 'u':
5509 case '0':
5510 /* These are left unchanged. */
5511 break;
da43a810 5512
aacd3885
RS
5513 default:
5514 gcc_unreachable ();
5515 }
da43a810
BS
5516
5517 if (code == SCRATCH)
5518 {
5519 i = copy_insn_n_scratches++;
5b0264cb 5520 gcc_assert (i < MAX_RECOG_OPERANDS);
da43a810
BS
5521 copy_insn_scratch_in[i] = orig;
5522 copy_insn_scratch_out[i] = copy;
5523 }
5524 else if (code == ASM_OPERANDS)
5525 {
6462bb43
AO
5526 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5527 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5528 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5529 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
da43a810
BS
5530 }
5531
5532 return copy;
5533}
5534
5535/* Create a new copy of an rtx.
5536 This function differs from copy_rtx in that it handles SCRATCHes and
5537 ASM_OPERANDs properly.
5538 INSN doesn't really have to be a full INSN; it could be just the
5539 pattern. */
5540rtx
502b8322 5541copy_insn (rtx insn)
da43a810
BS
5542{
5543 copy_insn_n_scratches = 0;
5544 orig_asm_operands_vector = 0;
5545 orig_asm_constraints_vector = 0;
5546 copy_asm_operands_vector = 0;
5547 copy_asm_constraints_vector = 0;
5548 return copy_insn_1 (insn);
5549}
59ec66dc 5550
23b2ce53
RS
5551/* Initialize data structures and variables in this file
5552 before generating rtl for each function. */
5553
5554void
502b8322 5555init_emit (void)
23b2ce53 5556{
5936d944
JH
5557 set_first_insn (NULL);
5558 set_last_insn (NULL);
b5b8b0ac
AO
5559 if (MIN_NONDEBUG_INSN_UID)
5560 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5561 else
5562 cur_insn_uid = 1;
5563 cur_debug_insn_uid = 1;
23b2ce53 5564 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
6773e15f 5565 last_location = UNKNOWN_LOCATION;
23b2ce53 5566 first_label_num = label_num;
49ad7cfa 5567 seq_stack = NULL;
23b2ce53 5568
23b2ce53
RS
5569 /* Init the tables that describe all the pseudo regs. */
5570
3e029763 5571 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
23b2ce53 5572
3e029763 5573 crtl->emit.regno_pointer_align
1b4572a8 5574 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
86fe05e0 5575
a9429e29 5576 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
0d4903b8 5577
e50126e8 5578 /* Put copies of all the hard registers into regno_reg_rtx. */
6cde4876 5579 memcpy (regno_reg_rtx,
5fb0e246 5580 initial_regno_reg_rtx,
6cde4876 5581 FIRST_PSEUDO_REGISTER * sizeof (rtx));
e50126e8 5582
23b2ce53 5583 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
bd60bab2 5584 init_virtual_regs ();
740ab4a2
RK
5585
5586 /* Indicate that the virtual registers and stack locations are
5587 all pointers. */
3502dc9c
JDA
5588 REG_POINTER (stack_pointer_rtx) = 1;
5589 REG_POINTER (frame_pointer_rtx) = 1;
5590 REG_POINTER (hard_frame_pointer_rtx) = 1;
5591 REG_POINTER (arg_pointer_rtx) = 1;
740ab4a2 5592
3502dc9c
JDA
5593 REG_POINTER (virtual_incoming_args_rtx) = 1;
5594 REG_POINTER (virtual_stack_vars_rtx) = 1;
5595 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5596 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5597 REG_POINTER (virtual_cfa_rtx) = 1;
5e82e7bd 5598
86fe05e0 5599#ifdef STACK_BOUNDARY
bdb429a5
RK
5600 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5601 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5602 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5603 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5604
5605 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5606 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5607 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5608 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5609 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
86fe05e0
RK
5610#endif
5611
5e82e7bd
JVA
5612#ifdef INIT_EXPANDERS
5613 INIT_EXPANDERS;
5614#endif
23b2ce53
RS
5615}
5616
a73b091d 5617/* Generate a vector constant for mode MODE and constant value CONSTANT. */
69ef87e2
AH
5618
5619static rtx
a73b091d 5620gen_const_vector (enum machine_mode mode, int constant)
69ef87e2
AH
5621{
5622 rtx tem;
5623 rtvec v;
5624 int units, i;
5625 enum machine_mode inner;
5626
5627 units = GET_MODE_NUNITS (mode);
5628 inner = GET_MODE_INNER (mode);
5629
15ed7b52
JG
5630 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5631
69ef87e2
AH
5632 v = rtvec_alloc (units);
5633
a73b091d
JW
5634 /* We need to call this function after we set the scalar const_tiny_rtx
5635 entries. */
5636 gcc_assert (const_tiny_rtx[constant][(int) inner]);
69ef87e2
AH
5637
5638 for (i = 0; i < units; ++i)
a73b091d 5639 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
69ef87e2 5640
a06e3c40 5641 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
69ef87e2
AH
5642 return tem;
5643}
5644
a06e3c40 5645/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
a73b091d 5646 all elements are zero, and the one vector when all elements are one. */
a06e3c40 5647rtx
502b8322 5648gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
a06e3c40 5649{
a73b091d
JW
5650 enum machine_mode inner = GET_MODE_INNER (mode);
5651 int nunits = GET_MODE_NUNITS (mode);
5652 rtx x;
a06e3c40
R
5653 int i;
5654
a73b091d
JW
5655 /* Check to see if all of the elements have the same value. */
5656 x = RTVEC_ELT (v, nunits - 1);
5657 for (i = nunits - 2; i >= 0; i--)
5658 if (RTVEC_ELT (v, i) != x)
5659 break;
5660
5661 /* If the values are all the same, check to see if we can use one of the
5662 standard constant vectors. */
5663 if (i == -1)
5664 {
5665 if (x == CONST0_RTX (inner))
5666 return CONST0_RTX (mode);
5667 else if (x == CONST1_RTX (inner))
5668 return CONST1_RTX (mode);
5669 }
5670
5671 return gen_rtx_raw_CONST_VECTOR (mode, v);
a06e3c40
R
5672}
5673
b5deb7b6
SL
5674/* Initialise global register information required by all functions. */
5675
5676void
5677init_emit_regs (void)
5678{
5679 int i;
5680
5681 /* Reset register attributes */
5682 htab_empty (reg_attrs_htab);
5683
5684 /* We need reg_raw_mode, so initialize the modes now. */
5685 init_reg_modes_target ();
5686
5687 /* Assign register numbers to the globally defined register rtx. */
5688 pc_rtx = gen_rtx_PC (VOIDmode);
5689 cc0_rtx = gen_rtx_CC0 (VOIDmode);
5690 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5691 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5692 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5693 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5694 virtual_incoming_args_rtx =
5695 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5696 virtual_stack_vars_rtx =
5697 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5698 virtual_stack_dynamic_rtx =
5699 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5700 virtual_outgoing_args_rtx =
5701 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5702 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
32990d5b
JJ
5703 virtual_preferred_stack_boundary_rtx =
5704 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
b5deb7b6
SL
5705
5706 /* Initialize RTL for commonly used hard registers. These are
5707 copied into regno_reg_rtx as we begin to compile each function. */
5708 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5fb0e246 5709 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
b5deb7b6
SL
5710
5711#ifdef RETURN_ADDRESS_POINTER_REGNUM
5712 return_address_pointer_rtx
5713 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5714#endif
5715
b5deb7b6
SL
5716 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5717 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5718 else
5719 pic_offset_table_rtx = NULL_RTX;
5720}
5721
2d888286 5722/* Create some permanent unique rtl objects shared between all functions. */
23b2ce53
RS
5723
5724void
2d888286 5725init_emit_once (void)
23b2ce53
RS
5726{
5727 int i;
5728 enum machine_mode mode;
9ec36da5 5729 enum machine_mode double_mode;
23b2ce53 5730
091a3ac7
CF
5731 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5732 hash tables. */
17211ab5
GK
5733 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5734 const_int_htab_eq, NULL);
173b24b9 5735
17211ab5
GK
5736 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5737 const_double_htab_eq, NULL);
5692c7bc 5738
091a3ac7
CF
5739 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5740 const_fixed_htab_eq, NULL);
5741
17211ab5
GK
5742 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5743 mem_attrs_htab_eq, NULL);
a560d4d4
JH
5744 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5745 reg_attrs_htab_eq, NULL);
67673f5c 5746
43fa6302
AS
5747 /* Compute the word and byte modes. */
5748
5749 byte_mode = VOIDmode;
5750 word_mode = VOIDmode;
5751 double_mode = VOIDmode;
5752
15ed7b52
JG
5753 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5754 mode != VOIDmode;
43fa6302
AS
5755 mode = GET_MODE_WIDER_MODE (mode))
5756 {
5757 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5758 && byte_mode == VOIDmode)
5759 byte_mode = mode;
5760
5761 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5762 && word_mode == VOIDmode)
5763 word_mode = mode;
5764 }
5765
15ed7b52
JG
5766 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5767 mode != VOIDmode;
43fa6302
AS
5768 mode = GET_MODE_WIDER_MODE (mode))
5769 {
5770 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5771 && double_mode == VOIDmode)
5772 double_mode = mode;
5773 }
5774
5775 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5776
5da077de 5777#ifdef INIT_EXPANDERS
414c4dc4
NC
5778 /* This is to initialize {init|mark|free}_machine_status before the first
5779 call to push_function_context_to. This is needed by the Chill front
a1f300c0 5780 end which calls push_function_context_to before the first call to
5da077de
AS
5781 init_function_start. */
5782 INIT_EXPANDERS;
5783#endif
5784
23b2ce53
RS
5785 /* Create the unique rtx's for certain rtx codes and operand values. */
5786
a2a8cc44 5787 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
c5c76735 5788 tries to use these variables. */
23b2ce53 5789 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
750c9258 5790 const_int_rtx[i + MAX_SAVED_CONST_INT] =
f1b690f1 5791 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
23b2ce53 5792
68d75312
JC
5793 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5794 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5da077de 5795 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
68d75312 5796 else
3b80f6ca 5797 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
23b2ce53 5798
5692c7bc
ZW
5799 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5800 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5801 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
aefa9d43
KG
5802
5803 dconstm1 = dconst1;
5804 dconstm1.sign = 1;
03f2ea93
RS
5805
5806 dconsthalf = dconst1;
1e92bbb9 5807 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
23b2ce53 5808
f7657db9 5809 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
23b2ce53 5810 {
aefa9d43 5811 const REAL_VALUE_TYPE *const r =
b216cd4a
ZW
5812 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5813
15ed7b52
JG
5814 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5815 mode != VOIDmode;
5816 mode = GET_MODE_WIDER_MODE (mode))
5817 const_tiny_rtx[i][(int) mode] =
5818 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5819
5820 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5821 mode != VOIDmode;
23b2ce53 5822 mode = GET_MODE_WIDER_MODE (mode))
5692c7bc
ZW
5823 const_tiny_rtx[i][(int) mode] =
5824 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
23b2ce53 5825
906c4e36 5826 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
23b2ce53 5827
15ed7b52
JG
5828 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5829 mode != VOIDmode;
23b2ce53 5830 mode = GET_MODE_WIDER_MODE (mode))
906c4e36 5831 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
33d3e559
RS
5832
5833 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5834 mode != VOIDmode;
5835 mode = GET_MODE_WIDER_MODE (mode))
5836 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
23b2ce53
RS
5837 }
5838
e90721b1
AP
5839 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5840 mode != VOIDmode;
5841 mode = GET_MODE_WIDER_MODE (mode))
5842 {
5843 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5844 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5845 }
5846
5847 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5848 mode != VOIDmode;
5849 mode = GET_MODE_WIDER_MODE (mode))
5850 {
5851 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5852 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5853 }
5854
69ef87e2
AH
5855 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5856 mode != VOIDmode;
5857 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5858 {
5859 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5860 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5861 }
69ef87e2
AH
5862
5863 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5864 mode != VOIDmode;
5865 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5866 {
5867 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5868 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5869 }
69ef87e2 5870
325217ed
CF
5871 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5872 mode != VOIDmode;
5873 mode = GET_MODE_WIDER_MODE (mode))
5874 {
5875 FCONST0(mode).data.high = 0;
5876 FCONST0(mode).data.low = 0;
5877 FCONST0(mode).mode = mode;
091a3ac7
CF
5878 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5879 FCONST0 (mode), mode);
325217ed
CF
5880 }
5881
5882 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5883 mode != VOIDmode;
5884 mode = GET_MODE_WIDER_MODE (mode))
5885 {
5886 FCONST0(mode).data.high = 0;
5887 FCONST0(mode).data.low = 0;
5888 FCONST0(mode).mode = mode;
091a3ac7
CF
5889 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5890 FCONST0 (mode), mode);
325217ed
CF
5891 }
5892
5893 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5894 mode != VOIDmode;
5895 mode = GET_MODE_WIDER_MODE (mode))
5896 {
5897 FCONST0(mode).data.high = 0;
5898 FCONST0(mode).data.low = 0;
5899 FCONST0(mode).mode = mode;
091a3ac7
CF
5900 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5901 FCONST0 (mode), mode);
325217ed
CF
5902
5903 /* We store the value 1. */
5904 FCONST1(mode).data.high = 0;
5905 FCONST1(mode).data.low = 0;
5906 FCONST1(mode).mode = mode;
5907 lshift_double (1, 0, GET_MODE_FBIT (mode),
5908 2 * HOST_BITS_PER_WIDE_INT,
5909 &FCONST1(mode).data.low,
5910 &FCONST1(mode).data.high,
5911 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
5912 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5913 FCONST1 (mode), mode);
325217ed
CF
5914 }
5915
5916 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5917 mode != VOIDmode;
5918 mode = GET_MODE_WIDER_MODE (mode))
5919 {
5920 FCONST0(mode).data.high = 0;
5921 FCONST0(mode).data.low = 0;
5922 FCONST0(mode).mode = mode;
091a3ac7
CF
5923 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5924 FCONST0 (mode), mode);
325217ed
CF
5925
5926 /* We store the value 1. */
5927 FCONST1(mode).data.high = 0;
5928 FCONST1(mode).data.low = 0;
5929 FCONST1(mode).mode = mode;
5930 lshift_double (1, 0, GET_MODE_FBIT (mode),
5931 2 * HOST_BITS_PER_WIDE_INT,
5932 &FCONST1(mode).data.low,
5933 &FCONST1(mode).data.high,
5934 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
5935 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5936 FCONST1 (mode), mode);
5937 }
5938
5939 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5940 mode != VOIDmode;
5941 mode = GET_MODE_WIDER_MODE (mode))
5942 {
5943 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5944 }
5945
5946 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5947 mode != VOIDmode;
5948 mode = GET_MODE_WIDER_MODE (mode))
5949 {
5950 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5951 }
5952
5953 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5954 mode != VOIDmode;
5955 mode = GET_MODE_WIDER_MODE (mode))
5956 {
5957 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5958 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5959 }
5960
5961 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5962 mode != VOIDmode;
5963 mode = GET_MODE_WIDER_MODE (mode))
5964 {
5965 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5966 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
325217ed
CF
5967 }
5968
dbbbbf3b
JDA
5969 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5970 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5971 const_tiny_rtx[0][i] = const0_rtx;
23b2ce53 5972
f0417c82
RH
5973 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5974 if (STORE_FLAG_VALUE == 1)
5975 const_tiny_rtx[1][(int) BImode] = const1_rtx;
23b2ce53 5976}
a11759a3 5977\f
969d70ca
JH
5978/* Produce exact duplicate of insn INSN after AFTER.
5979 Care updating of libcall regions if present. */
5980
5981rtx
502b8322 5982emit_copy_of_insn_after (rtx insn, rtx after)
969d70ca 5983{
60564289 5984 rtx new_rtx, link;
969d70ca
JH
5985
5986 switch (GET_CODE (insn))
5987 {
5988 case INSN:
60564289 5989 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
5990 break;
5991
5992 case JUMP_INSN:
60564289 5993 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
5994 break;
5995
b5b8b0ac
AO
5996 case DEBUG_INSN:
5997 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
5998 break;
5999
969d70ca 6000 case CALL_INSN:
60564289 6001 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca 6002 if (CALL_INSN_FUNCTION_USAGE (insn))
60564289 6003 CALL_INSN_FUNCTION_USAGE (new_rtx)
969d70ca 6004 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
60564289
KG
6005 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6006 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6007 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
b8698a0f 6008 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
becfd6e5 6009 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
969d70ca
JH
6010 break;
6011
6012 default:
5b0264cb 6013 gcc_unreachable ();
969d70ca
JH
6014 }
6015
6016 /* Update LABEL_NUSES. */
60564289 6017 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
969d70ca 6018
60564289 6019 INSN_LOCATOR (new_rtx) = INSN_LOCATOR (insn);
ba4f7968 6020
0a3d71f5
JW
6021 /* If the old insn is frame related, then so is the new one. This is
6022 primarily needed for IA-64 unwind info which marks epilogue insns,
6023 which may be duplicated by the basic block reordering code. */
60564289 6024 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
0a3d71f5 6025
cf7c4aa6
HPN
6026 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6027 will make them. REG_LABEL_TARGETs are created there too, but are
6028 supposed to be sticky, so we copy them. */
969d70ca 6029 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
cf7c4aa6 6030 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
969d70ca
JH
6031 {
6032 if (GET_CODE (link) == EXPR_LIST)
60564289 6033 add_reg_note (new_rtx, REG_NOTE_KIND (link),
65c5f2a6 6034 copy_insn_1 (XEXP (link, 0)));
969d70ca 6035 else
60564289 6036 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
969d70ca
JH
6037 }
6038
60564289
KG
6039 INSN_CODE (new_rtx) = INSN_CODE (insn);
6040 return new_rtx;
969d70ca 6041}
e2500fed 6042
1431042e 6043static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
3e89ed8d
JH
6044rtx
6045gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
6046{
6047 if (hard_reg_clobbers[mode][regno])
6048 return hard_reg_clobbers[mode][regno];
6049 else
6050 return (hard_reg_clobbers[mode][regno] =
6051 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6052}
6053
e2500fed 6054#include "gt-emit-rtl.h"