]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
re PR c++/52973 (visibility attribute for class is not passed to its members)
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
5e6908ea 1/* Emit RTL for the GCC expander.
ef58a523 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
54fb1ae0 3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
c2969d8e 4 2010, 2011
b6f65e3c 5 Free Software Foundation, Inc.
23b2ce53 6
1322177d 7This file is part of GCC.
23b2ce53 8
1322177d
LB
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
9dcd6f09 11Software Foundation; either version 3, or (at your option) any later
1322177d 12version.
23b2ce53 13
1322177d
LB
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
23b2ce53
RS
18
19You should have received a copy of the GNU General Public License
9dcd6f09
NC
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
23b2ce53
RS
22
23
24/* Middle-to-low level generation of rtx code and insns.
25
f822fcf7
KH
26 This file contains support functions for creating rtl expressions
27 and manipulating them in the doubly-linked chain of insns.
23b2ce53
RS
28
29 The patterns of the insns are created by machine-dependent
30 routines in insn-emit.c, which is generated automatically from
f822fcf7
KH
31 the machine description. These routines make the individual rtx's
32 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33 which are automatically generated from rtl.def; what is machine
a2a8cc44
KH
34 dependent is the kind of rtx's they make and what arguments they
35 use. */
23b2ce53
RS
36
37#include "config.h"
670ee920 38#include "system.h"
4977bab6
ZW
39#include "coretypes.h"
40#include "tm.h"
718f9c0f 41#include "diagnostic-core.h"
23b2ce53 42#include "rtl.h"
a25c7971 43#include "tree.h"
6baf1cc8 44#include "tm_p.h"
23b2ce53
RS
45#include "flags.h"
46#include "function.h"
47#include "expr.h"
48#include "regs.h"
aff48bca 49#include "hard-reg-set.h"
c13e8210 50#include "hashtab.h"
23b2ce53 51#include "insn-config.h"
e9a25f70 52#include "recog.h"
0dfa1860 53#include "bitmap.h"
a05924f9 54#include "basic-block.h"
87ff9c8e 55#include "ggc.h"
e1772ac0 56#include "debug.h"
d23c55c2 57#include "langhooks.h"
ef330312 58#include "tree-pass.h"
6fb5fa3c 59#include "df.h"
b5b8b0ac 60#include "params.h"
d4ebfa65 61#include "target.h"
4994da65 62#include "tree-flow.h"
ca695ac9 63
5fb0e246
RS
64struct target_rtl default_target_rtl;
65#if SWITCHABLE_TARGET
66struct target_rtl *this_target_rtl = &default_target_rtl;
67#endif
68
69#define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
70
1d445e9e
ILT
71/* Commonly used modes. */
72
0f41302f
MS
73enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
74enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
9ec36da5 75enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
0f41302f 76enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
1d445e9e 77
bd60bab2
JH
78/* Datastructures maintained for currently processed function in RTL form. */
79
3e029763 80struct rtl_data x_rtl;
bd60bab2
JH
81
82/* Indexed by pseudo register number, gives the rtx for that pseudo.
b8698a0f 83 Allocated in parallel with regno_pointer_align.
bd60bab2
JH
84 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
85 with length attribute nested in top level structures. */
86
87rtx * regno_reg_rtx;
23b2ce53
RS
88
89/* This is *not* reset after each function. It gives each CODE_LABEL
90 in the entire compilation a unique label number. */
91
044b4de3 92static GTY(()) int label_num = 1;
23b2ce53 93
23b2ce53
RS
94/* We record floating-point CONST_DOUBLEs in each floating-point mode for
95 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
e7c82a99
JJ
96 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
97 is set only for MODE_INT and MODE_VECTOR_INT modes. */
23b2ce53 98
e7c82a99 99rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
23b2ce53 100
68d75312
JC
101rtx const_true_rtx;
102
23b2ce53
RS
103REAL_VALUE_TYPE dconst0;
104REAL_VALUE_TYPE dconst1;
105REAL_VALUE_TYPE dconst2;
106REAL_VALUE_TYPE dconstm1;
03f2ea93 107REAL_VALUE_TYPE dconsthalf;
23b2ce53 108
325217ed
CF
109/* Record fixed-point constant 0 and 1. */
110FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
111FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
112
23b2ce53
RS
113/* We make one copy of (const_int C) where C is in
114 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
115 to save space during the compilation and simplify comparisons of
116 integers. */
117
5da077de 118rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
23b2ce53 119
ca4adc91
RS
120/* Standard pieces of rtx, to be substituted directly into things. */
121rtx pc_rtx;
122rtx ret_rtx;
123rtx simple_return_rtx;
124rtx cc0_rtx;
125
c13e8210
MM
126/* A hash table storing CONST_INTs whose absolute value is greater
127 than MAX_SAVED_CONST_INT. */
128
e2500fed
GK
129static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
130 htab_t const_int_htab;
c13e8210 131
173b24b9 132/* A hash table storing memory attribute structures. */
e2500fed
GK
133static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
134 htab_t mem_attrs_htab;
173b24b9 135
a560d4d4
JH
136/* A hash table storing register attribute structures. */
137static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
138 htab_t reg_attrs_htab;
139
5692c7bc 140/* A hash table storing all CONST_DOUBLEs. */
e2500fed
GK
141static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
142 htab_t const_double_htab;
5692c7bc 143
091a3ac7
CF
144/* A hash table storing all CONST_FIXEDs. */
145static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
146 htab_t const_fixed_htab;
147
3e029763 148#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
b5b8b0ac 149#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
3e029763
JH
150#define last_location (crtl->emit.x_last_location)
151#define first_label_num (crtl->emit.x_first_label_num)
23b2ce53 152
502b8322 153static rtx make_call_insn_raw (rtx);
502b8322 154static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
5eb2a9f2 155static void set_used_decls (tree);
502b8322
AJ
156static void mark_label_nuses (rtx);
157static hashval_t const_int_htab_hash (const void *);
158static int const_int_htab_eq (const void *, const void *);
159static hashval_t const_double_htab_hash (const void *);
160static int const_double_htab_eq (const void *, const void *);
161static rtx lookup_const_double (rtx);
091a3ac7
CF
162static hashval_t const_fixed_htab_hash (const void *);
163static int const_fixed_htab_eq (const void *, const void *);
164static rtx lookup_const_fixed (rtx);
502b8322
AJ
165static hashval_t mem_attrs_htab_hash (const void *);
166static int mem_attrs_htab_eq (const void *, const void *);
502b8322
AJ
167static hashval_t reg_attrs_htab_hash (const void *);
168static int reg_attrs_htab_eq (const void *, const void *);
169static reg_attrs *get_reg_attrs (tree, int);
a73b091d 170static rtx gen_const_vector (enum machine_mode, int);
32b32b16 171static void copy_rtx_if_shared_1 (rtx *orig);
c13e8210 172
6b24c259
JH
173/* Probability of the conditional branch currently proceeded by try_split.
174 Set to -1 otherwise. */
175int split_branch_probability = -1;
ca695ac9 176\f
c13e8210
MM
177/* Returns a hash code for X (which is a really a CONST_INT). */
178
179static hashval_t
502b8322 180const_int_htab_hash (const void *x)
c13e8210 181{
f7d504c2 182 return (hashval_t) INTVAL ((const_rtx) x);
c13e8210
MM
183}
184
cc2902df 185/* Returns nonzero if the value represented by X (which is really a
c13e8210
MM
186 CONST_INT) is the same as that given by Y (which is really a
187 HOST_WIDE_INT *). */
188
189static int
502b8322 190const_int_htab_eq (const void *x, const void *y)
c13e8210 191{
f7d504c2 192 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
5692c7bc
ZW
193}
194
195/* Returns a hash code for X (which is really a CONST_DOUBLE). */
196static hashval_t
502b8322 197const_double_htab_hash (const void *x)
5692c7bc 198{
f7d504c2 199 const_rtx const value = (const_rtx) x;
46b33600 200 hashval_t h;
5692c7bc 201
46b33600
RH
202 if (GET_MODE (value) == VOIDmode)
203 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
204 else
fe352c29 205 {
15c812e3 206 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
fe352c29
DJ
207 /* MODE is used in the comparison, so it should be in the hash. */
208 h ^= GET_MODE (value);
209 }
5692c7bc
ZW
210 return h;
211}
212
cc2902df 213/* Returns nonzero if the value represented by X (really a ...)
5692c7bc
ZW
214 is the same as that represented by Y (really a ...) */
215static int
502b8322 216const_double_htab_eq (const void *x, const void *y)
5692c7bc 217{
f7d504c2 218 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
5692c7bc
ZW
219
220 if (GET_MODE (a) != GET_MODE (b))
221 return 0;
8580f7a0
RH
222 if (GET_MODE (a) == VOIDmode)
223 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
224 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
225 else
226 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
227 CONST_DOUBLE_REAL_VALUE (b));
c13e8210
MM
228}
229
091a3ac7
CF
230/* Returns a hash code for X (which is really a CONST_FIXED). */
231
232static hashval_t
233const_fixed_htab_hash (const void *x)
234{
3101faab 235 const_rtx const value = (const_rtx) x;
091a3ac7
CF
236 hashval_t h;
237
238 h = fixed_hash (CONST_FIXED_VALUE (value));
239 /* MODE is used in the comparison, so it should be in the hash. */
240 h ^= GET_MODE (value);
241 return h;
242}
243
244/* Returns nonzero if the value represented by X (really a ...)
245 is the same as that represented by Y (really a ...). */
246
247static int
248const_fixed_htab_eq (const void *x, const void *y)
249{
3101faab 250 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
091a3ac7
CF
251
252 if (GET_MODE (a) != GET_MODE (b))
253 return 0;
254 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
255}
256
173b24b9
RK
257/* Returns a hash code for X (which is a really a mem_attrs *). */
258
259static hashval_t
502b8322 260mem_attrs_htab_hash (const void *x)
173b24b9 261{
f7d504c2 262 const mem_attrs *const p = (const mem_attrs *) x;
173b24b9
RK
263
264 return (p->alias ^ (p->align * 1000)
09e881c9 265 ^ (p->addrspace * 4000)
754c3d5d
RS
266 ^ ((p->offset_known_p ? p->offset : 0) * 50000)
267 ^ ((p->size_known_p ? p->size : 0) * 2500000)
78b76d08 268 ^ (size_t) iterative_hash_expr (p->expr, 0));
173b24b9
RK
269}
270
f12144dd 271/* Return true if the given memory attributes are equal. */
c13e8210 272
f12144dd
RS
273static bool
274mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
c13e8210 275{
754c3d5d
RS
276 return (p->alias == q->alias
277 && p->offset_known_p == q->offset_known_p
278 && (!p->offset_known_p || p->offset == q->offset)
279 && p->size_known_p == q->size_known_p
280 && (!p->size_known_p || p->size == q->size)
281 && p->align == q->align
09e881c9 282 && p->addrspace == q->addrspace
78b76d08
SB
283 && (p->expr == q->expr
284 || (p->expr != NULL_TREE && q->expr != NULL_TREE
285 && operand_equal_p (p->expr, q->expr, 0))));
c13e8210
MM
286}
287
f12144dd
RS
288/* Returns nonzero if the value represented by X (which is really a
289 mem_attrs *) is the same as that given by Y (which is also really a
290 mem_attrs *). */
173b24b9 291
f12144dd
RS
292static int
293mem_attrs_htab_eq (const void *x, const void *y)
173b24b9 294{
f12144dd
RS
295 return mem_attrs_eq_p ((const mem_attrs *) x, (const mem_attrs *) y);
296}
173b24b9 297
f12144dd 298/* Set MEM's memory attributes so that they are the same as ATTRS. */
10b76d73 299
f12144dd
RS
300static void
301set_mem_attrs (rtx mem, mem_attrs *attrs)
302{
303 void **slot;
304
305 /* If everything is the default, we can just clear the attributes. */
306 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
307 {
308 MEM_ATTRS (mem) = 0;
309 return;
310 }
173b24b9 311
f12144dd 312 slot = htab_find_slot (mem_attrs_htab, attrs, INSERT);
173b24b9
RK
313 if (*slot == 0)
314 {
a9429e29 315 *slot = ggc_alloc_mem_attrs ();
f12144dd 316 memcpy (*slot, attrs, sizeof (mem_attrs));
173b24b9
RK
317 }
318
f12144dd 319 MEM_ATTRS (mem) = (mem_attrs *) *slot;
c13e8210
MM
320}
321
a560d4d4
JH
322/* Returns a hash code for X (which is a really a reg_attrs *). */
323
324static hashval_t
502b8322 325reg_attrs_htab_hash (const void *x)
a560d4d4 326{
741ac903 327 const reg_attrs *const p = (const reg_attrs *) x;
a560d4d4 328
9841210f 329 return ((p->offset * 1000) ^ (intptr_t) p->decl);
a560d4d4
JH
330}
331
6356f892 332/* Returns nonzero if the value represented by X (which is really a
a560d4d4
JH
333 reg_attrs *) is the same as that given by Y (which is also really a
334 reg_attrs *). */
335
336static int
502b8322 337reg_attrs_htab_eq (const void *x, const void *y)
a560d4d4 338{
741ac903
KG
339 const reg_attrs *const p = (const reg_attrs *) x;
340 const reg_attrs *const q = (const reg_attrs *) y;
a560d4d4
JH
341
342 return (p->decl == q->decl && p->offset == q->offset);
343}
344/* Allocate a new reg_attrs structure and insert it into the hash table if
345 one identical to it is not already in the table. We are doing this for
346 MEM of mode MODE. */
347
348static reg_attrs *
502b8322 349get_reg_attrs (tree decl, int offset)
a560d4d4
JH
350{
351 reg_attrs attrs;
352 void **slot;
353
354 /* If everything is the default, we can just return zero. */
355 if (decl == 0 && offset == 0)
356 return 0;
357
358 attrs.decl = decl;
359 attrs.offset = offset;
360
361 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
362 if (*slot == 0)
363 {
a9429e29 364 *slot = ggc_alloc_reg_attrs ();
a560d4d4
JH
365 memcpy (*slot, &attrs, sizeof (reg_attrs));
366 }
367
1b4572a8 368 return (reg_attrs *) *slot;
a560d4d4
JH
369}
370
6fb5fa3c
DB
371
372#if !HAVE_blockage
373/* Generate an empty ASM_INPUT, which is used to block attempts to schedule
374 across this insn. */
375
376rtx
377gen_blockage (void)
378{
379 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
380 MEM_VOLATILE_P (x) = true;
381 return x;
382}
383#endif
384
385
08394eef
BS
386/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
387 don't attempt to share with the various global pieces of rtl (such as
388 frame_pointer_rtx). */
389
390rtx
502b8322 391gen_raw_REG (enum machine_mode mode, int regno)
08394eef
BS
392{
393 rtx x = gen_rtx_raw_REG (mode, regno);
394 ORIGINAL_REGNO (x) = regno;
395 return x;
396}
397
c5c76735
JL
398/* There are some RTL codes that require special attention; the generation
399 functions do the raw handling. If you add to this list, modify
400 special_rtx in gengenrtl.c as well. */
401
3b80f6ca 402rtx
502b8322 403gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3b80f6ca 404{
c13e8210
MM
405 void **slot;
406
3b80f6ca 407 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
5da077de 408 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3b80f6ca
RH
409
410#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
411 if (const_true_rtx && arg == STORE_FLAG_VALUE)
412 return const_true_rtx;
413#endif
414
c13e8210 415 /* Look up the CONST_INT in the hash table. */
e38992e8
RK
416 slot = htab_find_slot_with_hash (const_int_htab, &arg,
417 (hashval_t) arg, INSERT);
29105cea 418 if (*slot == 0)
1f8f4a0b 419 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
c13e8210
MM
420
421 return (rtx) *slot;
3b80f6ca
RH
422}
423
2496c7bd 424rtx
502b8322 425gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
2496c7bd
LB
426{
427 return GEN_INT (trunc_int_for_mode (c, mode));
428}
429
5692c7bc
ZW
430/* CONST_DOUBLEs might be created from pairs of integers, or from
431 REAL_VALUE_TYPEs. Also, their length is known only at run time,
432 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
433
434/* Determine whether REAL, a CONST_DOUBLE, already exists in the
435 hash table. If so, return its counterpart; otherwise add it
436 to the hash table and return it. */
437static rtx
502b8322 438lookup_const_double (rtx real)
5692c7bc
ZW
439{
440 void **slot = htab_find_slot (const_double_htab, real, INSERT);
441 if (*slot == 0)
442 *slot = real;
443
444 return (rtx) *slot;
445}
29105cea 446
5692c7bc
ZW
447/* Return a CONST_DOUBLE rtx for a floating-point value specified by
448 VALUE in mode MODE. */
0133b7d9 449rtx
502b8322 450const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
0133b7d9 451{
5692c7bc
ZW
452 rtx real = rtx_alloc (CONST_DOUBLE);
453 PUT_MODE (real, mode);
454
9e254451 455 real->u.rv = value;
5692c7bc
ZW
456
457 return lookup_const_double (real);
458}
459
091a3ac7
CF
460/* Determine whether FIXED, a CONST_FIXED, already exists in the
461 hash table. If so, return its counterpart; otherwise add it
462 to the hash table and return it. */
463
464static rtx
465lookup_const_fixed (rtx fixed)
466{
467 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
468 if (*slot == 0)
469 *slot = fixed;
470
471 return (rtx) *slot;
472}
473
474/* Return a CONST_FIXED rtx for a fixed-point value specified by
475 VALUE in mode MODE. */
476
477rtx
478const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
479{
480 rtx fixed = rtx_alloc (CONST_FIXED);
481 PUT_MODE (fixed, mode);
482
483 fixed->u.fv = value;
484
485 return lookup_const_fixed (fixed);
486}
487
3e93ff81
AS
488/* Constructs double_int from rtx CST. */
489
490double_int
491rtx_to_double_int (const_rtx cst)
492{
493 double_int r;
494
495 if (CONST_INT_P (cst))
496 r = shwi_to_double_int (INTVAL (cst));
497 else if (CONST_DOUBLE_P (cst) && GET_MODE (cst) == VOIDmode)
498 {
499 r.low = CONST_DOUBLE_LOW (cst);
500 r.high = CONST_DOUBLE_HIGH (cst);
501 }
502 else
503 gcc_unreachable ();
504
505 return r;
506}
507
508
54fb1ae0
AS
509/* Return a CONST_DOUBLE or CONST_INT for a value specified as
510 a double_int. */
511
512rtx
513immed_double_int_const (double_int i, enum machine_mode mode)
514{
515 return immed_double_const (i.low, i.high, mode);
516}
517
5692c7bc
ZW
518/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
519 of ints: I0 is the low-order word and I1 is the high-order word.
929e10f4
MS
520 For values that are larger than 2*HOST_BITS_PER_WIDE_INT, the
521 implied upper bits are copies of the high bit of i1. The value
522 itself is neither signed nor unsigned. Do not use this routine for
523 non-integer modes; convert to REAL_VALUE_TYPE and use
524 CONST_DOUBLE_FROM_REAL_VALUE. */
5692c7bc
ZW
525
526rtx
502b8322 527immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
5692c7bc
ZW
528{
529 rtx value;
530 unsigned int i;
531
65acccdd
ZD
532 /* There are the following cases (note that there are no modes with
533 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
534
535 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
536 gen_int_mode.
929e10f4
MS
537 2) If the value of the integer fits into HOST_WIDE_INT anyway
538 (i.e., i1 consists only from copies of the sign bit, and sign
539 of i0 and i1 are the same), then we return a CONST_INT for i0.
65acccdd 540 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
5692c7bc
ZW
541 if (mode != VOIDmode)
542 {
5b0264cb
NS
543 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
544 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
545 /* We can get a 0 for an error mark. */
546 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
547 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
5692c7bc 548
65acccdd
ZD
549 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
550 return gen_int_mode (i0, mode);
5692c7bc
ZW
551 }
552
553 /* If this integer fits in one word, return a CONST_INT. */
554 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
555 return GEN_INT (i0);
556
557 /* We use VOIDmode for integers. */
558 value = rtx_alloc (CONST_DOUBLE);
559 PUT_MODE (value, VOIDmode);
560
561 CONST_DOUBLE_LOW (value) = i0;
562 CONST_DOUBLE_HIGH (value) = i1;
563
564 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
565 XWINT (value, i) = 0;
566
567 return lookup_const_double (value);
0133b7d9
RH
568}
569
3b80f6ca 570rtx
502b8322 571gen_rtx_REG (enum machine_mode mode, unsigned int regno)
3b80f6ca
RH
572{
573 /* In case the MD file explicitly references the frame pointer, have
574 all such references point to the same frame pointer. This is
575 used during frame pointer elimination to distinguish the explicit
576 references to these registers from pseudos that happened to be
577 assigned to them.
578
579 If we have eliminated the frame pointer or arg pointer, we will
580 be using it as a normal register, for example as a spill
581 register. In such cases, we might be accessing it in a mode that
582 is not Pmode and therefore cannot use the pre-allocated rtx.
583
584 Also don't do this when we are making new REGs in reload, since
585 we don't want to get confused with the real pointers. */
586
587 if (mode == Pmode && !reload_in_progress)
588 {
e10c79fe
LB
589 if (regno == FRAME_POINTER_REGNUM
590 && (!reload_completed || frame_pointer_needed))
3b80f6ca 591 return frame_pointer_rtx;
e3339d0f 592#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
e10c79fe
LB
593 if (regno == HARD_FRAME_POINTER_REGNUM
594 && (!reload_completed || frame_pointer_needed))
3b80f6ca
RH
595 return hard_frame_pointer_rtx;
596#endif
e3339d0f 597#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
bcb33994 598 if (regno == ARG_POINTER_REGNUM)
3b80f6ca
RH
599 return arg_pointer_rtx;
600#endif
601#ifdef RETURN_ADDRESS_POINTER_REGNUM
bcb33994 602 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3b80f6ca
RH
603 return return_address_pointer_rtx;
604#endif
fc555370 605 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
bf9412cd 606 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
2d67bd7b 607 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
68252e27 608 return pic_offset_table_rtx;
bcb33994 609 if (regno == STACK_POINTER_REGNUM)
3b80f6ca
RH
610 return stack_pointer_rtx;
611 }
612
006a94b0 613#if 0
6cde4876 614 /* If the per-function register table has been set up, try to re-use
006a94b0
JL
615 an existing entry in that table to avoid useless generation of RTL.
616
617 This code is disabled for now until we can fix the various backends
618 which depend on having non-shared hard registers in some cases. Long
619 term we want to re-enable this code as it can significantly cut down
e10c79fe
LB
620 on the amount of useless RTL that gets generated.
621
622 We'll also need to fix some code that runs after reload that wants to
623 set ORIGINAL_REGNO. */
624
6cde4876
JL
625 if (cfun
626 && cfun->emit
627 && regno_reg_rtx
628 && regno < FIRST_PSEUDO_REGISTER
629 && reg_raw_mode[regno] == mode)
630 return regno_reg_rtx[regno];
006a94b0 631#endif
6cde4876 632
08394eef 633 return gen_raw_REG (mode, regno);
3b80f6ca
RH
634}
635
41472af8 636rtx
502b8322 637gen_rtx_MEM (enum machine_mode mode, rtx addr)
41472af8
MM
638{
639 rtx rt = gen_rtx_raw_MEM (mode, addr);
640
641 /* This field is not cleared by the mere allocation of the rtx, so
642 we clear it here. */
173b24b9 643 MEM_ATTRS (rt) = 0;
41472af8
MM
644
645 return rt;
646}
ddef6bc7 647
542a8afa
RH
648/* Generate a memory referring to non-trapping constant memory. */
649
650rtx
651gen_const_mem (enum machine_mode mode, rtx addr)
652{
653 rtx mem = gen_rtx_MEM (mode, addr);
654 MEM_READONLY_P (mem) = 1;
655 MEM_NOTRAP_P (mem) = 1;
656 return mem;
657}
658
bf877a76
R
659/* Generate a MEM referring to fixed portions of the frame, e.g., register
660 save areas. */
661
662rtx
663gen_frame_mem (enum machine_mode mode, rtx addr)
664{
665 rtx mem = gen_rtx_MEM (mode, addr);
666 MEM_NOTRAP_P (mem) = 1;
667 set_mem_alias_set (mem, get_frame_alias_set ());
668 return mem;
669}
670
671/* Generate a MEM referring to a temporary use of the stack, not part
672 of the fixed stack frame. For example, something which is pushed
673 by a target splitter. */
674rtx
675gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
676{
677 rtx mem = gen_rtx_MEM (mode, addr);
678 MEM_NOTRAP_P (mem) = 1;
e3b5732b 679 if (!cfun->calls_alloca)
bf877a76
R
680 set_mem_alias_set (mem, get_frame_alias_set ());
681 return mem;
682}
683
beb72684
RH
684/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
685 this construct would be valid, and false otherwise. */
686
687bool
688validate_subreg (enum machine_mode omode, enum machine_mode imode,
ed7a4b4b 689 const_rtx reg, unsigned int offset)
ddef6bc7 690{
beb72684
RH
691 unsigned int isize = GET_MODE_SIZE (imode);
692 unsigned int osize = GET_MODE_SIZE (omode);
693
694 /* All subregs must be aligned. */
695 if (offset % osize != 0)
696 return false;
697
698 /* The subreg offset cannot be outside the inner object. */
699 if (offset >= isize)
700 return false;
701
702 /* ??? This should not be here. Temporarily continue to allow word_mode
703 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
704 Generally, backends are doing something sketchy but it'll take time to
705 fix them all. */
706 if (omode == word_mode)
707 ;
708 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
709 is the culprit here, and not the backends. */
710 else if (osize >= UNITS_PER_WORD && isize >= osize)
711 ;
712 /* Allow component subregs of complex and vector. Though given the below
713 extraction rules, it's not always clear what that means. */
714 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
715 && GET_MODE_INNER (imode) == omode)
716 ;
717 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
718 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
719 represent this. It's questionable if this ought to be represented at
720 all -- why can't this all be hidden in post-reload splitters that make
721 arbitrarily mode changes to the registers themselves. */
722 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
723 ;
724 /* Subregs involving floating point modes are not allowed to
725 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
726 (subreg:SI (reg:DF) 0) isn't. */
727 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
728 {
729 if (isize != osize)
730 return false;
731 }
ddef6bc7 732
beb72684
RH
733 /* Paradoxical subregs must have offset zero. */
734 if (osize > isize)
735 return offset == 0;
736
737 /* This is a normal subreg. Verify that the offset is representable. */
738
739 /* For hard registers, we already have most of these rules collected in
740 subreg_offset_representable_p. */
741 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
742 {
743 unsigned int regno = REGNO (reg);
744
745#ifdef CANNOT_CHANGE_MODE_CLASS
746 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
747 && GET_MODE_INNER (imode) == omode)
748 ;
749 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
750 return false;
ddef6bc7 751#endif
beb72684
RH
752
753 return subreg_offset_representable_p (regno, imode, offset, omode);
754 }
755
756 /* For pseudo registers, we want most of the same checks. Namely:
757 If the register no larger than a word, the subreg must be lowpart.
758 If the register is larger than a word, the subreg must be the lowpart
759 of a subword. A subreg does *not* perform arbitrary bit extraction.
760 Given that we've already checked mode/offset alignment, we only have
761 to check subword subregs here. */
762 if (osize < UNITS_PER_WORD)
763 {
764 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
765 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
766 if (offset % UNITS_PER_WORD != low_off)
767 return false;
768 }
769 return true;
770}
771
772rtx
773gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
774{
775 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
5692c7bc 776 return gen_rtx_raw_SUBREG (mode, reg, offset);
ddef6bc7
JJ
777}
778
173b24b9
RK
779/* Generate a SUBREG representing the least-significant part of REG if MODE
780 is smaller than mode of REG, otherwise paradoxical SUBREG. */
781
ddef6bc7 782rtx
502b8322 783gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
ddef6bc7
JJ
784{
785 enum machine_mode inmode;
ddef6bc7
JJ
786
787 inmode = GET_MODE (reg);
788 if (inmode == VOIDmode)
789 inmode = mode;
e0e08ac2
JH
790 return gen_rtx_SUBREG (mode, reg,
791 subreg_lowpart_offset (mode, inmode));
ddef6bc7 792}
c5c76735 793\f
23b2ce53 794
80379f51
PB
795/* Create an rtvec and stores within it the RTXen passed in the arguments. */
796
23b2ce53 797rtvec
e34d07f2 798gen_rtvec (int n, ...)
23b2ce53 799{
80379f51
PB
800 int i;
801 rtvec rt_val;
e34d07f2 802 va_list p;
23b2ce53 803
e34d07f2 804 va_start (p, n);
23b2ce53 805
80379f51 806 /* Don't allocate an empty rtvec... */
23b2ce53 807 if (n == 0)
0edf1bb2
JL
808 {
809 va_end (p);
810 return NULL_RTVEC;
811 }
23b2ce53 812
80379f51 813 rt_val = rtvec_alloc (n);
4f90e4a0 814
23b2ce53 815 for (i = 0; i < n; i++)
80379f51 816 rt_val->elem[i] = va_arg (p, rtx);
6268b922 817
e34d07f2 818 va_end (p);
80379f51 819 return rt_val;
23b2ce53
RS
820}
821
822rtvec
502b8322 823gen_rtvec_v (int n, rtx *argp)
23b2ce53 824{
b3694847
SS
825 int i;
826 rtvec rt_val;
23b2ce53 827
80379f51 828 /* Don't allocate an empty rtvec... */
23b2ce53 829 if (n == 0)
80379f51 830 return NULL_RTVEC;
23b2ce53 831
80379f51 832 rt_val = rtvec_alloc (n);
23b2ce53
RS
833
834 for (i = 0; i < n; i++)
8f985ec4 835 rt_val->elem[i] = *argp++;
23b2ce53
RS
836
837 return rt_val;
838}
839\f
38ae7651
RS
840/* Return the number of bytes between the start of an OUTER_MODE
841 in-memory value and the start of an INNER_MODE in-memory value,
842 given that the former is a lowpart of the latter. It may be a
843 paradoxical lowpart, in which case the offset will be negative
844 on big-endian targets. */
845
846int
847byte_lowpart_offset (enum machine_mode outer_mode,
848 enum machine_mode inner_mode)
849{
850 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
851 return subreg_lowpart_offset (outer_mode, inner_mode);
852 else
853 return -subreg_lowpart_offset (inner_mode, outer_mode);
854}
855\f
23b2ce53
RS
856/* Generate a REG rtx for a new pseudo register of mode MODE.
857 This pseudo is assigned the next sequential register number. */
858
859rtx
502b8322 860gen_reg_rtx (enum machine_mode mode)
23b2ce53 861{
b3694847 862 rtx val;
2e3f842f 863 unsigned int align = GET_MODE_ALIGNMENT (mode);
23b2ce53 864
f8335a4f 865 gcc_assert (can_create_pseudo_p ());
23b2ce53 866
2e3f842f
L
867 /* If a virtual register with bigger mode alignment is generated,
868 increase stack alignment estimation because it might be spilled
869 to stack later. */
b8698a0f 870 if (SUPPORTS_STACK_ALIGNMENT
2e3f842f
L
871 && crtl->stack_alignment_estimated < align
872 && !crtl->stack_realign_processed)
ae58e548
JJ
873 {
874 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
875 if (crtl->stack_alignment_estimated < min_align)
876 crtl->stack_alignment_estimated = min_align;
877 }
2e3f842f 878
1b3d8f8a
GK
879 if (generating_concat_p
880 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
881 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
fc84e8a8
RS
882 {
883 /* For complex modes, don't make a single pseudo.
884 Instead, make a CONCAT of two pseudos.
885 This allows noncontiguous allocation of the real and imaginary parts,
886 which makes much better code. Besides, allocating DCmode
887 pseudos overstrains reload on some machines like the 386. */
888 rtx realpart, imagpart;
27e58a70 889 enum machine_mode partmode = GET_MODE_INNER (mode);
fc84e8a8
RS
890
891 realpart = gen_reg_rtx (partmode);
892 imagpart = gen_reg_rtx (partmode);
3b80f6ca 893 return gen_rtx_CONCAT (mode, realpart, imagpart);
fc84e8a8
RS
894 }
895
a560d4d4 896 /* Make sure regno_pointer_align, and regno_reg_rtx are large
0d4903b8 897 enough to have an element for this pseudo reg number. */
23b2ce53 898
3e029763 899 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
23b2ce53 900 {
3e029763 901 int old_size = crtl->emit.regno_pointer_align_length;
60564289 902 char *tmp;
0d4903b8 903 rtx *new1;
0d4903b8 904
60564289
KG
905 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
906 memset (tmp + old_size, 0, old_size);
907 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
49ad7cfa 908
1b4572a8 909 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
49ad7cfa 910 memset (new1 + old_size, 0, old_size * sizeof (rtx));
23b2ce53
RS
911 regno_reg_rtx = new1;
912
3e029763 913 crtl->emit.regno_pointer_align_length = old_size * 2;
23b2ce53
RS
914 }
915
08394eef 916 val = gen_raw_REG (mode, reg_rtx_no);
23b2ce53
RS
917 regno_reg_rtx[reg_rtx_no++] = val;
918 return val;
919}
920
38ae7651
RS
921/* Update NEW with the same attributes as REG, but with OFFSET added
922 to the REG_OFFSET. */
a560d4d4 923
e53a16e7 924static void
60564289 925update_reg_offset (rtx new_rtx, rtx reg, int offset)
a560d4d4 926{
60564289 927 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
502b8322 928 REG_OFFSET (reg) + offset);
e53a16e7
ILT
929}
930
38ae7651
RS
931/* Generate a register with same attributes as REG, but with OFFSET
932 added to the REG_OFFSET. */
e53a16e7
ILT
933
934rtx
935gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
936 int offset)
937{
60564289 938 rtx new_rtx = gen_rtx_REG (mode, regno);
e53a16e7 939
60564289
KG
940 update_reg_offset (new_rtx, reg, offset);
941 return new_rtx;
e53a16e7
ILT
942}
943
944/* Generate a new pseudo-register with the same attributes as REG, but
38ae7651 945 with OFFSET added to the REG_OFFSET. */
e53a16e7
ILT
946
947rtx
948gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
949{
60564289 950 rtx new_rtx = gen_reg_rtx (mode);
e53a16e7 951
60564289
KG
952 update_reg_offset (new_rtx, reg, offset);
953 return new_rtx;
a560d4d4
JH
954}
955
38ae7651
RS
956/* Adjust REG in-place so that it has mode MODE. It is assumed that the
957 new register is a (possibly paradoxical) lowpart of the old one. */
a560d4d4
JH
958
959void
38ae7651 960adjust_reg_mode (rtx reg, enum machine_mode mode)
a560d4d4 961{
38ae7651
RS
962 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
963 PUT_MODE (reg, mode);
964}
965
966/* Copy REG's attributes from X, if X has any attributes. If REG and X
967 have different modes, REG is a (possibly paradoxical) lowpart of X. */
968
969void
970set_reg_attrs_from_value (rtx reg, rtx x)
971{
972 int offset;
de6f3f7a
L
973 bool can_be_reg_pointer = true;
974
975 /* Don't call mark_reg_pointer for incompatible pointer sign
976 extension. */
977 while (GET_CODE (x) == SIGN_EXTEND
978 || GET_CODE (x) == ZERO_EXTEND
979 || GET_CODE (x) == TRUNCATE
980 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
981 {
982#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
983 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
984 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
985 can_be_reg_pointer = false;
986#endif
987 x = XEXP (x, 0);
988 }
38ae7651 989
923ba36f
JJ
990 /* Hard registers can be reused for multiple purposes within the same
991 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
992 on them is wrong. */
993 if (HARD_REGISTER_P (reg))
994 return;
995
38ae7651 996 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
46b71b03
PB
997 if (MEM_P (x))
998 {
527210c4
RS
999 if (MEM_OFFSET_KNOWN_P (x))
1000 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1001 MEM_OFFSET (x) + offset);
de6f3f7a 1002 if (can_be_reg_pointer && MEM_POINTER (x))
0a317111 1003 mark_reg_pointer (reg, 0);
46b71b03
PB
1004 }
1005 else if (REG_P (x))
1006 {
1007 if (REG_ATTRS (x))
1008 update_reg_offset (reg, x, offset);
de6f3f7a 1009 if (can_be_reg_pointer && REG_POINTER (x))
46b71b03
PB
1010 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1011 }
1012}
1013
1014/* Generate a REG rtx for a new pseudo register, copying the mode
1015 and attributes from X. */
1016
1017rtx
1018gen_reg_rtx_and_attrs (rtx x)
1019{
1020 rtx reg = gen_reg_rtx (GET_MODE (x));
1021 set_reg_attrs_from_value (reg, x);
1022 return reg;
a560d4d4
JH
1023}
1024
9d18e06b
JZ
1025/* Set the register attributes for registers contained in PARM_RTX.
1026 Use needed values from memory attributes of MEM. */
1027
1028void
502b8322 1029set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
9d18e06b 1030{
f8cfc6aa 1031 if (REG_P (parm_rtx))
38ae7651 1032 set_reg_attrs_from_value (parm_rtx, mem);
9d18e06b
JZ
1033 else if (GET_CODE (parm_rtx) == PARALLEL)
1034 {
1035 /* Check for a NULL entry in the first slot, used to indicate that the
1036 parameter goes both on the stack and in registers. */
1037 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1038 for (; i < XVECLEN (parm_rtx, 0); i++)
1039 {
1040 rtx x = XVECEXP (parm_rtx, 0, i);
f8cfc6aa 1041 if (REG_P (XEXP (x, 0)))
9d18e06b
JZ
1042 REG_ATTRS (XEXP (x, 0))
1043 = get_reg_attrs (MEM_EXPR (mem),
1044 INTVAL (XEXP (x, 1)));
1045 }
1046 }
1047}
1048
38ae7651
RS
1049/* Set the REG_ATTRS for registers in value X, given that X represents
1050 decl T. */
a560d4d4 1051
4e3825db 1052void
38ae7651
RS
1053set_reg_attrs_for_decl_rtl (tree t, rtx x)
1054{
1055 if (GET_CODE (x) == SUBREG)
fbe6ec81 1056 {
38ae7651
RS
1057 gcc_assert (subreg_lowpart_p (x));
1058 x = SUBREG_REG (x);
fbe6ec81 1059 }
f8cfc6aa 1060 if (REG_P (x))
38ae7651
RS
1061 REG_ATTRS (x)
1062 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
726612d2 1063 DECL_MODE (t)));
a560d4d4
JH
1064 if (GET_CODE (x) == CONCAT)
1065 {
1066 if (REG_P (XEXP (x, 0)))
1067 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1068 if (REG_P (XEXP (x, 1)))
1069 REG_ATTRS (XEXP (x, 1))
1070 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1071 }
1072 if (GET_CODE (x) == PARALLEL)
1073 {
d4afac5b
JZ
1074 int i, start;
1075
1076 /* Check for a NULL entry, used to indicate that the parameter goes
1077 both on the stack and in registers. */
1078 if (XEXP (XVECEXP (x, 0, 0), 0))
1079 start = 0;
1080 else
1081 start = 1;
1082
1083 for (i = start; i < XVECLEN (x, 0); i++)
a560d4d4
JH
1084 {
1085 rtx y = XVECEXP (x, 0, i);
1086 if (REG_P (XEXP (y, 0)))
1087 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1088 }
1089 }
1090}
1091
38ae7651
RS
1092/* Assign the RTX X to declaration T. */
1093
1094void
1095set_decl_rtl (tree t, rtx x)
1096{
1097 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1098 if (x)
1099 set_reg_attrs_for_decl_rtl (t, x);
1100}
1101
5141868d
RS
1102/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1103 if the ABI requires the parameter to be passed by reference. */
38ae7651
RS
1104
1105void
5141868d 1106set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
38ae7651
RS
1107{
1108 DECL_INCOMING_RTL (t) = x;
5141868d 1109 if (x && !by_reference_p)
38ae7651
RS
1110 set_reg_attrs_for_decl_rtl (t, x);
1111}
1112
754fdcca
RK
1113/* Identify REG (which may be a CONCAT) as a user register. */
1114
1115void
502b8322 1116mark_user_reg (rtx reg)
754fdcca
RK
1117{
1118 if (GET_CODE (reg) == CONCAT)
1119 {
1120 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1121 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1122 }
754fdcca 1123 else
5b0264cb
NS
1124 {
1125 gcc_assert (REG_P (reg));
1126 REG_USERVAR_P (reg) = 1;
1127 }
754fdcca
RK
1128}
1129
86fe05e0
RK
1130/* Identify REG as a probable pointer register and show its alignment
1131 as ALIGN, if nonzero. */
23b2ce53
RS
1132
1133void
502b8322 1134mark_reg_pointer (rtx reg, int align)
23b2ce53 1135{
3502dc9c 1136 if (! REG_POINTER (reg))
00995e78 1137 {
3502dc9c 1138 REG_POINTER (reg) = 1;
86fe05e0 1139
00995e78
RE
1140 if (align)
1141 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1142 }
1143 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
6614fd40 1144 /* We can no-longer be sure just how aligned this pointer is. */
86fe05e0 1145 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
23b2ce53
RS
1146}
1147
1148/* Return 1 plus largest pseudo reg number used in the current function. */
1149
1150int
502b8322 1151max_reg_num (void)
23b2ce53
RS
1152{
1153 return reg_rtx_no;
1154}
1155
1156/* Return 1 + the largest label number used so far in the current function. */
1157
1158int
502b8322 1159max_label_num (void)
23b2ce53 1160{
23b2ce53
RS
1161 return label_num;
1162}
1163
1164/* Return first label number used in this function (if any were used). */
1165
1166int
502b8322 1167get_first_label_num (void)
23b2ce53
RS
1168{
1169 return first_label_num;
1170}
6de9cd9a
DN
1171
1172/* If the rtx for label was created during the expansion of a nested
1173 function, then first_label_num won't include this label number.
fa10beec 1174 Fix this now so that array indices work later. */
6de9cd9a
DN
1175
1176void
1177maybe_set_first_label_num (rtx x)
1178{
1179 if (CODE_LABEL_NUMBER (x) < first_label_num)
1180 first_label_num = CODE_LABEL_NUMBER (x);
1181}
23b2ce53
RS
1182\f
1183/* Return a value representing some low-order bits of X, where the number
1184 of low-order bits is given by MODE. Note that no conversion is done
750c9258 1185 between floating-point and fixed-point values, rather, the bit
23b2ce53
RS
1186 representation is returned.
1187
1188 This function handles the cases in common between gen_lowpart, below,
1189 and two variants in cse.c and combine.c. These are the cases that can
1190 be safely handled at all points in the compilation.
1191
1192 If this is not a case we can handle, return 0. */
1193
1194rtx
502b8322 1195gen_lowpart_common (enum machine_mode mode, rtx x)
23b2ce53 1196{
ddef6bc7 1197 int msize = GET_MODE_SIZE (mode);
550d1387 1198 int xsize;
ddef6bc7 1199 int offset = 0;
550d1387
GK
1200 enum machine_mode innermode;
1201
1202 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1203 so we have to make one up. Yuk. */
1204 innermode = GET_MODE (x);
481683e1 1205 if (CONST_INT_P (x)
db487452 1206 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
550d1387
GK
1207 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1208 else if (innermode == VOIDmode)
1209 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
b8698a0f 1210
550d1387
GK
1211 xsize = GET_MODE_SIZE (innermode);
1212
5b0264cb 1213 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
23b2ce53 1214
550d1387 1215 if (innermode == mode)
23b2ce53
RS
1216 return x;
1217
1218 /* MODE must occupy no more words than the mode of X. */
550d1387
GK
1219 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1220 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
23b2ce53
RS
1221 return 0;
1222
53501a19 1223 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
3d8bf70f 1224 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
53501a19
BS
1225 return 0;
1226
550d1387 1227 offset = subreg_lowpart_offset (mode, innermode);
23b2ce53
RS
1228
1229 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
83e9c679
RK
1230 && (GET_MODE_CLASS (mode) == MODE_INT
1231 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
23b2ce53
RS
1232 {
1233 /* If we are getting the low-order part of something that has been
1234 sign- or zero-extended, we can either just use the object being
1235 extended or make a narrower extension. If we want an even smaller
1236 piece than the size of the object being extended, call ourselves
1237 recursively.
1238
1239 This case is used mostly by combine and cse. */
1240
1241 if (GET_MODE (XEXP (x, 0)) == mode)
1242 return XEXP (x, 0);
550d1387 1243 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
23b2ce53 1244 return gen_lowpart_common (mode, XEXP (x, 0));
550d1387 1245 else if (msize < xsize)
3b80f6ca 1246 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
23b2ce53 1247 }
f8cfc6aa 1248 else if (GET_CODE (x) == SUBREG || REG_P (x)
550d1387 1249 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
481683e1 1250 || GET_CODE (x) == CONST_DOUBLE || CONST_INT_P (x))
550d1387 1251 return simplify_gen_subreg (mode, x, innermode, offset);
8aada4ad 1252
23b2ce53
RS
1253 /* Otherwise, we can't do this. */
1254 return 0;
1255}
1256\f
ccba022b 1257rtx
502b8322 1258gen_highpart (enum machine_mode mode, rtx x)
ccba022b 1259{
ddef6bc7 1260 unsigned int msize = GET_MODE_SIZE (mode);
e0e08ac2 1261 rtx result;
ddef6bc7 1262
ccba022b
RS
1263 /* This case loses if X is a subreg. To catch bugs early,
1264 complain if an invalid MODE is used even in other cases. */
5b0264cb
NS
1265 gcc_assert (msize <= UNITS_PER_WORD
1266 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
ddef6bc7 1267
e0e08ac2
JH
1268 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1269 subreg_highpart_offset (mode, GET_MODE (x)));
5b0264cb 1270 gcc_assert (result);
b8698a0f 1271
09482e0d
JW
1272 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1273 the target if we have a MEM. gen_highpart must return a valid operand,
1274 emitting code if necessary to do so. */
5b0264cb
NS
1275 if (MEM_P (result))
1276 {
1277 result = validize_mem (result);
1278 gcc_assert (result);
1279 }
b8698a0f 1280
e0e08ac2
JH
1281 return result;
1282}
5222e470 1283
26d249eb 1284/* Like gen_highpart, but accept mode of EXP operand in case EXP can
5222e470
JH
1285 be VOIDmode constant. */
1286rtx
502b8322 1287gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
5222e470
JH
1288{
1289 if (GET_MODE (exp) != VOIDmode)
1290 {
5b0264cb 1291 gcc_assert (GET_MODE (exp) == innermode);
5222e470
JH
1292 return gen_highpart (outermode, exp);
1293 }
1294 return simplify_gen_subreg (outermode, exp, innermode,
1295 subreg_highpart_offset (outermode, innermode));
1296}
68252e27 1297
38ae7651 1298/* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
8698cce3 1299
e0e08ac2 1300unsigned int
502b8322 1301subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
e0e08ac2
JH
1302{
1303 unsigned int offset = 0;
1304 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
8698cce3 1305
e0e08ac2 1306 if (difference > 0)
ccba022b 1307 {
e0e08ac2
JH
1308 if (WORDS_BIG_ENDIAN)
1309 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1310 if (BYTES_BIG_ENDIAN)
1311 offset += difference % UNITS_PER_WORD;
ccba022b 1312 }
ddef6bc7 1313
e0e08ac2 1314 return offset;
ccba022b 1315}
eea50aa0 1316
e0e08ac2
JH
1317/* Return offset in bytes to get OUTERMODE high part
1318 of the value in mode INNERMODE stored in memory in target format. */
1319unsigned int
502b8322 1320subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
eea50aa0
JH
1321{
1322 unsigned int offset = 0;
1323 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1324
5b0264cb 1325 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
e0e08ac2 1326
eea50aa0
JH
1327 if (difference > 0)
1328 {
e0e08ac2 1329 if (! WORDS_BIG_ENDIAN)
eea50aa0 1330 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
e0e08ac2 1331 if (! BYTES_BIG_ENDIAN)
eea50aa0
JH
1332 offset += difference % UNITS_PER_WORD;
1333 }
1334
e0e08ac2 1335 return offset;
eea50aa0 1336}
ccba022b 1337
23b2ce53
RS
1338/* Return 1 iff X, assumed to be a SUBREG,
1339 refers to the least significant part of its containing reg.
1340 If X is not a SUBREG, always return 1 (it is its own low part!). */
1341
1342int
fa233e34 1343subreg_lowpart_p (const_rtx x)
23b2ce53
RS
1344{
1345 if (GET_CODE (x) != SUBREG)
1346 return 1;
a3a03040
RK
1347 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1348 return 0;
23b2ce53 1349
e0e08ac2
JH
1350 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1351 == SUBREG_BYTE (x));
23b2ce53 1352}
6a4bdc79
BS
1353
1354/* Return true if X is a paradoxical subreg, false otherwise. */
1355bool
1356paradoxical_subreg_p (const_rtx x)
1357{
1358 if (GET_CODE (x) != SUBREG)
1359 return false;
1360 return (GET_MODE_PRECISION (GET_MODE (x))
1361 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1362}
23b2ce53 1363\f
ddef6bc7
JJ
1364/* Return subword OFFSET of operand OP.
1365 The word number, OFFSET, is interpreted as the word number starting
1366 at the low-order address. OFFSET 0 is the low-order word if not
1367 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1368
1369 If we cannot extract the required word, we return zero. Otherwise,
1370 an rtx corresponding to the requested word will be returned.
1371
1372 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1373 reload has completed, a valid address will always be returned. After
1374 reload, if a valid address cannot be returned, we return zero.
1375
1376 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1377 it is the responsibility of the caller.
1378
1379 MODE is the mode of OP in case it is a CONST_INT.
1380
1381 ??? This is still rather broken for some cases. The problem for the
1382 moment is that all callers of this thing provide no 'goal mode' to
1383 tell us to work with. This exists because all callers were written
0631e0bf
JH
1384 in a word based SUBREG world.
1385 Now use of this function can be deprecated by simplify_subreg in most
1386 cases.
1387 */
ddef6bc7
JJ
1388
1389rtx
502b8322 1390operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
ddef6bc7
JJ
1391{
1392 if (mode == VOIDmode)
1393 mode = GET_MODE (op);
1394
5b0264cb 1395 gcc_assert (mode != VOIDmode);
ddef6bc7 1396
30f7a378 1397 /* If OP is narrower than a word, fail. */
ddef6bc7
JJ
1398 if (mode != BLKmode
1399 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1400 return 0;
1401
30f7a378 1402 /* If we want a word outside OP, return zero. */
ddef6bc7
JJ
1403 if (mode != BLKmode
1404 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1405 return const0_rtx;
1406
ddef6bc7 1407 /* Form a new MEM at the requested address. */
3c0cb5de 1408 if (MEM_P (op))
ddef6bc7 1409 {
60564289 1410 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
ddef6bc7 1411
f1ec5147 1412 if (! validate_address)
60564289 1413 return new_rtx;
f1ec5147
RK
1414
1415 else if (reload_completed)
ddef6bc7 1416 {
09e881c9
BE
1417 if (! strict_memory_address_addr_space_p (word_mode,
1418 XEXP (new_rtx, 0),
1419 MEM_ADDR_SPACE (op)))
f1ec5147 1420 return 0;
ddef6bc7 1421 }
f1ec5147 1422 else
60564289 1423 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
ddef6bc7
JJ
1424 }
1425
0631e0bf
JH
1426 /* Rest can be handled by simplify_subreg. */
1427 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
ddef6bc7
JJ
1428}
1429
535a42b1
NS
1430/* Similar to `operand_subword', but never return 0. If we can't
1431 extract the required subword, put OP into a register and try again.
1432 The second attempt must succeed. We always validate the address in
1433 this case.
23b2ce53
RS
1434
1435 MODE is the mode of OP, in case it is CONST_INT. */
1436
1437rtx
502b8322 1438operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
23b2ce53 1439{
ddef6bc7 1440 rtx result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1441
1442 if (result)
1443 return result;
1444
1445 if (mode != BLKmode && mode != VOIDmode)
77e6b0eb
JC
1446 {
1447 /* If this is a register which can not be accessed by words, copy it
1448 to a pseudo register. */
f8cfc6aa 1449 if (REG_P (op))
77e6b0eb
JC
1450 op = copy_to_reg (op);
1451 else
1452 op = force_reg (mode, op);
1453 }
23b2ce53 1454
ddef6bc7 1455 result = operand_subword (op, offset, 1, mode);
5b0264cb 1456 gcc_assert (result);
23b2ce53
RS
1457
1458 return result;
1459}
1460\f
2b3493c8
AK
1461/* Returns 1 if both MEM_EXPR can be considered equal
1462 and 0 otherwise. */
1463
1464int
4f588890 1465mem_expr_equal_p (const_tree expr1, const_tree expr2)
2b3493c8
AK
1466{
1467 if (expr1 == expr2)
1468 return 1;
1469
1470 if (! expr1 || ! expr2)
1471 return 0;
1472
1473 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1474 return 0;
1475
55b34b5f 1476 return operand_equal_p (expr1, expr2, 0);
2b3493c8
AK
1477}
1478
805903b5
JJ
1479/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1480 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1481 -1 if not known. */
1482
1483int
d9223014 1484get_mem_align_offset (rtx mem, unsigned int align)
805903b5
JJ
1485{
1486 tree expr;
1487 unsigned HOST_WIDE_INT offset;
1488
1489 /* This function can't use
527210c4 1490 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
e80c2726 1491 || (MAX (MEM_ALIGN (mem),
0eb77834 1492 MAX (align, get_object_alignment (MEM_EXPR (mem))))
805903b5
JJ
1493 < align))
1494 return -1;
1495 else
527210c4 1496 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
805903b5
JJ
1497 for two reasons:
1498 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1499 for <variable>. get_inner_reference doesn't handle it and
1500 even if it did, the alignment in that case needs to be determined
1501 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1502 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1503 isn't sufficiently aligned, the object it is in might be. */
1504 gcc_assert (MEM_P (mem));
1505 expr = MEM_EXPR (mem);
527210c4 1506 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
805903b5
JJ
1507 return -1;
1508
527210c4 1509 offset = MEM_OFFSET (mem);
805903b5
JJ
1510 if (DECL_P (expr))
1511 {
1512 if (DECL_ALIGN (expr) < align)
1513 return -1;
1514 }
1515 else if (INDIRECT_REF_P (expr))
1516 {
1517 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1518 return -1;
1519 }
1520 else if (TREE_CODE (expr) == COMPONENT_REF)
1521 {
1522 while (1)
1523 {
1524 tree inner = TREE_OPERAND (expr, 0);
1525 tree field = TREE_OPERAND (expr, 1);
1526 tree byte_offset = component_ref_field_offset (expr);
1527 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1528
1529 if (!byte_offset
1530 || !host_integerp (byte_offset, 1)
1531 || !host_integerp (bit_offset, 1))
1532 return -1;
1533
1534 offset += tree_low_cst (byte_offset, 1);
1535 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1536
1537 if (inner == NULL_TREE)
1538 {
1539 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1540 < (unsigned int) align)
1541 return -1;
1542 break;
1543 }
1544 else if (DECL_P (inner))
1545 {
1546 if (DECL_ALIGN (inner) < align)
1547 return -1;
1548 break;
1549 }
1550 else if (TREE_CODE (inner) != COMPONENT_REF)
1551 return -1;
1552 expr = inner;
1553 }
1554 }
1555 else
1556 return -1;
1557
1558 return offset & ((align / BITS_PER_UNIT) - 1);
1559}
1560
6926c713 1561/* Given REF (a MEM) and T, either the type of X or the expression
173b24b9 1562 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f1087be
RH
1563 if we are making a new object of this type. BITPOS is nonzero if
1564 there is an offset outstanding on T that will be applied later. */
173b24b9
RK
1565
1566void
502b8322
AJ
1567set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1568 HOST_WIDE_INT bitpos)
173b24b9 1569{
6f1087be 1570 HOST_WIDE_INT apply_bitpos = 0;
173b24b9 1571 tree type;
f12144dd 1572 struct mem_attrs attrs, *defattrs, *refattrs;
f18a7b25 1573 addr_space_t as;
173b24b9
RK
1574
1575 /* It can happen that type_for_mode was given a mode for which there
1576 is no language-level type. In which case it returns NULL, which
1577 we can see here. */
1578 if (t == NULL_TREE)
1579 return;
1580
1581 type = TYPE_P (t) ? t : TREE_TYPE (t);
eeb23c11
MM
1582 if (type == error_mark_node)
1583 return;
173b24b9 1584
173b24b9
RK
1585 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1586 wrong answer, as it assumes that DECL_RTL already has the right alias
1587 info. Callers should not set DECL_RTL until after the call to
1588 set_mem_attributes. */
5b0264cb 1589 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
173b24b9 1590
f12144dd
RS
1591 memset (&attrs, 0, sizeof (attrs));
1592
738cc472 1593 /* Get the alias set from the expression or type (perhaps using a
8ac61af7 1594 front-end routine) and use it. */
f12144dd 1595 attrs.alias = get_alias_set (t);
173b24b9 1596
a5e9c810 1597 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
f8ad8d7c 1598 MEM_POINTER (ref) = POINTER_TYPE_P (type);
173b24b9 1599
268f7033 1600 /* Default values from pre-existing memory attributes if present. */
f12144dd
RS
1601 refattrs = MEM_ATTRS (ref);
1602 if (refattrs)
268f7033
UW
1603 {
1604 /* ??? Can this ever happen? Calling this routine on a MEM that
1605 already carries memory attributes should probably be invalid. */
f12144dd 1606 attrs.expr = refattrs->expr;
754c3d5d 1607 attrs.offset_known_p = refattrs->offset_known_p;
f12144dd 1608 attrs.offset = refattrs->offset;
754c3d5d 1609 attrs.size_known_p = refattrs->size_known_p;
f12144dd
RS
1610 attrs.size = refattrs->size;
1611 attrs.align = refattrs->align;
268f7033
UW
1612 }
1613
1614 /* Otherwise, default values from the mode of the MEM reference. */
f12144dd 1615 else
268f7033 1616 {
f12144dd
RS
1617 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1618 gcc_assert (!defattrs->expr);
754c3d5d 1619 gcc_assert (!defattrs->offset_known_p);
f12144dd 1620
268f7033 1621 /* Respect mode size. */
754c3d5d 1622 attrs.size_known_p = defattrs->size_known_p;
f12144dd 1623 attrs.size = defattrs->size;
268f7033
UW
1624 /* ??? Is this really necessary? We probably should always get
1625 the size from the type below. */
1626
1627 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1628 if T is an object, always compute the object alignment below. */
f12144dd
RS
1629 if (TYPE_P (t))
1630 attrs.align = defattrs->align;
1631 else
1632 attrs.align = BITS_PER_UNIT;
268f7033
UW
1633 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1634 e.g. if the type carries an alignment attribute. Should we be
1635 able to simply always use TYPE_ALIGN? */
1636 }
1637
c3d32120
RK
1638 /* We can set the alignment from the type if we are making an object,
1639 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
a80903ff 1640 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
f12144dd 1641 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
a80903ff 1642
70f34814
RG
1643 else if (TREE_CODE (t) == MEM_REF)
1644 {
a80903ff 1645 tree op0 = TREE_OPERAND (t, 0);
3e32c761
RG
1646 if (TREE_CODE (op0) == ADDR_EXPR
1647 && (DECL_P (TREE_OPERAND (op0, 0))
1648 || CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))))
70f34814 1649 {
3e32c761 1650 if (DECL_P (TREE_OPERAND (op0, 0)))
f12144dd 1651 attrs.align = DECL_ALIGN (TREE_OPERAND (op0, 0));
3e32c761
RG
1652 else if (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)))
1653 {
f12144dd 1654 attrs.align = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (op0, 0)));
70f34814 1655#ifdef CONSTANT_ALIGNMENT
f12144dd
RS
1656 attrs.align = CONSTANT_ALIGNMENT (TREE_OPERAND (op0, 0),
1657 attrs.align);
70f34814 1658#endif
3e32c761
RG
1659 }
1660 if (TREE_INT_CST_LOW (TREE_OPERAND (t, 1)) != 0)
1661 {
1662 unsigned HOST_WIDE_INT ioff
1663 = TREE_INT_CST_LOW (TREE_OPERAND (t, 1));
1664 unsigned HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
f12144dd 1665 attrs.align = MIN (aoff, attrs.align);
3e32c761 1666 }
70f34814
RG
1667 }
1668 else
5951297a
EB
1669 /* ??? This isn't fully correct, we can't set the alignment from the
1670 type in all cases. */
f12144dd 1671 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
70f34814 1672 }
a80903ff 1673
9407f6bc
RG
1674 else if (TREE_CODE (t) == TARGET_MEM_REF)
1675 /* ??? This isn't fully correct, we can't set the alignment from the
1676 type in all cases. */
f12144dd 1677 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
9407f6bc 1678
738cc472
RK
1679 /* If the size is known, we can set that. */
1680 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
754c3d5d
RS
1681 {
1682 attrs.size_known_p = true;
1683 attrs.size = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1684 }
738cc472 1685
80965c18
RK
1686 /* If T is not a type, we may be able to deduce some more information about
1687 the expression. */
1688 if (! TYPE_P (t))
8ac61af7 1689 {
8476af98 1690 tree base;
df96b059 1691 bool align_computed = false;
389fdba0 1692
8ac61af7
RK
1693 if (TREE_THIS_VOLATILE (t))
1694 MEM_VOLATILE_P (ref) = 1;
173b24b9 1695
c56e3582
RK
1696 /* Now remove any conversions: they don't change what the underlying
1697 object is. Likewise for SAVE_EXPR. */
1043771b 1698 while (CONVERT_EXPR_P (t)
c56e3582
RK
1699 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1700 || TREE_CODE (t) == SAVE_EXPR)
8ac61af7
RK
1701 t = TREE_OPERAND (t, 0);
1702
4994da65
RG
1703 /* Note whether this expression can trap. */
1704 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1705
1706 base = get_base_address (t);
f18a7b25
MJ
1707 if (base)
1708 {
1709 if (DECL_P (base)
1710 && TREE_READONLY (base)
1711 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1712 && !TREE_THIS_VOLATILE (base))
1713 MEM_READONLY_P (ref) = 1;
1714
1715 /* Mark static const strings readonly as well. */
1716 if (TREE_CODE (base) == STRING_CST
1717 && TREE_READONLY (base)
1718 && TREE_STATIC (base))
1719 MEM_READONLY_P (ref) = 1;
1720
1721 if (TREE_CODE (base) == MEM_REF
1722 || TREE_CODE (base) == TARGET_MEM_REF)
1723 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1724 0))));
1725 else
1726 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1727 }
1728 else
1729 as = TYPE_ADDR_SPACE (type);
ba30e50d 1730
2039d7aa
RH
1731 /* If this expression uses it's parent's alias set, mark it such
1732 that we won't change it. */
1733 if (component_uses_parent_alias_set (t))
10b76d73
RK
1734 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1735
8ac61af7
RK
1736 /* If this is a decl, set the attributes of the MEM from it. */
1737 if (DECL_P (t))
1738 {
f12144dd 1739 attrs.expr = t;
754c3d5d
RS
1740 attrs.offset_known_p = true;
1741 attrs.offset = 0;
6f1087be 1742 apply_bitpos = bitpos;
754c3d5d
RS
1743 if (DECL_SIZE_UNIT (t) && host_integerp (DECL_SIZE_UNIT (t), 1))
1744 {
1745 attrs.size_known_p = true;
1746 attrs.size = tree_low_cst (DECL_SIZE_UNIT (t), 1);
1747 }
1748 else
1749 attrs.size_known_p = false;
f12144dd 1750 attrs.align = DECL_ALIGN (t);
df96b059 1751 align_computed = true;
8ac61af7
RK
1752 }
1753
40c0668b 1754 /* If this is a constant, we know the alignment. */
6615c446 1755 else if (CONSTANT_CLASS_P (t))
9ddfb1a7 1756 {
f12144dd 1757 attrs.align = TYPE_ALIGN (type);
9ddfb1a7 1758#ifdef CONSTANT_ALIGNMENT
f12144dd 1759 attrs.align = CONSTANT_ALIGNMENT (t, attrs.align);
9ddfb1a7 1760#endif
df96b059 1761 align_computed = true;
9ddfb1a7 1762 }
998d7deb
RH
1763
1764 /* If this is a field reference and not a bit-field, record it. */
fa10beec 1765 /* ??? There is some information that can be gleaned from bit-fields,
998d7deb
RH
1766 such as the word offset in the structure that might be modified.
1767 But skip it for now. */
1768 else if (TREE_CODE (t) == COMPONENT_REF
1769 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1770 {
f12144dd 1771 attrs.expr = t;
754c3d5d
RS
1772 attrs.offset_known_p = true;
1773 attrs.offset = 0;
6f1087be 1774 apply_bitpos = bitpos;
998d7deb
RH
1775 /* ??? Any reason the field size would be different than
1776 the size we got from the type? */
1777 }
1778
1779 /* If this is an array reference, look for an outer field reference. */
1780 else if (TREE_CODE (t) == ARRAY_REF)
1781 {
1782 tree off_tree = size_zero_node;
1b1838b6
JW
1783 /* We can't modify t, because we use it at the end of the
1784 function. */
1785 tree t2 = t;
998d7deb
RH
1786
1787 do
1788 {
1b1838b6 1789 tree index = TREE_OPERAND (t2, 1);
44de5aeb
RK
1790 tree low_bound = array_ref_low_bound (t2);
1791 tree unit_size = array_ref_element_size (t2);
2567406a
JH
1792
1793 /* We assume all arrays have sizes that are a multiple of a byte.
1794 First subtract the lower bound, if any, in the type of the
44de5aeb
RK
1795 index, then convert to sizetype and multiply by the size of
1796 the array element. */
1797 if (! integer_zerop (low_bound))
4845b383
KH
1798 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1799 index, low_bound);
2567406a 1800
44de5aeb 1801 off_tree = size_binop (PLUS_EXPR,
b6f65e3c
RS
1802 size_binop (MULT_EXPR,
1803 fold_convert (sizetype,
1804 index),
44de5aeb
RK
1805 unit_size),
1806 off_tree);
1b1838b6 1807 t2 = TREE_OPERAND (t2, 0);
998d7deb 1808 }
1b1838b6 1809 while (TREE_CODE (t2) == ARRAY_REF);
998d7deb 1810
1b1838b6 1811 if (DECL_P (t2))
c67a1cf6 1812 {
f12144dd 1813 attrs.expr = t2;
754c3d5d 1814 attrs.offset_known_p = false;
c67a1cf6 1815 if (host_integerp (off_tree, 1))
40cb04f1
RH
1816 {
1817 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1818 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
f12144dd
RS
1819 attrs.align = DECL_ALIGN (t2);
1820 if (aoff && (unsigned HOST_WIDE_INT) aoff < attrs.align)
1821 attrs.align = aoff;
df96b059 1822 align_computed = true;
754c3d5d
RS
1823 attrs.offset_known_p = true;
1824 attrs.offset = ioff;
6f1087be 1825 apply_bitpos = bitpos;
40cb04f1 1826 }
c67a1cf6 1827 }
1b1838b6 1828 else if (TREE_CODE (t2) == COMPONENT_REF)
998d7deb 1829 {
f12144dd 1830 attrs.expr = t2;
754c3d5d 1831 attrs.offset_known_p = false;
998d7deb 1832 if (host_integerp (off_tree, 1))
6f1087be 1833 {
754c3d5d
RS
1834 attrs.offset_known_p = true;
1835 attrs.offset = tree_low_cst (off_tree, 1);
6f1087be
RH
1836 apply_bitpos = bitpos;
1837 }
998d7deb
RH
1838 /* ??? Any reason the field size would be different than
1839 the size we got from the type? */
1840 }
56c47f22 1841
56c47f22 1842 /* If this is an indirect reference, record it. */
be1ac4ec 1843 else if (TREE_CODE (t) == MEM_REF)
56c47f22 1844 {
f12144dd 1845 attrs.expr = t;
754c3d5d
RS
1846 attrs.offset_known_p = true;
1847 attrs.offset = 0;
56c47f22
RG
1848 apply_bitpos = bitpos;
1849 }
c67a1cf6
RH
1850 }
1851
56c47f22 1852 /* If this is an indirect reference, record it. */
70f34814 1853 else if (TREE_CODE (t) == MEM_REF
be1ac4ec 1854 || TREE_CODE (t) == TARGET_MEM_REF)
56c47f22 1855 {
f12144dd 1856 attrs.expr = t;
754c3d5d
RS
1857 attrs.offset_known_p = true;
1858 attrs.offset = 0;
56c47f22
RG
1859 apply_bitpos = bitpos;
1860 }
1861
0eb77834 1862 if (!align_computed)
df96b059 1863 {
0eb77834 1864 unsigned int obj_align = get_object_alignment (t);
f12144dd 1865 attrs.align = MAX (attrs.align, obj_align);
df96b059 1866 }
8ac61af7 1867 }
f18a7b25
MJ
1868 else
1869 as = TYPE_ADDR_SPACE (type);
8ac61af7 1870
15c812e3 1871 /* If we modified OFFSET based on T, then subtract the outstanding
8c317c5f
RH
1872 bit position offset. Similarly, increase the size of the accessed
1873 object to contain the negative offset. */
6f1087be 1874 if (apply_bitpos)
8c317c5f 1875 {
754c3d5d
RS
1876 gcc_assert (attrs.offset_known_p);
1877 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1878 if (attrs.size_known_p)
1879 attrs.size += apply_bitpos / BITS_PER_UNIT;
8c317c5f 1880 }
6f1087be 1881
8ac61af7 1882 /* Now set the attributes we computed above. */
f18a7b25 1883 attrs.addrspace = as;
f12144dd 1884 set_mem_attrs (ref, &attrs);
173b24b9
RK
1885}
1886
6f1087be 1887void
502b8322 1888set_mem_attributes (rtx ref, tree t, int objectp)
6f1087be
RH
1889{
1890 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1891}
1892
173b24b9
RK
1893/* Set the alias set of MEM to SET. */
1894
1895void
4862826d 1896set_mem_alias_set (rtx mem, alias_set_type set)
173b24b9 1897{
f12144dd
RS
1898 struct mem_attrs attrs;
1899
173b24b9 1900 /* If the new and old alias sets don't conflict, something is wrong. */
77a74ed7 1901 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
f12144dd
RS
1902 attrs = *get_mem_attrs (mem);
1903 attrs.alias = set;
1904 set_mem_attrs (mem, &attrs);
09e881c9
BE
1905}
1906
1907/* Set the address space of MEM to ADDRSPACE (target-defined). */
1908
1909void
1910set_mem_addr_space (rtx mem, addr_space_t addrspace)
1911{
f12144dd
RS
1912 struct mem_attrs attrs;
1913
1914 attrs = *get_mem_attrs (mem);
1915 attrs.addrspace = addrspace;
1916 set_mem_attrs (mem, &attrs);
173b24b9 1917}
738cc472 1918
d022d93e 1919/* Set the alignment of MEM to ALIGN bits. */
738cc472
RK
1920
1921void
502b8322 1922set_mem_align (rtx mem, unsigned int align)
738cc472 1923{
f12144dd
RS
1924 struct mem_attrs attrs;
1925
1926 attrs = *get_mem_attrs (mem);
1927 attrs.align = align;
1928 set_mem_attrs (mem, &attrs);
738cc472 1929}
1285011e 1930
998d7deb 1931/* Set the expr for MEM to EXPR. */
1285011e
RK
1932
1933void
502b8322 1934set_mem_expr (rtx mem, tree expr)
1285011e 1935{
f12144dd
RS
1936 struct mem_attrs attrs;
1937
1938 attrs = *get_mem_attrs (mem);
1939 attrs.expr = expr;
1940 set_mem_attrs (mem, &attrs);
1285011e 1941}
998d7deb
RH
1942
1943/* Set the offset of MEM to OFFSET. */
1944
1945void
527210c4 1946set_mem_offset (rtx mem, HOST_WIDE_INT offset)
998d7deb 1947{
f12144dd
RS
1948 struct mem_attrs attrs;
1949
1950 attrs = *get_mem_attrs (mem);
754c3d5d
RS
1951 attrs.offset_known_p = true;
1952 attrs.offset = offset;
527210c4
RS
1953 set_mem_attrs (mem, &attrs);
1954}
1955
1956/* Clear the offset of MEM. */
1957
1958void
1959clear_mem_offset (rtx mem)
1960{
1961 struct mem_attrs attrs;
1962
1963 attrs = *get_mem_attrs (mem);
754c3d5d 1964 attrs.offset_known_p = false;
f12144dd 1965 set_mem_attrs (mem, &attrs);
35aff10b
AM
1966}
1967
1968/* Set the size of MEM to SIZE. */
1969
1970void
f5541398 1971set_mem_size (rtx mem, HOST_WIDE_INT size)
35aff10b 1972{
f12144dd
RS
1973 struct mem_attrs attrs;
1974
1975 attrs = *get_mem_attrs (mem);
754c3d5d
RS
1976 attrs.size_known_p = true;
1977 attrs.size = size;
f5541398
RS
1978 set_mem_attrs (mem, &attrs);
1979}
1980
1981/* Clear the size of MEM. */
1982
1983void
1984clear_mem_size (rtx mem)
1985{
1986 struct mem_attrs attrs;
1987
1988 attrs = *get_mem_attrs (mem);
754c3d5d 1989 attrs.size_known_p = false;
f12144dd 1990 set_mem_attrs (mem, &attrs);
998d7deb 1991}
173b24b9 1992\f
738cc472
RK
1993/* Return a memory reference like MEMREF, but with its mode changed to MODE
1994 and its address changed to ADDR. (VOIDmode means don't change the mode.
1995 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1996 returned memory location is required to be valid. The memory
1997 attributes are not changed. */
23b2ce53 1998
738cc472 1999static rtx
502b8322 2000change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
23b2ce53 2001{
09e881c9 2002 addr_space_t as;
60564289 2003 rtx new_rtx;
23b2ce53 2004
5b0264cb 2005 gcc_assert (MEM_P (memref));
09e881c9 2006 as = MEM_ADDR_SPACE (memref);
23b2ce53
RS
2007 if (mode == VOIDmode)
2008 mode = GET_MODE (memref);
2009 if (addr == 0)
2010 addr = XEXP (memref, 0);
a74ff877 2011 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
09e881c9 2012 && (!validate || memory_address_addr_space_p (mode, addr, as)))
a74ff877 2013 return memref;
23b2ce53 2014
f1ec5147 2015 if (validate)
23b2ce53 2016 {
f1ec5147 2017 if (reload_in_progress || reload_completed)
09e881c9 2018 gcc_assert (memory_address_addr_space_p (mode, addr, as));
f1ec5147 2019 else
09e881c9 2020 addr = memory_address_addr_space (mode, addr, as);
23b2ce53 2021 }
750c9258 2022
9b04c6a8
RK
2023 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2024 return memref;
2025
60564289
KG
2026 new_rtx = gen_rtx_MEM (mode, addr);
2027 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2028 return new_rtx;
23b2ce53 2029}
792760b9 2030
738cc472
RK
2031/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2032 way we are changing MEMREF, so we only preserve the alias set. */
f4ef873c
RK
2033
2034rtx
502b8322 2035change_address (rtx memref, enum machine_mode mode, rtx addr)
f4ef873c 2036{
f12144dd 2037 rtx new_rtx = change_address_1 (memref, mode, addr, 1);
60564289 2038 enum machine_mode mmode = GET_MODE (new_rtx);
f12144dd 2039 struct mem_attrs attrs, *defattrs;
4e44c1ef 2040
f12144dd
RS
2041 attrs = *get_mem_attrs (memref);
2042 defattrs = mode_mem_attrs[(int) mmode];
754c3d5d
RS
2043 attrs.expr = NULL_TREE;
2044 attrs.offset_known_p = false;
2045 attrs.size_known_p = defattrs->size_known_p;
f12144dd
RS
2046 attrs.size = defattrs->size;
2047 attrs.align = defattrs->align;
c2f7bcc3 2048
fdb1c7b3 2049 /* If there are no changes, just return the original memory reference. */
60564289 2050 if (new_rtx == memref)
4e44c1ef 2051 {
f12144dd 2052 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
60564289 2053 return new_rtx;
4e44c1ef 2054
60564289
KG
2055 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2056 MEM_COPY_ATTRIBUTES (new_rtx, memref);
4e44c1ef 2057 }
fdb1c7b3 2058
f12144dd 2059 set_mem_attrs (new_rtx, &attrs);
60564289 2060 return new_rtx;
f4ef873c 2061}
792760b9 2062
738cc472
RK
2063/* Return a memory reference like MEMREF, but with its mode changed
2064 to MODE and its address offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
2065 nonzero, the memory address is forced to be valid.
2066 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2067 and caller is responsible for adjusting MEMREF base register. */
f1ec5147
RK
2068
2069rtx
502b8322
AJ
2070adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2071 int validate, int adjust)
f1ec5147 2072{
823e3574 2073 rtx addr = XEXP (memref, 0);
60564289 2074 rtx new_rtx;
f12144dd 2075 enum machine_mode address_mode;
a6fe9ed4 2076 int pbits;
f12144dd
RS
2077 struct mem_attrs attrs, *defattrs;
2078 unsigned HOST_WIDE_INT max_align;
2079
2080 attrs = *get_mem_attrs (memref);
823e3574 2081
fdb1c7b3
JH
2082 /* If there are no changes, just return the original memory reference. */
2083 if (mode == GET_MODE (memref) && !offset
f12144dd
RS
2084 && (!validate || memory_address_addr_space_p (mode, addr,
2085 attrs.addrspace)))
fdb1c7b3
JH
2086 return memref;
2087
d14419e4 2088 /* ??? Prefer to create garbage instead of creating shared rtl.
cc2902df 2089 This may happen even if offset is nonzero -- consider
d14419e4
RH
2090 (plus (plus reg reg) const_int) -- so do this always. */
2091 addr = copy_rtx (addr);
2092
a6fe9ed4
JM
2093 /* Convert a possibly large offset to a signed value within the
2094 range of the target address space. */
372d6395 2095 address_mode = get_address_mode (memref);
d4ebfa65 2096 pbits = GET_MODE_BITSIZE (address_mode);
a6fe9ed4
JM
2097 if (HOST_BITS_PER_WIDE_INT > pbits)
2098 {
2099 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2100 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2101 >> shift);
2102 }
2103
4a78c787
RH
2104 if (adjust)
2105 {
2106 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2107 object, we can merge it into the LO_SUM. */
2108 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2109 && offset >= 0
2110 && (unsigned HOST_WIDE_INT) offset
2111 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
d4ebfa65 2112 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
0a81f074
RS
2113 plus_constant (address_mode,
2114 XEXP (addr, 1), offset));
4a78c787 2115 else
0a81f074 2116 addr = plus_constant (address_mode, addr, offset);
4a78c787 2117 }
823e3574 2118
60564289 2119 new_rtx = change_address_1 (memref, mode, addr, validate);
738cc472 2120
09efeca1
PB
2121 /* If the address is a REG, change_address_1 rightfully returns memref,
2122 but this would destroy memref's MEM_ATTRS. */
2123 if (new_rtx == memref && offset != 0)
2124 new_rtx = copy_rtx (new_rtx);
2125
738cc472
RK
2126 /* Compute the new values of the memory attributes due to this adjustment.
2127 We add the offsets and update the alignment. */
754c3d5d
RS
2128 if (attrs.offset_known_p)
2129 attrs.offset += offset;
738cc472 2130
03bf2c23
RK
2131 /* Compute the new alignment by taking the MIN of the alignment and the
2132 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2133 if zero. */
2134 if (offset != 0)
f12144dd
RS
2135 {
2136 max_align = (offset & -offset) * BITS_PER_UNIT;
2137 attrs.align = MIN (attrs.align, max_align);
2138 }
738cc472 2139
10b76d73 2140 /* We can compute the size in a number of ways. */
f12144dd 2141 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
754c3d5d
RS
2142 if (defattrs->size_known_p)
2143 {
2144 attrs.size_known_p = true;
2145 attrs.size = defattrs->size;
2146 }
2147 else if (attrs.size_known_p)
2148 attrs.size -= offset;
10b76d73 2149
f12144dd 2150 set_mem_attrs (new_rtx, &attrs);
738cc472
RK
2151
2152 /* At some point, we should validate that this offset is within the object,
2153 if all the appropriate values are known. */
60564289 2154 return new_rtx;
f1ec5147
RK
2155}
2156
630036c6
JJ
2157/* Return a memory reference like MEMREF, but with its mode changed
2158 to MODE and its address changed to ADDR, which is assumed to be
fa10beec 2159 MEMREF offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
2160 nonzero, the memory address is forced to be valid. */
2161
2162rtx
502b8322
AJ
2163adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2164 HOST_WIDE_INT offset, int validate)
630036c6
JJ
2165{
2166 memref = change_address_1 (memref, VOIDmode, addr, validate);
2167 return adjust_address_1 (memref, mode, offset, validate, 0);
2168}
2169
8ac61af7
RK
2170/* Return a memory reference like MEMREF, but whose address is changed by
2171 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2172 known to be in OFFSET (possibly 1). */
0d4903b8
RK
2173
2174rtx
502b8322 2175offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
0d4903b8 2176{
60564289 2177 rtx new_rtx, addr = XEXP (memref, 0);
f12144dd 2178 enum machine_mode address_mode;
754c3d5d 2179 struct mem_attrs attrs, *defattrs;
e3c8ea67 2180
f12144dd 2181 attrs = *get_mem_attrs (memref);
372d6395 2182 address_mode = get_address_mode (memref);
d4ebfa65 2183 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67 2184
68252e27 2185 /* At this point we don't know _why_ the address is invalid. It
4d6922ee 2186 could have secondary memory references, multiplies or anything.
e3c8ea67
RH
2187
2188 However, if we did go and rearrange things, we can wind up not
2189 being able to recognize the magic around pic_offset_table_rtx.
2190 This stuff is fragile, and is yet another example of why it is
2191 bad to expose PIC machinery too early. */
f12144dd
RS
2192 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2193 attrs.addrspace)
e3c8ea67
RH
2194 && GET_CODE (addr) == PLUS
2195 && XEXP (addr, 0) == pic_offset_table_rtx)
2196 {
2197 addr = force_reg (GET_MODE (addr), addr);
d4ebfa65 2198 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
e3c8ea67
RH
2199 }
2200
60564289
KG
2201 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2202 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
0d4903b8 2203
fdb1c7b3 2204 /* If there are no changes, just return the original memory reference. */
60564289
KG
2205 if (new_rtx == memref)
2206 return new_rtx;
fdb1c7b3 2207
0d4903b8
RK
2208 /* Update the alignment to reflect the offset. Reset the offset, which
2209 we don't know. */
754c3d5d
RS
2210 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2211 attrs.offset_known_p = false;
2212 attrs.size_known_p = defattrs->size_known_p;
2213 attrs.size = defattrs->size;
f12144dd
RS
2214 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2215 set_mem_attrs (new_rtx, &attrs);
60564289 2216 return new_rtx;
0d4903b8 2217}
68252e27 2218
792760b9
RK
2219/* Return a memory reference like MEMREF, but with its address changed to
2220 ADDR. The caller is asserting that the actual piece of memory pointed
2221 to is the same, just the form of the address is being changed, such as
2222 by putting something into a register. */
2223
2224rtx
502b8322 2225replace_equiv_address (rtx memref, rtx addr)
792760b9 2226{
738cc472
RK
2227 /* change_address_1 copies the memory attribute structure without change
2228 and that's exactly what we want here. */
40c0668b 2229 update_temp_slot_address (XEXP (memref, 0), addr);
738cc472 2230 return change_address_1 (memref, VOIDmode, addr, 1);
792760b9 2231}
738cc472 2232
f1ec5147
RK
2233/* Likewise, but the reference is not required to be valid. */
2234
2235rtx
502b8322 2236replace_equiv_address_nv (rtx memref, rtx addr)
f1ec5147 2237{
f1ec5147
RK
2238 return change_address_1 (memref, VOIDmode, addr, 0);
2239}
e7dfe4bb
RH
2240
2241/* Return a memory reference like MEMREF, but with its mode widened to
2242 MODE and offset by OFFSET. This would be used by targets that e.g.
2243 cannot issue QImode memory operations and have to use SImode memory
2244 operations plus masking logic. */
2245
2246rtx
502b8322 2247widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
e7dfe4bb 2248{
60564289 2249 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
f12144dd 2250 struct mem_attrs attrs;
e7dfe4bb
RH
2251 unsigned int size = GET_MODE_SIZE (mode);
2252
fdb1c7b3 2253 /* If there are no changes, just return the original memory reference. */
60564289
KG
2254 if (new_rtx == memref)
2255 return new_rtx;
fdb1c7b3 2256
f12144dd
RS
2257 attrs = *get_mem_attrs (new_rtx);
2258
e7dfe4bb
RH
2259 /* If we don't know what offset we were at within the expression, then
2260 we can't know if we've overstepped the bounds. */
754c3d5d 2261 if (! attrs.offset_known_p)
f12144dd 2262 attrs.expr = NULL_TREE;
e7dfe4bb 2263
f12144dd 2264 while (attrs.expr)
e7dfe4bb 2265 {
f12144dd 2266 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
e7dfe4bb 2267 {
f12144dd
RS
2268 tree field = TREE_OPERAND (attrs.expr, 1);
2269 tree offset = component_ref_field_offset (attrs.expr);
e7dfe4bb
RH
2270
2271 if (! DECL_SIZE_UNIT (field))
2272 {
f12144dd 2273 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2274 break;
2275 }
2276
2277 /* Is the field at least as large as the access? If so, ok,
2278 otherwise strip back to the containing structure. */
03667700
RK
2279 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2280 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
754c3d5d 2281 && attrs.offset >= 0)
e7dfe4bb
RH
2282 break;
2283
44de5aeb 2284 if (! host_integerp (offset, 1))
e7dfe4bb 2285 {
f12144dd 2286 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2287 break;
2288 }
2289
f12144dd 2290 attrs.expr = TREE_OPERAND (attrs.expr, 0);
754c3d5d
RS
2291 attrs.offset += tree_low_cst (offset, 1);
2292 attrs.offset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2293 / BITS_PER_UNIT);
e7dfe4bb
RH
2294 }
2295 /* Similarly for the decl. */
f12144dd
RS
2296 else if (DECL_P (attrs.expr)
2297 && DECL_SIZE_UNIT (attrs.expr)
2298 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2299 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
754c3d5d 2300 && (! attrs.offset_known_p || attrs.offset >= 0))
e7dfe4bb
RH
2301 break;
2302 else
2303 {
2304 /* The widened memory access overflows the expression, which means
2305 that it could alias another expression. Zap it. */
f12144dd 2306 attrs.expr = NULL_TREE;
e7dfe4bb
RH
2307 break;
2308 }
2309 }
2310
f12144dd 2311 if (! attrs.expr)
754c3d5d 2312 attrs.offset_known_p = false;
e7dfe4bb
RH
2313
2314 /* The widened memory may alias other stuff, so zap the alias set. */
2315 /* ??? Maybe use get_alias_set on any remaining expression. */
f12144dd 2316 attrs.alias = 0;
754c3d5d
RS
2317 attrs.size_known_p = true;
2318 attrs.size = size;
f12144dd 2319 set_mem_attrs (new_rtx, &attrs);
60564289 2320 return new_rtx;
e7dfe4bb 2321}
23b2ce53 2322\f
f6129d66
RH
2323/* A fake decl that is used as the MEM_EXPR of spill slots. */
2324static GTY(()) tree spill_slot_decl;
2325
3d7e23f6
RH
2326tree
2327get_spill_slot_decl (bool force_build_p)
f6129d66
RH
2328{
2329 tree d = spill_slot_decl;
2330 rtx rd;
f12144dd 2331 struct mem_attrs attrs;
f6129d66 2332
3d7e23f6 2333 if (d || !force_build_p)
f6129d66
RH
2334 return d;
2335
c2255bc4
AH
2336 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2337 VAR_DECL, get_identifier ("%sfp"), void_type_node);
f6129d66
RH
2338 DECL_ARTIFICIAL (d) = 1;
2339 DECL_IGNORED_P (d) = 1;
2340 TREE_USED (d) = 1;
f6129d66
RH
2341 spill_slot_decl = d;
2342
2343 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2344 MEM_NOTRAP_P (rd) = 1;
f12144dd
RS
2345 attrs = *mode_mem_attrs[(int) BLKmode];
2346 attrs.alias = new_alias_set ();
2347 attrs.expr = d;
2348 set_mem_attrs (rd, &attrs);
f6129d66
RH
2349 SET_DECL_RTL (d, rd);
2350
2351 return d;
2352}
2353
2354/* Given MEM, a result from assign_stack_local, fill in the memory
2355 attributes as appropriate for a register allocator spill slot.
2356 These slots are not aliasable by other memory. We arrange for
2357 them all to use a single MEM_EXPR, so that the aliasing code can
2358 work properly in the case of shared spill slots. */
2359
2360void
2361set_mem_attrs_for_spill (rtx mem)
2362{
f12144dd
RS
2363 struct mem_attrs attrs;
2364 rtx addr;
f6129d66 2365
f12144dd
RS
2366 attrs = *get_mem_attrs (mem);
2367 attrs.expr = get_spill_slot_decl (true);
2368 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2369 attrs.addrspace = ADDR_SPACE_GENERIC;
f6129d66
RH
2370
2371 /* We expect the incoming memory to be of the form:
2372 (mem:MODE (plus (reg sfp) (const_int offset)))
2373 with perhaps the plus missing for offset = 0. */
2374 addr = XEXP (mem, 0);
754c3d5d
RS
2375 attrs.offset_known_p = true;
2376 attrs.offset = 0;
f6129d66 2377 if (GET_CODE (addr) == PLUS
481683e1 2378 && CONST_INT_P (XEXP (addr, 1)))
754c3d5d 2379 attrs.offset = INTVAL (XEXP (addr, 1));
f6129d66 2380
f12144dd 2381 set_mem_attrs (mem, &attrs);
f6129d66
RH
2382 MEM_NOTRAP_P (mem) = 1;
2383}
2384\f
23b2ce53
RS
2385/* Return a newly created CODE_LABEL rtx with a unique label number. */
2386
2387rtx
502b8322 2388gen_label_rtx (void)
23b2ce53 2389{
0dc36574 2390 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
502b8322 2391 NULL, label_num++, NULL);
23b2ce53
RS
2392}
2393\f
2394/* For procedure integration. */
2395
23b2ce53 2396/* Install new pointers to the first and last insns in the chain.
86fe05e0 2397 Also, set cur_insn_uid to one higher than the last in use.
23b2ce53
RS
2398 Used for an inline-procedure after copying the insn chain. */
2399
2400void
502b8322 2401set_new_first_and_last_insn (rtx first, rtx last)
23b2ce53 2402{
86fe05e0
RK
2403 rtx insn;
2404
5936d944
JH
2405 set_first_insn (first);
2406 set_last_insn (last);
86fe05e0
RK
2407 cur_insn_uid = 0;
2408
b5b8b0ac
AO
2409 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2410 {
2411 int debug_count = 0;
2412
2413 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2414 cur_debug_insn_uid = 0;
2415
2416 for (insn = first; insn; insn = NEXT_INSN (insn))
2417 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2418 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2419 else
2420 {
2421 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2422 if (DEBUG_INSN_P (insn))
2423 debug_count++;
2424 }
2425
2426 if (debug_count)
2427 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2428 else
2429 cur_debug_insn_uid++;
2430 }
2431 else
2432 for (insn = first; insn; insn = NEXT_INSN (insn))
2433 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
86fe05e0
RK
2434
2435 cur_insn_uid++;
23b2ce53 2436}
23b2ce53 2437\f
750c9258 2438/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779 2439 structure. This routine should only be called once. */
23b2ce53 2440
fd743bc1 2441static void
b4aaa77b 2442unshare_all_rtl_1 (rtx insn)
23b2ce53 2443{
d1b81779 2444 /* Unshare just about everything else. */
2c07f13b 2445 unshare_all_rtl_in_chain (insn);
750c9258 2446
23b2ce53
RS
2447 /* Make sure the addresses of stack slots found outside the insn chain
2448 (such as, in DECL_RTL of a variable) are not shared
2449 with the insn chain.
2450
2451 This special care is necessary when the stack slot MEM does not
2452 actually appear in the insn chain. If it does appear, its address
2453 is unshared from all else at that point. */
242b0ce6 2454 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
23b2ce53
RS
2455}
2456
750c9258 2457/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779
GK
2458 structure, again. This is a fairly expensive thing to do so it
2459 should be done sparingly. */
2460
2461void
502b8322 2462unshare_all_rtl_again (rtx insn)
d1b81779
GK
2463{
2464 rtx p;
624c87aa
RE
2465 tree decl;
2466
d1b81779 2467 for (p = insn; p; p = NEXT_INSN (p))
2c3c49de 2468 if (INSN_P (p))
d1b81779
GK
2469 {
2470 reset_used_flags (PATTERN (p));
2471 reset_used_flags (REG_NOTES (p));
776bebcd
JJ
2472 if (CALL_P (p))
2473 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
d1b81779 2474 }
624c87aa 2475
2d4aecb3 2476 /* Make sure that virtual stack slots are not shared. */
5eb2a9f2 2477 set_used_decls (DECL_INITIAL (cfun->decl));
2d4aecb3 2478
624c87aa 2479 /* Make sure that virtual parameters are not shared. */
910ad8de 2480 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
5eb2a9f2 2481 set_used_flags (DECL_RTL (decl));
624c87aa
RE
2482
2483 reset_used_flags (stack_slot_list);
2484
b4aaa77b 2485 unshare_all_rtl_1 (insn);
fd743bc1
PB
2486}
2487
c2924966 2488unsigned int
fd743bc1
PB
2489unshare_all_rtl (void)
2490{
b4aaa77b 2491 unshare_all_rtl_1 (get_insns ());
c2924966 2492 return 0;
d1b81779
GK
2493}
2494
ef330312 2495
2c07f13b
JH
2496/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2497 Recursively does the same for subexpressions. */
2498
2499static void
2500verify_rtx_sharing (rtx orig, rtx insn)
2501{
2502 rtx x = orig;
2503 int i;
2504 enum rtx_code code;
2505 const char *format_ptr;
2506
2507 if (x == 0)
2508 return;
2509
2510 code = GET_CODE (x);
2511
2512 /* These types may be freely shared. */
2513
2514 switch (code)
2515 {
2516 case REG:
0ca5af51
AO
2517 case DEBUG_EXPR:
2518 case VALUE:
2c07f13b
JH
2519 case CONST_INT:
2520 case CONST_DOUBLE:
091a3ac7 2521 case CONST_FIXED:
2c07f13b
JH
2522 case CONST_VECTOR:
2523 case SYMBOL_REF:
2524 case LABEL_REF:
2525 case CODE_LABEL:
2526 case PC:
2527 case CC0:
3810076b 2528 case RETURN:
26898771 2529 case SIMPLE_RETURN:
2c07f13b 2530 case SCRATCH:
2c07f13b 2531 return;
3e89ed8d
JH
2532 /* SCRATCH must be shared because they represent distinct values. */
2533 case CLOBBER:
2534 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2535 return;
2536 break;
2c07f13b
JH
2537
2538 case CONST:
6fb5fa3c 2539 if (shared_const_p (orig))
2c07f13b
JH
2540 return;
2541 break;
2542
2543 case MEM:
2544 /* A MEM is allowed to be shared if its address is constant. */
2545 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2546 || reload_completed || reload_in_progress)
2547 return;
2548
2549 break;
2550
2551 default:
2552 break;
2553 }
2554
2555 /* This rtx may not be shared. If it has already been seen,
2556 replace it with a copy of itself. */
1a2caa7a 2557#ifdef ENABLE_CHECKING
2c07f13b
JH
2558 if (RTX_FLAG (x, used))
2559 {
ab532386 2560 error ("invalid rtl sharing found in the insn");
2c07f13b 2561 debug_rtx (insn);
ab532386 2562 error ("shared rtx");
2c07f13b 2563 debug_rtx (x);
ab532386 2564 internal_error ("internal consistency failure");
2c07f13b 2565 }
1a2caa7a
NS
2566#endif
2567 gcc_assert (!RTX_FLAG (x, used));
b8698a0f 2568
2c07f13b
JH
2569 RTX_FLAG (x, used) = 1;
2570
6614fd40 2571 /* Now scan the subexpressions recursively. */
2c07f13b
JH
2572
2573 format_ptr = GET_RTX_FORMAT (code);
2574
2575 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2576 {
2577 switch (*format_ptr++)
2578 {
2579 case 'e':
2580 verify_rtx_sharing (XEXP (x, i), insn);
2581 break;
2582
2583 case 'E':
2584 if (XVEC (x, i) != NULL)
2585 {
2586 int j;
2587 int len = XVECLEN (x, i);
2588
2589 for (j = 0; j < len; j++)
2590 {
1a2caa7a
NS
2591 /* We allow sharing of ASM_OPERANDS inside single
2592 instruction. */
2c07f13b 2593 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
1a2caa7a
NS
2594 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2595 == ASM_OPERANDS))
2c07f13b
JH
2596 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2597 else
2598 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2599 }
2600 }
2601 break;
2602 }
2603 }
2604 return;
2605}
2606
ba228239 2607/* Go through all the RTL insn bodies and check that there is no unexpected
2c07f13b
JH
2608 sharing in between the subexpressions. */
2609
24e47c76 2610DEBUG_FUNCTION void
2c07f13b
JH
2611verify_rtl_sharing (void)
2612{
2613 rtx p;
2614
a222c01a
MM
2615 timevar_push (TV_VERIFY_RTL_SHARING);
2616
2c07f13b
JH
2617 for (p = get_insns (); p; p = NEXT_INSN (p))
2618 if (INSN_P (p))
2619 {
2620 reset_used_flags (PATTERN (p));
2621 reset_used_flags (REG_NOTES (p));
776bebcd
JJ
2622 if (CALL_P (p))
2623 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2954a813
KK
2624 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2625 {
2626 int i;
2627 rtx q, sequence = PATTERN (p);
2628
2629 for (i = 0; i < XVECLEN (sequence, 0); i++)
2630 {
2631 q = XVECEXP (sequence, 0, i);
2632 gcc_assert (INSN_P (q));
2633 reset_used_flags (PATTERN (q));
2634 reset_used_flags (REG_NOTES (q));
776bebcd
JJ
2635 if (CALL_P (q))
2636 reset_used_flags (CALL_INSN_FUNCTION_USAGE (q));
2954a813
KK
2637 }
2638 }
2c07f13b
JH
2639 }
2640
2641 for (p = get_insns (); p; p = NEXT_INSN (p))
2642 if (INSN_P (p))
2643 {
2644 verify_rtx_sharing (PATTERN (p), p);
2645 verify_rtx_sharing (REG_NOTES (p), p);
776bebcd
JJ
2646 if (CALL_P (p))
2647 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (p), p);
2c07f13b 2648 }
a222c01a
MM
2649
2650 timevar_pop (TV_VERIFY_RTL_SHARING);
2c07f13b
JH
2651}
2652
d1b81779
GK
2653/* Go through all the RTL insn bodies and copy any invalid shared structure.
2654 Assumes the mark bits are cleared at entry. */
2655
2c07f13b
JH
2656void
2657unshare_all_rtl_in_chain (rtx insn)
d1b81779
GK
2658{
2659 for (; insn; insn = NEXT_INSN (insn))
2c3c49de 2660 if (INSN_P (insn))
d1b81779
GK
2661 {
2662 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2663 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
776bebcd
JJ
2664 if (CALL_P (insn))
2665 CALL_INSN_FUNCTION_USAGE (insn)
2666 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
d1b81779
GK
2667 }
2668}
2669
2d4aecb3 2670/* Go through all virtual stack slots of a function and mark them as
5eb2a9f2
RS
2671 shared. We never replace the DECL_RTLs themselves with a copy,
2672 but expressions mentioned into a DECL_RTL cannot be shared with
2673 expressions in the instruction stream.
2674
2675 Note that reload may convert pseudo registers into memories in-place.
2676 Pseudo registers are always shared, but MEMs never are. Thus if we
2677 reset the used flags on MEMs in the instruction stream, we must set
2678 them again on MEMs that appear in DECL_RTLs. */
2679
2d4aecb3 2680static void
5eb2a9f2 2681set_used_decls (tree blk)
2d4aecb3
AO
2682{
2683 tree t;
2684
2685 /* Mark decls. */
910ad8de 2686 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
19e7881c 2687 if (DECL_RTL_SET_P (t))
5eb2a9f2 2688 set_used_flags (DECL_RTL (t));
2d4aecb3
AO
2689
2690 /* Now process sub-blocks. */
87caf699 2691 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
5eb2a9f2 2692 set_used_decls (t);
2d4aecb3
AO
2693}
2694
23b2ce53 2695/* Mark ORIG as in use, and return a copy of it if it was already in use.
ff954f39
AP
2696 Recursively does the same for subexpressions. Uses
2697 copy_rtx_if_shared_1 to reduce stack space. */
23b2ce53
RS
2698
2699rtx
502b8322 2700copy_rtx_if_shared (rtx orig)
23b2ce53 2701{
32b32b16
AP
2702 copy_rtx_if_shared_1 (&orig);
2703 return orig;
2704}
2705
ff954f39
AP
2706/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2707 use. Recursively does the same for subexpressions. */
2708
32b32b16
AP
2709static void
2710copy_rtx_if_shared_1 (rtx *orig1)
2711{
2712 rtx x;
b3694847
SS
2713 int i;
2714 enum rtx_code code;
32b32b16 2715 rtx *last_ptr;
b3694847 2716 const char *format_ptr;
23b2ce53 2717 int copied = 0;
32b32b16
AP
2718 int length;
2719
2720 /* Repeat is used to turn tail-recursion into iteration. */
2721repeat:
2722 x = *orig1;
23b2ce53
RS
2723
2724 if (x == 0)
32b32b16 2725 return;
23b2ce53
RS
2726
2727 code = GET_CODE (x);
2728
2729 /* These types may be freely shared. */
2730
2731 switch (code)
2732 {
2733 case REG:
0ca5af51
AO
2734 case DEBUG_EXPR:
2735 case VALUE:
23b2ce53
RS
2736 case CONST_INT:
2737 case CONST_DOUBLE:
091a3ac7 2738 case CONST_FIXED:
69ef87e2 2739 case CONST_VECTOR:
23b2ce53 2740 case SYMBOL_REF:
2c07f13b 2741 case LABEL_REF:
23b2ce53
RS
2742 case CODE_LABEL:
2743 case PC:
2744 case CC0:
276e0224 2745 case RETURN:
26898771 2746 case SIMPLE_RETURN:
23b2ce53 2747 case SCRATCH:
0f41302f 2748 /* SCRATCH must be shared because they represent distinct values. */
32b32b16 2749 return;
3e89ed8d
JH
2750 case CLOBBER:
2751 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2752 return;
2753 break;
23b2ce53 2754
b851ea09 2755 case CONST:
6fb5fa3c 2756 if (shared_const_p (x))
32b32b16 2757 return;
b851ea09
RK
2758 break;
2759
b5b8b0ac 2760 case DEBUG_INSN:
23b2ce53
RS
2761 case INSN:
2762 case JUMP_INSN:
2763 case CALL_INSN:
2764 case NOTE:
23b2ce53
RS
2765 case BARRIER:
2766 /* The chain of insns is not being copied. */
32b32b16 2767 return;
23b2ce53 2768
e9a25f70
JL
2769 default:
2770 break;
23b2ce53
RS
2771 }
2772
2773 /* This rtx may not be shared. If it has already been seen,
2774 replace it with a copy of itself. */
2775
2adc7f12 2776 if (RTX_FLAG (x, used))
23b2ce53 2777 {
aacd3885 2778 x = shallow_copy_rtx (x);
23b2ce53
RS
2779 copied = 1;
2780 }
2adc7f12 2781 RTX_FLAG (x, used) = 1;
23b2ce53
RS
2782
2783 /* Now scan the subexpressions recursively.
2784 We can store any replaced subexpressions directly into X
2785 since we know X is not shared! Any vectors in X
2786 must be copied if X was copied. */
2787
2788 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
2789 length = GET_RTX_LENGTH (code);
2790 last_ptr = NULL;
b8698a0f 2791
32b32b16 2792 for (i = 0; i < length; i++)
23b2ce53
RS
2793 {
2794 switch (*format_ptr++)
2795 {
2796 case 'e':
32b32b16
AP
2797 if (last_ptr)
2798 copy_rtx_if_shared_1 (last_ptr);
2799 last_ptr = &XEXP (x, i);
23b2ce53
RS
2800 break;
2801
2802 case 'E':
2803 if (XVEC (x, i) != NULL)
2804 {
b3694847 2805 int j;
f0722107 2806 int len = XVECLEN (x, i);
b8698a0f 2807
6614fd40
KH
2808 /* Copy the vector iff I copied the rtx and the length
2809 is nonzero. */
f0722107 2810 if (copied && len > 0)
8f985ec4 2811 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
b8698a0f 2812
5d3cc252 2813 /* Call recursively on all inside the vector. */
f0722107 2814 for (j = 0; j < len; j++)
32b32b16
AP
2815 {
2816 if (last_ptr)
2817 copy_rtx_if_shared_1 (last_ptr);
2818 last_ptr = &XVECEXP (x, i, j);
2819 }
23b2ce53
RS
2820 }
2821 break;
2822 }
2823 }
32b32b16
AP
2824 *orig1 = x;
2825 if (last_ptr)
2826 {
2827 orig1 = last_ptr;
2828 goto repeat;
2829 }
2830 return;
23b2ce53
RS
2831}
2832
76369a82 2833/* Set the USED bit in X and its non-shareable subparts to FLAG. */
23b2ce53 2834
76369a82
NF
2835static void
2836mark_used_flags (rtx x, int flag)
23b2ce53 2837{
b3694847
SS
2838 int i, j;
2839 enum rtx_code code;
2840 const char *format_ptr;
32b32b16 2841 int length;
23b2ce53 2842
32b32b16
AP
2843 /* Repeat is used to turn tail-recursion into iteration. */
2844repeat:
23b2ce53
RS
2845 if (x == 0)
2846 return;
2847
2848 code = GET_CODE (x);
2849
9faa82d8 2850 /* These types may be freely shared so we needn't do any resetting
23b2ce53
RS
2851 for them. */
2852
2853 switch (code)
2854 {
2855 case REG:
0ca5af51
AO
2856 case DEBUG_EXPR:
2857 case VALUE:
23b2ce53
RS
2858 case CONST_INT:
2859 case CONST_DOUBLE:
091a3ac7 2860 case CONST_FIXED:
69ef87e2 2861 case CONST_VECTOR:
23b2ce53
RS
2862 case SYMBOL_REF:
2863 case CODE_LABEL:
2864 case PC:
2865 case CC0:
276e0224 2866 case RETURN:
26898771 2867 case SIMPLE_RETURN:
23b2ce53
RS
2868 return;
2869
b5b8b0ac 2870 case DEBUG_INSN:
23b2ce53
RS
2871 case INSN:
2872 case JUMP_INSN:
2873 case CALL_INSN:
2874 case NOTE:
2875 case LABEL_REF:
2876 case BARRIER:
2877 /* The chain of insns is not being copied. */
2878 return;
750c9258 2879
e9a25f70
JL
2880 default:
2881 break;
23b2ce53
RS
2882 }
2883
76369a82 2884 RTX_FLAG (x, used) = flag;
23b2ce53
RS
2885
2886 format_ptr = GET_RTX_FORMAT (code);
32b32b16 2887 length = GET_RTX_LENGTH (code);
b8698a0f 2888
32b32b16 2889 for (i = 0; i < length; i++)
23b2ce53
RS
2890 {
2891 switch (*format_ptr++)
2892 {
2893 case 'e':
32b32b16
AP
2894 if (i == length-1)
2895 {
2896 x = XEXP (x, i);
2897 goto repeat;
2898 }
76369a82 2899 mark_used_flags (XEXP (x, i), flag);
23b2ce53
RS
2900 break;
2901
2902 case 'E':
2903 for (j = 0; j < XVECLEN (x, i); j++)
76369a82 2904 mark_used_flags (XVECEXP (x, i, j), flag);
23b2ce53
RS
2905 break;
2906 }
2907 }
2908}
2c07f13b 2909
76369a82 2910/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2c07f13b
JH
2911 to look for shared sub-parts. */
2912
2913void
76369a82 2914reset_used_flags (rtx x)
2c07f13b 2915{
76369a82
NF
2916 mark_used_flags (x, 0);
2917}
2c07f13b 2918
76369a82
NF
2919/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2920 to look for shared sub-parts. */
2c07f13b 2921
76369a82
NF
2922void
2923set_used_flags (rtx x)
2924{
2925 mark_used_flags (x, 1);
2c07f13b 2926}
23b2ce53
RS
2927\f
2928/* Copy X if necessary so that it won't be altered by changes in OTHER.
2929 Return X or the rtx for the pseudo reg the value of X was copied into.
2930 OTHER must be valid as a SET_DEST. */
2931
2932rtx
502b8322 2933make_safe_from (rtx x, rtx other)
23b2ce53
RS
2934{
2935 while (1)
2936 switch (GET_CODE (other))
2937 {
2938 case SUBREG:
2939 other = SUBREG_REG (other);
2940 break;
2941 case STRICT_LOW_PART:
2942 case SIGN_EXTEND:
2943 case ZERO_EXTEND:
2944 other = XEXP (other, 0);
2945 break;
2946 default:
2947 goto done;
2948 }
2949 done:
3c0cb5de 2950 if ((MEM_P (other)
23b2ce53 2951 && ! CONSTANT_P (x)
f8cfc6aa 2952 && !REG_P (x)
23b2ce53 2953 && GET_CODE (x) != SUBREG)
f8cfc6aa 2954 || (REG_P (other)
23b2ce53
RS
2955 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2956 || reg_mentioned_p (other, x))))
2957 {
2958 rtx temp = gen_reg_rtx (GET_MODE (x));
2959 emit_move_insn (temp, x);
2960 return temp;
2961 }
2962 return x;
2963}
2964\f
2965/* Emission of insns (adding them to the doubly-linked list). */
2966
23b2ce53
RS
2967/* Return the last insn emitted, even if it is in a sequence now pushed. */
2968
2969rtx
502b8322 2970get_last_insn_anywhere (void)
23b2ce53
RS
2971{
2972 struct sequence_stack *stack;
5936d944
JH
2973 if (get_last_insn ())
2974 return get_last_insn ();
49ad7cfa 2975 for (stack = seq_stack; stack; stack = stack->next)
23b2ce53
RS
2976 if (stack->last != 0)
2977 return stack->last;
2978 return 0;
2979}
2980
2a496e8b
JDA
2981/* Return the first nonnote insn emitted in current sequence or current
2982 function. This routine looks inside SEQUENCEs. */
2983
2984rtx
502b8322 2985get_first_nonnote_insn (void)
2a496e8b 2986{
5936d944 2987 rtx insn = get_insns ();
91373fe8
JDA
2988
2989 if (insn)
2990 {
2991 if (NOTE_P (insn))
2992 for (insn = next_insn (insn);
2993 insn && NOTE_P (insn);
2994 insn = next_insn (insn))
2995 continue;
2996 else
2997 {
2ca202e7 2998 if (NONJUMP_INSN_P (insn)
91373fe8
JDA
2999 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3000 insn = XVECEXP (PATTERN (insn), 0, 0);
3001 }
3002 }
2a496e8b
JDA
3003
3004 return insn;
3005}
3006
3007/* Return the last nonnote insn emitted in current sequence or current
3008 function. This routine looks inside SEQUENCEs. */
3009
3010rtx
502b8322 3011get_last_nonnote_insn (void)
2a496e8b 3012{
5936d944 3013 rtx insn = get_last_insn ();
91373fe8
JDA
3014
3015 if (insn)
3016 {
3017 if (NOTE_P (insn))
3018 for (insn = previous_insn (insn);
3019 insn && NOTE_P (insn);
3020 insn = previous_insn (insn))
3021 continue;
3022 else
3023 {
2ca202e7 3024 if (NONJUMP_INSN_P (insn)
91373fe8
JDA
3025 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3026 insn = XVECEXP (PATTERN (insn), 0,
3027 XVECLEN (PATTERN (insn), 0) - 1);
3028 }
3029 }
2a496e8b
JDA
3030
3031 return insn;
3032}
3033
b5b8b0ac
AO
3034/* Return the number of actual (non-debug) insns emitted in this
3035 function. */
3036
3037int
3038get_max_insn_count (void)
3039{
3040 int n = cur_insn_uid;
3041
3042 /* The table size must be stable across -g, to avoid codegen
3043 differences due to debug insns, and not be affected by
3044 -fmin-insn-uid, to avoid excessive table size and to simplify
3045 debugging of -fcompare-debug failures. */
3046 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3047 n -= cur_debug_insn_uid;
3048 else
3049 n -= MIN_NONDEBUG_INSN_UID;
3050
3051 return n;
3052}
3053
23b2ce53
RS
3054\f
3055/* Return the next insn. If it is a SEQUENCE, return the first insn
3056 of the sequence. */
3057
3058rtx
502b8322 3059next_insn (rtx insn)
23b2ce53 3060{
75547801
KG
3061 if (insn)
3062 {
3063 insn = NEXT_INSN (insn);
3064 if (insn && NONJUMP_INSN_P (insn)
3065 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3066 insn = XVECEXP (PATTERN (insn), 0, 0);
3067 }
23b2ce53 3068
75547801 3069 return insn;
23b2ce53
RS
3070}
3071
3072/* Return the previous insn. If it is a SEQUENCE, return the last insn
3073 of the sequence. */
3074
3075rtx
502b8322 3076previous_insn (rtx insn)
23b2ce53 3077{
75547801
KG
3078 if (insn)
3079 {
3080 insn = PREV_INSN (insn);
3081 if (insn && NONJUMP_INSN_P (insn)
3082 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3083 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3084 }
23b2ce53 3085
75547801 3086 return insn;
23b2ce53
RS
3087}
3088
3089/* Return the next insn after INSN that is not a NOTE. This routine does not
3090 look inside SEQUENCEs. */
3091
3092rtx
502b8322 3093next_nonnote_insn (rtx insn)
23b2ce53 3094{
75547801
KG
3095 while (insn)
3096 {
3097 insn = NEXT_INSN (insn);
3098 if (insn == 0 || !NOTE_P (insn))
3099 break;
3100 }
23b2ce53 3101
75547801 3102 return insn;
23b2ce53
RS
3103}
3104
1e211590
DD
3105/* Return the next insn after INSN that is not a NOTE, but stop the
3106 search before we enter another basic block. This routine does not
3107 look inside SEQUENCEs. */
3108
3109rtx
3110next_nonnote_insn_bb (rtx insn)
3111{
3112 while (insn)
3113 {
3114 insn = NEXT_INSN (insn);
3115 if (insn == 0 || !NOTE_P (insn))
3116 break;
3117 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3118 return NULL_RTX;
3119 }
3120
3121 return insn;
3122}
3123
23b2ce53
RS
3124/* Return the previous insn before INSN that is not a NOTE. This routine does
3125 not look inside SEQUENCEs. */
3126
3127rtx
502b8322 3128prev_nonnote_insn (rtx insn)
23b2ce53 3129{
75547801
KG
3130 while (insn)
3131 {
3132 insn = PREV_INSN (insn);
3133 if (insn == 0 || !NOTE_P (insn))
3134 break;
3135 }
23b2ce53 3136
75547801 3137 return insn;
23b2ce53
RS
3138}
3139
896aa4ea
DD
3140/* Return the previous insn before INSN that is not a NOTE, but stop
3141 the search before we enter another basic block. This routine does
3142 not look inside SEQUENCEs. */
3143
3144rtx
3145prev_nonnote_insn_bb (rtx insn)
3146{
3147 while (insn)
3148 {
3149 insn = PREV_INSN (insn);
3150 if (insn == 0 || !NOTE_P (insn))
3151 break;
3152 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3153 return NULL_RTX;
3154 }
3155
3156 return insn;
3157}
3158
b5b8b0ac
AO
3159/* Return the next insn after INSN that is not a DEBUG_INSN. This
3160 routine does not look inside SEQUENCEs. */
3161
3162rtx
3163next_nondebug_insn (rtx insn)
3164{
3165 while (insn)
3166 {
3167 insn = NEXT_INSN (insn);
3168 if (insn == 0 || !DEBUG_INSN_P (insn))
3169 break;
3170 }
3171
3172 return insn;
3173}
3174
3175/* Return the previous insn before INSN that is not a DEBUG_INSN.
3176 This routine does not look inside SEQUENCEs. */
3177
3178rtx
3179prev_nondebug_insn (rtx insn)
3180{
3181 while (insn)
3182 {
3183 insn = PREV_INSN (insn);
3184 if (insn == 0 || !DEBUG_INSN_P (insn))
3185 break;
3186 }
3187
3188 return insn;
3189}
3190
f0fc0803
JJ
3191/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3192 This routine does not look inside SEQUENCEs. */
3193
3194rtx
3195next_nonnote_nondebug_insn (rtx insn)
3196{
3197 while (insn)
3198 {
3199 insn = NEXT_INSN (insn);
3200 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3201 break;
3202 }
3203
3204 return insn;
3205}
3206
3207/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3208 This routine does not look inside SEQUENCEs. */
3209
3210rtx
3211prev_nonnote_nondebug_insn (rtx insn)
3212{
3213 while (insn)
3214 {
3215 insn = PREV_INSN (insn);
3216 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3217 break;
3218 }
3219
3220 return insn;
3221}
3222
23b2ce53
RS
3223/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3224 or 0, if there is none. This routine does not look inside
0f41302f 3225 SEQUENCEs. */
23b2ce53
RS
3226
3227rtx
502b8322 3228next_real_insn (rtx insn)
23b2ce53 3229{
75547801
KG
3230 while (insn)
3231 {
3232 insn = NEXT_INSN (insn);
3233 if (insn == 0 || INSN_P (insn))
3234 break;
3235 }
23b2ce53 3236
75547801 3237 return insn;
23b2ce53
RS
3238}
3239
3240/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3241 or 0, if there is none. This routine does not look inside
3242 SEQUENCEs. */
3243
3244rtx
502b8322 3245prev_real_insn (rtx insn)
23b2ce53 3246{
75547801
KG
3247 while (insn)
3248 {
3249 insn = PREV_INSN (insn);
3250 if (insn == 0 || INSN_P (insn))
3251 break;
3252 }
23b2ce53 3253
75547801 3254 return insn;
23b2ce53
RS
3255}
3256
ee960939
OH
3257/* Return the last CALL_INSN in the current list, or 0 if there is none.
3258 This routine does not look inside SEQUENCEs. */
3259
3260rtx
502b8322 3261last_call_insn (void)
ee960939
OH
3262{
3263 rtx insn;
3264
3265 for (insn = get_last_insn ();
4b4bf941 3266 insn && !CALL_P (insn);
ee960939
OH
3267 insn = PREV_INSN (insn))
3268 ;
3269
3270 return insn;
3271}
3272
23b2ce53 3273/* Find the next insn after INSN that really does something. This routine
9c517bf3
AK
3274 does not look inside SEQUENCEs. After reload this also skips over
3275 standalone USE and CLOBBER insn. */
23b2ce53 3276
69732dcb 3277int
4f588890 3278active_insn_p (const_rtx insn)
69732dcb 3279{
4b4bf941
JQ
3280 return (CALL_P (insn) || JUMP_P (insn)
3281 || (NONJUMP_INSN_P (insn)
23b8ba81
RH
3282 && (! reload_completed
3283 || (GET_CODE (PATTERN (insn)) != USE
3284 && GET_CODE (PATTERN (insn)) != CLOBBER))));
69732dcb
RH
3285}
3286
23b2ce53 3287rtx
502b8322 3288next_active_insn (rtx insn)
23b2ce53 3289{
75547801
KG
3290 while (insn)
3291 {
3292 insn = NEXT_INSN (insn);
3293 if (insn == 0 || active_insn_p (insn))
3294 break;
3295 }
23b2ce53 3296
75547801 3297 return insn;
23b2ce53
RS
3298}
3299
3300/* Find the last insn before INSN that really does something. This routine
9c517bf3
AK
3301 does not look inside SEQUENCEs. After reload this also skips over
3302 standalone USE and CLOBBER insn. */
23b2ce53
RS
3303
3304rtx
502b8322 3305prev_active_insn (rtx insn)
23b2ce53 3306{
75547801
KG
3307 while (insn)
3308 {
3309 insn = PREV_INSN (insn);
3310 if (insn == 0 || active_insn_p (insn))
3311 break;
3312 }
23b2ce53 3313
75547801 3314 return insn;
23b2ce53
RS
3315}
3316
3317/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3318
3319rtx
502b8322 3320next_label (rtx insn)
23b2ce53 3321{
75547801
KG
3322 while (insn)
3323 {
3324 insn = NEXT_INSN (insn);
3325 if (insn == 0 || LABEL_P (insn))
3326 break;
3327 }
23b2ce53 3328
75547801 3329 return insn;
23b2ce53
RS
3330}
3331
dc0ff1c8
BS
3332/* Return the last label to mark the same position as LABEL. Return LABEL
3333 itself if it is null or any return rtx. */
6c2511d3
RS
3334
3335rtx
3336skip_consecutive_labels (rtx label)
3337{
3338 rtx insn;
3339
dc0ff1c8
BS
3340 if (label && ANY_RETURN_P (label))
3341 return label;
3342
6c2511d3
RS
3343 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3344 if (LABEL_P (insn))
3345 label = insn;
3346
3347 return label;
3348}
23b2ce53
RS
3349\f
3350#ifdef HAVE_cc0
c572e5ba
JVA
3351/* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3352 and REG_CC_USER notes so we can find it. */
3353
3354void
502b8322 3355link_cc0_insns (rtx insn)
c572e5ba
JVA
3356{
3357 rtx user = next_nonnote_insn (insn);
3358
4b4bf941 3359 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
c572e5ba
JVA
3360 user = XVECEXP (PATTERN (user), 0, 0);
3361
65c5f2a6
ILT
3362 add_reg_note (user, REG_CC_SETTER, insn);
3363 add_reg_note (insn, REG_CC_USER, user);
c572e5ba
JVA
3364}
3365
23b2ce53
RS
3366/* Return the next insn that uses CC0 after INSN, which is assumed to
3367 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3368 applied to the result of this function should yield INSN).
3369
3370 Normally, this is simply the next insn. However, if a REG_CC_USER note
3371 is present, it contains the insn that uses CC0.
3372
3373 Return 0 if we can't find the insn. */
3374
3375rtx
502b8322 3376next_cc0_user (rtx insn)
23b2ce53 3377{
906c4e36 3378 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
23b2ce53
RS
3379
3380 if (note)
3381 return XEXP (note, 0);
3382
3383 insn = next_nonnote_insn (insn);
4b4bf941 3384 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
23b2ce53
RS
3385 insn = XVECEXP (PATTERN (insn), 0, 0);
3386
2c3c49de 3387 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
23b2ce53
RS
3388 return insn;
3389
3390 return 0;
3391}
3392
3393/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3394 note, it is the previous insn. */
3395
3396rtx
502b8322 3397prev_cc0_setter (rtx insn)
23b2ce53 3398{
906c4e36 3399 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
23b2ce53
RS
3400
3401 if (note)
3402 return XEXP (note, 0);
3403
3404 insn = prev_nonnote_insn (insn);
5b0264cb 3405 gcc_assert (sets_cc0_p (PATTERN (insn)));
23b2ce53
RS
3406
3407 return insn;
3408}
3409#endif
e5bef2e4 3410
594f8779
RZ
3411#ifdef AUTO_INC_DEC
3412/* Find a RTX_AUTOINC class rtx which matches DATA. */
3413
3414static int
3415find_auto_inc (rtx *xp, void *data)
3416{
3417 rtx x = *xp;
5ead67f6 3418 rtx reg = (rtx) data;
594f8779
RZ
3419
3420 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3421 return 0;
3422
3423 switch (GET_CODE (x))
3424 {
3425 case PRE_DEC:
3426 case PRE_INC:
3427 case POST_DEC:
3428 case POST_INC:
3429 case PRE_MODIFY:
3430 case POST_MODIFY:
3431 if (rtx_equal_p (reg, XEXP (x, 0)))
3432 return 1;
3433 break;
3434
3435 default:
3436 gcc_unreachable ();
3437 }
3438 return -1;
3439}
3440#endif
3441
e5bef2e4
HB
3442/* Increment the label uses for all labels present in rtx. */
3443
3444static void
502b8322 3445mark_label_nuses (rtx x)
e5bef2e4 3446{
b3694847
SS
3447 enum rtx_code code;
3448 int i, j;
3449 const char *fmt;
e5bef2e4
HB
3450
3451 code = GET_CODE (x);
7537fc90 3452 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
e5bef2e4
HB
3453 LABEL_NUSES (XEXP (x, 0))++;
3454
3455 fmt = GET_RTX_FORMAT (code);
3456 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3457 {
3458 if (fmt[i] == 'e')
0fb7aeda 3459 mark_label_nuses (XEXP (x, i));
e5bef2e4 3460 else if (fmt[i] == 'E')
0fb7aeda 3461 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
e5bef2e4
HB
3462 mark_label_nuses (XVECEXP (x, i, j));
3463 }
3464}
3465
23b2ce53
RS
3466\f
3467/* Try splitting insns that can be split for better scheduling.
3468 PAT is the pattern which might split.
3469 TRIAL is the insn providing PAT.
cc2902df 3470 LAST is nonzero if we should return the last insn of the sequence produced.
23b2ce53
RS
3471
3472 If this routine succeeds in splitting, it returns the first or last
11147ebe 3473 replacement insn depending on the value of LAST. Otherwise, it
23b2ce53
RS
3474 returns TRIAL. If the insn to be returned can be split, it will be. */
3475
3476rtx
502b8322 3477try_split (rtx pat, rtx trial, int last)
23b2ce53
RS
3478{
3479 rtx before = PREV_INSN (trial);
3480 rtx after = NEXT_INSN (trial);
23b2ce53 3481 int has_barrier = 0;
4a8cae83 3482 rtx note, seq, tem;
6b24c259 3483 int probability;
599aedd9
RH
3484 rtx insn_last, insn;
3485 int njumps = 0;
6b24c259 3486
cd9c1ca8
RH
3487 /* We're not good at redistributing frame information. */
3488 if (RTX_FRAME_RELATED_P (trial))
3489 return trial;
3490
6b24c259
JH
3491 if (any_condjump_p (trial)
3492 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3493 split_branch_probability = INTVAL (XEXP (note, 0));
3494 probability = split_branch_probability;
3495
3496 seq = split_insns (pat, trial);
3497
3498 split_branch_probability = -1;
23b2ce53
RS
3499
3500 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3501 We may need to handle this specially. */
4b4bf941 3502 if (after && BARRIER_P (after))
23b2ce53
RS
3503 {
3504 has_barrier = 1;
3505 after = NEXT_INSN (after);
3506 }
3507
599aedd9
RH
3508 if (!seq)
3509 return trial;
3510
3511 /* Avoid infinite loop if any insn of the result matches
3512 the original pattern. */
3513 insn_last = seq;
3514 while (1)
23b2ce53 3515 {
599aedd9
RH
3516 if (INSN_P (insn_last)
3517 && rtx_equal_p (PATTERN (insn_last), pat))
3518 return trial;
3519 if (!NEXT_INSN (insn_last))
3520 break;
3521 insn_last = NEXT_INSN (insn_last);
3522 }
750c9258 3523
6fb5fa3c
DB
3524 /* We will be adding the new sequence to the function. The splitters
3525 may have introduced invalid RTL sharing, so unshare the sequence now. */
3526 unshare_all_rtl_in_chain (seq);
3527
599aedd9
RH
3528 /* Mark labels. */
3529 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3530 {
4b4bf941 3531 if (JUMP_P (insn))
599aedd9
RH
3532 {
3533 mark_jump_label (PATTERN (insn), insn, 0);
3534 njumps++;
3535 if (probability != -1
3536 && any_condjump_p (insn)
3537 && !find_reg_note (insn, REG_BR_PROB, 0))
2f937369 3538 {
599aedd9
RH
3539 /* We can preserve the REG_BR_PROB notes only if exactly
3540 one jump is created, otherwise the machine description
3541 is responsible for this step using
3542 split_branch_probability variable. */
5b0264cb 3543 gcc_assert (njumps == 1);
65c5f2a6 3544 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
2f937369 3545 }
599aedd9
RH
3546 }
3547 }
3548
3549 /* If we are splitting a CALL_INSN, look for the CALL_INSN
65712d5c 3550 in SEQ and copy any additional information across. */
4b4bf941 3551 if (CALL_P (trial))
599aedd9
RH
3552 {
3553 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
4b4bf941 3554 if (CALL_P (insn))
599aedd9 3555 {
65712d5c
RS
3556 rtx next, *p;
3557
3558 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3559 target may have explicitly specified. */
3560 p = &CALL_INSN_FUNCTION_USAGE (insn);
f6a1f3f6
RH
3561 while (*p)
3562 p = &XEXP (*p, 1);
3563 *p = CALL_INSN_FUNCTION_USAGE (trial);
65712d5c
RS
3564
3565 /* If the old call was a sibling call, the new one must
3566 be too. */
599aedd9 3567 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
65712d5c
RS
3568
3569 /* If the new call is the last instruction in the sequence,
3570 it will effectively replace the old call in-situ. Otherwise
3571 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3572 so that it comes immediately after the new call. */
3573 if (NEXT_INSN (insn))
65f3dedb
RS
3574 for (next = NEXT_INSN (trial);
3575 next && NOTE_P (next);
3576 next = NEXT_INSN (next))
3577 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
65712d5c
RS
3578 {
3579 remove_insn (next);
3580 add_insn_after (next, insn, NULL);
65f3dedb 3581 break;
65712d5c 3582 }
599aedd9
RH
3583 }
3584 }
4b5e8abe 3585
599aedd9
RH
3586 /* Copy notes, particularly those related to the CFG. */
3587 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3588 {
3589 switch (REG_NOTE_KIND (note))
3590 {
3591 case REG_EH_REGION:
1d65f45c 3592 copy_reg_eh_region_note_backward (note, insn_last, NULL);
599aedd9 3593 break;
216183ce 3594
599aedd9
RH
3595 case REG_NORETURN:
3596 case REG_SETJMP:
0a35513e 3597 case REG_TM:
594f8779 3598 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
216183ce 3599 {
4b4bf941 3600 if (CALL_P (insn))
65c5f2a6 3601 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
216183ce 3602 }
599aedd9 3603 break;
d6e95df8 3604
599aedd9 3605 case REG_NON_LOCAL_GOTO:
594f8779 3606 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
2f937369 3607 {
4b4bf941 3608 if (JUMP_P (insn))
65c5f2a6 3609 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
2f937369 3610 }
599aedd9 3611 break;
e5bef2e4 3612
594f8779
RZ
3613#ifdef AUTO_INC_DEC
3614 case REG_INC:
3615 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3616 {
3617 rtx reg = XEXP (note, 0);
3618 if (!FIND_REG_INC_NOTE (insn, reg)
3619 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
65c5f2a6 3620 add_reg_note (insn, REG_INC, reg);
594f8779
RZ
3621 }
3622 break;
3623#endif
3624
9a08d230
RH
3625 case REG_ARGS_SIZE:
3626 fixup_args_size_notes (NULL_RTX, insn_last, INTVAL (XEXP (note, 0)));
3627 break;
3628
599aedd9
RH
3629 default:
3630 break;
23b2ce53 3631 }
599aedd9
RH
3632 }
3633
3634 /* If there are LABELS inside the split insns increment the
3635 usage count so we don't delete the label. */
cf7c4aa6 3636 if (INSN_P (trial))
599aedd9
RH
3637 {
3638 insn = insn_last;
3639 while (insn != NULL_RTX)
23b2ce53 3640 {
cf7c4aa6 3641 /* JUMP_P insns have already been "marked" above. */
4b4bf941 3642 if (NONJUMP_INSN_P (insn))
599aedd9 3643 mark_label_nuses (PATTERN (insn));
23b2ce53 3644
599aedd9
RH
3645 insn = PREV_INSN (insn);
3646 }
23b2ce53
RS
3647 }
3648
0435312e 3649 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
599aedd9
RH
3650
3651 delete_insn (trial);
3652 if (has_barrier)
3653 emit_barrier_after (tem);
3654
3655 /* Recursively call try_split for each new insn created; by the
3656 time control returns here that insn will be fully split, so
3657 set LAST and continue from the insn after the one returned.
3658 We can't use next_active_insn here since AFTER may be a note.
3659 Ignore deleted insns, which can be occur if not optimizing. */
3660 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3661 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3662 tem = try_split (PATTERN (tem), tem, 1);
3663
3664 /* Return either the first or the last insn, depending on which was
3665 requested. */
3666 return last
5936d944 3667 ? (after ? PREV_INSN (after) : get_last_insn ())
599aedd9 3668 : NEXT_INSN (before);
23b2ce53
RS
3669}
3670\f
3671/* Make and return an INSN rtx, initializing all its slots.
4b1f5e8c 3672 Store PATTERN in the pattern slots. */
23b2ce53
RS
3673
3674rtx
502b8322 3675make_insn_raw (rtx pattern)
23b2ce53 3676{
b3694847 3677 rtx insn;
23b2ce53 3678
1f8f4a0b 3679 insn = rtx_alloc (INSN);
23b2ce53 3680
43127294 3681 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3682 PATTERN (insn) = pattern;
3683 INSN_CODE (insn) = -1;
1632afca 3684 REG_NOTES (insn) = NULL;
55e092c4 3685 INSN_LOCATOR (insn) = curr_insn_locator ();
ba4f7968 3686 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53 3687
47984720
NC
3688#ifdef ENABLE_RTL_CHECKING
3689 if (insn
2c3c49de 3690 && INSN_P (insn)
47984720
NC
3691 && (returnjump_p (insn)
3692 || (GET_CODE (insn) == SET
3693 && SET_DEST (insn) == pc_rtx)))
3694 {
d4ee4d25 3695 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
47984720
NC
3696 debug_rtx (insn);
3697 }
3698#endif
750c9258 3699
23b2ce53
RS
3700 return insn;
3701}
3702
b5b8b0ac
AO
3703/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3704
3705rtx
3706make_debug_insn_raw (rtx pattern)
3707{
3708 rtx insn;
3709
3710 insn = rtx_alloc (DEBUG_INSN);
3711 INSN_UID (insn) = cur_debug_insn_uid++;
3712 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3713 INSN_UID (insn) = cur_insn_uid++;
3714
3715 PATTERN (insn) = pattern;
3716 INSN_CODE (insn) = -1;
3717 REG_NOTES (insn) = NULL;
3718 INSN_LOCATOR (insn) = curr_insn_locator ();
3719 BLOCK_FOR_INSN (insn) = NULL;
3720
3721 return insn;
3722}
3723
2f937369 3724/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
23b2ce53 3725
38109dab 3726rtx
502b8322 3727make_jump_insn_raw (rtx pattern)
23b2ce53 3728{
b3694847 3729 rtx insn;
23b2ce53 3730
4b1f5e8c 3731 insn = rtx_alloc (JUMP_INSN);
1632afca 3732 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3733
3734 PATTERN (insn) = pattern;
3735 INSN_CODE (insn) = -1;
1632afca
RS
3736 REG_NOTES (insn) = NULL;
3737 JUMP_LABEL (insn) = NULL;
55e092c4 3738 INSN_LOCATOR (insn) = curr_insn_locator ();
ba4f7968 3739 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53
RS
3740
3741 return insn;
3742}
aff507f4 3743
2f937369 3744/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
aff507f4
RK
3745
3746static rtx
502b8322 3747make_call_insn_raw (rtx pattern)
aff507f4 3748{
b3694847 3749 rtx insn;
aff507f4
RK
3750
3751 insn = rtx_alloc (CALL_INSN);
3752 INSN_UID (insn) = cur_insn_uid++;
3753
3754 PATTERN (insn) = pattern;
3755 INSN_CODE (insn) = -1;
aff507f4
RK
3756 REG_NOTES (insn) = NULL;
3757 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
55e092c4 3758 INSN_LOCATOR (insn) = curr_insn_locator ();
ba4f7968 3759 BLOCK_FOR_INSN (insn) = NULL;
aff507f4
RK
3760
3761 return insn;
3762}
23b2ce53
RS
3763\f
3764/* Add INSN to the end of the doubly-linked list.
3765 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3766
3767void
502b8322 3768add_insn (rtx insn)
23b2ce53 3769{
5936d944 3770 PREV_INSN (insn) = get_last_insn();
23b2ce53
RS
3771 NEXT_INSN (insn) = 0;
3772
5936d944
JH
3773 if (NULL != get_last_insn())
3774 NEXT_INSN (get_last_insn ()) = insn;
23b2ce53 3775
5936d944
JH
3776 if (NULL == get_insns ())
3777 set_first_insn (insn);
23b2ce53 3778
5936d944 3779 set_last_insn (insn);
23b2ce53
RS
3780}
3781
a0ae8e8d
RK
3782/* Add INSN into the doubly-linked list after insn AFTER. This and
3783 the next should be the only functions called to insert an insn once
ba213285 3784 delay slots have been filled since only they know how to update a
a0ae8e8d 3785 SEQUENCE. */
23b2ce53
RS
3786
3787void
6fb5fa3c 3788add_insn_after (rtx insn, rtx after, basic_block bb)
23b2ce53
RS
3789{
3790 rtx next = NEXT_INSN (after);
3791
5b0264cb 3792 gcc_assert (!optimize || !INSN_DELETED_P (after));
ba213285 3793
23b2ce53
RS
3794 NEXT_INSN (insn) = next;
3795 PREV_INSN (insn) = after;
3796
3797 if (next)
3798 {
3799 PREV_INSN (next) = insn;
4b4bf941 3800 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
23b2ce53
RS
3801 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3802 }
5936d944
JH
3803 else if (get_last_insn () == after)
3804 set_last_insn (insn);
23b2ce53
RS
3805 else
3806 {
49ad7cfa 3807 struct sequence_stack *stack = seq_stack;
23b2ce53
RS
3808 /* Scan all pending sequences too. */
3809 for (; stack; stack = stack->next)
3810 if (after == stack->last)
fef0509b
RK
3811 {
3812 stack->last = insn;
3813 break;
3814 }
a0ae8e8d 3815
5b0264cb 3816 gcc_assert (stack);
23b2ce53
RS
3817 }
3818
4b4bf941
JQ
3819 if (!BARRIER_P (after)
3820 && !BARRIER_P (insn)
3c030e88
JH
3821 && (bb = BLOCK_FOR_INSN (after)))
3822 {
3823 set_block_for_insn (insn, bb);
38c1593d 3824 if (INSN_P (insn))
6fb5fa3c 3825 df_insn_rescan (insn);
3c030e88 3826 /* Should not happen as first in the BB is always
a1f300c0 3827 either NOTE or LABEL. */
a813c111 3828 if (BB_END (bb) == after
3c030e88 3829 /* Avoid clobbering of structure when creating new BB. */
4b4bf941 3830 && !BARRIER_P (insn)
a38e7aa5 3831 && !NOTE_INSN_BASIC_BLOCK_P (insn))
a813c111 3832 BB_END (bb) = insn;
3c030e88
JH
3833 }
3834
23b2ce53 3835 NEXT_INSN (after) = insn;
4b4bf941 3836 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
23b2ce53
RS
3837 {
3838 rtx sequence = PATTERN (after);
3839 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3840 }
3841}
3842
a0ae8e8d 3843/* Add INSN into the doubly-linked list before insn BEFORE. This and
6fb5fa3c
DB
3844 the previous should be the only functions called to insert an insn
3845 once delay slots have been filled since only they know how to
3846 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3847 bb from before. */
a0ae8e8d
RK
3848
3849void
6fb5fa3c 3850add_insn_before (rtx insn, rtx before, basic_block bb)
a0ae8e8d
RK
3851{
3852 rtx prev = PREV_INSN (before);
3853
5b0264cb 3854 gcc_assert (!optimize || !INSN_DELETED_P (before));
ba213285 3855
a0ae8e8d
RK
3856 PREV_INSN (insn) = prev;
3857 NEXT_INSN (insn) = before;
3858
3859 if (prev)
3860 {
3861 NEXT_INSN (prev) = insn;
4b4bf941 3862 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
a0ae8e8d
RK
3863 {
3864 rtx sequence = PATTERN (prev);
3865 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3866 }
3867 }
5936d944
JH
3868 else if (get_insns () == before)
3869 set_first_insn (insn);
a0ae8e8d
RK
3870 else
3871 {
49ad7cfa 3872 struct sequence_stack *stack = seq_stack;
a0ae8e8d
RK
3873 /* Scan all pending sequences too. */
3874 for (; stack; stack = stack->next)
3875 if (before == stack->first)
fef0509b
RK
3876 {
3877 stack->first = insn;
3878 break;
3879 }
a0ae8e8d 3880
5b0264cb 3881 gcc_assert (stack);
a0ae8e8d
RK
3882 }
3883
b8698a0f 3884 if (!bb
6fb5fa3c
DB
3885 && !BARRIER_P (before)
3886 && !BARRIER_P (insn))
3887 bb = BLOCK_FOR_INSN (before);
3888
3889 if (bb)
3c030e88
JH
3890 {
3891 set_block_for_insn (insn, bb);
38c1593d 3892 if (INSN_P (insn))
6fb5fa3c 3893 df_insn_rescan (insn);
5b0264cb 3894 /* Should not happen as first in the BB is always either NOTE or
43e05e45 3895 LABEL. */
5b0264cb
NS
3896 gcc_assert (BB_HEAD (bb) != insn
3897 /* Avoid clobbering of structure when creating new BB. */
3898 || BARRIER_P (insn)
a38e7aa5 3899 || NOTE_INSN_BASIC_BLOCK_P (insn));
3c030e88
JH
3900 }
3901
a0ae8e8d 3902 PREV_INSN (before) = insn;
4b4bf941 3903 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
a0ae8e8d
RK
3904 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3905}
3906
6fb5fa3c
DB
3907
3908/* Replace insn with an deleted instruction note. */
3909
0ce2b299
EB
3910void
3911set_insn_deleted (rtx insn)
6fb5fa3c
DB
3912{
3913 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3914 PUT_CODE (insn, NOTE);
3915 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3916}
3917
3918
89e99eea
DB
3919/* Remove an insn from its doubly-linked list. This function knows how
3920 to handle sequences. */
3921void
502b8322 3922remove_insn (rtx insn)
89e99eea
DB
3923{
3924 rtx next = NEXT_INSN (insn);
3925 rtx prev = PREV_INSN (insn);
53c17031
JH
3926 basic_block bb;
3927
6fb5fa3c
DB
3928 /* Later in the code, the block will be marked dirty. */
3929 df_insn_delete (NULL, INSN_UID (insn));
3930
89e99eea
DB
3931 if (prev)
3932 {
3933 NEXT_INSN (prev) = next;
4b4bf941 3934 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
89e99eea
DB
3935 {
3936 rtx sequence = PATTERN (prev);
3937 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3938 }
3939 }
5936d944
JH
3940 else if (get_insns () == insn)
3941 {
fb9ef4c1
JH
3942 if (next)
3943 PREV_INSN (next) = NULL;
5936d944
JH
3944 set_first_insn (next);
3945 }
89e99eea
DB
3946 else
3947 {
49ad7cfa 3948 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3949 /* Scan all pending sequences too. */
3950 for (; stack; stack = stack->next)
3951 if (insn == stack->first)
3952 {
3953 stack->first = next;
3954 break;
3955 }
3956
5b0264cb 3957 gcc_assert (stack);
89e99eea
DB
3958 }
3959
3960 if (next)
3961 {
3962 PREV_INSN (next) = prev;
4b4bf941 3963 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
89e99eea
DB
3964 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3965 }
5936d944
JH
3966 else if (get_last_insn () == insn)
3967 set_last_insn (prev);
89e99eea
DB
3968 else
3969 {
49ad7cfa 3970 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3971 /* Scan all pending sequences too. */
3972 for (; stack; stack = stack->next)
3973 if (insn == stack->last)
3974 {
3975 stack->last = prev;
3976 break;
3977 }
3978
5b0264cb 3979 gcc_assert (stack);
89e99eea 3980 }
4b4bf941 3981 if (!BARRIER_P (insn)
53c17031
JH
3982 && (bb = BLOCK_FOR_INSN (insn)))
3983 {
4e0084e4 3984 if (NONDEBUG_INSN_P (insn))
6fb5fa3c 3985 df_set_bb_dirty (bb);
a813c111 3986 if (BB_HEAD (bb) == insn)
53c17031 3987 {
3bf1e984
RK
3988 /* Never ever delete the basic block note without deleting whole
3989 basic block. */
5b0264cb 3990 gcc_assert (!NOTE_P (insn));
a813c111 3991 BB_HEAD (bb) = next;
53c17031 3992 }
a813c111
SB
3993 if (BB_END (bb) == insn)
3994 BB_END (bb) = prev;
53c17031 3995 }
89e99eea
DB
3996}
3997
ee960939
OH
3998/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3999
4000void
502b8322 4001add_function_usage_to (rtx call_insn, rtx call_fusage)
ee960939 4002{
5b0264cb 4003 gcc_assert (call_insn && CALL_P (call_insn));
ee960939
OH
4004
4005 /* Put the register usage information on the CALL. If there is already
4006 some usage information, put ours at the end. */
4007 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4008 {
4009 rtx link;
4010
4011 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4012 link = XEXP (link, 1))
4013 ;
4014
4015 XEXP (link, 1) = call_fusage;
4016 }
4017 else
4018 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4019}
4020
23b2ce53
RS
4021/* Delete all insns made since FROM.
4022 FROM becomes the new last instruction. */
4023
4024void
502b8322 4025delete_insns_since (rtx from)
23b2ce53
RS
4026{
4027 if (from == 0)
5936d944 4028 set_first_insn (0);
23b2ce53
RS
4029 else
4030 NEXT_INSN (from) = 0;
5936d944 4031 set_last_insn (from);
23b2ce53
RS
4032}
4033
5dab5552
MS
4034/* This function is deprecated, please use sequences instead.
4035
4036 Move a consecutive bunch of insns to a different place in the chain.
23b2ce53
RS
4037 The insns to be moved are those between FROM and TO.
4038 They are moved to a new position after the insn AFTER.
4039 AFTER must not be FROM or TO or any insn in between.
4040
4041 This function does not know about SEQUENCEs and hence should not be
4042 called after delay-slot filling has been done. */
4043
4044void
502b8322 4045reorder_insns_nobb (rtx from, rtx to, rtx after)
23b2ce53 4046{
4f8344eb
HPN
4047#ifdef ENABLE_CHECKING
4048 rtx x;
4049 for (x = from; x != to; x = NEXT_INSN (x))
4050 gcc_assert (after != x);
4051 gcc_assert (after != to);
4052#endif
4053
23b2ce53
RS
4054 /* Splice this bunch out of where it is now. */
4055 if (PREV_INSN (from))
4056 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4057 if (NEXT_INSN (to))
4058 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
5936d944
JH
4059 if (get_last_insn () == to)
4060 set_last_insn (PREV_INSN (from));
4061 if (get_insns () == from)
4062 set_first_insn (NEXT_INSN (to));
23b2ce53
RS
4063
4064 /* Make the new neighbors point to it and it to them. */
4065 if (NEXT_INSN (after))
4066 PREV_INSN (NEXT_INSN (after)) = to;
4067
4068 NEXT_INSN (to) = NEXT_INSN (after);
4069 PREV_INSN (from) = after;
4070 NEXT_INSN (after) = from;
5936d944
JH
4071 if (after == get_last_insn())
4072 set_last_insn (to);
23b2ce53
RS
4073}
4074
3c030e88
JH
4075/* Same as function above, but take care to update BB boundaries. */
4076void
502b8322 4077reorder_insns (rtx from, rtx to, rtx after)
3c030e88
JH
4078{
4079 rtx prev = PREV_INSN (from);
4080 basic_block bb, bb2;
4081
4082 reorder_insns_nobb (from, to, after);
4083
4b4bf941 4084 if (!BARRIER_P (after)
3c030e88
JH
4085 && (bb = BLOCK_FOR_INSN (after)))
4086 {
4087 rtx x;
6fb5fa3c 4088 df_set_bb_dirty (bb);
68252e27 4089
4b4bf941 4090 if (!BARRIER_P (from)
3c030e88
JH
4091 && (bb2 = BLOCK_FOR_INSN (from)))
4092 {
a813c111
SB
4093 if (BB_END (bb2) == to)
4094 BB_END (bb2) = prev;
6fb5fa3c 4095 df_set_bb_dirty (bb2);
3c030e88
JH
4096 }
4097
a813c111
SB
4098 if (BB_END (bb) == after)
4099 BB_END (bb) = to;
3c030e88
JH
4100
4101 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7bd5ed5c 4102 if (!BARRIER_P (x))
63642d5a 4103 df_insn_change_bb (x, bb);
3c030e88
JH
4104 }
4105}
4106
23b2ce53 4107\f
2f937369
DM
4108/* Emit insn(s) of given code and pattern
4109 at a specified place within the doubly-linked list.
23b2ce53 4110
2f937369
DM
4111 All of the emit_foo global entry points accept an object
4112 X which is either an insn list or a PATTERN of a single
4113 instruction.
23b2ce53 4114
2f937369
DM
4115 There are thus a few canonical ways to generate code and
4116 emit it at a specific place in the instruction stream. For
4117 example, consider the instruction named SPOT and the fact that
4118 we would like to emit some instructions before SPOT. We might
4119 do it like this:
23b2ce53 4120
2f937369
DM
4121 start_sequence ();
4122 ... emit the new instructions ...
4123 insns_head = get_insns ();
4124 end_sequence ();
23b2ce53 4125
2f937369 4126 emit_insn_before (insns_head, SPOT);
23b2ce53 4127
2f937369
DM
4128 It used to be common to generate SEQUENCE rtl instead, but that
4129 is a relic of the past which no longer occurs. The reason is that
4130 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4131 generated would almost certainly die right after it was created. */
23b2ce53 4132
5f02387d
NF
4133static rtx
4134emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4135 rtx (*make_raw) (rtx))
23b2ce53 4136{
b3694847 4137 rtx insn;
23b2ce53 4138
5b0264cb 4139 gcc_assert (before);
2f937369
DM
4140
4141 if (x == NULL_RTX)
4142 return last;
4143
4144 switch (GET_CODE (x))
23b2ce53 4145 {
b5b8b0ac 4146 case DEBUG_INSN:
2f937369
DM
4147 case INSN:
4148 case JUMP_INSN:
4149 case CALL_INSN:
4150 case CODE_LABEL:
4151 case BARRIER:
4152 case NOTE:
4153 insn = x;
4154 while (insn)
4155 {
4156 rtx next = NEXT_INSN (insn);
6fb5fa3c 4157 add_insn_before (insn, before, bb);
2f937369
DM
4158 last = insn;
4159 insn = next;
4160 }
4161 break;
4162
4163#ifdef ENABLE_RTL_CHECKING
4164 case SEQUENCE:
5b0264cb 4165 gcc_unreachable ();
2f937369
DM
4166 break;
4167#endif
4168
4169 default:
5f02387d 4170 last = (*make_raw) (x);
6fb5fa3c 4171 add_insn_before (last, before, bb);
2f937369 4172 break;
23b2ce53
RS
4173 }
4174
2f937369 4175 return last;
23b2ce53
RS
4176}
4177
5f02387d
NF
4178/* Make X be output before the instruction BEFORE. */
4179
4180rtx
4181emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4182{
4183 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4184}
4185
2f937369 4186/* Make an instruction with body X and code JUMP_INSN
23b2ce53
RS
4187 and output it before the instruction BEFORE. */
4188
4189rtx
a7102479 4190emit_jump_insn_before_noloc (rtx x, rtx before)
23b2ce53 4191{
5f02387d
NF
4192 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4193 make_jump_insn_raw);
23b2ce53
RS
4194}
4195
2f937369 4196/* Make an instruction with body X and code CALL_INSN
969d70ca
JH
4197 and output it before the instruction BEFORE. */
4198
4199rtx
a7102479 4200emit_call_insn_before_noloc (rtx x, rtx before)
969d70ca 4201{
5f02387d
NF
4202 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4203 make_call_insn_raw);
969d70ca
JH
4204}
4205
b5b8b0ac
AO
4206/* Make an instruction with body X and code DEBUG_INSN
4207 and output it before the instruction BEFORE. */
4208
4209rtx
4210emit_debug_insn_before_noloc (rtx x, rtx before)
4211{
5f02387d
NF
4212 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4213 make_debug_insn_raw);
b5b8b0ac
AO
4214}
4215
23b2ce53 4216/* Make an insn of code BARRIER
e881bb1b 4217 and output it before the insn BEFORE. */
23b2ce53
RS
4218
4219rtx
502b8322 4220emit_barrier_before (rtx before)
23b2ce53 4221{
b3694847 4222 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
4223
4224 INSN_UID (insn) = cur_insn_uid++;
4225
6fb5fa3c 4226 add_insn_before (insn, before, NULL);
23b2ce53
RS
4227 return insn;
4228}
4229
e881bb1b
RH
4230/* Emit the label LABEL before the insn BEFORE. */
4231
4232rtx
502b8322 4233emit_label_before (rtx label, rtx before)
e881bb1b
RH
4234{
4235 /* This can be called twice for the same label as a result of the
4236 confusion that follows a syntax error! So make it harmless. */
4237 if (INSN_UID (label) == 0)
4238 {
4239 INSN_UID (label) = cur_insn_uid++;
6fb5fa3c 4240 add_insn_before (label, before, NULL);
e881bb1b
RH
4241 }
4242
4243 return label;
4244}
4245
23b2ce53
RS
4246/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4247
4248rtx
a38e7aa5 4249emit_note_before (enum insn_note subtype, rtx before)
23b2ce53 4250{
b3694847 4251 rtx note = rtx_alloc (NOTE);
23b2ce53 4252 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 4253 NOTE_KIND (note) = subtype;
ba4f7968 4254 BLOCK_FOR_INSN (note) = NULL;
9dbe7947 4255 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
23b2ce53 4256
6fb5fa3c 4257 add_insn_before (note, before, NULL);
23b2ce53
RS
4258 return note;
4259}
4260\f
2f937369
DM
4261/* Helper for emit_insn_after, handles lists of instructions
4262 efficiently. */
23b2ce53 4263
2f937369 4264static rtx
6fb5fa3c 4265emit_insn_after_1 (rtx first, rtx after, basic_block bb)
23b2ce53 4266{
2f937369
DM
4267 rtx last;
4268 rtx after_after;
6fb5fa3c
DB
4269 if (!bb && !BARRIER_P (after))
4270 bb = BLOCK_FOR_INSN (after);
23b2ce53 4271
6fb5fa3c 4272 if (bb)
23b2ce53 4273 {
6fb5fa3c 4274 df_set_bb_dirty (bb);
2f937369 4275 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4b4bf941 4276 if (!BARRIER_P (last))
6fb5fa3c
DB
4277 {
4278 set_block_for_insn (last, bb);
4279 df_insn_rescan (last);
4280 }
4b4bf941 4281 if (!BARRIER_P (last))
6fb5fa3c
DB
4282 {
4283 set_block_for_insn (last, bb);
4284 df_insn_rescan (last);
4285 }
a813c111
SB
4286 if (BB_END (bb) == after)
4287 BB_END (bb) = last;
23b2ce53
RS
4288 }
4289 else
2f937369
DM
4290 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4291 continue;
4292
4293 after_after = NEXT_INSN (after);
4294
4295 NEXT_INSN (after) = first;
4296 PREV_INSN (first) = after;
4297 NEXT_INSN (last) = after_after;
4298 if (after_after)
4299 PREV_INSN (after_after) = last;
4300
5936d944
JH
4301 if (after == get_last_insn())
4302 set_last_insn (last);
e855c69d 4303
2f937369
DM
4304 return last;
4305}
4306
5f02387d
NF
4307static rtx
4308emit_pattern_after_noloc (rtx x, rtx after, basic_block bb,
4309 rtx (*make_raw)(rtx))
2f937369
DM
4310{
4311 rtx last = after;
4312
5b0264cb 4313 gcc_assert (after);
2f937369
DM
4314
4315 if (x == NULL_RTX)
4316 return last;
4317
4318 switch (GET_CODE (x))
23b2ce53 4319 {
b5b8b0ac 4320 case DEBUG_INSN:
2f937369
DM
4321 case INSN:
4322 case JUMP_INSN:
4323 case CALL_INSN:
4324 case CODE_LABEL:
4325 case BARRIER:
4326 case NOTE:
6fb5fa3c 4327 last = emit_insn_after_1 (x, after, bb);
2f937369
DM
4328 break;
4329
4330#ifdef ENABLE_RTL_CHECKING
4331 case SEQUENCE:
5b0264cb 4332 gcc_unreachable ();
2f937369
DM
4333 break;
4334#endif
4335
4336 default:
5f02387d 4337 last = (*make_raw) (x);
6fb5fa3c 4338 add_insn_after (last, after, bb);
2f937369 4339 break;
23b2ce53
RS
4340 }
4341
2f937369 4342 return last;
23b2ce53
RS
4343}
4344
5f02387d
NF
4345/* Make X be output after the insn AFTER and set the BB of insn. If
4346 BB is NULL, an attempt is made to infer the BB from AFTER. */
4347
4348rtx
4349emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4350{
4351 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4352}
4353
255680cf 4354
2f937369 4355/* Make an insn of code JUMP_INSN with body X
23b2ce53
RS
4356 and output it after the insn AFTER. */
4357
4358rtx
a7102479 4359emit_jump_insn_after_noloc (rtx x, rtx after)
23b2ce53 4360{
5f02387d 4361 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
2f937369
DM
4362}
4363
4364/* Make an instruction with body X and code CALL_INSN
4365 and output it after the instruction AFTER. */
4366
4367rtx
a7102479 4368emit_call_insn_after_noloc (rtx x, rtx after)
2f937369 4369{
5f02387d 4370 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
23b2ce53
RS
4371}
4372
b5b8b0ac
AO
4373/* Make an instruction with body X and code CALL_INSN
4374 and output it after the instruction AFTER. */
4375
4376rtx
4377emit_debug_insn_after_noloc (rtx x, rtx after)
4378{
5f02387d 4379 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
b5b8b0ac
AO
4380}
4381
23b2ce53
RS
4382/* Make an insn of code BARRIER
4383 and output it after the insn AFTER. */
4384
4385rtx
502b8322 4386emit_barrier_after (rtx after)
23b2ce53 4387{
b3694847 4388 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
4389
4390 INSN_UID (insn) = cur_insn_uid++;
4391
6fb5fa3c 4392 add_insn_after (insn, after, NULL);
23b2ce53
RS
4393 return insn;
4394}
4395
4396/* Emit the label LABEL after the insn AFTER. */
4397
4398rtx
502b8322 4399emit_label_after (rtx label, rtx after)
23b2ce53
RS
4400{
4401 /* This can be called twice for the same label
4402 as a result of the confusion that follows a syntax error!
4403 So make it harmless. */
4404 if (INSN_UID (label) == 0)
4405 {
4406 INSN_UID (label) = cur_insn_uid++;
6fb5fa3c 4407 add_insn_after (label, after, NULL);
23b2ce53
RS
4408 }
4409
4410 return label;
4411}
4412
4413/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4414
4415rtx
a38e7aa5 4416emit_note_after (enum insn_note subtype, rtx after)
23b2ce53 4417{
b3694847 4418 rtx note = rtx_alloc (NOTE);
23b2ce53 4419 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 4420 NOTE_KIND (note) = subtype;
ba4f7968 4421 BLOCK_FOR_INSN (note) = NULL;
9dbe7947 4422 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
6fb5fa3c 4423 add_insn_after (note, after, NULL);
23b2ce53
RS
4424 return note;
4425}
23b2ce53 4426\f
e8110d6f
NF
4427/* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4428 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4429
4430static rtx
4431emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
4432 rtx (*make_raw) (rtx))
0d682900 4433{
e8110d6f 4434 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
0d682900 4435
a7102479 4436 if (pattern == NULL_RTX || !loc)
dd3adcf8
DJ
4437 return last;
4438
2f937369
DM
4439 after = NEXT_INSN (after);
4440 while (1)
4441 {
a7102479 4442 if (active_insn_p (after) && !INSN_LOCATOR (after))
0435312e 4443 INSN_LOCATOR (after) = loc;
2f937369
DM
4444 if (after == last)
4445 break;
4446 after = NEXT_INSN (after);
4447 }
0d682900
JH
4448 return last;
4449}
4450
e8110d6f
NF
4451/* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4452 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4453 any DEBUG_INSNs. */
4454
4455static rtx
4456emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
4457 rtx (*make_raw) (rtx))
a7102479 4458{
b5b8b0ac
AO
4459 rtx prev = after;
4460
e8110d6f
NF
4461 if (skip_debug_insns)
4462 while (DEBUG_INSN_P (prev))
4463 prev = PREV_INSN (prev);
b5b8b0ac
AO
4464
4465 if (INSN_P (prev))
e8110d6f
NF
4466 return emit_pattern_after_setloc (pattern, after, INSN_LOCATOR (prev),
4467 make_raw);
a7102479 4468 else
e8110d6f 4469 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
a7102479
JH
4470}
4471
e8110d6f 4472/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
0d682900 4473rtx
e8110d6f 4474emit_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4475{
e8110d6f
NF
4476 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4477}
2f937369 4478
e8110d6f
NF
4479/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4480rtx
4481emit_insn_after (rtx pattern, rtx after)
4482{
4483 return emit_pattern_after (pattern, after, true, make_insn_raw);
4484}
dd3adcf8 4485
e8110d6f
NF
4486/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4487rtx
4488emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4489{
4490 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
0d682900
JH
4491}
4492
a7102479
JH
4493/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4494rtx
4495emit_jump_insn_after (rtx pattern, rtx after)
4496{
e8110d6f 4497 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
a7102479
JH
4498}
4499
e8110d6f 4500/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
0d682900 4501rtx
502b8322 4502emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4503{
e8110d6f 4504 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
0d682900
JH
4505}
4506
a7102479
JH
4507/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4508rtx
4509emit_call_insn_after (rtx pattern, rtx after)
4510{
e8110d6f 4511 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
a7102479
JH
4512}
4513
e8110d6f 4514/* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
b5b8b0ac
AO
4515rtx
4516emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4517{
e8110d6f 4518 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
b5b8b0ac
AO
4519}
4520
4521/* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4522rtx
4523emit_debug_insn_after (rtx pattern, rtx after)
4524{
e8110d6f 4525 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
b5b8b0ac
AO
4526}
4527
e8110d6f
NF
4528/* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4529 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4530 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4531 CALL_INSN, etc. */
4532
4533static rtx
4534emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
4535 rtx (*make_raw) (rtx))
0d682900
JH
4536{
4537 rtx first = PREV_INSN (before);
e8110d6f
NF
4538 rtx last = emit_pattern_before_noloc (pattern, before,
4539 insnp ? before : NULL_RTX,
4540 NULL, make_raw);
a7102479
JH
4541
4542 if (pattern == NULL_RTX || !loc)
4543 return last;
4544
26cb3993
JH
4545 if (!first)
4546 first = get_insns ();
4547 else
4548 first = NEXT_INSN (first);
a7102479
JH
4549 while (1)
4550 {
4551 if (active_insn_p (first) && !INSN_LOCATOR (first))
4552 INSN_LOCATOR (first) = loc;
4553 if (first == last)
4554 break;
4555 first = NEXT_INSN (first);
4556 }
4557 return last;
4558}
4559
e8110d6f
NF
4560/* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4561 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4562 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4563 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4564
4565static rtx
4566emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
4567 bool insnp, rtx (*make_raw) (rtx))
a7102479 4568{
b5b8b0ac
AO
4569 rtx next = before;
4570
e8110d6f
NF
4571 if (skip_debug_insns)
4572 while (DEBUG_INSN_P (next))
4573 next = PREV_INSN (next);
b5b8b0ac
AO
4574
4575 if (INSN_P (next))
e8110d6f
NF
4576 return emit_pattern_before_setloc (pattern, before, INSN_LOCATOR (next),
4577 insnp, make_raw);
a7102479 4578 else
e8110d6f
NF
4579 return emit_pattern_before_noloc (pattern, before,
4580 insnp ? before : NULL_RTX,
4581 NULL, make_raw);
a7102479
JH
4582}
4583
e8110d6f 4584/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
a7102479 4585rtx
e8110d6f 4586emit_insn_before_setloc (rtx pattern, rtx before, int loc)
a7102479 4587{
e8110d6f
NF
4588 return emit_pattern_before_setloc (pattern, before, loc, true,
4589 make_insn_raw);
4590}
a7102479 4591
e8110d6f
NF
4592/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4593rtx
4594emit_insn_before (rtx pattern, rtx before)
4595{
4596 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4597}
a7102479 4598
e8110d6f
NF
4599/* like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4600rtx
4601emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4602{
4603 return emit_pattern_before_setloc (pattern, before, loc, false,
4604 make_jump_insn_raw);
a7102479
JH
4605}
4606
4607/* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4608rtx
4609emit_jump_insn_before (rtx pattern, rtx before)
4610{
e8110d6f
NF
4611 return emit_pattern_before (pattern, before, true, false,
4612 make_jump_insn_raw);
a7102479
JH
4613}
4614
e8110d6f 4615/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
a7102479
JH
4616rtx
4617emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4618{
e8110d6f
NF
4619 return emit_pattern_before_setloc (pattern, before, loc, false,
4620 make_call_insn_raw);
0d682900 4621}
a7102479 4622
e8110d6f
NF
4623/* Like emit_call_insn_before_noloc,
4624 but set insn_locator according to BEFORE. */
a7102479
JH
4625rtx
4626emit_call_insn_before (rtx pattern, rtx before)
4627{
e8110d6f
NF
4628 return emit_pattern_before (pattern, before, true, false,
4629 make_call_insn_raw);
a7102479 4630}
b5b8b0ac 4631
e8110d6f 4632/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
b5b8b0ac
AO
4633rtx
4634emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4635{
e8110d6f
NF
4636 return emit_pattern_before_setloc (pattern, before, loc, false,
4637 make_debug_insn_raw);
b5b8b0ac
AO
4638}
4639
e8110d6f
NF
4640/* Like emit_debug_insn_before_noloc,
4641 but set insn_locator according to BEFORE. */
b5b8b0ac
AO
4642rtx
4643emit_debug_insn_before (rtx pattern, rtx before)
4644{
e8110d6f
NF
4645 return emit_pattern_before (pattern, before, false, false,
4646 make_debug_insn_raw);
b5b8b0ac 4647}
0d682900 4648\f
2f937369
DM
4649/* Take X and emit it at the end of the doubly-linked
4650 INSN list.
23b2ce53
RS
4651
4652 Returns the last insn emitted. */
4653
4654rtx
502b8322 4655emit_insn (rtx x)
23b2ce53 4656{
5936d944 4657 rtx last = get_last_insn();
2f937369 4658 rtx insn;
23b2ce53 4659
2f937369
DM
4660 if (x == NULL_RTX)
4661 return last;
23b2ce53 4662
2f937369
DM
4663 switch (GET_CODE (x))
4664 {
b5b8b0ac 4665 case DEBUG_INSN:
2f937369
DM
4666 case INSN:
4667 case JUMP_INSN:
4668 case CALL_INSN:
4669 case CODE_LABEL:
4670 case BARRIER:
4671 case NOTE:
4672 insn = x;
4673 while (insn)
23b2ce53 4674 {
2f937369 4675 rtx next = NEXT_INSN (insn);
23b2ce53 4676 add_insn (insn);
2f937369
DM
4677 last = insn;
4678 insn = next;
23b2ce53 4679 }
2f937369 4680 break;
23b2ce53 4681
2f937369
DM
4682#ifdef ENABLE_RTL_CHECKING
4683 case SEQUENCE:
5b0264cb 4684 gcc_unreachable ();
2f937369
DM
4685 break;
4686#endif
23b2ce53 4687
2f937369
DM
4688 default:
4689 last = make_insn_raw (x);
4690 add_insn (last);
4691 break;
23b2ce53
RS
4692 }
4693
4694 return last;
4695}
4696
b5b8b0ac
AO
4697/* Make an insn of code DEBUG_INSN with pattern X
4698 and add it to the end of the doubly-linked list. */
4699
4700rtx
4701emit_debug_insn (rtx x)
4702{
5936d944 4703 rtx last = get_last_insn();
b5b8b0ac
AO
4704 rtx insn;
4705
4706 if (x == NULL_RTX)
4707 return last;
4708
4709 switch (GET_CODE (x))
4710 {
4711 case DEBUG_INSN:
4712 case INSN:
4713 case JUMP_INSN:
4714 case CALL_INSN:
4715 case CODE_LABEL:
4716 case BARRIER:
4717 case NOTE:
4718 insn = x;
4719 while (insn)
4720 {
4721 rtx next = NEXT_INSN (insn);
4722 add_insn (insn);
4723 last = insn;
4724 insn = next;
4725 }
4726 break;
4727
4728#ifdef ENABLE_RTL_CHECKING
4729 case SEQUENCE:
4730 gcc_unreachable ();
4731 break;
4732#endif
4733
4734 default:
4735 last = make_debug_insn_raw (x);
4736 add_insn (last);
4737 break;
4738 }
4739
4740 return last;
4741}
4742
2f937369
DM
4743/* Make an insn of code JUMP_INSN with pattern X
4744 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4745
4746rtx
502b8322 4747emit_jump_insn (rtx x)
23b2ce53 4748{
d950dee3 4749 rtx last = NULL_RTX, insn;
23b2ce53 4750
2f937369 4751 switch (GET_CODE (x))
23b2ce53 4752 {
b5b8b0ac 4753 case DEBUG_INSN:
2f937369
DM
4754 case INSN:
4755 case JUMP_INSN:
4756 case CALL_INSN:
4757 case CODE_LABEL:
4758 case BARRIER:
4759 case NOTE:
4760 insn = x;
4761 while (insn)
4762 {
4763 rtx next = NEXT_INSN (insn);
4764 add_insn (insn);
4765 last = insn;
4766 insn = next;
4767 }
4768 break;
e0a5c5eb 4769
2f937369
DM
4770#ifdef ENABLE_RTL_CHECKING
4771 case SEQUENCE:
5b0264cb 4772 gcc_unreachable ();
2f937369
DM
4773 break;
4774#endif
e0a5c5eb 4775
2f937369
DM
4776 default:
4777 last = make_jump_insn_raw (x);
4778 add_insn (last);
4779 break;
3c030e88 4780 }
e0a5c5eb
RS
4781
4782 return last;
4783}
4784
2f937369 4785/* Make an insn of code CALL_INSN with pattern X
23b2ce53
RS
4786 and add it to the end of the doubly-linked list. */
4787
4788rtx
502b8322 4789emit_call_insn (rtx x)
23b2ce53 4790{
2f937369
DM
4791 rtx insn;
4792
4793 switch (GET_CODE (x))
23b2ce53 4794 {
b5b8b0ac 4795 case DEBUG_INSN:
2f937369
DM
4796 case INSN:
4797 case JUMP_INSN:
4798 case CALL_INSN:
4799 case CODE_LABEL:
4800 case BARRIER:
4801 case NOTE:
4802 insn = emit_insn (x);
4803 break;
23b2ce53 4804
2f937369
DM
4805#ifdef ENABLE_RTL_CHECKING
4806 case SEQUENCE:
5b0264cb 4807 gcc_unreachable ();
2f937369
DM
4808 break;
4809#endif
23b2ce53 4810
2f937369
DM
4811 default:
4812 insn = make_call_insn_raw (x);
23b2ce53 4813 add_insn (insn);
2f937369 4814 break;
23b2ce53 4815 }
2f937369
DM
4816
4817 return insn;
23b2ce53
RS
4818}
4819
4820/* Add the label LABEL to the end of the doubly-linked list. */
4821
4822rtx
502b8322 4823emit_label (rtx label)
23b2ce53
RS
4824{
4825 /* This can be called twice for the same label
4826 as a result of the confusion that follows a syntax error!
4827 So make it harmless. */
4828 if (INSN_UID (label) == 0)
4829 {
4830 INSN_UID (label) = cur_insn_uid++;
4831 add_insn (label);
4832 }
4833 return label;
4834}
4835
4836/* Make an insn of code BARRIER
4837 and add it to the end of the doubly-linked list. */
4838
4839rtx
502b8322 4840emit_barrier (void)
23b2ce53 4841{
b3694847 4842 rtx barrier = rtx_alloc (BARRIER);
23b2ce53
RS
4843 INSN_UID (barrier) = cur_insn_uid++;
4844 add_insn (barrier);
4845 return barrier;
4846}
4847
5f2fc772 4848/* Emit a copy of note ORIG. */
502b8322 4849
5f2fc772
NS
4850rtx
4851emit_note_copy (rtx orig)
4852{
4853 rtx note;
b8698a0f 4854
5f2fc772 4855 note = rtx_alloc (NOTE);
b8698a0f 4856
5f2fc772
NS
4857 INSN_UID (note) = cur_insn_uid++;
4858 NOTE_DATA (note) = NOTE_DATA (orig);
a38e7aa5 4859 NOTE_KIND (note) = NOTE_KIND (orig);
5f2fc772
NS
4860 BLOCK_FOR_INSN (note) = NULL;
4861 add_insn (note);
b8698a0f 4862
2e040219 4863 return note;
23b2ce53
RS
4864}
4865
2e040219
NS
4866/* Make an insn of code NOTE or type NOTE_NO
4867 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4868
4869rtx
a38e7aa5 4870emit_note (enum insn_note kind)
23b2ce53 4871{
b3694847 4872 rtx note;
23b2ce53 4873
23b2ce53
RS
4874 note = rtx_alloc (NOTE);
4875 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 4876 NOTE_KIND (note) = kind;
dd107e66 4877 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
ba4f7968 4878 BLOCK_FOR_INSN (note) = NULL;
23b2ce53
RS
4879 add_insn (note);
4880 return note;
4881}
4882
c41c1387
RS
4883/* Emit a clobber of lvalue X. */
4884
4885rtx
4886emit_clobber (rtx x)
4887{
4888 /* CONCATs should not appear in the insn stream. */
4889 if (GET_CODE (x) == CONCAT)
4890 {
4891 emit_clobber (XEXP (x, 0));
4892 return emit_clobber (XEXP (x, 1));
4893 }
4894 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
4895}
4896
4897/* Return a sequence of insns to clobber lvalue X. */
4898
4899rtx
4900gen_clobber (rtx x)
4901{
4902 rtx seq;
4903
4904 start_sequence ();
4905 emit_clobber (x);
4906 seq = get_insns ();
4907 end_sequence ();
4908 return seq;
4909}
4910
4911/* Emit a use of rvalue X. */
4912
4913rtx
4914emit_use (rtx x)
4915{
4916 /* CONCATs should not appear in the insn stream. */
4917 if (GET_CODE (x) == CONCAT)
4918 {
4919 emit_use (XEXP (x, 0));
4920 return emit_use (XEXP (x, 1));
4921 }
4922 return emit_insn (gen_rtx_USE (VOIDmode, x));
4923}
4924
4925/* Return a sequence of insns to use rvalue X. */
4926
4927rtx
4928gen_use (rtx x)
4929{
4930 rtx seq;
4931
4932 start_sequence ();
4933 emit_use (x);
4934 seq = get_insns ();
4935 end_sequence ();
4936 return seq;
4937}
4938
23b2ce53 4939/* Cause next statement to emit a line note even if the line number
0cea056b 4940 has not changed. */
23b2ce53
RS
4941
4942void
502b8322 4943force_next_line_note (void)
23b2ce53 4944{
6773e15f 4945 last_location = -1;
23b2ce53 4946}
87b47c85
AM
4947
4948/* Place a note of KIND on insn INSN with DATUM as the datum. If a
30f7a378 4949 note of this type already exists, remove it first. */
87b47c85 4950
3d238248 4951rtx
502b8322 4952set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
87b47c85
AM
4953{
4954 rtx note = find_reg_note (insn, kind, NULL_RTX);
4955
52488da1
JW
4956 switch (kind)
4957 {
4958 case REG_EQUAL:
4959 case REG_EQUIV:
4960 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4961 has multiple sets (some callers assume single_set
4962 means the insn only has one set, when in fact it
4963 means the insn only has one * useful * set). */
4964 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4965 {
5b0264cb 4966 gcc_assert (!note);
52488da1
JW
4967 return NULL_RTX;
4968 }
4969
4970 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4971 It serves no useful purpose and breaks eliminate_regs. */
4972 if (GET_CODE (datum) == ASM_OPERANDS)
4973 return NULL_RTX;
6fb5fa3c
DB
4974
4975 if (note)
4976 {
4977 XEXP (note, 0) = datum;
4978 df_notes_rescan (insn);
4979 return note;
4980 }
52488da1
JW
4981 break;
4982
4983 default:
6fb5fa3c
DB
4984 if (note)
4985 {
4986 XEXP (note, 0) = datum;
4987 return note;
4988 }
52488da1
JW
4989 break;
4990 }
3d238248 4991
65c5f2a6 4992 add_reg_note (insn, kind, datum);
6fb5fa3c
DB
4993
4994 switch (kind)
3d238248 4995 {
6fb5fa3c
DB
4996 case REG_EQUAL:
4997 case REG_EQUIV:
4998 df_notes_rescan (insn);
4999 break;
5000 default:
5001 break;
3d238248 5002 }
87b47c85 5003
3d238248 5004 return REG_NOTES (insn);
87b47c85 5005}
7543f918
JR
5006
5007/* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5008rtx
5009set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5010{
5011 rtx set = single_set (insn);
5012
5013 if (set && SET_DEST (set) == dst)
5014 return set_unique_reg_note (insn, kind, datum);
5015 return NULL_RTX;
5016}
23b2ce53
RS
5017\f
5018/* Return an indication of which type of insn should have X as a body.
5019 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5020
d78db459 5021static enum rtx_code
502b8322 5022classify_insn (rtx x)
23b2ce53 5023{
4b4bf941 5024 if (LABEL_P (x))
23b2ce53
RS
5025 return CODE_LABEL;
5026 if (GET_CODE (x) == CALL)
5027 return CALL_INSN;
26898771 5028 if (ANY_RETURN_P (x))
23b2ce53
RS
5029 return JUMP_INSN;
5030 if (GET_CODE (x) == SET)
5031 {
5032 if (SET_DEST (x) == pc_rtx)
5033 return JUMP_INSN;
5034 else if (GET_CODE (SET_SRC (x)) == CALL)
5035 return CALL_INSN;
5036 else
5037 return INSN;
5038 }
5039 if (GET_CODE (x) == PARALLEL)
5040 {
b3694847 5041 int j;
23b2ce53
RS
5042 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5043 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5044 return CALL_INSN;
5045 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5046 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5047 return JUMP_INSN;
5048 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5049 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5050 return CALL_INSN;
5051 }
5052 return INSN;
5053}
5054
5055/* Emit the rtl pattern X as an appropriate kind of insn.
5056 If X is a label, it is simply added into the insn chain. */
5057
5058rtx
502b8322 5059emit (rtx x)
23b2ce53
RS
5060{
5061 enum rtx_code code = classify_insn (x);
5062
5b0264cb 5063 switch (code)
23b2ce53 5064 {
5b0264cb
NS
5065 case CODE_LABEL:
5066 return emit_label (x);
5067 case INSN:
5068 return emit_insn (x);
5069 case JUMP_INSN:
5070 {
5071 rtx insn = emit_jump_insn (x);
5072 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5073 return emit_barrier ();
5074 return insn;
5075 }
5076 case CALL_INSN:
5077 return emit_call_insn (x);
b5b8b0ac
AO
5078 case DEBUG_INSN:
5079 return emit_debug_insn (x);
5b0264cb
NS
5080 default:
5081 gcc_unreachable ();
23b2ce53 5082 }
23b2ce53
RS
5083}
5084\f
e2500fed 5085/* Space for free sequence stack entries. */
1431042e 5086static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
e2500fed 5087
4dfa0342
RH
5088/* Begin emitting insns to a sequence. If this sequence will contain
5089 something that might cause the compiler to pop arguments to function
5090 calls (because those pops have previously been deferred; see
5091 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5092 before calling this function. That will ensure that the deferred
5093 pops are not accidentally emitted in the middle of this sequence. */
23b2ce53
RS
5094
5095void
502b8322 5096start_sequence (void)
23b2ce53
RS
5097{
5098 struct sequence_stack *tem;
5099
e2500fed
GK
5100 if (free_sequence_stack != NULL)
5101 {
5102 tem = free_sequence_stack;
5103 free_sequence_stack = tem->next;
5104 }
5105 else
a9429e29 5106 tem = ggc_alloc_sequence_stack ();
23b2ce53 5107
49ad7cfa 5108 tem->next = seq_stack;
5936d944
JH
5109 tem->first = get_insns ();
5110 tem->last = get_last_insn ();
23b2ce53 5111
49ad7cfa 5112 seq_stack = tem;
23b2ce53 5113
5936d944
JH
5114 set_first_insn (0);
5115 set_last_insn (0);
23b2ce53
RS
5116}
5117
5c7a310f
MM
5118/* Set up the insn chain starting with FIRST as the current sequence,
5119 saving the previously current one. See the documentation for
5120 start_sequence for more information about how to use this function. */
23b2ce53
RS
5121
5122void
502b8322 5123push_to_sequence (rtx first)
23b2ce53
RS
5124{
5125 rtx last;
5126
5127 start_sequence ();
5128
e84a58ff
EB
5129 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5130 ;
23b2ce53 5131
5936d944
JH
5132 set_first_insn (first);
5133 set_last_insn (last);
23b2ce53
RS
5134}
5135
bb27eeda
SE
5136/* Like push_to_sequence, but take the last insn as an argument to avoid
5137 looping through the list. */
5138
5139void
5140push_to_sequence2 (rtx first, rtx last)
5141{
5142 start_sequence ();
5143
5936d944
JH
5144 set_first_insn (first);
5145 set_last_insn (last);
bb27eeda
SE
5146}
5147
f15ae3a1
TW
5148/* Set up the outer-level insn chain
5149 as the current sequence, saving the previously current one. */
5150
5151void
502b8322 5152push_topmost_sequence (void)
f15ae3a1 5153{
aefdd5ab 5154 struct sequence_stack *stack, *top = NULL;
f15ae3a1
TW
5155
5156 start_sequence ();
5157
49ad7cfa 5158 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
5159 top = stack;
5160
5936d944
JH
5161 set_first_insn (top->first);
5162 set_last_insn (top->last);
f15ae3a1
TW
5163}
5164
5165/* After emitting to the outer-level insn chain, update the outer-level
5166 insn chain, and restore the previous saved state. */
5167
5168void
502b8322 5169pop_topmost_sequence (void)
f15ae3a1 5170{
aefdd5ab 5171 struct sequence_stack *stack, *top = NULL;
f15ae3a1 5172
49ad7cfa 5173 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
5174 top = stack;
5175
5936d944
JH
5176 top->first = get_insns ();
5177 top->last = get_last_insn ();
f15ae3a1
TW
5178
5179 end_sequence ();
5180}
5181
23b2ce53
RS
5182/* After emitting to a sequence, restore previous saved state.
5183
5c7a310f 5184 To get the contents of the sequence just made, you must call
2f937369 5185 `get_insns' *before* calling here.
5c7a310f
MM
5186
5187 If the compiler might have deferred popping arguments while
5188 generating this sequence, and this sequence will not be immediately
5189 inserted into the instruction stream, use do_pending_stack_adjust
2f937369 5190 before calling get_insns. That will ensure that the deferred
5c7a310f
MM
5191 pops are inserted into this sequence, and not into some random
5192 location in the instruction stream. See INHIBIT_DEFER_POP for more
5193 information about deferred popping of arguments. */
23b2ce53
RS
5194
5195void
502b8322 5196end_sequence (void)
23b2ce53 5197{
49ad7cfa 5198 struct sequence_stack *tem = seq_stack;
23b2ce53 5199
5936d944
JH
5200 set_first_insn (tem->first);
5201 set_last_insn (tem->last);
49ad7cfa 5202 seq_stack = tem->next;
23b2ce53 5203
e2500fed
GK
5204 memset (tem, 0, sizeof (*tem));
5205 tem->next = free_sequence_stack;
5206 free_sequence_stack = tem;
23b2ce53
RS
5207}
5208
5209/* Return 1 if currently emitting into a sequence. */
5210
5211int
502b8322 5212in_sequence_p (void)
23b2ce53 5213{
49ad7cfa 5214 return seq_stack != 0;
23b2ce53 5215}
23b2ce53 5216\f
59ec66dc
MM
5217/* Put the various virtual registers into REGNO_REG_RTX. */
5218
2bbdec73 5219static void
bd60bab2 5220init_virtual_regs (void)
59ec66dc 5221{
bd60bab2
JH
5222 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5223 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5224 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5225 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5226 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
32990d5b
JJ
5227 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5228 = virtual_preferred_stack_boundary_rtx;
49ad7cfa
BS
5229}
5230
da43a810
BS
5231\f
5232/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5233static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5234static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5235static int copy_insn_n_scratches;
5236
5237/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5238 copied an ASM_OPERANDS.
5239 In that case, it is the original input-operand vector. */
5240static rtvec orig_asm_operands_vector;
5241
5242/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5243 copied an ASM_OPERANDS.
5244 In that case, it is the copied input-operand vector. */
5245static rtvec copy_asm_operands_vector;
5246
5247/* Likewise for the constraints vector. */
5248static rtvec orig_asm_constraints_vector;
5249static rtvec copy_asm_constraints_vector;
5250
5251/* Recursively create a new copy of an rtx for copy_insn.
5252 This function differs from copy_rtx in that it handles SCRATCHes and
5253 ASM_OPERANDs properly.
5254 Normally, this function is not used directly; use copy_insn as front end.
5255 However, you could first copy an insn pattern with copy_insn and then use
5256 this function afterwards to properly copy any REG_NOTEs containing
5257 SCRATCHes. */
5258
5259rtx
502b8322 5260copy_insn_1 (rtx orig)
da43a810 5261{
b3694847
SS
5262 rtx copy;
5263 int i, j;
5264 RTX_CODE code;
5265 const char *format_ptr;
da43a810 5266
cd9c1ca8
RH
5267 if (orig == NULL)
5268 return NULL;
5269
da43a810
BS
5270 code = GET_CODE (orig);
5271
5272 switch (code)
5273 {
5274 case REG:
a52a87c3 5275 case DEBUG_EXPR:
da43a810
BS
5276 case CONST_INT:
5277 case CONST_DOUBLE:
091a3ac7 5278 case CONST_FIXED:
69ef87e2 5279 case CONST_VECTOR:
da43a810
BS
5280 case SYMBOL_REF:
5281 case CODE_LABEL:
5282 case PC:
5283 case CC0:
276e0224 5284 case RETURN:
26898771 5285 case SIMPLE_RETURN:
da43a810 5286 return orig;
3e89ed8d
JH
5287 case CLOBBER:
5288 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5289 return orig;
5290 break;
da43a810
BS
5291
5292 case SCRATCH:
5293 for (i = 0; i < copy_insn_n_scratches; i++)
5294 if (copy_insn_scratch_in[i] == orig)
5295 return copy_insn_scratch_out[i];
5296 break;
5297
5298 case CONST:
6fb5fa3c 5299 if (shared_const_p (orig))
da43a810
BS
5300 return orig;
5301 break;
750c9258 5302
da43a810
BS
5303 /* A MEM with a constant address is not sharable. The problem is that
5304 the constant address may need to be reloaded. If the mem is shared,
5305 then reloading one copy of this mem will cause all copies to appear
5306 to have been reloaded. */
5307
5308 default:
5309 break;
5310 }
5311
aacd3885
RS
5312 /* Copy the various flags, fields, and other information. We assume
5313 that all fields need copying, and then clear the fields that should
da43a810
BS
5314 not be copied. That is the sensible default behavior, and forces
5315 us to explicitly document why we are *not* copying a flag. */
aacd3885 5316 copy = shallow_copy_rtx (orig);
da43a810
BS
5317
5318 /* We do not copy the USED flag, which is used as a mark bit during
5319 walks over the RTL. */
2adc7f12 5320 RTX_FLAG (copy, used) = 0;
da43a810
BS
5321
5322 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
ec8e098d 5323 if (INSN_P (orig))
da43a810 5324 {
2adc7f12
JJ
5325 RTX_FLAG (copy, jump) = 0;
5326 RTX_FLAG (copy, call) = 0;
5327 RTX_FLAG (copy, frame_related) = 0;
da43a810 5328 }
750c9258 5329
da43a810
BS
5330 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5331
5332 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
aacd3885
RS
5333 switch (*format_ptr++)
5334 {
5335 case 'e':
5336 if (XEXP (orig, i) != NULL)
5337 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5338 break;
da43a810 5339
aacd3885
RS
5340 case 'E':
5341 case 'V':
5342 if (XVEC (orig, i) == orig_asm_constraints_vector)
5343 XVEC (copy, i) = copy_asm_constraints_vector;
5344 else if (XVEC (orig, i) == orig_asm_operands_vector)
5345 XVEC (copy, i) = copy_asm_operands_vector;
5346 else if (XVEC (orig, i) != NULL)
5347 {
5348 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5349 for (j = 0; j < XVECLEN (copy, i); j++)
5350 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5351 }
5352 break;
da43a810 5353
aacd3885
RS
5354 case 't':
5355 case 'w':
5356 case 'i':
5357 case 's':
5358 case 'S':
5359 case 'u':
5360 case '0':
5361 /* These are left unchanged. */
5362 break;
da43a810 5363
aacd3885
RS
5364 default:
5365 gcc_unreachable ();
5366 }
da43a810
BS
5367
5368 if (code == SCRATCH)
5369 {
5370 i = copy_insn_n_scratches++;
5b0264cb 5371 gcc_assert (i < MAX_RECOG_OPERANDS);
da43a810
BS
5372 copy_insn_scratch_in[i] = orig;
5373 copy_insn_scratch_out[i] = copy;
5374 }
5375 else if (code == ASM_OPERANDS)
5376 {
6462bb43
AO
5377 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5378 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5379 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5380 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
da43a810
BS
5381 }
5382
5383 return copy;
5384}
5385
5386/* Create a new copy of an rtx.
5387 This function differs from copy_rtx in that it handles SCRATCHes and
5388 ASM_OPERANDs properly.
5389 INSN doesn't really have to be a full INSN; it could be just the
5390 pattern. */
5391rtx
502b8322 5392copy_insn (rtx insn)
da43a810
BS
5393{
5394 copy_insn_n_scratches = 0;
5395 orig_asm_operands_vector = 0;
5396 orig_asm_constraints_vector = 0;
5397 copy_asm_operands_vector = 0;
5398 copy_asm_constraints_vector = 0;
5399 return copy_insn_1 (insn);
5400}
59ec66dc 5401
23b2ce53
RS
5402/* Initialize data structures and variables in this file
5403 before generating rtl for each function. */
5404
5405void
502b8322 5406init_emit (void)
23b2ce53 5407{
5936d944
JH
5408 set_first_insn (NULL);
5409 set_last_insn (NULL);
b5b8b0ac
AO
5410 if (MIN_NONDEBUG_INSN_UID)
5411 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5412 else
5413 cur_insn_uid = 1;
5414 cur_debug_insn_uid = 1;
23b2ce53 5415 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
6773e15f 5416 last_location = UNKNOWN_LOCATION;
23b2ce53 5417 first_label_num = label_num;
49ad7cfa 5418 seq_stack = NULL;
23b2ce53 5419
23b2ce53
RS
5420 /* Init the tables that describe all the pseudo regs. */
5421
3e029763 5422 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
23b2ce53 5423
3e029763 5424 crtl->emit.regno_pointer_align
1b4572a8 5425 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
86fe05e0 5426
a9429e29 5427 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
0d4903b8 5428
e50126e8 5429 /* Put copies of all the hard registers into regno_reg_rtx. */
6cde4876 5430 memcpy (regno_reg_rtx,
5fb0e246 5431 initial_regno_reg_rtx,
6cde4876 5432 FIRST_PSEUDO_REGISTER * sizeof (rtx));
e50126e8 5433
23b2ce53 5434 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
bd60bab2 5435 init_virtual_regs ();
740ab4a2
RK
5436
5437 /* Indicate that the virtual registers and stack locations are
5438 all pointers. */
3502dc9c
JDA
5439 REG_POINTER (stack_pointer_rtx) = 1;
5440 REG_POINTER (frame_pointer_rtx) = 1;
5441 REG_POINTER (hard_frame_pointer_rtx) = 1;
5442 REG_POINTER (arg_pointer_rtx) = 1;
740ab4a2 5443
3502dc9c
JDA
5444 REG_POINTER (virtual_incoming_args_rtx) = 1;
5445 REG_POINTER (virtual_stack_vars_rtx) = 1;
5446 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5447 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5448 REG_POINTER (virtual_cfa_rtx) = 1;
5e82e7bd 5449
86fe05e0 5450#ifdef STACK_BOUNDARY
bdb429a5
RK
5451 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5452 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5453 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5454 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5455
5456 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5457 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5458 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5459 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5460 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
86fe05e0
RK
5461#endif
5462
5e82e7bd
JVA
5463#ifdef INIT_EXPANDERS
5464 INIT_EXPANDERS;
5465#endif
23b2ce53
RS
5466}
5467
a73b091d 5468/* Generate a vector constant for mode MODE and constant value CONSTANT. */
69ef87e2
AH
5469
5470static rtx
a73b091d 5471gen_const_vector (enum machine_mode mode, int constant)
69ef87e2
AH
5472{
5473 rtx tem;
5474 rtvec v;
5475 int units, i;
5476 enum machine_mode inner;
5477
5478 units = GET_MODE_NUNITS (mode);
5479 inner = GET_MODE_INNER (mode);
5480
15ed7b52
JG
5481 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5482
69ef87e2
AH
5483 v = rtvec_alloc (units);
5484
a73b091d
JW
5485 /* We need to call this function after we set the scalar const_tiny_rtx
5486 entries. */
5487 gcc_assert (const_tiny_rtx[constant][(int) inner]);
69ef87e2
AH
5488
5489 for (i = 0; i < units; ++i)
a73b091d 5490 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
69ef87e2 5491
a06e3c40 5492 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
69ef87e2
AH
5493 return tem;
5494}
5495
a06e3c40 5496/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
a73b091d 5497 all elements are zero, and the one vector when all elements are one. */
a06e3c40 5498rtx
502b8322 5499gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
a06e3c40 5500{
a73b091d
JW
5501 enum machine_mode inner = GET_MODE_INNER (mode);
5502 int nunits = GET_MODE_NUNITS (mode);
5503 rtx x;
a06e3c40
R
5504 int i;
5505
a73b091d
JW
5506 /* Check to see if all of the elements have the same value. */
5507 x = RTVEC_ELT (v, nunits - 1);
5508 for (i = nunits - 2; i >= 0; i--)
5509 if (RTVEC_ELT (v, i) != x)
5510 break;
5511
5512 /* If the values are all the same, check to see if we can use one of the
5513 standard constant vectors. */
5514 if (i == -1)
5515 {
5516 if (x == CONST0_RTX (inner))
5517 return CONST0_RTX (mode);
5518 else if (x == CONST1_RTX (inner))
5519 return CONST1_RTX (mode);
e7c82a99
JJ
5520 else if (x == CONSTM1_RTX (inner))
5521 return CONSTM1_RTX (mode);
a73b091d
JW
5522 }
5523
5524 return gen_rtx_raw_CONST_VECTOR (mode, v);
a06e3c40
R
5525}
5526
b5deb7b6
SL
5527/* Initialise global register information required by all functions. */
5528
5529void
5530init_emit_regs (void)
5531{
5532 int i;
1c3f523e
RS
5533 enum machine_mode mode;
5534 mem_attrs *attrs;
b5deb7b6
SL
5535
5536 /* Reset register attributes */
5537 htab_empty (reg_attrs_htab);
5538
5539 /* We need reg_raw_mode, so initialize the modes now. */
5540 init_reg_modes_target ();
5541
5542 /* Assign register numbers to the globally defined register rtx. */
b5deb7b6
SL
5543 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5544 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5545 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5546 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5547 virtual_incoming_args_rtx =
5548 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5549 virtual_stack_vars_rtx =
5550 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5551 virtual_stack_dynamic_rtx =
5552 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5553 virtual_outgoing_args_rtx =
5554 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5555 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
32990d5b
JJ
5556 virtual_preferred_stack_boundary_rtx =
5557 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
b5deb7b6
SL
5558
5559 /* Initialize RTL for commonly used hard registers. These are
5560 copied into regno_reg_rtx as we begin to compile each function. */
5561 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5fb0e246 5562 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
b5deb7b6
SL
5563
5564#ifdef RETURN_ADDRESS_POINTER_REGNUM
5565 return_address_pointer_rtx
5566 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5567#endif
5568
b5deb7b6
SL
5569 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5570 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5571 else
5572 pic_offset_table_rtx = NULL_RTX;
1c3f523e
RS
5573
5574 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5575 {
5576 mode = (enum machine_mode) i;
5577 attrs = ggc_alloc_cleared_mem_attrs ();
5578 attrs->align = BITS_PER_UNIT;
5579 attrs->addrspace = ADDR_SPACE_GENERIC;
5580 if (mode != BLKmode)
5581 {
754c3d5d
RS
5582 attrs->size_known_p = true;
5583 attrs->size = GET_MODE_SIZE (mode);
1c3f523e
RS
5584 if (STRICT_ALIGNMENT)
5585 attrs->align = GET_MODE_ALIGNMENT (mode);
5586 }
5587 mode_mem_attrs[i] = attrs;
5588 }
b5deb7b6
SL
5589}
5590
2d888286 5591/* Create some permanent unique rtl objects shared between all functions. */
23b2ce53
RS
5592
5593void
2d888286 5594init_emit_once (void)
23b2ce53
RS
5595{
5596 int i;
5597 enum machine_mode mode;
9ec36da5 5598 enum machine_mode double_mode;
23b2ce53 5599
091a3ac7
CF
5600 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5601 hash tables. */
17211ab5
GK
5602 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5603 const_int_htab_eq, NULL);
173b24b9 5604
17211ab5
GK
5605 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5606 const_double_htab_eq, NULL);
5692c7bc 5607
091a3ac7
CF
5608 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5609 const_fixed_htab_eq, NULL);
5610
17211ab5
GK
5611 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5612 mem_attrs_htab_eq, NULL);
a560d4d4
JH
5613 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5614 reg_attrs_htab_eq, NULL);
67673f5c 5615
43fa6302
AS
5616 /* Compute the word and byte modes. */
5617
5618 byte_mode = VOIDmode;
5619 word_mode = VOIDmode;
5620 double_mode = VOIDmode;
5621
15ed7b52
JG
5622 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5623 mode != VOIDmode;
43fa6302
AS
5624 mode = GET_MODE_WIDER_MODE (mode))
5625 {
5626 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5627 && byte_mode == VOIDmode)
5628 byte_mode = mode;
5629
5630 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5631 && word_mode == VOIDmode)
5632 word_mode = mode;
5633 }
5634
15ed7b52
JG
5635 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5636 mode != VOIDmode;
43fa6302
AS
5637 mode = GET_MODE_WIDER_MODE (mode))
5638 {
5639 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5640 && double_mode == VOIDmode)
5641 double_mode = mode;
5642 }
5643
5644 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5645
5da077de 5646#ifdef INIT_EXPANDERS
414c4dc4
NC
5647 /* This is to initialize {init|mark|free}_machine_status before the first
5648 call to push_function_context_to. This is needed by the Chill front
a1f300c0 5649 end which calls push_function_context_to before the first call to
5da077de
AS
5650 init_function_start. */
5651 INIT_EXPANDERS;
5652#endif
5653
23b2ce53
RS
5654 /* Create the unique rtx's for certain rtx codes and operand values. */
5655
a2a8cc44 5656 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
c5c76735 5657 tries to use these variables. */
23b2ce53 5658 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
750c9258 5659 const_int_rtx[i + MAX_SAVED_CONST_INT] =
f1b690f1 5660 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
23b2ce53 5661
68d75312
JC
5662 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5663 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5da077de 5664 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
68d75312 5665 else
3b80f6ca 5666 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
23b2ce53 5667
5692c7bc
ZW
5668 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5669 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5670 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
aefa9d43
KG
5671
5672 dconstm1 = dconst1;
5673 dconstm1.sign = 1;
03f2ea93
RS
5674
5675 dconsthalf = dconst1;
1e92bbb9 5676 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
23b2ce53 5677
e7c82a99 5678 for (i = 0; i < 3; i++)
23b2ce53 5679 {
aefa9d43 5680 const REAL_VALUE_TYPE *const r =
b216cd4a
ZW
5681 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5682
15ed7b52
JG
5683 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5684 mode != VOIDmode;
5685 mode = GET_MODE_WIDER_MODE (mode))
5686 const_tiny_rtx[i][(int) mode] =
5687 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5688
5689 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5690 mode != VOIDmode;
23b2ce53 5691 mode = GET_MODE_WIDER_MODE (mode))
5692c7bc
ZW
5692 const_tiny_rtx[i][(int) mode] =
5693 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
23b2ce53 5694
906c4e36 5695 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
23b2ce53 5696
15ed7b52
JG
5697 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5698 mode != VOIDmode;
23b2ce53 5699 mode = GET_MODE_WIDER_MODE (mode))
906c4e36 5700 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
33d3e559
RS
5701
5702 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5703 mode != VOIDmode;
5704 mode = GET_MODE_WIDER_MODE (mode))
5705 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
23b2ce53
RS
5706 }
5707
e7c82a99
JJ
5708 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5709
5710 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5711 mode != VOIDmode;
5712 mode = GET_MODE_WIDER_MODE (mode))
5713 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5714
c8a89d2a
BS
5715 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5716 mode != VOIDmode;
5717 mode = GET_MODE_WIDER_MODE (mode))
5718 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5719
e90721b1
AP
5720 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5721 mode != VOIDmode;
5722 mode = GET_MODE_WIDER_MODE (mode))
5723 {
5724 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5725 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5726 }
5727
5728 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5729 mode != VOIDmode;
5730 mode = GET_MODE_WIDER_MODE (mode))
5731 {
5732 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5733 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5734 }
5735
69ef87e2
AH
5736 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5737 mode != VOIDmode;
5738 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5739 {
5740 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5741 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
e7c82a99 5742 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
a73b091d 5743 }
69ef87e2
AH
5744
5745 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5746 mode != VOIDmode;
5747 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5748 {
5749 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5750 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5751 }
69ef87e2 5752
325217ed
CF
5753 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5754 mode != VOIDmode;
5755 mode = GET_MODE_WIDER_MODE (mode))
5756 {
5757 FCONST0(mode).data.high = 0;
5758 FCONST0(mode).data.low = 0;
5759 FCONST0(mode).mode = mode;
091a3ac7
CF
5760 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5761 FCONST0 (mode), mode);
325217ed
CF
5762 }
5763
5764 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5765 mode != VOIDmode;
5766 mode = GET_MODE_WIDER_MODE (mode))
5767 {
5768 FCONST0(mode).data.high = 0;
5769 FCONST0(mode).data.low = 0;
5770 FCONST0(mode).mode = mode;
091a3ac7
CF
5771 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5772 FCONST0 (mode), mode);
325217ed
CF
5773 }
5774
5775 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5776 mode != VOIDmode;
5777 mode = GET_MODE_WIDER_MODE (mode))
5778 {
5779 FCONST0(mode).data.high = 0;
5780 FCONST0(mode).data.low = 0;
5781 FCONST0(mode).mode = mode;
091a3ac7
CF
5782 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5783 FCONST0 (mode), mode);
325217ed
CF
5784
5785 /* We store the value 1. */
5786 FCONST1(mode).data.high = 0;
5787 FCONST1(mode).data.low = 0;
5788 FCONST1(mode).mode = mode;
5789 lshift_double (1, 0, GET_MODE_FBIT (mode),
5790 2 * HOST_BITS_PER_WIDE_INT,
5791 &FCONST1(mode).data.low,
5792 &FCONST1(mode).data.high,
5793 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
5794 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5795 FCONST1 (mode), mode);
325217ed
CF
5796 }
5797
5798 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5799 mode != VOIDmode;
5800 mode = GET_MODE_WIDER_MODE (mode))
5801 {
5802 FCONST0(mode).data.high = 0;
5803 FCONST0(mode).data.low = 0;
5804 FCONST0(mode).mode = mode;
091a3ac7
CF
5805 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5806 FCONST0 (mode), mode);
325217ed
CF
5807
5808 /* We store the value 1. */
5809 FCONST1(mode).data.high = 0;
5810 FCONST1(mode).data.low = 0;
5811 FCONST1(mode).mode = mode;
5812 lshift_double (1, 0, GET_MODE_FBIT (mode),
5813 2 * HOST_BITS_PER_WIDE_INT,
5814 &FCONST1(mode).data.low,
5815 &FCONST1(mode).data.high,
5816 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
5817 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5818 FCONST1 (mode), mode);
5819 }
5820
5821 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5822 mode != VOIDmode;
5823 mode = GET_MODE_WIDER_MODE (mode))
5824 {
5825 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5826 }
5827
5828 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5829 mode != VOIDmode;
5830 mode = GET_MODE_WIDER_MODE (mode))
5831 {
5832 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5833 }
5834
5835 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5836 mode != VOIDmode;
5837 mode = GET_MODE_WIDER_MODE (mode))
5838 {
5839 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5840 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5841 }
5842
5843 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5844 mode != VOIDmode;
5845 mode = GET_MODE_WIDER_MODE (mode))
5846 {
5847 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5848 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
325217ed
CF
5849 }
5850
dbbbbf3b
JDA
5851 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5852 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5853 const_tiny_rtx[0][i] = const0_rtx;
23b2ce53 5854
f0417c82
RH
5855 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5856 if (STORE_FLAG_VALUE == 1)
5857 const_tiny_rtx[1][(int) BImode] = const1_rtx;
ca4adc91
RS
5858
5859 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
5860 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
5861 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
5862 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
23b2ce53 5863}
a11759a3 5864\f
969d70ca
JH
5865/* Produce exact duplicate of insn INSN after AFTER.
5866 Care updating of libcall regions if present. */
5867
5868rtx
502b8322 5869emit_copy_of_insn_after (rtx insn, rtx after)
969d70ca 5870{
60564289 5871 rtx new_rtx, link;
969d70ca
JH
5872
5873 switch (GET_CODE (insn))
5874 {
5875 case INSN:
60564289 5876 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
5877 break;
5878
5879 case JUMP_INSN:
60564289 5880 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
5881 break;
5882
b5b8b0ac
AO
5883 case DEBUG_INSN:
5884 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
5885 break;
5886
969d70ca 5887 case CALL_INSN:
60564289 5888 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca 5889 if (CALL_INSN_FUNCTION_USAGE (insn))
60564289 5890 CALL_INSN_FUNCTION_USAGE (new_rtx)
969d70ca 5891 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
60564289
KG
5892 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5893 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5894 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
b8698a0f 5895 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
becfd6e5 5896 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
969d70ca
JH
5897 break;
5898
5899 default:
5b0264cb 5900 gcc_unreachable ();
969d70ca
JH
5901 }
5902
5903 /* Update LABEL_NUSES. */
60564289 5904 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
969d70ca 5905
60564289 5906 INSN_LOCATOR (new_rtx) = INSN_LOCATOR (insn);
ba4f7968 5907
0a3d71f5
JW
5908 /* If the old insn is frame related, then so is the new one. This is
5909 primarily needed for IA-64 unwind info which marks epilogue insns,
5910 which may be duplicated by the basic block reordering code. */
60564289 5911 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
0a3d71f5 5912
cf7c4aa6
HPN
5913 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
5914 will make them. REG_LABEL_TARGETs are created there too, but are
5915 supposed to be sticky, so we copy them. */
969d70ca 5916 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
cf7c4aa6 5917 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
969d70ca
JH
5918 {
5919 if (GET_CODE (link) == EXPR_LIST)
60564289 5920 add_reg_note (new_rtx, REG_NOTE_KIND (link),
65c5f2a6 5921 copy_insn_1 (XEXP (link, 0)));
969d70ca 5922 else
60564289 5923 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
969d70ca
JH
5924 }
5925
60564289
KG
5926 INSN_CODE (new_rtx) = INSN_CODE (insn);
5927 return new_rtx;
969d70ca 5928}
e2500fed 5929
1431042e 5930static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
3e89ed8d
JH
5931rtx
5932gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5933{
5934 if (hard_reg_clobbers[mode][regno])
5935 return hard_reg_clobbers[mode][regno];
5936 else
5937 return (hard_reg_clobbers[mode][regno] =
5938 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5939}
5940
e2500fed 5941#include "gt-emit-rtl.h"