]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
[multiple changes]
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
5e6908ea 1/* Emit RTL for the GCC expander.
ef58a523 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
09efeca1 3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
b6f65e3c 4 Free Software Foundation, Inc.
23b2ce53 5
1322177d 6This file is part of GCC.
23b2ce53 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
9dcd6f09 10Software Foundation; either version 3, or (at your option) any later
1322177d 11version.
23b2ce53 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
23b2ce53
RS
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
23b2ce53
RS
21
22
23/* Middle-to-low level generation of rtx code and insns.
24
f822fcf7
KH
25 This file contains support functions for creating rtl expressions
26 and manipulating them in the doubly-linked chain of insns.
23b2ce53
RS
27
28 The patterns of the insns are created by machine-dependent
29 routines in insn-emit.c, which is generated automatically from
f822fcf7
KH
30 the machine description. These routines make the individual rtx's
31 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
32 which are automatically generated from rtl.def; what is machine
a2a8cc44
KH
33 dependent is the kind of rtx's they make and what arguments they
34 use. */
23b2ce53
RS
35
36#include "config.h"
670ee920 37#include "system.h"
4977bab6
ZW
38#include "coretypes.h"
39#include "tm.h"
01198c2f 40#include "toplev.h"
23b2ce53 41#include "rtl.h"
a25c7971 42#include "tree.h"
6baf1cc8 43#include "tm_p.h"
23b2ce53
RS
44#include "flags.h"
45#include "function.h"
46#include "expr.h"
47#include "regs.h"
aff48bca 48#include "hard-reg-set.h"
c13e8210 49#include "hashtab.h"
23b2ce53 50#include "insn-config.h"
e9a25f70 51#include "recog.h"
23b2ce53 52#include "real.h"
325217ed 53#include "fixed-value.h"
0dfa1860 54#include "bitmap.h"
a05924f9 55#include "basic-block.h"
87ff9c8e 56#include "ggc.h"
e1772ac0 57#include "debug.h"
d23c55c2 58#include "langhooks.h"
ef330312 59#include "tree-pass.h"
6fb5fa3c 60#include "df.h"
b5b8b0ac 61#include "params.h"
ca695ac9 62
1d445e9e
ILT
63/* Commonly used modes. */
64
0f41302f
MS
65enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
66enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
9ec36da5 67enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
0f41302f 68enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
1d445e9e 69
bd60bab2
JH
70/* Datastructures maintained for currently processed function in RTL form. */
71
3e029763 72struct rtl_data x_rtl;
bd60bab2
JH
73
74/* Indexed by pseudo register number, gives the rtx for that pseudo.
75 Allocated in parallel with regno_pointer_align.
76 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
77 with length attribute nested in top level structures. */
78
79rtx * regno_reg_rtx;
23b2ce53
RS
80
81/* This is *not* reset after each function. It gives each CODE_LABEL
82 in the entire compilation a unique label number. */
83
044b4de3 84static GTY(()) int label_num = 1;
23b2ce53 85
23b2ce53
RS
86/* Nonzero means do not generate NOTEs for source line numbers. */
87
88static int no_line_numbers;
89
90/* Commonly used rtx's, so that we only need space for one copy.
91 These are initialized once for the entire compilation.
5692c7bc
ZW
92 All of these are unique; no other rtx-object will be equal to any
93 of these. */
23b2ce53 94
5da077de 95rtx global_rtl[GR_MAX];
23b2ce53 96
6cde4876
JL
97/* Commonly used RTL for hard registers. These objects are not necessarily
98 unique, so we allocate them separately from global_rtl. They are
99 initialized once per compilation unit, then copied into regno_reg_rtx
100 at the beginning of each function. */
101static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
102
23b2ce53
RS
103/* We record floating-point CONST_DOUBLEs in each floating-point mode for
104 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
105 record a copy of const[012]_rtx. */
106
107rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
108
68d75312
JC
109rtx const_true_rtx;
110
23b2ce53
RS
111REAL_VALUE_TYPE dconst0;
112REAL_VALUE_TYPE dconst1;
113REAL_VALUE_TYPE dconst2;
114REAL_VALUE_TYPE dconstm1;
03f2ea93 115REAL_VALUE_TYPE dconsthalf;
23b2ce53 116
325217ed
CF
117/* Record fixed-point constant 0 and 1. */
118FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
119FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
120
23b2ce53
RS
121/* All references to the following fixed hard registers go through
122 these unique rtl objects. On machines where the frame-pointer and
123 arg-pointer are the same register, they use the same unique object.
124
125 After register allocation, other rtl objects which used to be pseudo-regs
126 may be clobbered to refer to the frame-pointer register.
127 But references that were originally to the frame-pointer can be
128 distinguished from the others because they contain frame_pointer_rtx.
129
ac6f08b0
DE
130 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
131 tricky: until register elimination has taken place hard_frame_pointer_rtx
750c9258 132 should be used if it is being set, and frame_pointer_rtx otherwise. After
ac6f08b0
DE
133 register elimination hard_frame_pointer_rtx should always be used.
134 On machines where the two registers are same (most) then these are the
135 same.
136
23b2ce53
RS
137 In an inline procedure, the stack and frame pointer rtxs may not be
138 used for anything else. */
23b2ce53
RS
139rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
140rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
141rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
142
a4417a86
JW
143/* This is used to implement __builtin_return_address for some machines.
144 See for instance the MIPS port. */
145rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
146
23b2ce53
RS
147/* We make one copy of (const_int C) where C is in
148 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
149 to save space during the compilation and simplify comparisons of
150 integers. */
151
5da077de 152rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
23b2ce53 153
c13e8210
MM
154/* A hash table storing CONST_INTs whose absolute value is greater
155 than MAX_SAVED_CONST_INT. */
156
e2500fed
GK
157static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
158 htab_t const_int_htab;
c13e8210 159
173b24b9 160/* A hash table storing memory attribute structures. */
e2500fed
GK
161static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
162 htab_t mem_attrs_htab;
173b24b9 163
a560d4d4
JH
164/* A hash table storing register attribute structures. */
165static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
166 htab_t reg_attrs_htab;
167
5692c7bc 168/* A hash table storing all CONST_DOUBLEs. */
e2500fed
GK
169static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
170 htab_t const_double_htab;
5692c7bc 171
091a3ac7
CF
172/* A hash table storing all CONST_FIXEDs. */
173static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
174 htab_t const_fixed_htab;
175
3e029763
JH
176#define first_insn (crtl->emit.x_first_insn)
177#define last_insn (crtl->emit.x_last_insn)
178#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
b5b8b0ac 179#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
3e029763
JH
180#define last_location (crtl->emit.x_last_location)
181#define first_label_num (crtl->emit.x_first_label_num)
23b2ce53 182
502b8322 183static rtx make_call_insn_raw (rtx);
502b8322 184static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
5eb2a9f2 185static void set_used_decls (tree);
502b8322
AJ
186static void mark_label_nuses (rtx);
187static hashval_t const_int_htab_hash (const void *);
188static int const_int_htab_eq (const void *, const void *);
189static hashval_t const_double_htab_hash (const void *);
190static int const_double_htab_eq (const void *, const void *);
191static rtx lookup_const_double (rtx);
091a3ac7
CF
192static hashval_t const_fixed_htab_hash (const void *);
193static int const_fixed_htab_eq (const void *, const void *);
194static rtx lookup_const_fixed (rtx);
502b8322
AJ
195static hashval_t mem_attrs_htab_hash (const void *);
196static int mem_attrs_htab_eq (const void *, const void *);
4862826d 197static mem_attrs *get_mem_attrs (alias_set_type, tree, rtx, rtx, unsigned int,
502b8322
AJ
198 enum machine_mode);
199static hashval_t reg_attrs_htab_hash (const void *);
200static int reg_attrs_htab_eq (const void *, const void *);
201static reg_attrs *get_reg_attrs (tree, int);
a73b091d 202static rtx gen_const_vector (enum machine_mode, int);
32b32b16 203static void copy_rtx_if_shared_1 (rtx *orig);
c13e8210 204
6b24c259
JH
205/* Probability of the conditional branch currently proceeded by try_split.
206 Set to -1 otherwise. */
207int split_branch_probability = -1;
ca695ac9 208\f
c13e8210
MM
209/* Returns a hash code for X (which is a really a CONST_INT). */
210
211static hashval_t
502b8322 212const_int_htab_hash (const void *x)
c13e8210 213{
f7d504c2 214 return (hashval_t) INTVAL ((const_rtx) x);
c13e8210
MM
215}
216
cc2902df 217/* Returns nonzero if the value represented by X (which is really a
c13e8210
MM
218 CONST_INT) is the same as that given by Y (which is really a
219 HOST_WIDE_INT *). */
220
221static int
502b8322 222const_int_htab_eq (const void *x, const void *y)
c13e8210 223{
f7d504c2 224 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
5692c7bc
ZW
225}
226
227/* Returns a hash code for X (which is really a CONST_DOUBLE). */
228static hashval_t
502b8322 229const_double_htab_hash (const void *x)
5692c7bc 230{
f7d504c2 231 const_rtx const value = (const_rtx) x;
46b33600 232 hashval_t h;
5692c7bc 233
46b33600
RH
234 if (GET_MODE (value) == VOIDmode)
235 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
236 else
fe352c29 237 {
15c812e3 238 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
fe352c29
DJ
239 /* MODE is used in the comparison, so it should be in the hash. */
240 h ^= GET_MODE (value);
241 }
5692c7bc
ZW
242 return h;
243}
244
cc2902df 245/* Returns nonzero if the value represented by X (really a ...)
5692c7bc
ZW
246 is the same as that represented by Y (really a ...) */
247static int
502b8322 248const_double_htab_eq (const void *x, const void *y)
5692c7bc 249{
f7d504c2 250 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
5692c7bc
ZW
251
252 if (GET_MODE (a) != GET_MODE (b))
253 return 0;
8580f7a0
RH
254 if (GET_MODE (a) == VOIDmode)
255 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
256 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
257 else
258 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
259 CONST_DOUBLE_REAL_VALUE (b));
c13e8210
MM
260}
261
091a3ac7
CF
262/* Returns a hash code for X (which is really a CONST_FIXED). */
263
264static hashval_t
265const_fixed_htab_hash (const void *x)
266{
3101faab 267 const_rtx const value = (const_rtx) x;
091a3ac7
CF
268 hashval_t h;
269
270 h = fixed_hash (CONST_FIXED_VALUE (value));
271 /* MODE is used in the comparison, so it should be in the hash. */
272 h ^= GET_MODE (value);
273 return h;
274}
275
276/* Returns nonzero if the value represented by X (really a ...)
277 is the same as that represented by Y (really a ...). */
278
279static int
280const_fixed_htab_eq (const void *x, const void *y)
281{
3101faab 282 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
091a3ac7
CF
283
284 if (GET_MODE (a) != GET_MODE (b))
285 return 0;
286 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
287}
288
173b24b9
RK
289/* Returns a hash code for X (which is a really a mem_attrs *). */
290
291static hashval_t
502b8322 292mem_attrs_htab_hash (const void *x)
173b24b9 293{
f7d504c2 294 const mem_attrs *const p = (const mem_attrs *) x;
173b24b9
RK
295
296 return (p->alias ^ (p->align * 1000)
297 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
298 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
78b76d08 299 ^ (size_t) iterative_hash_expr (p->expr, 0));
173b24b9
RK
300}
301
cc2902df 302/* Returns nonzero if the value represented by X (which is really a
173b24b9
RK
303 mem_attrs *) is the same as that given by Y (which is also really a
304 mem_attrs *). */
c13e8210
MM
305
306static int
502b8322 307mem_attrs_htab_eq (const void *x, const void *y)
c13e8210 308{
741ac903
KG
309 const mem_attrs *const p = (const mem_attrs *) x;
310 const mem_attrs *const q = (const mem_attrs *) y;
173b24b9 311
78b76d08
SB
312 return (p->alias == q->alias && p->offset == q->offset
313 && p->size == q->size && p->align == q->align
314 && (p->expr == q->expr
315 || (p->expr != NULL_TREE && q->expr != NULL_TREE
316 && operand_equal_p (p->expr, q->expr, 0))));
c13e8210
MM
317}
318
173b24b9 319/* Allocate a new mem_attrs structure and insert it into the hash table if
10b76d73
RK
320 one identical to it is not already in the table. We are doing this for
321 MEM of mode MODE. */
173b24b9
RK
322
323static mem_attrs *
4862826d 324get_mem_attrs (alias_set_type alias, tree expr, rtx offset, rtx size,
502b8322 325 unsigned int align, enum machine_mode mode)
173b24b9
RK
326{
327 mem_attrs attrs;
328 void **slot;
329
bb056a77
OH
330 /* If everything is the default, we can just return zero.
331 This must match what the corresponding MEM_* macros return when the
332 field is not present. */
998d7deb 333 if (alias == 0 && expr == 0 && offset == 0
10b76d73
RK
334 && (size == 0
335 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
bb056a77
OH
336 && (STRICT_ALIGNMENT && mode != BLKmode
337 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
10b76d73
RK
338 return 0;
339
173b24b9 340 attrs.alias = alias;
998d7deb 341 attrs.expr = expr;
173b24b9
RK
342 attrs.offset = offset;
343 attrs.size = size;
344 attrs.align = align;
345
346 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
347 if (*slot == 0)
348 {
349 *slot = ggc_alloc (sizeof (mem_attrs));
350 memcpy (*slot, &attrs, sizeof (mem_attrs));
351 }
352
1b4572a8 353 return (mem_attrs *) *slot;
c13e8210
MM
354}
355
a560d4d4
JH
356/* Returns a hash code for X (which is a really a reg_attrs *). */
357
358static hashval_t
502b8322 359reg_attrs_htab_hash (const void *x)
a560d4d4 360{
741ac903 361 const reg_attrs *const p = (const reg_attrs *) x;
a560d4d4
JH
362
363 return ((p->offset * 1000) ^ (long) p->decl);
364}
365
6356f892 366/* Returns nonzero if the value represented by X (which is really a
a560d4d4
JH
367 reg_attrs *) is the same as that given by Y (which is also really a
368 reg_attrs *). */
369
370static int
502b8322 371reg_attrs_htab_eq (const void *x, const void *y)
a560d4d4 372{
741ac903
KG
373 const reg_attrs *const p = (const reg_attrs *) x;
374 const reg_attrs *const q = (const reg_attrs *) y;
a560d4d4
JH
375
376 return (p->decl == q->decl && p->offset == q->offset);
377}
378/* Allocate a new reg_attrs structure and insert it into the hash table if
379 one identical to it is not already in the table. We are doing this for
380 MEM of mode MODE. */
381
382static reg_attrs *
502b8322 383get_reg_attrs (tree decl, int offset)
a560d4d4
JH
384{
385 reg_attrs attrs;
386 void **slot;
387
388 /* If everything is the default, we can just return zero. */
389 if (decl == 0 && offset == 0)
390 return 0;
391
392 attrs.decl = decl;
393 attrs.offset = offset;
394
395 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
396 if (*slot == 0)
397 {
398 *slot = ggc_alloc (sizeof (reg_attrs));
399 memcpy (*slot, &attrs, sizeof (reg_attrs));
400 }
401
1b4572a8 402 return (reg_attrs *) *slot;
a560d4d4
JH
403}
404
6fb5fa3c
DB
405
406#if !HAVE_blockage
407/* Generate an empty ASM_INPUT, which is used to block attempts to schedule
408 across this insn. */
409
410rtx
411gen_blockage (void)
412{
413 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
414 MEM_VOLATILE_P (x) = true;
415 return x;
416}
417#endif
418
419
08394eef
BS
420/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
421 don't attempt to share with the various global pieces of rtl (such as
422 frame_pointer_rtx). */
423
424rtx
502b8322 425gen_raw_REG (enum machine_mode mode, int regno)
08394eef
BS
426{
427 rtx x = gen_rtx_raw_REG (mode, regno);
428 ORIGINAL_REGNO (x) = regno;
429 return x;
430}
431
c5c76735
JL
432/* There are some RTL codes that require special attention; the generation
433 functions do the raw handling. If you add to this list, modify
434 special_rtx in gengenrtl.c as well. */
435
3b80f6ca 436rtx
502b8322 437gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3b80f6ca 438{
c13e8210
MM
439 void **slot;
440
3b80f6ca 441 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
5da077de 442 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3b80f6ca
RH
443
444#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
445 if (const_true_rtx && arg == STORE_FLAG_VALUE)
446 return const_true_rtx;
447#endif
448
c13e8210 449 /* Look up the CONST_INT in the hash table. */
e38992e8
RK
450 slot = htab_find_slot_with_hash (const_int_htab, &arg,
451 (hashval_t) arg, INSERT);
29105cea 452 if (*slot == 0)
1f8f4a0b 453 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
c13e8210
MM
454
455 return (rtx) *slot;
3b80f6ca
RH
456}
457
2496c7bd 458rtx
502b8322 459gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
2496c7bd
LB
460{
461 return GEN_INT (trunc_int_for_mode (c, mode));
462}
463
5692c7bc
ZW
464/* CONST_DOUBLEs might be created from pairs of integers, or from
465 REAL_VALUE_TYPEs. Also, their length is known only at run time,
466 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
467
468/* Determine whether REAL, a CONST_DOUBLE, already exists in the
469 hash table. If so, return its counterpart; otherwise add it
470 to the hash table and return it. */
471static rtx
502b8322 472lookup_const_double (rtx real)
5692c7bc
ZW
473{
474 void **slot = htab_find_slot (const_double_htab, real, INSERT);
475 if (*slot == 0)
476 *slot = real;
477
478 return (rtx) *slot;
479}
29105cea 480
5692c7bc
ZW
481/* Return a CONST_DOUBLE rtx for a floating-point value specified by
482 VALUE in mode MODE. */
0133b7d9 483rtx
502b8322 484const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
0133b7d9 485{
5692c7bc
ZW
486 rtx real = rtx_alloc (CONST_DOUBLE);
487 PUT_MODE (real, mode);
488
9e254451 489 real->u.rv = value;
5692c7bc
ZW
490
491 return lookup_const_double (real);
492}
493
091a3ac7
CF
494/* Determine whether FIXED, a CONST_FIXED, already exists in the
495 hash table. If so, return its counterpart; otherwise add it
496 to the hash table and return it. */
497
498static rtx
499lookup_const_fixed (rtx fixed)
500{
501 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
502 if (*slot == 0)
503 *slot = fixed;
504
505 return (rtx) *slot;
506}
507
508/* Return a CONST_FIXED rtx for a fixed-point value specified by
509 VALUE in mode MODE. */
510
511rtx
512const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
513{
514 rtx fixed = rtx_alloc (CONST_FIXED);
515 PUT_MODE (fixed, mode);
516
517 fixed->u.fv = value;
518
519 return lookup_const_fixed (fixed);
520}
521
5692c7bc
ZW
522/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
523 of ints: I0 is the low-order word and I1 is the high-order word.
524 Do not use this routine for non-integer modes; convert to
525 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
526
527rtx
502b8322 528immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
5692c7bc
ZW
529{
530 rtx value;
531 unsigned int i;
532
65acccdd
ZD
533 /* There are the following cases (note that there are no modes with
534 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
535
536 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
537 gen_int_mode.
538 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
539 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
540 from copies of the sign bit, and sign of i0 and i1 are the same), then
541 we return a CONST_INT for i0.
542 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
5692c7bc
ZW
543 if (mode != VOIDmode)
544 {
5b0264cb
NS
545 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
546 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
547 /* We can get a 0 for an error mark. */
548 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
549 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
5692c7bc 550
65acccdd
ZD
551 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
552 return gen_int_mode (i0, mode);
553
554 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
5692c7bc
ZW
555 }
556
557 /* If this integer fits in one word, return a CONST_INT. */
558 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
559 return GEN_INT (i0);
560
561 /* We use VOIDmode for integers. */
562 value = rtx_alloc (CONST_DOUBLE);
563 PUT_MODE (value, VOIDmode);
564
565 CONST_DOUBLE_LOW (value) = i0;
566 CONST_DOUBLE_HIGH (value) = i1;
567
568 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
569 XWINT (value, i) = 0;
570
571 return lookup_const_double (value);
0133b7d9
RH
572}
573
3b80f6ca 574rtx
502b8322 575gen_rtx_REG (enum machine_mode mode, unsigned int regno)
3b80f6ca
RH
576{
577 /* In case the MD file explicitly references the frame pointer, have
578 all such references point to the same frame pointer. This is
579 used during frame pointer elimination to distinguish the explicit
580 references to these registers from pseudos that happened to be
581 assigned to them.
582
583 If we have eliminated the frame pointer or arg pointer, we will
584 be using it as a normal register, for example as a spill
585 register. In such cases, we might be accessing it in a mode that
586 is not Pmode and therefore cannot use the pre-allocated rtx.
587
588 Also don't do this when we are making new REGs in reload, since
589 we don't want to get confused with the real pointers. */
590
591 if (mode == Pmode && !reload_in_progress)
592 {
e10c79fe
LB
593 if (regno == FRAME_POINTER_REGNUM
594 && (!reload_completed || frame_pointer_needed))
3b80f6ca
RH
595 return frame_pointer_rtx;
596#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
e10c79fe
LB
597 if (regno == HARD_FRAME_POINTER_REGNUM
598 && (!reload_completed || frame_pointer_needed))
3b80f6ca
RH
599 return hard_frame_pointer_rtx;
600#endif
601#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
bcb33994 602 if (regno == ARG_POINTER_REGNUM)
3b80f6ca
RH
603 return arg_pointer_rtx;
604#endif
605#ifdef RETURN_ADDRESS_POINTER_REGNUM
bcb33994 606 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3b80f6ca
RH
607 return return_address_pointer_rtx;
608#endif
fc555370 609 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
2d67bd7b 610 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
68252e27 611 return pic_offset_table_rtx;
bcb33994 612 if (regno == STACK_POINTER_REGNUM)
3b80f6ca
RH
613 return stack_pointer_rtx;
614 }
615
006a94b0 616#if 0
6cde4876 617 /* If the per-function register table has been set up, try to re-use
006a94b0
JL
618 an existing entry in that table to avoid useless generation of RTL.
619
620 This code is disabled for now until we can fix the various backends
621 which depend on having non-shared hard registers in some cases. Long
622 term we want to re-enable this code as it can significantly cut down
e10c79fe
LB
623 on the amount of useless RTL that gets generated.
624
625 We'll also need to fix some code that runs after reload that wants to
626 set ORIGINAL_REGNO. */
627
6cde4876
JL
628 if (cfun
629 && cfun->emit
630 && regno_reg_rtx
631 && regno < FIRST_PSEUDO_REGISTER
632 && reg_raw_mode[regno] == mode)
633 return regno_reg_rtx[regno];
006a94b0 634#endif
6cde4876 635
08394eef 636 return gen_raw_REG (mode, regno);
3b80f6ca
RH
637}
638
41472af8 639rtx
502b8322 640gen_rtx_MEM (enum machine_mode mode, rtx addr)
41472af8
MM
641{
642 rtx rt = gen_rtx_raw_MEM (mode, addr);
643
644 /* This field is not cleared by the mere allocation of the rtx, so
645 we clear it here. */
173b24b9 646 MEM_ATTRS (rt) = 0;
41472af8
MM
647
648 return rt;
649}
ddef6bc7 650
542a8afa
RH
651/* Generate a memory referring to non-trapping constant memory. */
652
653rtx
654gen_const_mem (enum machine_mode mode, rtx addr)
655{
656 rtx mem = gen_rtx_MEM (mode, addr);
657 MEM_READONLY_P (mem) = 1;
658 MEM_NOTRAP_P (mem) = 1;
659 return mem;
660}
661
bf877a76
R
662/* Generate a MEM referring to fixed portions of the frame, e.g., register
663 save areas. */
664
665rtx
666gen_frame_mem (enum machine_mode mode, rtx addr)
667{
668 rtx mem = gen_rtx_MEM (mode, addr);
669 MEM_NOTRAP_P (mem) = 1;
670 set_mem_alias_set (mem, get_frame_alias_set ());
671 return mem;
672}
673
674/* Generate a MEM referring to a temporary use of the stack, not part
675 of the fixed stack frame. For example, something which is pushed
676 by a target splitter. */
677rtx
678gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
679{
680 rtx mem = gen_rtx_MEM (mode, addr);
681 MEM_NOTRAP_P (mem) = 1;
e3b5732b 682 if (!cfun->calls_alloca)
bf877a76
R
683 set_mem_alias_set (mem, get_frame_alias_set ());
684 return mem;
685}
686
beb72684
RH
687/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
688 this construct would be valid, and false otherwise. */
689
690bool
691validate_subreg (enum machine_mode omode, enum machine_mode imode,
ed7a4b4b 692 const_rtx reg, unsigned int offset)
ddef6bc7 693{
beb72684
RH
694 unsigned int isize = GET_MODE_SIZE (imode);
695 unsigned int osize = GET_MODE_SIZE (omode);
696
697 /* All subregs must be aligned. */
698 if (offset % osize != 0)
699 return false;
700
701 /* The subreg offset cannot be outside the inner object. */
702 if (offset >= isize)
703 return false;
704
705 /* ??? This should not be here. Temporarily continue to allow word_mode
706 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
707 Generally, backends are doing something sketchy but it'll take time to
708 fix them all. */
709 if (omode == word_mode)
710 ;
711 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
712 is the culprit here, and not the backends. */
713 else if (osize >= UNITS_PER_WORD && isize >= osize)
714 ;
715 /* Allow component subregs of complex and vector. Though given the below
716 extraction rules, it's not always clear what that means. */
717 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
718 && GET_MODE_INNER (imode) == omode)
719 ;
720 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
721 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
722 represent this. It's questionable if this ought to be represented at
723 all -- why can't this all be hidden in post-reload splitters that make
724 arbitrarily mode changes to the registers themselves. */
725 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
726 ;
727 /* Subregs involving floating point modes are not allowed to
728 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
729 (subreg:SI (reg:DF) 0) isn't. */
730 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
731 {
732 if (isize != osize)
733 return false;
734 }
ddef6bc7 735
beb72684
RH
736 /* Paradoxical subregs must have offset zero. */
737 if (osize > isize)
738 return offset == 0;
739
740 /* This is a normal subreg. Verify that the offset is representable. */
741
742 /* For hard registers, we already have most of these rules collected in
743 subreg_offset_representable_p. */
744 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
745 {
746 unsigned int regno = REGNO (reg);
747
748#ifdef CANNOT_CHANGE_MODE_CLASS
749 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
750 && GET_MODE_INNER (imode) == omode)
751 ;
752 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
753 return false;
ddef6bc7 754#endif
beb72684
RH
755
756 return subreg_offset_representable_p (regno, imode, offset, omode);
757 }
758
759 /* For pseudo registers, we want most of the same checks. Namely:
760 If the register no larger than a word, the subreg must be lowpart.
761 If the register is larger than a word, the subreg must be the lowpart
762 of a subword. A subreg does *not* perform arbitrary bit extraction.
763 Given that we've already checked mode/offset alignment, we only have
764 to check subword subregs here. */
765 if (osize < UNITS_PER_WORD)
766 {
767 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
768 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
769 if (offset % UNITS_PER_WORD != low_off)
770 return false;
771 }
772 return true;
773}
774
775rtx
776gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
777{
778 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
5692c7bc 779 return gen_rtx_raw_SUBREG (mode, reg, offset);
ddef6bc7
JJ
780}
781
173b24b9
RK
782/* Generate a SUBREG representing the least-significant part of REG if MODE
783 is smaller than mode of REG, otherwise paradoxical SUBREG. */
784
ddef6bc7 785rtx
502b8322 786gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
ddef6bc7
JJ
787{
788 enum machine_mode inmode;
ddef6bc7
JJ
789
790 inmode = GET_MODE (reg);
791 if (inmode == VOIDmode)
792 inmode = mode;
e0e08ac2
JH
793 return gen_rtx_SUBREG (mode, reg,
794 subreg_lowpart_offset (mode, inmode));
ddef6bc7 795}
c5c76735 796\f
23b2ce53 797
80379f51
PB
798/* Create an rtvec and stores within it the RTXen passed in the arguments. */
799
23b2ce53 800rtvec
e34d07f2 801gen_rtvec (int n, ...)
23b2ce53 802{
80379f51
PB
803 int i;
804 rtvec rt_val;
e34d07f2 805 va_list p;
23b2ce53 806
e34d07f2 807 va_start (p, n);
23b2ce53 808
80379f51 809 /* Don't allocate an empty rtvec... */
23b2ce53 810 if (n == 0)
80379f51 811 return NULL_RTVEC;
23b2ce53 812
80379f51 813 rt_val = rtvec_alloc (n);
4f90e4a0 814
23b2ce53 815 for (i = 0; i < n; i++)
80379f51 816 rt_val->elem[i] = va_arg (p, rtx);
6268b922 817
e34d07f2 818 va_end (p);
80379f51 819 return rt_val;
23b2ce53
RS
820}
821
822rtvec
502b8322 823gen_rtvec_v (int n, rtx *argp)
23b2ce53 824{
b3694847
SS
825 int i;
826 rtvec rt_val;
23b2ce53 827
80379f51 828 /* Don't allocate an empty rtvec... */
23b2ce53 829 if (n == 0)
80379f51 830 return NULL_RTVEC;
23b2ce53 831
80379f51 832 rt_val = rtvec_alloc (n);
23b2ce53
RS
833
834 for (i = 0; i < n; i++)
8f985ec4 835 rt_val->elem[i] = *argp++;
23b2ce53
RS
836
837 return rt_val;
838}
839\f
38ae7651
RS
840/* Return the number of bytes between the start of an OUTER_MODE
841 in-memory value and the start of an INNER_MODE in-memory value,
842 given that the former is a lowpart of the latter. It may be a
843 paradoxical lowpart, in which case the offset will be negative
844 on big-endian targets. */
845
846int
847byte_lowpart_offset (enum machine_mode outer_mode,
848 enum machine_mode inner_mode)
849{
850 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
851 return subreg_lowpart_offset (outer_mode, inner_mode);
852 else
853 return -subreg_lowpart_offset (inner_mode, outer_mode);
854}
855\f
23b2ce53
RS
856/* Generate a REG rtx for a new pseudo register of mode MODE.
857 This pseudo is assigned the next sequential register number. */
858
859rtx
502b8322 860gen_reg_rtx (enum machine_mode mode)
23b2ce53 861{
b3694847 862 rtx val;
2e3f842f 863 unsigned int align = GET_MODE_ALIGNMENT (mode);
23b2ce53 864
f8335a4f 865 gcc_assert (can_create_pseudo_p ());
23b2ce53 866
2e3f842f
L
867 /* If a virtual register with bigger mode alignment is generated,
868 increase stack alignment estimation because it might be spilled
869 to stack later. */
870 if (SUPPORTS_STACK_ALIGNMENT
871 && crtl->stack_alignment_estimated < align
872 && !crtl->stack_realign_processed)
ae58e548
JJ
873 {
874 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
875 if (crtl->stack_alignment_estimated < min_align)
876 crtl->stack_alignment_estimated = min_align;
877 }
2e3f842f 878
1b3d8f8a
GK
879 if (generating_concat_p
880 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
881 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
fc84e8a8
RS
882 {
883 /* For complex modes, don't make a single pseudo.
884 Instead, make a CONCAT of two pseudos.
885 This allows noncontiguous allocation of the real and imaginary parts,
886 which makes much better code. Besides, allocating DCmode
887 pseudos overstrains reload on some machines like the 386. */
888 rtx realpart, imagpart;
27e58a70 889 enum machine_mode partmode = GET_MODE_INNER (mode);
fc84e8a8
RS
890
891 realpart = gen_reg_rtx (partmode);
892 imagpart = gen_reg_rtx (partmode);
3b80f6ca 893 return gen_rtx_CONCAT (mode, realpart, imagpart);
fc84e8a8
RS
894 }
895
a560d4d4 896 /* Make sure regno_pointer_align, and regno_reg_rtx are large
0d4903b8 897 enough to have an element for this pseudo reg number. */
23b2ce53 898
3e029763 899 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
23b2ce53 900 {
3e029763 901 int old_size = crtl->emit.regno_pointer_align_length;
60564289 902 char *tmp;
0d4903b8 903 rtx *new1;
0d4903b8 904
60564289
KG
905 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
906 memset (tmp + old_size, 0, old_size);
907 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
49ad7cfa 908
1b4572a8 909 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
49ad7cfa 910 memset (new1 + old_size, 0, old_size * sizeof (rtx));
23b2ce53
RS
911 regno_reg_rtx = new1;
912
3e029763 913 crtl->emit.regno_pointer_align_length = old_size * 2;
23b2ce53
RS
914 }
915
08394eef 916 val = gen_raw_REG (mode, reg_rtx_no);
23b2ce53
RS
917 regno_reg_rtx[reg_rtx_no++] = val;
918 return val;
919}
920
38ae7651
RS
921/* Update NEW with the same attributes as REG, but with OFFSET added
922 to the REG_OFFSET. */
a560d4d4 923
e53a16e7 924static void
60564289 925update_reg_offset (rtx new_rtx, rtx reg, int offset)
a560d4d4 926{
60564289 927 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
502b8322 928 REG_OFFSET (reg) + offset);
e53a16e7
ILT
929}
930
38ae7651
RS
931/* Generate a register with same attributes as REG, but with OFFSET
932 added to the REG_OFFSET. */
e53a16e7
ILT
933
934rtx
935gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
936 int offset)
937{
60564289 938 rtx new_rtx = gen_rtx_REG (mode, regno);
e53a16e7 939
60564289
KG
940 update_reg_offset (new_rtx, reg, offset);
941 return new_rtx;
e53a16e7
ILT
942}
943
944/* Generate a new pseudo-register with the same attributes as REG, but
38ae7651 945 with OFFSET added to the REG_OFFSET. */
e53a16e7
ILT
946
947rtx
948gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
949{
60564289 950 rtx new_rtx = gen_reg_rtx (mode);
e53a16e7 951
60564289
KG
952 update_reg_offset (new_rtx, reg, offset);
953 return new_rtx;
a560d4d4
JH
954}
955
38ae7651
RS
956/* Adjust REG in-place so that it has mode MODE. It is assumed that the
957 new register is a (possibly paradoxical) lowpart of the old one. */
a560d4d4
JH
958
959void
38ae7651 960adjust_reg_mode (rtx reg, enum machine_mode mode)
a560d4d4 961{
38ae7651
RS
962 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
963 PUT_MODE (reg, mode);
964}
965
966/* Copy REG's attributes from X, if X has any attributes. If REG and X
967 have different modes, REG is a (possibly paradoxical) lowpart of X. */
968
969void
970set_reg_attrs_from_value (rtx reg, rtx x)
971{
972 int offset;
973
923ba36f
JJ
974 /* Hard registers can be reused for multiple purposes within the same
975 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
976 on them is wrong. */
977 if (HARD_REGISTER_P (reg))
978 return;
979
38ae7651 980 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
46b71b03
PB
981 if (MEM_P (x))
982 {
481683e1 983 if (MEM_OFFSET (x) && CONST_INT_P (MEM_OFFSET (x)))
46b71b03
PB
984 REG_ATTRS (reg)
985 = get_reg_attrs (MEM_EXPR (x), INTVAL (MEM_OFFSET (x)) + offset);
986 if (MEM_POINTER (x))
0a317111 987 mark_reg_pointer (reg, 0);
46b71b03
PB
988 }
989 else if (REG_P (x))
990 {
991 if (REG_ATTRS (x))
992 update_reg_offset (reg, x, offset);
993 if (REG_POINTER (x))
994 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
995 }
996}
997
998/* Generate a REG rtx for a new pseudo register, copying the mode
999 and attributes from X. */
1000
1001rtx
1002gen_reg_rtx_and_attrs (rtx x)
1003{
1004 rtx reg = gen_reg_rtx (GET_MODE (x));
1005 set_reg_attrs_from_value (reg, x);
1006 return reg;
a560d4d4
JH
1007}
1008
9d18e06b
JZ
1009/* Set the register attributes for registers contained in PARM_RTX.
1010 Use needed values from memory attributes of MEM. */
1011
1012void
502b8322 1013set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
9d18e06b 1014{
f8cfc6aa 1015 if (REG_P (parm_rtx))
38ae7651 1016 set_reg_attrs_from_value (parm_rtx, mem);
9d18e06b
JZ
1017 else if (GET_CODE (parm_rtx) == PARALLEL)
1018 {
1019 /* Check for a NULL entry in the first slot, used to indicate that the
1020 parameter goes both on the stack and in registers. */
1021 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1022 for (; i < XVECLEN (parm_rtx, 0); i++)
1023 {
1024 rtx x = XVECEXP (parm_rtx, 0, i);
f8cfc6aa 1025 if (REG_P (XEXP (x, 0)))
9d18e06b
JZ
1026 REG_ATTRS (XEXP (x, 0))
1027 = get_reg_attrs (MEM_EXPR (mem),
1028 INTVAL (XEXP (x, 1)));
1029 }
1030 }
1031}
1032
38ae7651
RS
1033/* Set the REG_ATTRS for registers in value X, given that X represents
1034 decl T. */
a560d4d4 1035
4e3825db 1036void
38ae7651
RS
1037set_reg_attrs_for_decl_rtl (tree t, rtx x)
1038{
1039 if (GET_CODE (x) == SUBREG)
fbe6ec81 1040 {
38ae7651
RS
1041 gcc_assert (subreg_lowpart_p (x));
1042 x = SUBREG_REG (x);
fbe6ec81 1043 }
f8cfc6aa 1044 if (REG_P (x))
38ae7651
RS
1045 REG_ATTRS (x)
1046 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
726612d2 1047 DECL_MODE (t)));
a560d4d4
JH
1048 if (GET_CODE (x) == CONCAT)
1049 {
1050 if (REG_P (XEXP (x, 0)))
1051 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1052 if (REG_P (XEXP (x, 1)))
1053 REG_ATTRS (XEXP (x, 1))
1054 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1055 }
1056 if (GET_CODE (x) == PARALLEL)
1057 {
d4afac5b
JZ
1058 int i, start;
1059
1060 /* Check for a NULL entry, used to indicate that the parameter goes
1061 both on the stack and in registers. */
1062 if (XEXP (XVECEXP (x, 0, 0), 0))
1063 start = 0;
1064 else
1065 start = 1;
1066
1067 for (i = start; i < XVECLEN (x, 0); i++)
a560d4d4
JH
1068 {
1069 rtx y = XVECEXP (x, 0, i);
1070 if (REG_P (XEXP (y, 0)))
1071 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1072 }
1073 }
1074}
1075
38ae7651
RS
1076/* Assign the RTX X to declaration T. */
1077
1078void
1079set_decl_rtl (tree t, rtx x)
1080{
1081 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1082 if (x)
1083 set_reg_attrs_for_decl_rtl (t, x);
1084}
1085
5141868d
RS
1086/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1087 if the ABI requires the parameter to be passed by reference. */
38ae7651
RS
1088
1089void
5141868d 1090set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
38ae7651
RS
1091{
1092 DECL_INCOMING_RTL (t) = x;
5141868d 1093 if (x && !by_reference_p)
38ae7651
RS
1094 set_reg_attrs_for_decl_rtl (t, x);
1095}
1096
754fdcca
RK
1097/* Identify REG (which may be a CONCAT) as a user register. */
1098
1099void
502b8322 1100mark_user_reg (rtx reg)
754fdcca
RK
1101{
1102 if (GET_CODE (reg) == CONCAT)
1103 {
1104 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1105 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1106 }
754fdcca 1107 else
5b0264cb
NS
1108 {
1109 gcc_assert (REG_P (reg));
1110 REG_USERVAR_P (reg) = 1;
1111 }
754fdcca
RK
1112}
1113
86fe05e0
RK
1114/* Identify REG as a probable pointer register and show its alignment
1115 as ALIGN, if nonzero. */
23b2ce53
RS
1116
1117void
502b8322 1118mark_reg_pointer (rtx reg, int align)
23b2ce53 1119{
3502dc9c 1120 if (! REG_POINTER (reg))
00995e78 1121 {
3502dc9c 1122 REG_POINTER (reg) = 1;
86fe05e0 1123
00995e78
RE
1124 if (align)
1125 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1126 }
1127 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
6614fd40 1128 /* We can no-longer be sure just how aligned this pointer is. */
86fe05e0 1129 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
23b2ce53
RS
1130}
1131
1132/* Return 1 plus largest pseudo reg number used in the current function. */
1133
1134int
502b8322 1135max_reg_num (void)
23b2ce53
RS
1136{
1137 return reg_rtx_no;
1138}
1139
1140/* Return 1 + the largest label number used so far in the current function. */
1141
1142int
502b8322 1143max_label_num (void)
23b2ce53 1144{
23b2ce53
RS
1145 return label_num;
1146}
1147
1148/* Return first label number used in this function (if any were used). */
1149
1150int
502b8322 1151get_first_label_num (void)
23b2ce53
RS
1152{
1153 return first_label_num;
1154}
6de9cd9a
DN
1155
1156/* If the rtx for label was created during the expansion of a nested
1157 function, then first_label_num won't include this label number.
fa10beec 1158 Fix this now so that array indices work later. */
6de9cd9a
DN
1159
1160void
1161maybe_set_first_label_num (rtx x)
1162{
1163 if (CODE_LABEL_NUMBER (x) < first_label_num)
1164 first_label_num = CODE_LABEL_NUMBER (x);
1165}
23b2ce53
RS
1166\f
1167/* Return a value representing some low-order bits of X, where the number
1168 of low-order bits is given by MODE. Note that no conversion is done
750c9258 1169 between floating-point and fixed-point values, rather, the bit
23b2ce53
RS
1170 representation is returned.
1171
1172 This function handles the cases in common between gen_lowpart, below,
1173 and two variants in cse.c and combine.c. These are the cases that can
1174 be safely handled at all points in the compilation.
1175
1176 If this is not a case we can handle, return 0. */
1177
1178rtx
502b8322 1179gen_lowpart_common (enum machine_mode mode, rtx x)
23b2ce53 1180{
ddef6bc7 1181 int msize = GET_MODE_SIZE (mode);
550d1387 1182 int xsize;
ddef6bc7 1183 int offset = 0;
550d1387
GK
1184 enum machine_mode innermode;
1185
1186 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1187 so we have to make one up. Yuk. */
1188 innermode = GET_MODE (x);
481683e1 1189 if (CONST_INT_P (x)
db487452 1190 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
550d1387
GK
1191 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1192 else if (innermode == VOIDmode)
1193 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1194
1195 xsize = GET_MODE_SIZE (innermode);
1196
5b0264cb 1197 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
23b2ce53 1198
550d1387 1199 if (innermode == mode)
23b2ce53
RS
1200 return x;
1201
1202 /* MODE must occupy no more words than the mode of X. */
550d1387
GK
1203 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1204 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
23b2ce53
RS
1205 return 0;
1206
53501a19 1207 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
3d8bf70f 1208 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
53501a19
BS
1209 return 0;
1210
550d1387 1211 offset = subreg_lowpart_offset (mode, innermode);
23b2ce53
RS
1212
1213 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
83e9c679
RK
1214 && (GET_MODE_CLASS (mode) == MODE_INT
1215 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
23b2ce53
RS
1216 {
1217 /* If we are getting the low-order part of something that has been
1218 sign- or zero-extended, we can either just use the object being
1219 extended or make a narrower extension. If we want an even smaller
1220 piece than the size of the object being extended, call ourselves
1221 recursively.
1222
1223 This case is used mostly by combine and cse. */
1224
1225 if (GET_MODE (XEXP (x, 0)) == mode)
1226 return XEXP (x, 0);
550d1387 1227 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
23b2ce53 1228 return gen_lowpart_common (mode, XEXP (x, 0));
550d1387 1229 else if (msize < xsize)
3b80f6ca 1230 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
23b2ce53 1231 }
f8cfc6aa 1232 else if (GET_CODE (x) == SUBREG || REG_P (x)
550d1387 1233 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
481683e1 1234 || GET_CODE (x) == CONST_DOUBLE || CONST_INT_P (x))
550d1387 1235 return simplify_gen_subreg (mode, x, innermode, offset);
8aada4ad 1236
23b2ce53
RS
1237 /* Otherwise, we can't do this. */
1238 return 0;
1239}
1240\f
ccba022b 1241rtx
502b8322 1242gen_highpart (enum machine_mode mode, rtx x)
ccba022b 1243{
ddef6bc7 1244 unsigned int msize = GET_MODE_SIZE (mode);
e0e08ac2 1245 rtx result;
ddef6bc7 1246
ccba022b
RS
1247 /* This case loses if X is a subreg. To catch bugs early,
1248 complain if an invalid MODE is used even in other cases. */
5b0264cb
NS
1249 gcc_assert (msize <= UNITS_PER_WORD
1250 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
ddef6bc7 1251
e0e08ac2
JH
1252 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1253 subreg_highpart_offset (mode, GET_MODE (x)));
5b0264cb
NS
1254 gcc_assert (result);
1255
09482e0d
JW
1256 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1257 the target if we have a MEM. gen_highpart must return a valid operand,
1258 emitting code if necessary to do so. */
5b0264cb
NS
1259 if (MEM_P (result))
1260 {
1261 result = validize_mem (result);
1262 gcc_assert (result);
1263 }
1264
e0e08ac2
JH
1265 return result;
1266}
5222e470 1267
26d249eb 1268/* Like gen_highpart, but accept mode of EXP operand in case EXP can
5222e470
JH
1269 be VOIDmode constant. */
1270rtx
502b8322 1271gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
5222e470
JH
1272{
1273 if (GET_MODE (exp) != VOIDmode)
1274 {
5b0264cb 1275 gcc_assert (GET_MODE (exp) == innermode);
5222e470
JH
1276 return gen_highpart (outermode, exp);
1277 }
1278 return simplify_gen_subreg (outermode, exp, innermode,
1279 subreg_highpart_offset (outermode, innermode));
1280}
68252e27 1281
38ae7651 1282/* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
8698cce3 1283
e0e08ac2 1284unsigned int
502b8322 1285subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
e0e08ac2
JH
1286{
1287 unsigned int offset = 0;
1288 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
8698cce3 1289
e0e08ac2 1290 if (difference > 0)
ccba022b 1291 {
e0e08ac2
JH
1292 if (WORDS_BIG_ENDIAN)
1293 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1294 if (BYTES_BIG_ENDIAN)
1295 offset += difference % UNITS_PER_WORD;
ccba022b 1296 }
ddef6bc7 1297
e0e08ac2 1298 return offset;
ccba022b 1299}
eea50aa0 1300
e0e08ac2
JH
1301/* Return offset in bytes to get OUTERMODE high part
1302 of the value in mode INNERMODE stored in memory in target format. */
1303unsigned int
502b8322 1304subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
eea50aa0
JH
1305{
1306 unsigned int offset = 0;
1307 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1308
5b0264cb 1309 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
e0e08ac2 1310
eea50aa0
JH
1311 if (difference > 0)
1312 {
e0e08ac2 1313 if (! WORDS_BIG_ENDIAN)
eea50aa0 1314 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
e0e08ac2 1315 if (! BYTES_BIG_ENDIAN)
eea50aa0
JH
1316 offset += difference % UNITS_PER_WORD;
1317 }
1318
e0e08ac2 1319 return offset;
eea50aa0 1320}
ccba022b 1321
23b2ce53
RS
1322/* Return 1 iff X, assumed to be a SUBREG,
1323 refers to the least significant part of its containing reg.
1324 If X is not a SUBREG, always return 1 (it is its own low part!). */
1325
1326int
fa233e34 1327subreg_lowpart_p (const_rtx x)
23b2ce53
RS
1328{
1329 if (GET_CODE (x) != SUBREG)
1330 return 1;
a3a03040
RK
1331 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1332 return 0;
23b2ce53 1333
e0e08ac2
JH
1334 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1335 == SUBREG_BYTE (x));
23b2ce53
RS
1336}
1337\f
ddef6bc7
JJ
1338/* Return subword OFFSET of operand OP.
1339 The word number, OFFSET, is interpreted as the word number starting
1340 at the low-order address. OFFSET 0 is the low-order word if not
1341 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1342
1343 If we cannot extract the required word, we return zero. Otherwise,
1344 an rtx corresponding to the requested word will be returned.
1345
1346 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1347 reload has completed, a valid address will always be returned. After
1348 reload, if a valid address cannot be returned, we return zero.
1349
1350 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1351 it is the responsibility of the caller.
1352
1353 MODE is the mode of OP in case it is a CONST_INT.
1354
1355 ??? This is still rather broken for some cases. The problem for the
1356 moment is that all callers of this thing provide no 'goal mode' to
1357 tell us to work with. This exists because all callers were written
0631e0bf
JH
1358 in a word based SUBREG world.
1359 Now use of this function can be deprecated by simplify_subreg in most
1360 cases.
1361 */
ddef6bc7
JJ
1362
1363rtx
502b8322 1364operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
ddef6bc7
JJ
1365{
1366 if (mode == VOIDmode)
1367 mode = GET_MODE (op);
1368
5b0264cb 1369 gcc_assert (mode != VOIDmode);
ddef6bc7 1370
30f7a378 1371 /* If OP is narrower than a word, fail. */
ddef6bc7
JJ
1372 if (mode != BLKmode
1373 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1374 return 0;
1375
30f7a378 1376 /* If we want a word outside OP, return zero. */
ddef6bc7
JJ
1377 if (mode != BLKmode
1378 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1379 return const0_rtx;
1380
ddef6bc7 1381 /* Form a new MEM at the requested address. */
3c0cb5de 1382 if (MEM_P (op))
ddef6bc7 1383 {
60564289 1384 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
ddef6bc7 1385
f1ec5147 1386 if (! validate_address)
60564289 1387 return new_rtx;
f1ec5147
RK
1388
1389 else if (reload_completed)
ddef6bc7 1390 {
60564289 1391 if (! strict_memory_address_p (word_mode, XEXP (new_rtx, 0)))
f1ec5147 1392 return 0;
ddef6bc7 1393 }
f1ec5147 1394 else
60564289 1395 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
ddef6bc7
JJ
1396 }
1397
0631e0bf
JH
1398 /* Rest can be handled by simplify_subreg. */
1399 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
ddef6bc7
JJ
1400}
1401
535a42b1
NS
1402/* Similar to `operand_subword', but never return 0. If we can't
1403 extract the required subword, put OP into a register and try again.
1404 The second attempt must succeed. We always validate the address in
1405 this case.
23b2ce53
RS
1406
1407 MODE is the mode of OP, in case it is CONST_INT. */
1408
1409rtx
502b8322 1410operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
23b2ce53 1411{
ddef6bc7 1412 rtx result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1413
1414 if (result)
1415 return result;
1416
1417 if (mode != BLKmode && mode != VOIDmode)
77e6b0eb
JC
1418 {
1419 /* If this is a register which can not be accessed by words, copy it
1420 to a pseudo register. */
f8cfc6aa 1421 if (REG_P (op))
77e6b0eb
JC
1422 op = copy_to_reg (op);
1423 else
1424 op = force_reg (mode, op);
1425 }
23b2ce53 1426
ddef6bc7 1427 result = operand_subword (op, offset, 1, mode);
5b0264cb 1428 gcc_assert (result);
23b2ce53
RS
1429
1430 return result;
1431}
1432\f
2b3493c8
AK
1433/* Returns 1 if both MEM_EXPR can be considered equal
1434 and 0 otherwise. */
1435
1436int
4f588890 1437mem_expr_equal_p (const_tree expr1, const_tree expr2)
2b3493c8
AK
1438{
1439 if (expr1 == expr2)
1440 return 1;
1441
1442 if (! expr1 || ! expr2)
1443 return 0;
1444
1445 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1446 return 0;
1447
55b34b5f 1448 return operand_equal_p (expr1, expr2, 0);
2b3493c8
AK
1449}
1450
805903b5
JJ
1451/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1452 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1453 -1 if not known. */
1454
1455int
d9223014 1456get_mem_align_offset (rtx mem, unsigned int align)
805903b5
JJ
1457{
1458 tree expr;
1459 unsigned HOST_WIDE_INT offset;
1460
1461 /* This function can't use
1462 if (!MEM_EXPR (mem) || !MEM_OFFSET (mem)
1463 || !CONST_INT_P (MEM_OFFSET (mem))
1464 || (get_object_alignment (MEM_EXPR (mem), MEM_ALIGN (mem), align)
1465 < align))
1466 return -1;
1467 else
1468 return (- INTVAL (MEM_OFFSET (mem))) & (align / BITS_PER_UNIT - 1);
1469 for two reasons:
1470 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1471 for <variable>. get_inner_reference doesn't handle it and
1472 even if it did, the alignment in that case needs to be determined
1473 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1474 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1475 isn't sufficiently aligned, the object it is in might be. */
1476 gcc_assert (MEM_P (mem));
1477 expr = MEM_EXPR (mem);
1478 if (expr == NULL_TREE
1479 || MEM_OFFSET (mem) == NULL_RTX
1480 || !CONST_INT_P (MEM_OFFSET (mem)))
1481 return -1;
1482
1483 offset = INTVAL (MEM_OFFSET (mem));
1484 if (DECL_P (expr))
1485 {
1486 if (DECL_ALIGN (expr) < align)
1487 return -1;
1488 }
1489 else if (INDIRECT_REF_P (expr))
1490 {
1491 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1492 return -1;
1493 }
1494 else if (TREE_CODE (expr) == COMPONENT_REF)
1495 {
1496 while (1)
1497 {
1498 tree inner = TREE_OPERAND (expr, 0);
1499 tree field = TREE_OPERAND (expr, 1);
1500 tree byte_offset = component_ref_field_offset (expr);
1501 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1502
1503 if (!byte_offset
1504 || !host_integerp (byte_offset, 1)
1505 || !host_integerp (bit_offset, 1))
1506 return -1;
1507
1508 offset += tree_low_cst (byte_offset, 1);
1509 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1510
1511 if (inner == NULL_TREE)
1512 {
1513 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1514 < (unsigned int) align)
1515 return -1;
1516 break;
1517 }
1518 else if (DECL_P (inner))
1519 {
1520 if (DECL_ALIGN (inner) < align)
1521 return -1;
1522 break;
1523 }
1524 else if (TREE_CODE (inner) != COMPONENT_REF)
1525 return -1;
1526 expr = inner;
1527 }
1528 }
1529 else
1530 return -1;
1531
1532 return offset & ((align / BITS_PER_UNIT) - 1);
1533}
1534
6926c713 1535/* Given REF (a MEM) and T, either the type of X or the expression
173b24b9 1536 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f1087be
RH
1537 if we are making a new object of this type. BITPOS is nonzero if
1538 there is an offset outstanding on T that will be applied later. */
173b24b9
RK
1539
1540void
502b8322
AJ
1541set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1542 HOST_WIDE_INT bitpos)
173b24b9 1543{
4862826d 1544 alias_set_type alias = MEM_ALIAS_SET (ref);
998d7deb 1545 tree expr = MEM_EXPR (ref);
8ac61af7
RK
1546 rtx offset = MEM_OFFSET (ref);
1547 rtx size = MEM_SIZE (ref);
1548 unsigned int align = MEM_ALIGN (ref);
6f1087be 1549 HOST_WIDE_INT apply_bitpos = 0;
173b24b9
RK
1550 tree type;
1551
1552 /* It can happen that type_for_mode was given a mode for which there
1553 is no language-level type. In which case it returns NULL, which
1554 we can see here. */
1555 if (t == NULL_TREE)
1556 return;
1557
1558 type = TYPE_P (t) ? t : TREE_TYPE (t);
eeb23c11
MM
1559 if (type == error_mark_node)
1560 return;
173b24b9 1561
173b24b9
RK
1562 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1563 wrong answer, as it assumes that DECL_RTL already has the right alias
1564 info. Callers should not set DECL_RTL until after the call to
1565 set_mem_attributes. */
5b0264cb 1566 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
173b24b9 1567
738cc472 1568 /* Get the alias set from the expression or type (perhaps using a
8ac61af7
RK
1569 front-end routine) and use it. */
1570 alias = get_alias_set (t);
173b24b9 1571
a5e9c810 1572 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
07cb6e8c
JM
1573 MEM_IN_STRUCT_P (ref)
1574 = AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE;
f8ad8d7c 1575 MEM_POINTER (ref) = POINTER_TYPE_P (type);
173b24b9 1576
8ac61af7
RK
1577 /* If we are making an object of this type, or if this is a DECL, we know
1578 that it is a scalar if the type is not an aggregate. */
07cb6e8c
JM
1579 if ((objectp || DECL_P (t))
1580 && ! AGGREGATE_TYPE_P (type)
1581 && TREE_CODE (type) != COMPLEX_TYPE)
173b24b9
RK
1582 MEM_SCALAR_P (ref) = 1;
1583
c3d32120
RK
1584 /* We can set the alignment from the type if we are making an object,
1585 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
7ccf35ed
DN
1586 if (objectp || TREE_CODE (t) == INDIRECT_REF
1587 || TREE_CODE (t) == ALIGN_INDIRECT_REF
1588 || TYPE_ALIGN_OK (type))
c3d32120 1589 align = MAX (align, TYPE_ALIGN (type));
7ccf35ed
DN
1590 else
1591 if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1592 {
1593 if (integer_zerop (TREE_OPERAND (t, 1)))
1594 /* We don't know anything about the alignment. */
1595 align = BITS_PER_UNIT;
1596 else
1597 align = tree_low_cst (TREE_OPERAND (t, 1), 1);
1598 }
40c0668b 1599
738cc472
RK
1600 /* If the size is known, we can set that. */
1601 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
8ac61af7 1602 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
738cc472 1603
80965c18
RK
1604 /* If T is not a type, we may be able to deduce some more information about
1605 the expression. */
1606 if (! TYPE_P (t))
8ac61af7 1607 {
8476af98 1608 tree base;
df96b059 1609 bool align_computed = false;
389fdba0 1610
8ac61af7
RK
1611 if (TREE_THIS_VOLATILE (t))
1612 MEM_VOLATILE_P (ref) = 1;
173b24b9 1613
c56e3582
RK
1614 /* Now remove any conversions: they don't change what the underlying
1615 object is. Likewise for SAVE_EXPR. */
1043771b 1616 while (CONVERT_EXPR_P (t)
c56e3582
RK
1617 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1618 || TREE_CODE (t) == SAVE_EXPR)
8ac61af7
RK
1619 t = TREE_OPERAND (t, 0);
1620
8476af98
RH
1621 /* We may look through structure-like accesses for the purposes of
1622 examining TREE_THIS_NOTRAP, but not array-like accesses. */
1623 base = t;
1624 while (TREE_CODE (base) == COMPONENT_REF
1625 || TREE_CODE (base) == REALPART_EXPR
1626 || TREE_CODE (base) == IMAGPART_EXPR
1627 || TREE_CODE (base) == BIT_FIELD_REF)
1628 base = TREE_OPERAND (base, 0);
1629
1630 if (DECL_P (base))
1631 {
1632 if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
1633 MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
1634 else
1635 MEM_NOTRAP_P (ref) = 1;
1636 }
1637 else
1638 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
1639
1640 base = get_base_address (base);
1641 if (base && DECL_P (base)
1642 && TREE_READONLY (base)
1643 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1644 {
1645 tree base_type = TREE_TYPE (base);
1646 gcc_assert (!(base_type && TYPE_NEEDS_CONSTRUCTING (base_type))
1647 || DECL_ARTIFICIAL (base));
1648 MEM_READONLY_P (ref) = 1;
1649 }
1650
2039d7aa
RH
1651 /* If this expression uses it's parent's alias set, mark it such
1652 that we won't change it. */
1653 if (component_uses_parent_alias_set (t))
10b76d73
RK
1654 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1655
8ac61af7
RK
1656 /* If this is a decl, set the attributes of the MEM from it. */
1657 if (DECL_P (t))
1658 {
998d7deb
RH
1659 expr = t;
1660 offset = const0_rtx;
6f1087be 1661 apply_bitpos = bitpos;
8ac61af7
RK
1662 size = (DECL_SIZE_UNIT (t)
1663 && host_integerp (DECL_SIZE_UNIT (t), 1)
1664 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
68252e27 1665 align = DECL_ALIGN (t);
df96b059 1666 align_computed = true;
8ac61af7
RK
1667 }
1668
40c0668b 1669 /* If this is a constant, we know the alignment. */
6615c446 1670 else if (CONSTANT_CLASS_P (t))
9ddfb1a7
RK
1671 {
1672 align = TYPE_ALIGN (type);
1673#ifdef CONSTANT_ALIGNMENT
1674 align = CONSTANT_ALIGNMENT (t, align);
1675#endif
df96b059 1676 align_computed = true;
9ddfb1a7 1677 }
998d7deb
RH
1678
1679 /* If this is a field reference and not a bit-field, record it. */
fa10beec 1680 /* ??? There is some information that can be gleaned from bit-fields,
998d7deb
RH
1681 such as the word offset in the structure that might be modified.
1682 But skip it for now. */
1683 else if (TREE_CODE (t) == COMPONENT_REF
1684 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1685 {
55b34b5f 1686 expr = t;
998d7deb 1687 offset = const0_rtx;
6f1087be 1688 apply_bitpos = bitpos;
998d7deb
RH
1689 /* ??? Any reason the field size would be different than
1690 the size we got from the type? */
1691 }
1692
1693 /* If this is an array reference, look for an outer field reference. */
1694 else if (TREE_CODE (t) == ARRAY_REF)
1695 {
1696 tree off_tree = size_zero_node;
1b1838b6
JW
1697 /* We can't modify t, because we use it at the end of the
1698 function. */
1699 tree t2 = t;
998d7deb
RH
1700
1701 do
1702 {
1b1838b6 1703 tree index = TREE_OPERAND (t2, 1);
44de5aeb
RK
1704 tree low_bound = array_ref_low_bound (t2);
1705 tree unit_size = array_ref_element_size (t2);
2567406a
JH
1706
1707 /* We assume all arrays have sizes that are a multiple of a byte.
1708 First subtract the lower bound, if any, in the type of the
44de5aeb
RK
1709 index, then convert to sizetype and multiply by the size of
1710 the array element. */
1711 if (! integer_zerop (low_bound))
4845b383
KH
1712 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1713 index, low_bound);
2567406a 1714
44de5aeb 1715 off_tree = size_binop (PLUS_EXPR,
b6f65e3c
RS
1716 size_binop (MULT_EXPR,
1717 fold_convert (sizetype,
1718 index),
44de5aeb
RK
1719 unit_size),
1720 off_tree);
1b1838b6 1721 t2 = TREE_OPERAND (t2, 0);
998d7deb 1722 }
1b1838b6 1723 while (TREE_CODE (t2) == ARRAY_REF);
998d7deb 1724
1b1838b6 1725 if (DECL_P (t2))
c67a1cf6 1726 {
1b1838b6 1727 expr = t2;
40cb04f1 1728 offset = NULL;
c67a1cf6 1729 if (host_integerp (off_tree, 1))
40cb04f1
RH
1730 {
1731 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1732 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1b1838b6 1733 align = DECL_ALIGN (t2);
fc555370 1734 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
40cb04f1 1735 align = aoff;
df96b059 1736 align_computed = true;
40cb04f1 1737 offset = GEN_INT (ioff);
6f1087be 1738 apply_bitpos = bitpos;
40cb04f1 1739 }
c67a1cf6 1740 }
1b1838b6 1741 else if (TREE_CODE (t2) == COMPONENT_REF)
998d7deb 1742 {
55b34b5f
RG
1743 expr = t2;
1744 offset = NULL;
998d7deb 1745 if (host_integerp (off_tree, 1))
6f1087be
RH
1746 {
1747 offset = GEN_INT (tree_low_cst (off_tree, 1));
1748 apply_bitpos = bitpos;
1749 }
998d7deb
RH
1750 /* ??? Any reason the field size would be different than
1751 the size we got from the type? */
1752 }
c67a1cf6 1753 else if (flag_argument_noalias > 1
1b096a0a 1754 && (INDIRECT_REF_P (t2))
1b1838b6 1755 && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
c67a1cf6 1756 {
1b1838b6 1757 expr = t2;
c67a1cf6
RH
1758 offset = NULL;
1759 }
1760 }
1761
1762 /* If this is a Fortran indirect argument reference, record the
1763 parameter decl. */
1764 else if (flag_argument_noalias > 1
1b096a0a 1765 && (INDIRECT_REF_P (t))
c67a1cf6
RH
1766 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1767 {
1768 expr = t;
1769 offset = NULL;
998d7deb 1770 }
df96b059
JJ
1771
1772 if (!align_computed && !INDIRECT_REF_P (t))
1773 {
1774 unsigned int obj_align
1775 = get_object_alignment (t, align, BIGGEST_ALIGNMENT);
1776 align = MAX (align, obj_align);
1777 }
8ac61af7
RK
1778 }
1779
15c812e3 1780 /* If we modified OFFSET based on T, then subtract the outstanding
8c317c5f
RH
1781 bit position offset. Similarly, increase the size of the accessed
1782 object to contain the negative offset. */
6f1087be 1783 if (apply_bitpos)
8c317c5f
RH
1784 {
1785 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1786 if (size)
1787 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1788 }
6f1087be 1789
7ccf35ed
DN
1790 if (TREE_CODE (t) == ALIGN_INDIRECT_REF)
1791 {
fa10beec 1792 /* Force EXPR and OFFSET to NULL, since we don't know exactly what
7ccf35ed
DN
1793 we're overlapping. */
1794 offset = NULL;
1795 expr = NULL;
1796 }
1797
8ac61af7 1798 /* Now set the attributes we computed above. */
10b76d73 1799 MEM_ATTRS (ref)
998d7deb 1800 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
8ac61af7
RK
1801
1802 /* If this is already known to be a scalar or aggregate, we are done. */
1803 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
738cc472
RK
1804 return;
1805
8ac61af7
RK
1806 /* If it is a reference into an aggregate, this is part of an aggregate.
1807 Otherwise we don't know. */
173b24b9
RK
1808 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1809 || TREE_CODE (t) == ARRAY_RANGE_REF
1810 || TREE_CODE (t) == BIT_FIELD_REF)
1811 MEM_IN_STRUCT_P (ref) = 1;
1812}
1813
6f1087be 1814void
502b8322 1815set_mem_attributes (rtx ref, tree t, int objectp)
6f1087be
RH
1816{
1817 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1818}
1819
173b24b9
RK
1820/* Set the alias set of MEM to SET. */
1821
1822void
4862826d 1823set_mem_alias_set (rtx mem, alias_set_type set)
173b24b9 1824{
68252e27 1825#ifdef ENABLE_CHECKING
173b24b9 1826 /* If the new and old alias sets don't conflict, something is wrong. */
5b0264cb 1827 gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
173b24b9
RK
1828#endif
1829
998d7deb 1830 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
10b76d73
RK
1831 MEM_SIZE (mem), MEM_ALIGN (mem),
1832 GET_MODE (mem));
173b24b9 1833}
738cc472 1834
d022d93e 1835/* Set the alignment of MEM to ALIGN bits. */
738cc472
RK
1836
1837void
502b8322 1838set_mem_align (rtx mem, unsigned int align)
738cc472 1839{
998d7deb 1840 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
10b76d73
RK
1841 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1842 GET_MODE (mem));
738cc472 1843}
1285011e 1844
998d7deb 1845/* Set the expr for MEM to EXPR. */
1285011e
RK
1846
1847void
502b8322 1848set_mem_expr (rtx mem, tree expr)
1285011e
RK
1849{
1850 MEM_ATTRS (mem)
998d7deb 1851 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1285011e
RK
1852 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1853}
998d7deb
RH
1854
1855/* Set the offset of MEM to OFFSET. */
1856
1857void
502b8322 1858set_mem_offset (rtx mem, rtx offset)
998d7deb
RH
1859{
1860 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1861 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1862 GET_MODE (mem));
35aff10b
AM
1863}
1864
1865/* Set the size of MEM to SIZE. */
1866
1867void
502b8322 1868set_mem_size (rtx mem, rtx size)
35aff10b
AM
1869{
1870 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1871 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1872 GET_MODE (mem));
998d7deb 1873}
173b24b9 1874\f
738cc472
RK
1875/* Return a memory reference like MEMREF, but with its mode changed to MODE
1876 and its address changed to ADDR. (VOIDmode means don't change the mode.
1877 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1878 returned memory location is required to be valid. The memory
1879 attributes are not changed. */
23b2ce53 1880
738cc472 1881static rtx
502b8322 1882change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
23b2ce53 1883{
60564289 1884 rtx new_rtx;
23b2ce53 1885
5b0264cb 1886 gcc_assert (MEM_P (memref));
23b2ce53
RS
1887 if (mode == VOIDmode)
1888 mode = GET_MODE (memref);
1889 if (addr == 0)
1890 addr = XEXP (memref, 0);
a74ff877
JH
1891 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1892 && (!validate || memory_address_p (mode, addr)))
1893 return memref;
23b2ce53 1894
f1ec5147 1895 if (validate)
23b2ce53 1896 {
f1ec5147 1897 if (reload_in_progress || reload_completed)
5b0264cb 1898 gcc_assert (memory_address_p (mode, addr));
f1ec5147
RK
1899 else
1900 addr = memory_address (mode, addr);
23b2ce53 1901 }
750c9258 1902
9b04c6a8
RK
1903 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1904 return memref;
1905
60564289
KG
1906 new_rtx = gen_rtx_MEM (mode, addr);
1907 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1908 return new_rtx;
23b2ce53 1909}
792760b9 1910
738cc472
RK
1911/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1912 way we are changing MEMREF, so we only preserve the alias set. */
f4ef873c
RK
1913
1914rtx
502b8322 1915change_address (rtx memref, enum machine_mode mode, rtx addr)
f4ef873c 1916{
60564289
KG
1917 rtx new_rtx = change_address_1 (memref, mode, addr, 1), size;
1918 enum machine_mode mmode = GET_MODE (new_rtx);
4e44c1ef
JJ
1919 unsigned int align;
1920
1921 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1922 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
c2f7bcc3 1923
fdb1c7b3 1924 /* If there are no changes, just return the original memory reference. */
60564289 1925 if (new_rtx == memref)
4e44c1ef
JJ
1926 {
1927 if (MEM_ATTRS (memref) == 0
1928 || (MEM_EXPR (memref) == NULL
1929 && MEM_OFFSET (memref) == NULL
1930 && MEM_SIZE (memref) == size
1931 && MEM_ALIGN (memref) == align))
60564289 1932 return new_rtx;
4e44c1ef 1933
60564289
KG
1934 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
1935 MEM_COPY_ATTRIBUTES (new_rtx, memref);
4e44c1ef 1936 }
fdb1c7b3 1937
60564289 1938 MEM_ATTRS (new_rtx)
4e44c1ef 1939 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align, mmode);
823e3574 1940
60564289 1941 return new_rtx;
f4ef873c 1942}
792760b9 1943
738cc472
RK
1944/* Return a memory reference like MEMREF, but with its mode changed
1945 to MODE and its address offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
1946 nonzero, the memory address is forced to be valid.
1947 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1948 and caller is responsible for adjusting MEMREF base register. */
f1ec5147
RK
1949
1950rtx
502b8322
AJ
1951adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1952 int validate, int adjust)
f1ec5147 1953{
823e3574 1954 rtx addr = XEXP (memref, 0);
60564289 1955 rtx new_rtx;
738cc472 1956 rtx memoffset = MEM_OFFSET (memref);
10b76d73 1957 rtx size = 0;
738cc472 1958 unsigned int memalign = MEM_ALIGN (memref);
a6fe9ed4 1959 int pbits;
823e3574 1960
fdb1c7b3
JH
1961 /* If there are no changes, just return the original memory reference. */
1962 if (mode == GET_MODE (memref) && !offset
1963 && (!validate || memory_address_p (mode, addr)))
1964 return memref;
1965
d14419e4 1966 /* ??? Prefer to create garbage instead of creating shared rtl.
cc2902df 1967 This may happen even if offset is nonzero -- consider
d14419e4
RH
1968 (plus (plus reg reg) const_int) -- so do this always. */
1969 addr = copy_rtx (addr);
1970
a6fe9ed4
JM
1971 /* Convert a possibly large offset to a signed value within the
1972 range of the target address space. */
1973 pbits = GET_MODE_BITSIZE (Pmode);
1974 if (HOST_BITS_PER_WIDE_INT > pbits)
1975 {
1976 int shift = HOST_BITS_PER_WIDE_INT - pbits;
1977 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
1978 >> shift);
1979 }
1980
4a78c787
RH
1981 if (adjust)
1982 {
1983 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1984 object, we can merge it into the LO_SUM. */
1985 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1986 && offset >= 0
1987 && (unsigned HOST_WIDE_INT) offset
1988 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1989 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1990 plus_constant (XEXP (addr, 1), offset));
1991 else
1992 addr = plus_constant (addr, offset);
1993 }
823e3574 1994
60564289 1995 new_rtx = change_address_1 (memref, mode, addr, validate);
738cc472 1996
09efeca1
PB
1997 /* If the address is a REG, change_address_1 rightfully returns memref,
1998 but this would destroy memref's MEM_ATTRS. */
1999 if (new_rtx == memref && offset != 0)
2000 new_rtx = copy_rtx (new_rtx);
2001
738cc472
RK
2002 /* Compute the new values of the memory attributes due to this adjustment.
2003 We add the offsets and update the alignment. */
2004 if (memoffset)
2005 memoffset = GEN_INT (offset + INTVAL (memoffset));
2006
03bf2c23
RK
2007 /* Compute the new alignment by taking the MIN of the alignment and the
2008 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2009 if zero. */
2010 if (offset != 0)
3bf1e984
RK
2011 memalign
2012 = MIN (memalign,
2013 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
738cc472 2014
10b76d73 2015 /* We can compute the size in a number of ways. */
60564289
KG
2016 if (GET_MODE (new_rtx) != BLKmode)
2017 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new_rtx)));
10b76d73
RK
2018 else if (MEM_SIZE (memref))
2019 size = plus_constant (MEM_SIZE (memref), -offset);
2020
60564289
KG
2021 MEM_ATTRS (new_rtx) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
2022 memoffset, size, memalign, GET_MODE (new_rtx));
738cc472
RK
2023
2024 /* At some point, we should validate that this offset is within the object,
2025 if all the appropriate values are known. */
60564289 2026 return new_rtx;
f1ec5147
RK
2027}
2028
630036c6
JJ
2029/* Return a memory reference like MEMREF, but with its mode changed
2030 to MODE and its address changed to ADDR, which is assumed to be
fa10beec 2031 MEMREF offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
2032 nonzero, the memory address is forced to be valid. */
2033
2034rtx
502b8322
AJ
2035adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2036 HOST_WIDE_INT offset, int validate)
630036c6
JJ
2037{
2038 memref = change_address_1 (memref, VOIDmode, addr, validate);
2039 return adjust_address_1 (memref, mode, offset, validate, 0);
2040}
2041
8ac61af7
RK
2042/* Return a memory reference like MEMREF, but whose address is changed by
2043 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2044 known to be in OFFSET (possibly 1). */
0d4903b8
RK
2045
2046rtx
502b8322 2047offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
0d4903b8 2048{
60564289 2049 rtx new_rtx, addr = XEXP (memref, 0);
e3c8ea67 2050
60564289 2051 new_rtx = simplify_gen_binary (PLUS, Pmode, addr, offset);
e3c8ea67 2052
68252e27 2053 /* At this point we don't know _why_ the address is invalid. It
4d6922ee 2054 could have secondary memory references, multiplies or anything.
e3c8ea67
RH
2055
2056 However, if we did go and rearrange things, we can wind up not
2057 being able to recognize the magic around pic_offset_table_rtx.
2058 This stuff is fragile, and is yet another example of why it is
2059 bad to expose PIC machinery too early. */
60564289 2060 if (! memory_address_p (GET_MODE (memref), new_rtx)
e3c8ea67
RH
2061 && GET_CODE (addr) == PLUS
2062 && XEXP (addr, 0) == pic_offset_table_rtx)
2063 {
2064 addr = force_reg (GET_MODE (addr), addr);
60564289 2065 new_rtx = simplify_gen_binary (PLUS, Pmode, addr, offset);
e3c8ea67
RH
2066 }
2067
60564289
KG
2068 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2069 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
0d4903b8 2070
fdb1c7b3 2071 /* If there are no changes, just return the original memory reference. */
60564289
KG
2072 if (new_rtx == memref)
2073 return new_rtx;
fdb1c7b3 2074
0d4903b8
RK
2075 /* Update the alignment to reflect the offset. Reset the offset, which
2076 we don't know. */
60564289 2077 MEM_ATTRS (new_rtx)
2cc2d4bb 2078 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
9ceca302 2079 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
60564289
KG
2080 GET_MODE (new_rtx));
2081 return new_rtx;
0d4903b8 2082}
68252e27 2083
792760b9
RK
2084/* Return a memory reference like MEMREF, but with its address changed to
2085 ADDR. The caller is asserting that the actual piece of memory pointed
2086 to is the same, just the form of the address is being changed, such as
2087 by putting something into a register. */
2088
2089rtx
502b8322 2090replace_equiv_address (rtx memref, rtx addr)
792760b9 2091{
738cc472
RK
2092 /* change_address_1 copies the memory attribute structure without change
2093 and that's exactly what we want here. */
40c0668b 2094 update_temp_slot_address (XEXP (memref, 0), addr);
738cc472 2095 return change_address_1 (memref, VOIDmode, addr, 1);
792760b9 2096}
738cc472 2097
f1ec5147
RK
2098/* Likewise, but the reference is not required to be valid. */
2099
2100rtx
502b8322 2101replace_equiv_address_nv (rtx memref, rtx addr)
f1ec5147 2102{
f1ec5147
RK
2103 return change_address_1 (memref, VOIDmode, addr, 0);
2104}
e7dfe4bb
RH
2105
2106/* Return a memory reference like MEMREF, but with its mode widened to
2107 MODE and offset by OFFSET. This would be used by targets that e.g.
2108 cannot issue QImode memory operations and have to use SImode memory
2109 operations plus masking logic. */
2110
2111rtx
502b8322 2112widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
e7dfe4bb 2113{
60564289
KG
2114 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
2115 tree expr = MEM_EXPR (new_rtx);
2116 rtx memoffset = MEM_OFFSET (new_rtx);
e7dfe4bb
RH
2117 unsigned int size = GET_MODE_SIZE (mode);
2118
fdb1c7b3 2119 /* If there are no changes, just return the original memory reference. */
60564289
KG
2120 if (new_rtx == memref)
2121 return new_rtx;
fdb1c7b3 2122
e7dfe4bb
RH
2123 /* If we don't know what offset we were at within the expression, then
2124 we can't know if we've overstepped the bounds. */
fa1591cb 2125 if (! memoffset)
e7dfe4bb
RH
2126 expr = NULL_TREE;
2127
2128 while (expr)
2129 {
2130 if (TREE_CODE (expr) == COMPONENT_REF)
2131 {
2132 tree field = TREE_OPERAND (expr, 1);
44de5aeb 2133 tree offset = component_ref_field_offset (expr);
e7dfe4bb
RH
2134
2135 if (! DECL_SIZE_UNIT (field))
2136 {
2137 expr = NULL_TREE;
2138 break;
2139 }
2140
2141 /* Is the field at least as large as the access? If so, ok,
2142 otherwise strip back to the containing structure. */
03667700
RK
2143 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2144 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
e7dfe4bb
RH
2145 && INTVAL (memoffset) >= 0)
2146 break;
2147
44de5aeb 2148 if (! host_integerp (offset, 1))
e7dfe4bb
RH
2149 {
2150 expr = NULL_TREE;
2151 break;
2152 }
2153
2154 expr = TREE_OPERAND (expr, 0);
44de5aeb
RK
2155 memoffset
2156 = (GEN_INT (INTVAL (memoffset)
2157 + tree_low_cst (offset, 1)
2158 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2159 / BITS_PER_UNIT)));
e7dfe4bb
RH
2160 }
2161 /* Similarly for the decl. */
2162 else if (DECL_P (expr)
2163 && DECL_SIZE_UNIT (expr)
45f79783 2164 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
e7dfe4bb
RH
2165 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2166 && (! memoffset || INTVAL (memoffset) >= 0))
2167 break;
2168 else
2169 {
2170 /* The widened memory access overflows the expression, which means
2171 that it could alias another expression. Zap it. */
2172 expr = NULL_TREE;
2173 break;
2174 }
2175 }
2176
2177 if (! expr)
2178 memoffset = NULL_RTX;
2179
2180 /* The widened memory may alias other stuff, so zap the alias set. */
2181 /* ??? Maybe use get_alias_set on any remaining expression. */
2182
60564289
KG
2183 MEM_ATTRS (new_rtx) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2184 MEM_ALIGN (new_rtx), mode);
e7dfe4bb 2185
60564289 2186 return new_rtx;
e7dfe4bb 2187}
23b2ce53 2188\f
f6129d66
RH
2189/* A fake decl that is used as the MEM_EXPR of spill slots. */
2190static GTY(()) tree spill_slot_decl;
2191
3d7e23f6
RH
2192tree
2193get_spill_slot_decl (bool force_build_p)
f6129d66
RH
2194{
2195 tree d = spill_slot_decl;
2196 rtx rd;
2197
3d7e23f6 2198 if (d || !force_build_p)
f6129d66
RH
2199 return d;
2200
c2255bc4
AH
2201 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2202 VAR_DECL, get_identifier ("%sfp"), void_type_node);
f6129d66
RH
2203 DECL_ARTIFICIAL (d) = 1;
2204 DECL_IGNORED_P (d) = 1;
2205 TREE_USED (d) = 1;
2206 TREE_THIS_NOTRAP (d) = 1;
2207 spill_slot_decl = d;
2208
2209 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2210 MEM_NOTRAP_P (rd) = 1;
2211 MEM_ATTRS (rd) = get_mem_attrs (new_alias_set (), d, const0_rtx,
2212 NULL_RTX, 0, BLKmode);
2213 SET_DECL_RTL (d, rd);
2214
2215 return d;
2216}
2217
2218/* Given MEM, a result from assign_stack_local, fill in the memory
2219 attributes as appropriate for a register allocator spill slot.
2220 These slots are not aliasable by other memory. We arrange for
2221 them all to use a single MEM_EXPR, so that the aliasing code can
2222 work properly in the case of shared spill slots. */
2223
2224void
2225set_mem_attrs_for_spill (rtx mem)
2226{
2227 alias_set_type alias;
2228 rtx addr, offset;
2229 tree expr;
2230
3d7e23f6 2231 expr = get_spill_slot_decl (true);
f6129d66
RH
2232 alias = MEM_ALIAS_SET (DECL_RTL (expr));
2233
2234 /* We expect the incoming memory to be of the form:
2235 (mem:MODE (plus (reg sfp) (const_int offset)))
2236 with perhaps the plus missing for offset = 0. */
2237 addr = XEXP (mem, 0);
2238 offset = const0_rtx;
2239 if (GET_CODE (addr) == PLUS
481683e1 2240 && CONST_INT_P (XEXP (addr, 1)))
f6129d66
RH
2241 offset = XEXP (addr, 1);
2242
2243 MEM_ATTRS (mem) = get_mem_attrs (alias, expr, offset,
2244 MEM_SIZE (mem), MEM_ALIGN (mem),
2245 GET_MODE (mem));
2246 MEM_NOTRAP_P (mem) = 1;
2247}
2248\f
23b2ce53
RS
2249/* Return a newly created CODE_LABEL rtx with a unique label number. */
2250
2251rtx
502b8322 2252gen_label_rtx (void)
23b2ce53 2253{
0dc36574 2254 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
502b8322 2255 NULL, label_num++, NULL);
23b2ce53
RS
2256}
2257\f
2258/* For procedure integration. */
2259
23b2ce53 2260/* Install new pointers to the first and last insns in the chain.
86fe05e0 2261 Also, set cur_insn_uid to one higher than the last in use.
23b2ce53
RS
2262 Used for an inline-procedure after copying the insn chain. */
2263
2264void
502b8322 2265set_new_first_and_last_insn (rtx first, rtx last)
23b2ce53 2266{
86fe05e0
RK
2267 rtx insn;
2268
23b2ce53
RS
2269 first_insn = first;
2270 last_insn = last;
86fe05e0
RK
2271 cur_insn_uid = 0;
2272
b5b8b0ac
AO
2273 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2274 {
2275 int debug_count = 0;
2276
2277 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2278 cur_debug_insn_uid = 0;
2279
2280 for (insn = first; insn; insn = NEXT_INSN (insn))
2281 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2282 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2283 else
2284 {
2285 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2286 if (DEBUG_INSN_P (insn))
2287 debug_count++;
2288 }
2289
2290 if (debug_count)
2291 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2292 else
2293 cur_debug_insn_uid++;
2294 }
2295 else
2296 for (insn = first; insn; insn = NEXT_INSN (insn))
2297 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
86fe05e0
RK
2298
2299 cur_insn_uid++;
23b2ce53 2300}
23b2ce53 2301\f
750c9258 2302/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779 2303 structure. This routine should only be called once. */
23b2ce53 2304
fd743bc1 2305static void
b4aaa77b 2306unshare_all_rtl_1 (rtx insn)
23b2ce53 2307{
d1b81779 2308 /* Unshare just about everything else. */
2c07f13b 2309 unshare_all_rtl_in_chain (insn);
750c9258 2310
23b2ce53
RS
2311 /* Make sure the addresses of stack slots found outside the insn chain
2312 (such as, in DECL_RTL of a variable) are not shared
2313 with the insn chain.
2314
2315 This special care is necessary when the stack slot MEM does not
2316 actually appear in the insn chain. If it does appear, its address
2317 is unshared from all else at that point. */
242b0ce6 2318 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
23b2ce53
RS
2319}
2320
750c9258 2321/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779
GK
2322 structure, again. This is a fairly expensive thing to do so it
2323 should be done sparingly. */
2324
2325void
502b8322 2326unshare_all_rtl_again (rtx insn)
d1b81779
GK
2327{
2328 rtx p;
624c87aa
RE
2329 tree decl;
2330
d1b81779 2331 for (p = insn; p; p = NEXT_INSN (p))
2c3c49de 2332 if (INSN_P (p))
d1b81779
GK
2333 {
2334 reset_used_flags (PATTERN (p));
2335 reset_used_flags (REG_NOTES (p));
d1b81779 2336 }
624c87aa 2337
2d4aecb3 2338 /* Make sure that virtual stack slots are not shared. */
5eb2a9f2 2339 set_used_decls (DECL_INITIAL (cfun->decl));
2d4aecb3 2340
624c87aa
RE
2341 /* Make sure that virtual parameters are not shared. */
2342 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
5eb2a9f2 2343 set_used_flags (DECL_RTL (decl));
624c87aa
RE
2344
2345 reset_used_flags (stack_slot_list);
2346
b4aaa77b 2347 unshare_all_rtl_1 (insn);
fd743bc1
PB
2348}
2349
c2924966 2350unsigned int
fd743bc1
PB
2351unshare_all_rtl (void)
2352{
b4aaa77b 2353 unshare_all_rtl_1 (get_insns ());
c2924966 2354 return 0;
d1b81779
GK
2355}
2356
8ddbbcae 2357struct rtl_opt_pass pass_unshare_all_rtl =
ef330312 2358{
8ddbbcae
JH
2359 {
2360 RTL_PASS,
defb77dc 2361 "unshare", /* name */
ef330312
PB
2362 NULL, /* gate */
2363 unshare_all_rtl, /* execute */
2364 NULL, /* sub */
2365 NULL, /* next */
2366 0, /* static_pass_number */
7072a650 2367 TV_NONE, /* tv_id */
ef330312
PB
2368 0, /* properties_required */
2369 0, /* properties_provided */
2370 0, /* properties_destroyed */
2371 0, /* todo_flags_start */
8ddbbcae
JH
2372 TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */
2373 }
ef330312
PB
2374};
2375
2376
2c07f13b
JH
2377/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2378 Recursively does the same for subexpressions. */
2379
2380static void
2381verify_rtx_sharing (rtx orig, rtx insn)
2382{
2383 rtx x = orig;
2384 int i;
2385 enum rtx_code code;
2386 const char *format_ptr;
2387
2388 if (x == 0)
2389 return;
2390
2391 code = GET_CODE (x);
2392
2393 /* These types may be freely shared. */
2394
2395 switch (code)
2396 {
2397 case REG:
2c07f13b
JH
2398 case CONST_INT:
2399 case CONST_DOUBLE:
091a3ac7 2400 case CONST_FIXED:
2c07f13b
JH
2401 case CONST_VECTOR:
2402 case SYMBOL_REF:
2403 case LABEL_REF:
2404 case CODE_LABEL:
2405 case PC:
2406 case CC0:
2407 case SCRATCH:
2c07f13b 2408 return;
3e89ed8d
JH
2409 /* SCRATCH must be shared because they represent distinct values. */
2410 case CLOBBER:
2411 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2412 return;
2413 break;
2c07f13b
JH
2414
2415 case CONST:
6fb5fa3c 2416 if (shared_const_p (orig))
2c07f13b
JH
2417 return;
2418 break;
2419
2420 case MEM:
2421 /* A MEM is allowed to be shared if its address is constant. */
2422 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2423 || reload_completed || reload_in_progress)
2424 return;
2425
2426 break;
2427
2428 default:
2429 break;
2430 }
2431
2432 /* This rtx may not be shared. If it has already been seen,
2433 replace it with a copy of itself. */
1a2caa7a 2434#ifdef ENABLE_CHECKING
2c07f13b
JH
2435 if (RTX_FLAG (x, used))
2436 {
ab532386 2437 error ("invalid rtl sharing found in the insn");
2c07f13b 2438 debug_rtx (insn);
ab532386 2439 error ("shared rtx");
2c07f13b 2440 debug_rtx (x);
ab532386 2441 internal_error ("internal consistency failure");
2c07f13b 2442 }
1a2caa7a
NS
2443#endif
2444 gcc_assert (!RTX_FLAG (x, used));
2445
2c07f13b
JH
2446 RTX_FLAG (x, used) = 1;
2447
6614fd40 2448 /* Now scan the subexpressions recursively. */
2c07f13b
JH
2449
2450 format_ptr = GET_RTX_FORMAT (code);
2451
2452 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2453 {
2454 switch (*format_ptr++)
2455 {
2456 case 'e':
2457 verify_rtx_sharing (XEXP (x, i), insn);
2458 break;
2459
2460 case 'E':
2461 if (XVEC (x, i) != NULL)
2462 {
2463 int j;
2464 int len = XVECLEN (x, i);
2465
2466 for (j = 0; j < len; j++)
2467 {
1a2caa7a
NS
2468 /* We allow sharing of ASM_OPERANDS inside single
2469 instruction. */
2c07f13b 2470 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
1a2caa7a
NS
2471 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2472 == ASM_OPERANDS))
2c07f13b
JH
2473 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2474 else
2475 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2476 }
2477 }
2478 break;
2479 }
2480 }
2481 return;
2482}
2483
ba228239 2484/* Go through all the RTL insn bodies and check that there is no unexpected
2c07f13b
JH
2485 sharing in between the subexpressions. */
2486
2487void
2488verify_rtl_sharing (void)
2489{
2490 rtx p;
2491
2492 for (p = get_insns (); p; p = NEXT_INSN (p))
2493 if (INSN_P (p))
2494 {
2495 reset_used_flags (PATTERN (p));
2496 reset_used_flags (REG_NOTES (p));
2954a813
KK
2497 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2498 {
2499 int i;
2500 rtx q, sequence = PATTERN (p);
2501
2502 for (i = 0; i < XVECLEN (sequence, 0); i++)
2503 {
2504 q = XVECEXP (sequence, 0, i);
2505 gcc_assert (INSN_P (q));
2506 reset_used_flags (PATTERN (q));
2507 reset_used_flags (REG_NOTES (q));
2954a813
KK
2508 }
2509 }
2c07f13b
JH
2510 }
2511
2512 for (p = get_insns (); p; p = NEXT_INSN (p))
2513 if (INSN_P (p))
2514 {
2515 verify_rtx_sharing (PATTERN (p), p);
2516 verify_rtx_sharing (REG_NOTES (p), p);
2c07f13b
JH
2517 }
2518}
2519
d1b81779
GK
2520/* Go through all the RTL insn bodies and copy any invalid shared structure.
2521 Assumes the mark bits are cleared at entry. */
2522
2c07f13b
JH
2523void
2524unshare_all_rtl_in_chain (rtx insn)
d1b81779
GK
2525{
2526 for (; insn; insn = NEXT_INSN (insn))
2c3c49de 2527 if (INSN_P (insn))
d1b81779
GK
2528 {
2529 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2530 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
d1b81779
GK
2531 }
2532}
2533
2d4aecb3 2534/* Go through all virtual stack slots of a function and mark them as
5eb2a9f2
RS
2535 shared. We never replace the DECL_RTLs themselves with a copy,
2536 but expressions mentioned into a DECL_RTL cannot be shared with
2537 expressions in the instruction stream.
2538
2539 Note that reload may convert pseudo registers into memories in-place.
2540 Pseudo registers are always shared, but MEMs never are. Thus if we
2541 reset the used flags on MEMs in the instruction stream, we must set
2542 them again on MEMs that appear in DECL_RTLs. */
2543
2d4aecb3 2544static void
5eb2a9f2 2545set_used_decls (tree blk)
2d4aecb3
AO
2546{
2547 tree t;
2548
2549 /* Mark decls. */
2550 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
19e7881c 2551 if (DECL_RTL_SET_P (t))
5eb2a9f2 2552 set_used_flags (DECL_RTL (t));
2d4aecb3
AO
2553
2554 /* Now process sub-blocks. */
87caf699 2555 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
5eb2a9f2 2556 set_used_decls (t);
2d4aecb3
AO
2557}
2558
23b2ce53 2559/* Mark ORIG as in use, and return a copy of it if it was already in use.
ff954f39
AP
2560 Recursively does the same for subexpressions. Uses
2561 copy_rtx_if_shared_1 to reduce stack space. */
23b2ce53
RS
2562
2563rtx
502b8322 2564copy_rtx_if_shared (rtx orig)
23b2ce53 2565{
32b32b16
AP
2566 copy_rtx_if_shared_1 (&orig);
2567 return orig;
2568}
2569
ff954f39
AP
2570/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2571 use. Recursively does the same for subexpressions. */
2572
32b32b16
AP
2573static void
2574copy_rtx_if_shared_1 (rtx *orig1)
2575{
2576 rtx x;
b3694847
SS
2577 int i;
2578 enum rtx_code code;
32b32b16 2579 rtx *last_ptr;
b3694847 2580 const char *format_ptr;
23b2ce53 2581 int copied = 0;
32b32b16
AP
2582 int length;
2583
2584 /* Repeat is used to turn tail-recursion into iteration. */
2585repeat:
2586 x = *orig1;
23b2ce53
RS
2587
2588 if (x == 0)
32b32b16 2589 return;
23b2ce53
RS
2590
2591 code = GET_CODE (x);
2592
2593 /* These types may be freely shared. */
2594
2595 switch (code)
2596 {
2597 case REG:
23b2ce53
RS
2598 case CONST_INT:
2599 case CONST_DOUBLE:
091a3ac7 2600 case CONST_FIXED:
69ef87e2 2601 case CONST_VECTOR:
23b2ce53 2602 case SYMBOL_REF:
2c07f13b 2603 case LABEL_REF:
23b2ce53
RS
2604 case CODE_LABEL:
2605 case PC:
2606 case CC0:
2607 case SCRATCH:
0f41302f 2608 /* SCRATCH must be shared because they represent distinct values. */
32b32b16 2609 return;
3e89ed8d
JH
2610 case CLOBBER:
2611 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2612 return;
2613 break;
23b2ce53 2614
b851ea09 2615 case CONST:
6fb5fa3c 2616 if (shared_const_p (x))
32b32b16 2617 return;
b851ea09
RK
2618 break;
2619
b5b8b0ac 2620 case DEBUG_INSN:
23b2ce53
RS
2621 case INSN:
2622 case JUMP_INSN:
2623 case CALL_INSN:
2624 case NOTE:
23b2ce53
RS
2625 case BARRIER:
2626 /* The chain of insns is not being copied. */
32b32b16 2627 return;
23b2ce53 2628
e9a25f70
JL
2629 default:
2630 break;
23b2ce53
RS
2631 }
2632
2633 /* This rtx may not be shared. If it has already been seen,
2634 replace it with a copy of itself. */
2635
2adc7f12 2636 if (RTX_FLAG (x, used))
23b2ce53 2637 {
aacd3885 2638 x = shallow_copy_rtx (x);
23b2ce53
RS
2639 copied = 1;
2640 }
2adc7f12 2641 RTX_FLAG (x, used) = 1;
23b2ce53
RS
2642
2643 /* Now scan the subexpressions recursively.
2644 We can store any replaced subexpressions directly into X
2645 since we know X is not shared! Any vectors in X
2646 must be copied if X was copied. */
2647
2648 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
2649 length = GET_RTX_LENGTH (code);
2650 last_ptr = NULL;
2651
2652 for (i = 0; i < length; i++)
23b2ce53
RS
2653 {
2654 switch (*format_ptr++)
2655 {
2656 case 'e':
32b32b16
AP
2657 if (last_ptr)
2658 copy_rtx_if_shared_1 (last_ptr);
2659 last_ptr = &XEXP (x, i);
23b2ce53
RS
2660 break;
2661
2662 case 'E':
2663 if (XVEC (x, i) != NULL)
2664 {
b3694847 2665 int j;
f0722107 2666 int len = XVECLEN (x, i);
32b32b16 2667
6614fd40
KH
2668 /* Copy the vector iff I copied the rtx and the length
2669 is nonzero. */
f0722107 2670 if (copied && len > 0)
8f985ec4 2671 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
32b32b16 2672
5d3cc252 2673 /* Call recursively on all inside the vector. */
f0722107 2674 for (j = 0; j < len; j++)
32b32b16
AP
2675 {
2676 if (last_ptr)
2677 copy_rtx_if_shared_1 (last_ptr);
2678 last_ptr = &XVECEXP (x, i, j);
2679 }
23b2ce53
RS
2680 }
2681 break;
2682 }
2683 }
32b32b16
AP
2684 *orig1 = x;
2685 if (last_ptr)
2686 {
2687 orig1 = last_ptr;
2688 goto repeat;
2689 }
2690 return;
23b2ce53
RS
2691}
2692
2693/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2694 to look for shared sub-parts. */
2695
2696void
502b8322 2697reset_used_flags (rtx x)
23b2ce53 2698{
b3694847
SS
2699 int i, j;
2700 enum rtx_code code;
2701 const char *format_ptr;
32b32b16 2702 int length;
23b2ce53 2703
32b32b16
AP
2704 /* Repeat is used to turn tail-recursion into iteration. */
2705repeat:
23b2ce53
RS
2706 if (x == 0)
2707 return;
2708
2709 code = GET_CODE (x);
2710
9faa82d8 2711 /* These types may be freely shared so we needn't do any resetting
23b2ce53
RS
2712 for them. */
2713
2714 switch (code)
2715 {
2716 case REG:
23b2ce53
RS
2717 case CONST_INT:
2718 case CONST_DOUBLE:
091a3ac7 2719 case CONST_FIXED:
69ef87e2 2720 case CONST_VECTOR:
23b2ce53
RS
2721 case SYMBOL_REF:
2722 case CODE_LABEL:
2723 case PC:
2724 case CC0:
2725 return;
2726
b5b8b0ac 2727 case DEBUG_INSN:
23b2ce53
RS
2728 case INSN:
2729 case JUMP_INSN:
2730 case CALL_INSN:
2731 case NOTE:
2732 case LABEL_REF:
2733 case BARRIER:
2734 /* The chain of insns is not being copied. */
2735 return;
750c9258 2736
e9a25f70
JL
2737 default:
2738 break;
23b2ce53
RS
2739 }
2740
2adc7f12 2741 RTX_FLAG (x, used) = 0;
23b2ce53
RS
2742
2743 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
2744 length = GET_RTX_LENGTH (code);
2745
2746 for (i = 0; i < length; i++)
23b2ce53
RS
2747 {
2748 switch (*format_ptr++)
2749 {
2750 case 'e':
32b32b16
AP
2751 if (i == length-1)
2752 {
2753 x = XEXP (x, i);
2754 goto repeat;
2755 }
23b2ce53
RS
2756 reset_used_flags (XEXP (x, i));
2757 break;
2758
2759 case 'E':
2760 for (j = 0; j < XVECLEN (x, i); j++)
2761 reset_used_flags (XVECEXP (x, i, j));
2762 break;
2763 }
2764 }
2765}
2c07f13b
JH
2766
2767/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2768 to look for shared sub-parts. */
2769
2770void
2771set_used_flags (rtx x)
2772{
2773 int i, j;
2774 enum rtx_code code;
2775 const char *format_ptr;
2776
2777 if (x == 0)
2778 return;
2779
2780 code = GET_CODE (x);
2781
2782 /* These types may be freely shared so we needn't do any resetting
2783 for them. */
2784
2785 switch (code)
2786 {
2787 case REG:
2c07f13b
JH
2788 case CONST_INT:
2789 case CONST_DOUBLE:
091a3ac7 2790 case CONST_FIXED:
2c07f13b
JH
2791 case CONST_VECTOR:
2792 case SYMBOL_REF:
2793 case CODE_LABEL:
2794 case PC:
2795 case CC0:
2796 return;
2797
b5b8b0ac 2798 case DEBUG_INSN:
2c07f13b
JH
2799 case INSN:
2800 case JUMP_INSN:
2801 case CALL_INSN:
2802 case NOTE:
2803 case LABEL_REF:
2804 case BARRIER:
2805 /* The chain of insns is not being copied. */
2806 return;
2807
2808 default:
2809 break;
2810 }
2811
2812 RTX_FLAG (x, used) = 1;
2813
2814 format_ptr = GET_RTX_FORMAT (code);
2815 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2816 {
2817 switch (*format_ptr++)
2818 {
2819 case 'e':
2820 set_used_flags (XEXP (x, i));
2821 break;
2822
2823 case 'E':
2824 for (j = 0; j < XVECLEN (x, i); j++)
2825 set_used_flags (XVECEXP (x, i, j));
2826 break;
2827 }
2828 }
2829}
23b2ce53
RS
2830\f
2831/* Copy X if necessary so that it won't be altered by changes in OTHER.
2832 Return X or the rtx for the pseudo reg the value of X was copied into.
2833 OTHER must be valid as a SET_DEST. */
2834
2835rtx
502b8322 2836make_safe_from (rtx x, rtx other)
23b2ce53
RS
2837{
2838 while (1)
2839 switch (GET_CODE (other))
2840 {
2841 case SUBREG:
2842 other = SUBREG_REG (other);
2843 break;
2844 case STRICT_LOW_PART:
2845 case SIGN_EXTEND:
2846 case ZERO_EXTEND:
2847 other = XEXP (other, 0);
2848 break;
2849 default:
2850 goto done;
2851 }
2852 done:
3c0cb5de 2853 if ((MEM_P (other)
23b2ce53 2854 && ! CONSTANT_P (x)
f8cfc6aa 2855 && !REG_P (x)
23b2ce53 2856 && GET_CODE (x) != SUBREG)
f8cfc6aa 2857 || (REG_P (other)
23b2ce53
RS
2858 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2859 || reg_mentioned_p (other, x))))
2860 {
2861 rtx temp = gen_reg_rtx (GET_MODE (x));
2862 emit_move_insn (temp, x);
2863 return temp;
2864 }
2865 return x;
2866}
2867\f
2868/* Emission of insns (adding them to the doubly-linked list). */
2869
2870/* Return the first insn of the current sequence or current function. */
2871
2872rtx
502b8322 2873get_insns (void)
23b2ce53
RS
2874{
2875 return first_insn;
2876}
2877
3dec4024
JH
2878/* Specify a new insn as the first in the chain. */
2879
2880void
502b8322 2881set_first_insn (rtx insn)
3dec4024 2882{
5b0264cb 2883 gcc_assert (!PREV_INSN (insn));
3dec4024
JH
2884 first_insn = insn;
2885}
2886
23b2ce53
RS
2887/* Return the last insn emitted in current sequence or current function. */
2888
2889rtx
502b8322 2890get_last_insn (void)
23b2ce53
RS
2891{
2892 return last_insn;
2893}
2894
2895/* Specify a new insn as the last in the chain. */
2896
2897void
502b8322 2898set_last_insn (rtx insn)
23b2ce53 2899{
5b0264cb 2900 gcc_assert (!NEXT_INSN (insn));
23b2ce53
RS
2901 last_insn = insn;
2902}
2903
2904/* Return the last insn emitted, even if it is in a sequence now pushed. */
2905
2906rtx
502b8322 2907get_last_insn_anywhere (void)
23b2ce53
RS
2908{
2909 struct sequence_stack *stack;
2910 if (last_insn)
2911 return last_insn;
49ad7cfa 2912 for (stack = seq_stack; stack; stack = stack->next)
23b2ce53
RS
2913 if (stack->last != 0)
2914 return stack->last;
2915 return 0;
2916}
2917
2a496e8b
JDA
2918/* Return the first nonnote insn emitted in current sequence or current
2919 function. This routine looks inside SEQUENCEs. */
2920
2921rtx
502b8322 2922get_first_nonnote_insn (void)
2a496e8b 2923{
91373fe8
JDA
2924 rtx insn = first_insn;
2925
2926 if (insn)
2927 {
2928 if (NOTE_P (insn))
2929 for (insn = next_insn (insn);
2930 insn && NOTE_P (insn);
2931 insn = next_insn (insn))
2932 continue;
2933 else
2934 {
2ca202e7 2935 if (NONJUMP_INSN_P (insn)
91373fe8
JDA
2936 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2937 insn = XVECEXP (PATTERN (insn), 0, 0);
2938 }
2939 }
2a496e8b
JDA
2940
2941 return insn;
2942}
2943
2944/* Return the last nonnote insn emitted in current sequence or current
2945 function. This routine looks inside SEQUENCEs. */
2946
2947rtx
502b8322 2948get_last_nonnote_insn (void)
2a496e8b 2949{
91373fe8
JDA
2950 rtx insn = last_insn;
2951
2952 if (insn)
2953 {
2954 if (NOTE_P (insn))
2955 for (insn = previous_insn (insn);
2956 insn && NOTE_P (insn);
2957 insn = previous_insn (insn))
2958 continue;
2959 else
2960 {
2ca202e7 2961 if (NONJUMP_INSN_P (insn)
91373fe8
JDA
2962 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2963 insn = XVECEXP (PATTERN (insn), 0,
2964 XVECLEN (PATTERN (insn), 0) - 1);
2965 }
2966 }
2a496e8b
JDA
2967
2968 return insn;
2969}
2970
23b2ce53
RS
2971/* Return a number larger than any instruction's uid in this function. */
2972
2973int
502b8322 2974get_max_uid (void)
23b2ce53
RS
2975{
2976 return cur_insn_uid;
2977}
b5b8b0ac
AO
2978
2979/* Return the number of actual (non-debug) insns emitted in this
2980 function. */
2981
2982int
2983get_max_insn_count (void)
2984{
2985 int n = cur_insn_uid;
2986
2987 /* The table size must be stable across -g, to avoid codegen
2988 differences due to debug insns, and not be affected by
2989 -fmin-insn-uid, to avoid excessive table size and to simplify
2990 debugging of -fcompare-debug failures. */
2991 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
2992 n -= cur_debug_insn_uid;
2993 else
2994 n -= MIN_NONDEBUG_INSN_UID;
2995
2996 return n;
2997}
2998
23b2ce53
RS
2999\f
3000/* Return the next insn. If it is a SEQUENCE, return the first insn
3001 of the sequence. */
3002
3003rtx
502b8322 3004next_insn (rtx insn)
23b2ce53 3005{
75547801
KG
3006 if (insn)
3007 {
3008 insn = NEXT_INSN (insn);
3009 if (insn && NONJUMP_INSN_P (insn)
3010 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3011 insn = XVECEXP (PATTERN (insn), 0, 0);
3012 }
23b2ce53 3013
75547801 3014 return insn;
23b2ce53
RS
3015}
3016
3017/* Return the previous insn. If it is a SEQUENCE, return the last insn
3018 of the sequence. */
3019
3020rtx
502b8322 3021previous_insn (rtx insn)
23b2ce53 3022{
75547801
KG
3023 if (insn)
3024 {
3025 insn = PREV_INSN (insn);
3026 if (insn && NONJUMP_INSN_P (insn)
3027 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3028 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3029 }
23b2ce53 3030
75547801 3031 return insn;
23b2ce53
RS
3032}
3033
3034/* Return the next insn after INSN that is not a NOTE. This routine does not
3035 look inside SEQUENCEs. */
3036
3037rtx
502b8322 3038next_nonnote_insn (rtx insn)
23b2ce53 3039{
75547801
KG
3040 while (insn)
3041 {
3042 insn = NEXT_INSN (insn);
3043 if (insn == 0 || !NOTE_P (insn))
3044 break;
3045 }
23b2ce53 3046
75547801 3047 return insn;
23b2ce53
RS
3048}
3049
1e211590
DD
3050/* Return the next insn after INSN that is not a NOTE, but stop the
3051 search before we enter another basic block. This routine does not
3052 look inside SEQUENCEs. */
3053
3054rtx
3055next_nonnote_insn_bb (rtx insn)
3056{
3057 while (insn)
3058 {
3059 insn = NEXT_INSN (insn);
3060 if (insn == 0 || !NOTE_P (insn))
3061 break;
3062 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3063 return NULL_RTX;
3064 }
3065
3066 return insn;
3067}
3068
23b2ce53
RS
3069/* Return the previous insn before INSN that is not a NOTE. This routine does
3070 not look inside SEQUENCEs. */
3071
3072rtx
502b8322 3073prev_nonnote_insn (rtx insn)
23b2ce53 3074{
75547801
KG
3075 while (insn)
3076 {
3077 insn = PREV_INSN (insn);
3078 if (insn == 0 || !NOTE_P (insn))
3079 break;
3080 }
23b2ce53 3081
75547801 3082 return insn;
23b2ce53
RS
3083}
3084
b5b8b0ac
AO
3085/* Return the next insn after INSN that is not a DEBUG_INSN. This
3086 routine does not look inside SEQUENCEs. */
3087
3088rtx
3089next_nondebug_insn (rtx insn)
3090{
3091 while (insn)
3092 {
3093 insn = NEXT_INSN (insn);
3094 if (insn == 0 || !DEBUG_INSN_P (insn))
3095 break;
3096 }
3097
3098 return insn;
3099}
3100
3101/* Return the previous insn before INSN that is not a DEBUG_INSN.
3102 This routine does not look inside SEQUENCEs. */
3103
3104rtx
3105prev_nondebug_insn (rtx insn)
3106{
3107 while (insn)
3108 {
3109 insn = PREV_INSN (insn);
3110 if (insn == 0 || !DEBUG_INSN_P (insn))
3111 break;
3112 }
3113
3114 return insn;
3115}
3116
23b2ce53
RS
3117/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3118 or 0, if there is none. This routine does not look inside
0f41302f 3119 SEQUENCEs. */
23b2ce53
RS
3120
3121rtx
502b8322 3122next_real_insn (rtx insn)
23b2ce53 3123{
75547801
KG
3124 while (insn)
3125 {
3126 insn = NEXT_INSN (insn);
3127 if (insn == 0 || INSN_P (insn))
3128 break;
3129 }
23b2ce53 3130
75547801 3131 return insn;
23b2ce53
RS
3132}
3133
3134/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3135 or 0, if there is none. This routine does not look inside
3136 SEQUENCEs. */
3137
3138rtx
502b8322 3139prev_real_insn (rtx insn)
23b2ce53 3140{
75547801
KG
3141 while (insn)
3142 {
3143 insn = PREV_INSN (insn);
3144 if (insn == 0 || INSN_P (insn))
3145 break;
3146 }
23b2ce53 3147
75547801 3148 return insn;
23b2ce53
RS
3149}
3150
ee960939
OH
3151/* Return the last CALL_INSN in the current list, or 0 if there is none.
3152 This routine does not look inside SEQUENCEs. */
3153
3154rtx
502b8322 3155last_call_insn (void)
ee960939
OH
3156{
3157 rtx insn;
3158
3159 for (insn = get_last_insn ();
4b4bf941 3160 insn && !CALL_P (insn);
ee960939
OH
3161 insn = PREV_INSN (insn))
3162 ;
3163
3164 return insn;
3165}
3166
23b2ce53
RS
3167/* Find the next insn after INSN that really does something. This routine
3168 does not look inside SEQUENCEs. Until reload has completed, this is the
3169 same as next_real_insn. */
3170
69732dcb 3171int
4f588890 3172active_insn_p (const_rtx insn)
69732dcb 3173{
4b4bf941
JQ
3174 return (CALL_P (insn) || JUMP_P (insn)
3175 || (NONJUMP_INSN_P (insn)
23b8ba81
RH
3176 && (! reload_completed
3177 || (GET_CODE (PATTERN (insn)) != USE
3178 && GET_CODE (PATTERN (insn)) != CLOBBER))));
69732dcb
RH
3179}
3180
23b2ce53 3181rtx
502b8322 3182next_active_insn (rtx insn)
23b2ce53 3183{
75547801
KG
3184 while (insn)
3185 {
3186 insn = NEXT_INSN (insn);
3187 if (insn == 0 || active_insn_p (insn))
3188 break;
3189 }
23b2ce53 3190
75547801 3191 return insn;
23b2ce53
RS
3192}
3193
3194/* Find the last insn before INSN that really does something. This routine
3195 does not look inside SEQUENCEs. Until reload has completed, this is the
3196 same as prev_real_insn. */
3197
3198rtx
502b8322 3199prev_active_insn (rtx insn)
23b2ce53 3200{
75547801
KG
3201 while (insn)
3202 {
3203 insn = PREV_INSN (insn);
3204 if (insn == 0 || active_insn_p (insn))
3205 break;
3206 }
23b2ce53 3207
75547801 3208 return insn;
23b2ce53
RS
3209}
3210
3211/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3212
3213rtx
502b8322 3214next_label (rtx insn)
23b2ce53 3215{
75547801
KG
3216 while (insn)
3217 {
3218 insn = NEXT_INSN (insn);
3219 if (insn == 0 || LABEL_P (insn))
3220 break;
3221 }
23b2ce53 3222
75547801 3223 return insn;
23b2ce53
RS
3224}
3225
3226/* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3227
3228rtx
502b8322 3229prev_label (rtx insn)
23b2ce53 3230{
75547801
KG
3231 while (insn)
3232 {
3233 insn = PREV_INSN (insn);
3234 if (insn == 0 || LABEL_P (insn))
3235 break;
3236 }
23b2ce53 3237
75547801 3238 return insn;
23b2ce53 3239}
6c2511d3
RS
3240
3241/* Return the last label to mark the same position as LABEL. Return null
3242 if LABEL itself is null. */
3243
3244rtx
3245skip_consecutive_labels (rtx label)
3246{
3247 rtx insn;
3248
3249 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3250 if (LABEL_P (insn))
3251 label = insn;
3252
3253 return label;
3254}
23b2ce53
RS
3255\f
3256#ifdef HAVE_cc0
c572e5ba
JVA
3257/* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3258 and REG_CC_USER notes so we can find it. */
3259
3260void
502b8322 3261link_cc0_insns (rtx insn)
c572e5ba
JVA
3262{
3263 rtx user = next_nonnote_insn (insn);
3264
4b4bf941 3265 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
c572e5ba
JVA
3266 user = XVECEXP (PATTERN (user), 0, 0);
3267
65c5f2a6
ILT
3268 add_reg_note (user, REG_CC_SETTER, insn);
3269 add_reg_note (insn, REG_CC_USER, user);
c572e5ba
JVA
3270}
3271
23b2ce53
RS
3272/* Return the next insn that uses CC0 after INSN, which is assumed to
3273 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3274 applied to the result of this function should yield INSN).
3275
3276 Normally, this is simply the next insn. However, if a REG_CC_USER note
3277 is present, it contains the insn that uses CC0.
3278
3279 Return 0 if we can't find the insn. */
3280
3281rtx
502b8322 3282next_cc0_user (rtx insn)
23b2ce53 3283{
906c4e36 3284 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
23b2ce53
RS
3285
3286 if (note)
3287 return XEXP (note, 0);
3288
3289 insn = next_nonnote_insn (insn);
4b4bf941 3290 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
23b2ce53
RS
3291 insn = XVECEXP (PATTERN (insn), 0, 0);
3292
2c3c49de 3293 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
23b2ce53
RS
3294 return insn;
3295
3296 return 0;
3297}
3298
3299/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3300 note, it is the previous insn. */
3301
3302rtx
502b8322 3303prev_cc0_setter (rtx insn)
23b2ce53 3304{
906c4e36 3305 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
23b2ce53
RS
3306
3307 if (note)
3308 return XEXP (note, 0);
3309
3310 insn = prev_nonnote_insn (insn);
5b0264cb 3311 gcc_assert (sets_cc0_p (PATTERN (insn)));
23b2ce53
RS
3312
3313 return insn;
3314}
3315#endif
e5bef2e4 3316
594f8779
RZ
3317#ifdef AUTO_INC_DEC
3318/* Find a RTX_AUTOINC class rtx which matches DATA. */
3319
3320static int
3321find_auto_inc (rtx *xp, void *data)
3322{
3323 rtx x = *xp;
5ead67f6 3324 rtx reg = (rtx) data;
594f8779
RZ
3325
3326 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3327 return 0;
3328
3329 switch (GET_CODE (x))
3330 {
3331 case PRE_DEC:
3332 case PRE_INC:
3333 case POST_DEC:
3334 case POST_INC:
3335 case PRE_MODIFY:
3336 case POST_MODIFY:
3337 if (rtx_equal_p (reg, XEXP (x, 0)))
3338 return 1;
3339 break;
3340
3341 default:
3342 gcc_unreachable ();
3343 }
3344 return -1;
3345}
3346#endif
3347
e5bef2e4
HB
3348/* Increment the label uses for all labels present in rtx. */
3349
3350static void
502b8322 3351mark_label_nuses (rtx x)
e5bef2e4 3352{
b3694847
SS
3353 enum rtx_code code;
3354 int i, j;
3355 const char *fmt;
e5bef2e4
HB
3356
3357 code = GET_CODE (x);
7537fc90 3358 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
e5bef2e4
HB
3359 LABEL_NUSES (XEXP (x, 0))++;
3360
3361 fmt = GET_RTX_FORMAT (code);
3362 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3363 {
3364 if (fmt[i] == 'e')
0fb7aeda 3365 mark_label_nuses (XEXP (x, i));
e5bef2e4 3366 else if (fmt[i] == 'E')
0fb7aeda 3367 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
e5bef2e4
HB
3368 mark_label_nuses (XVECEXP (x, i, j));
3369 }
3370}
3371
23b2ce53
RS
3372\f
3373/* Try splitting insns that can be split for better scheduling.
3374 PAT is the pattern which might split.
3375 TRIAL is the insn providing PAT.
cc2902df 3376 LAST is nonzero if we should return the last insn of the sequence produced.
23b2ce53
RS
3377
3378 If this routine succeeds in splitting, it returns the first or last
11147ebe 3379 replacement insn depending on the value of LAST. Otherwise, it
23b2ce53
RS
3380 returns TRIAL. If the insn to be returned can be split, it will be. */
3381
3382rtx
502b8322 3383try_split (rtx pat, rtx trial, int last)
23b2ce53
RS
3384{
3385 rtx before = PREV_INSN (trial);
3386 rtx after = NEXT_INSN (trial);
23b2ce53 3387 int has_barrier = 0;
4a8cae83 3388 rtx note, seq, tem;
6b24c259 3389 int probability;
599aedd9
RH
3390 rtx insn_last, insn;
3391 int njumps = 0;
6b24c259 3392
cd9c1ca8
RH
3393 /* We're not good at redistributing frame information. */
3394 if (RTX_FRAME_RELATED_P (trial))
3395 return trial;
3396
6b24c259
JH
3397 if (any_condjump_p (trial)
3398 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3399 split_branch_probability = INTVAL (XEXP (note, 0));
3400 probability = split_branch_probability;
3401
3402 seq = split_insns (pat, trial);
3403
3404 split_branch_probability = -1;
23b2ce53
RS
3405
3406 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3407 We may need to handle this specially. */
4b4bf941 3408 if (after && BARRIER_P (after))
23b2ce53
RS
3409 {
3410 has_barrier = 1;
3411 after = NEXT_INSN (after);
3412 }
3413
599aedd9
RH
3414 if (!seq)
3415 return trial;
3416
3417 /* Avoid infinite loop if any insn of the result matches
3418 the original pattern. */
3419 insn_last = seq;
3420 while (1)
23b2ce53 3421 {
599aedd9
RH
3422 if (INSN_P (insn_last)
3423 && rtx_equal_p (PATTERN (insn_last), pat))
3424 return trial;
3425 if (!NEXT_INSN (insn_last))
3426 break;
3427 insn_last = NEXT_INSN (insn_last);
3428 }
750c9258 3429
6fb5fa3c
DB
3430 /* We will be adding the new sequence to the function. The splitters
3431 may have introduced invalid RTL sharing, so unshare the sequence now. */
3432 unshare_all_rtl_in_chain (seq);
3433
599aedd9
RH
3434 /* Mark labels. */
3435 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3436 {
4b4bf941 3437 if (JUMP_P (insn))
599aedd9
RH
3438 {
3439 mark_jump_label (PATTERN (insn), insn, 0);
3440 njumps++;
3441 if (probability != -1
3442 && any_condjump_p (insn)
3443 && !find_reg_note (insn, REG_BR_PROB, 0))
2f937369 3444 {
599aedd9
RH
3445 /* We can preserve the REG_BR_PROB notes only if exactly
3446 one jump is created, otherwise the machine description
3447 is responsible for this step using
3448 split_branch_probability variable. */
5b0264cb 3449 gcc_assert (njumps == 1);
65c5f2a6 3450 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
2f937369 3451 }
599aedd9
RH
3452 }
3453 }
3454
3455 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3456 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
4b4bf941 3457 if (CALL_P (trial))
599aedd9
RH
3458 {
3459 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
4b4bf941 3460 if (CALL_P (insn))
599aedd9 3461 {
f6a1f3f6
RH
3462 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3463 while (*p)
3464 p = &XEXP (*p, 1);
3465 *p = CALL_INSN_FUNCTION_USAGE (trial);
599aedd9
RH
3466 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3467 }
3468 }
4b5e8abe 3469
599aedd9
RH
3470 /* Copy notes, particularly those related to the CFG. */
3471 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3472 {
3473 switch (REG_NOTE_KIND (note))
3474 {
3475 case REG_EH_REGION:
594f8779 3476 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
2f937369 3477 {
4b4bf941 3478 if (CALL_P (insn)
d3a583b1 3479 || (flag_non_call_exceptions && INSN_P (insn)
599aedd9 3480 && may_trap_p (PATTERN (insn))))
65c5f2a6 3481 add_reg_note (insn, REG_EH_REGION, XEXP (note, 0));
2f937369 3482 }
599aedd9 3483 break;
216183ce 3484
599aedd9
RH
3485 case REG_NORETURN:
3486 case REG_SETJMP:
594f8779 3487 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
216183ce 3488 {
4b4bf941 3489 if (CALL_P (insn))
65c5f2a6 3490 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
216183ce 3491 }
599aedd9 3492 break;
d6e95df8 3493
599aedd9 3494 case REG_NON_LOCAL_GOTO:
594f8779 3495 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
2f937369 3496 {
4b4bf941 3497 if (JUMP_P (insn))
65c5f2a6 3498 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
2f937369 3499 }
599aedd9 3500 break;
e5bef2e4 3501
594f8779
RZ
3502#ifdef AUTO_INC_DEC
3503 case REG_INC:
3504 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3505 {
3506 rtx reg = XEXP (note, 0);
3507 if (!FIND_REG_INC_NOTE (insn, reg)
3508 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
65c5f2a6 3509 add_reg_note (insn, REG_INC, reg);
594f8779
RZ
3510 }
3511 break;
3512#endif
3513
599aedd9
RH
3514 default:
3515 break;
23b2ce53 3516 }
599aedd9
RH
3517 }
3518
3519 /* If there are LABELS inside the split insns increment the
3520 usage count so we don't delete the label. */
cf7c4aa6 3521 if (INSN_P (trial))
599aedd9
RH
3522 {
3523 insn = insn_last;
3524 while (insn != NULL_RTX)
23b2ce53 3525 {
cf7c4aa6 3526 /* JUMP_P insns have already been "marked" above. */
4b4bf941 3527 if (NONJUMP_INSN_P (insn))
599aedd9 3528 mark_label_nuses (PATTERN (insn));
23b2ce53 3529
599aedd9
RH
3530 insn = PREV_INSN (insn);
3531 }
23b2ce53
RS
3532 }
3533
0435312e 3534 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
599aedd9
RH
3535
3536 delete_insn (trial);
3537 if (has_barrier)
3538 emit_barrier_after (tem);
3539
3540 /* Recursively call try_split for each new insn created; by the
3541 time control returns here that insn will be fully split, so
3542 set LAST and continue from the insn after the one returned.
3543 We can't use next_active_insn here since AFTER may be a note.
3544 Ignore deleted insns, which can be occur if not optimizing. */
3545 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3546 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3547 tem = try_split (PATTERN (tem), tem, 1);
3548
3549 /* Return either the first or the last insn, depending on which was
3550 requested. */
3551 return last
3552 ? (after ? PREV_INSN (after) : last_insn)
3553 : NEXT_INSN (before);
23b2ce53
RS
3554}
3555\f
3556/* Make and return an INSN rtx, initializing all its slots.
4b1f5e8c 3557 Store PATTERN in the pattern slots. */
23b2ce53
RS
3558
3559rtx
502b8322 3560make_insn_raw (rtx pattern)
23b2ce53 3561{
b3694847 3562 rtx insn;
23b2ce53 3563
1f8f4a0b 3564 insn = rtx_alloc (INSN);
23b2ce53 3565
43127294 3566 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3567 PATTERN (insn) = pattern;
3568 INSN_CODE (insn) = -1;
1632afca 3569 REG_NOTES (insn) = NULL;
55e092c4 3570 INSN_LOCATOR (insn) = curr_insn_locator ();
ba4f7968 3571 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53 3572
47984720
NC
3573#ifdef ENABLE_RTL_CHECKING
3574 if (insn
2c3c49de 3575 && INSN_P (insn)
47984720
NC
3576 && (returnjump_p (insn)
3577 || (GET_CODE (insn) == SET
3578 && SET_DEST (insn) == pc_rtx)))
3579 {
d4ee4d25 3580 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
47984720
NC
3581 debug_rtx (insn);
3582 }
3583#endif
750c9258 3584
23b2ce53
RS
3585 return insn;
3586}
3587
b5b8b0ac
AO
3588/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3589
3590rtx
3591make_debug_insn_raw (rtx pattern)
3592{
3593 rtx insn;
3594
3595 insn = rtx_alloc (DEBUG_INSN);
3596 INSN_UID (insn) = cur_debug_insn_uid++;
3597 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3598 INSN_UID (insn) = cur_insn_uid++;
3599
3600 PATTERN (insn) = pattern;
3601 INSN_CODE (insn) = -1;
3602 REG_NOTES (insn) = NULL;
3603 INSN_LOCATOR (insn) = curr_insn_locator ();
3604 BLOCK_FOR_INSN (insn) = NULL;
3605
3606 return insn;
3607}
3608
2f937369 3609/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
23b2ce53 3610
38109dab 3611rtx
502b8322 3612make_jump_insn_raw (rtx pattern)
23b2ce53 3613{
b3694847 3614 rtx insn;
23b2ce53 3615
4b1f5e8c 3616 insn = rtx_alloc (JUMP_INSN);
1632afca 3617 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3618
3619 PATTERN (insn) = pattern;
3620 INSN_CODE (insn) = -1;
1632afca
RS
3621 REG_NOTES (insn) = NULL;
3622 JUMP_LABEL (insn) = NULL;
55e092c4 3623 INSN_LOCATOR (insn) = curr_insn_locator ();
ba4f7968 3624 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53
RS
3625
3626 return insn;
3627}
aff507f4 3628
2f937369 3629/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
aff507f4
RK
3630
3631static rtx
502b8322 3632make_call_insn_raw (rtx pattern)
aff507f4 3633{
b3694847 3634 rtx insn;
aff507f4
RK
3635
3636 insn = rtx_alloc (CALL_INSN);
3637 INSN_UID (insn) = cur_insn_uid++;
3638
3639 PATTERN (insn) = pattern;
3640 INSN_CODE (insn) = -1;
aff507f4
RK
3641 REG_NOTES (insn) = NULL;
3642 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
55e092c4 3643 INSN_LOCATOR (insn) = curr_insn_locator ();
ba4f7968 3644 BLOCK_FOR_INSN (insn) = NULL;
aff507f4
RK
3645
3646 return insn;
3647}
23b2ce53
RS
3648\f
3649/* Add INSN to the end of the doubly-linked list.
3650 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3651
3652void
502b8322 3653add_insn (rtx insn)
23b2ce53
RS
3654{
3655 PREV_INSN (insn) = last_insn;
3656 NEXT_INSN (insn) = 0;
3657
3658 if (NULL != last_insn)
3659 NEXT_INSN (last_insn) = insn;
3660
3661 if (NULL == first_insn)
3662 first_insn = insn;
3663
3664 last_insn = insn;
3665}
3666
a0ae8e8d
RK
3667/* Add INSN into the doubly-linked list after insn AFTER. This and
3668 the next should be the only functions called to insert an insn once
ba213285 3669 delay slots have been filled since only they know how to update a
a0ae8e8d 3670 SEQUENCE. */
23b2ce53
RS
3671
3672void
6fb5fa3c 3673add_insn_after (rtx insn, rtx after, basic_block bb)
23b2ce53
RS
3674{
3675 rtx next = NEXT_INSN (after);
3676
5b0264cb 3677 gcc_assert (!optimize || !INSN_DELETED_P (after));
ba213285 3678
23b2ce53
RS
3679 NEXT_INSN (insn) = next;
3680 PREV_INSN (insn) = after;
3681
3682 if (next)
3683 {
3684 PREV_INSN (next) = insn;
4b4bf941 3685 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
23b2ce53
RS
3686 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3687 }
3688 else if (last_insn == after)
3689 last_insn = insn;
3690 else
3691 {
49ad7cfa 3692 struct sequence_stack *stack = seq_stack;
23b2ce53
RS
3693 /* Scan all pending sequences too. */
3694 for (; stack; stack = stack->next)
3695 if (after == stack->last)
fef0509b
RK
3696 {
3697 stack->last = insn;
3698 break;
3699 }
a0ae8e8d 3700
5b0264cb 3701 gcc_assert (stack);
23b2ce53
RS
3702 }
3703
4b4bf941
JQ
3704 if (!BARRIER_P (after)
3705 && !BARRIER_P (insn)
3c030e88
JH
3706 && (bb = BLOCK_FOR_INSN (after)))
3707 {
3708 set_block_for_insn (insn, bb);
38c1593d 3709 if (INSN_P (insn))
6fb5fa3c 3710 df_insn_rescan (insn);
3c030e88 3711 /* Should not happen as first in the BB is always
a1f300c0 3712 either NOTE or LABEL. */
a813c111 3713 if (BB_END (bb) == after
3c030e88 3714 /* Avoid clobbering of structure when creating new BB. */
4b4bf941 3715 && !BARRIER_P (insn)
a38e7aa5 3716 && !NOTE_INSN_BASIC_BLOCK_P (insn))
a813c111 3717 BB_END (bb) = insn;
3c030e88
JH
3718 }
3719
23b2ce53 3720 NEXT_INSN (after) = insn;
4b4bf941 3721 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
23b2ce53
RS
3722 {
3723 rtx sequence = PATTERN (after);
3724 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3725 }
3726}
3727
a0ae8e8d 3728/* Add INSN into the doubly-linked list before insn BEFORE. This and
6fb5fa3c
DB
3729 the previous should be the only functions called to insert an insn
3730 once delay slots have been filled since only they know how to
3731 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3732 bb from before. */
a0ae8e8d
RK
3733
3734void
6fb5fa3c 3735add_insn_before (rtx insn, rtx before, basic_block bb)
a0ae8e8d
RK
3736{
3737 rtx prev = PREV_INSN (before);
3738
5b0264cb 3739 gcc_assert (!optimize || !INSN_DELETED_P (before));
ba213285 3740
a0ae8e8d
RK
3741 PREV_INSN (insn) = prev;
3742 NEXT_INSN (insn) = before;
3743
3744 if (prev)
3745 {
3746 NEXT_INSN (prev) = insn;
4b4bf941 3747 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
a0ae8e8d
RK
3748 {
3749 rtx sequence = PATTERN (prev);
3750 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3751 }
3752 }
3753 else if (first_insn == before)
3754 first_insn = insn;
3755 else
3756 {
49ad7cfa 3757 struct sequence_stack *stack = seq_stack;
a0ae8e8d
RK
3758 /* Scan all pending sequences too. */
3759 for (; stack; stack = stack->next)
3760 if (before == stack->first)
fef0509b
RK
3761 {
3762 stack->first = insn;
3763 break;
3764 }
a0ae8e8d 3765
5b0264cb 3766 gcc_assert (stack);
a0ae8e8d
RK
3767 }
3768
6fb5fa3c
DB
3769 if (!bb
3770 && !BARRIER_P (before)
3771 && !BARRIER_P (insn))
3772 bb = BLOCK_FOR_INSN (before);
3773
3774 if (bb)
3c030e88
JH
3775 {
3776 set_block_for_insn (insn, bb);
38c1593d 3777 if (INSN_P (insn))
6fb5fa3c 3778 df_insn_rescan (insn);
5b0264cb 3779 /* Should not happen as first in the BB is always either NOTE or
43e05e45 3780 LABEL. */
5b0264cb
NS
3781 gcc_assert (BB_HEAD (bb) != insn
3782 /* Avoid clobbering of structure when creating new BB. */
3783 || BARRIER_P (insn)
a38e7aa5 3784 || NOTE_INSN_BASIC_BLOCK_P (insn));
3c030e88
JH
3785 }
3786
a0ae8e8d 3787 PREV_INSN (before) = insn;
4b4bf941 3788 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
a0ae8e8d
RK
3789 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3790}
3791
6fb5fa3c
DB
3792
3793/* Replace insn with an deleted instruction note. */
3794
0ce2b299
EB
3795void
3796set_insn_deleted (rtx insn)
6fb5fa3c
DB
3797{
3798 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3799 PUT_CODE (insn, NOTE);
3800 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3801}
3802
3803
89e99eea
DB
3804/* Remove an insn from its doubly-linked list. This function knows how
3805 to handle sequences. */
3806void
502b8322 3807remove_insn (rtx insn)
89e99eea
DB
3808{
3809 rtx next = NEXT_INSN (insn);
3810 rtx prev = PREV_INSN (insn);
53c17031
JH
3811 basic_block bb;
3812
6fb5fa3c
DB
3813 /* Later in the code, the block will be marked dirty. */
3814 df_insn_delete (NULL, INSN_UID (insn));
3815
89e99eea
DB
3816 if (prev)
3817 {
3818 NEXT_INSN (prev) = next;
4b4bf941 3819 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
89e99eea
DB
3820 {
3821 rtx sequence = PATTERN (prev);
3822 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3823 }
3824 }
3825 else if (first_insn == insn)
3826 first_insn = next;
3827 else
3828 {
49ad7cfa 3829 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3830 /* Scan all pending sequences too. */
3831 for (; stack; stack = stack->next)
3832 if (insn == stack->first)
3833 {
3834 stack->first = next;
3835 break;
3836 }
3837
5b0264cb 3838 gcc_assert (stack);
89e99eea
DB
3839 }
3840
3841 if (next)
3842 {
3843 PREV_INSN (next) = prev;
4b4bf941 3844 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
89e99eea
DB
3845 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3846 }
3847 else if (last_insn == insn)
3848 last_insn = prev;
3849 else
3850 {
49ad7cfa 3851 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3852 /* Scan all pending sequences too. */
3853 for (; stack; stack = stack->next)
3854 if (insn == stack->last)
3855 {
3856 stack->last = prev;
3857 break;
3858 }
3859
5b0264cb 3860 gcc_assert (stack);
89e99eea 3861 }
4b4bf941 3862 if (!BARRIER_P (insn)
53c17031
JH
3863 && (bb = BLOCK_FOR_INSN (insn)))
3864 {
38c1593d 3865 if (INSN_P (insn))
6fb5fa3c 3866 df_set_bb_dirty (bb);
a813c111 3867 if (BB_HEAD (bb) == insn)
53c17031 3868 {
3bf1e984
RK
3869 /* Never ever delete the basic block note without deleting whole
3870 basic block. */
5b0264cb 3871 gcc_assert (!NOTE_P (insn));
a813c111 3872 BB_HEAD (bb) = next;
53c17031 3873 }
a813c111
SB
3874 if (BB_END (bb) == insn)
3875 BB_END (bb) = prev;
53c17031 3876 }
89e99eea
DB
3877}
3878
ee960939
OH
3879/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3880
3881void
502b8322 3882add_function_usage_to (rtx call_insn, rtx call_fusage)
ee960939 3883{
5b0264cb 3884 gcc_assert (call_insn && CALL_P (call_insn));
ee960939
OH
3885
3886 /* Put the register usage information on the CALL. If there is already
3887 some usage information, put ours at the end. */
3888 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3889 {
3890 rtx link;
3891
3892 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3893 link = XEXP (link, 1))
3894 ;
3895
3896 XEXP (link, 1) = call_fusage;
3897 }
3898 else
3899 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3900}
3901
23b2ce53
RS
3902/* Delete all insns made since FROM.
3903 FROM becomes the new last instruction. */
3904
3905void
502b8322 3906delete_insns_since (rtx from)
23b2ce53
RS
3907{
3908 if (from == 0)
3909 first_insn = 0;
3910 else
3911 NEXT_INSN (from) = 0;
3912 last_insn = from;
3913}
3914
5dab5552
MS
3915/* This function is deprecated, please use sequences instead.
3916
3917 Move a consecutive bunch of insns to a different place in the chain.
23b2ce53
RS
3918 The insns to be moved are those between FROM and TO.
3919 They are moved to a new position after the insn AFTER.
3920 AFTER must not be FROM or TO or any insn in between.
3921
3922 This function does not know about SEQUENCEs and hence should not be
3923 called after delay-slot filling has been done. */
3924
3925void
502b8322 3926reorder_insns_nobb (rtx from, rtx to, rtx after)
23b2ce53
RS
3927{
3928 /* Splice this bunch out of where it is now. */
3929 if (PREV_INSN (from))
3930 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3931 if (NEXT_INSN (to))
3932 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3933 if (last_insn == to)
3934 last_insn = PREV_INSN (from);
3935 if (first_insn == from)
3936 first_insn = NEXT_INSN (to);
3937
3938 /* Make the new neighbors point to it and it to them. */
3939 if (NEXT_INSN (after))
3940 PREV_INSN (NEXT_INSN (after)) = to;
3941
3942 NEXT_INSN (to) = NEXT_INSN (after);
3943 PREV_INSN (from) = after;
3944 NEXT_INSN (after) = from;
3945 if (after == last_insn)
3946 last_insn = to;
3947}
3948
3c030e88
JH
3949/* Same as function above, but take care to update BB boundaries. */
3950void
502b8322 3951reorder_insns (rtx from, rtx to, rtx after)
3c030e88
JH
3952{
3953 rtx prev = PREV_INSN (from);
3954 basic_block bb, bb2;
3955
3956 reorder_insns_nobb (from, to, after);
3957
4b4bf941 3958 if (!BARRIER_P (after)
3c030e88
JH
3959 && (bb = BLOCK_FOR_INSN (after)))
3960 {
3961 rtx x;
6fb5fa3c 3962 df_set_bb_dirty (bb);
68252e27 3963
4b4bf941 3964 if (!BARRIER_P (from)
3c030e88
JH
3965 && (bb2 = BLOCK_FOR_INSN (from)))
3966 {
a813c111
SB
3967 if (BB_END (bb2) == to)
3968 BB_END (bb2) = prev;
6fb5fa3c 3969 df_set_bb_dirty (bb2);
3c030e88
JH
3970 }
3971
a813c111
SB
3972 if (BB_END (bb) == after)
3973 BB_END (bb) = to;
3c030e88
JH
3974
3975 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7bd5ed5c 3976 if (!BARRIER_P (x))
63642d5a 3977 df_insn_change_bb (x, bb);
3c030e88
JH
3978 }
3979}
3980
23b2ce53 3981\f
2f937369
DM
3982/* Emit insn(s) of given code and pattern
3983 at a specified place within the doubly-linked list.
23b2ce53 3984
2f937369
DM
3985 All of the emit_foo global entry points accept an object
3986 X which is either an insn list or a PATTERN of a single
3987 instruction.
23b2ce53 3988
2f937369
DM
3989 There are thus a few canonical ways to generate code and
3990 emit it at a specific place in the instruction stream. For
3991 example, consider the instruction named SPOT and the fact that
3992 we would like to emit some instructions before SPOT. We might
3993 do it like this:
23b2ce53 3994
2f937369
DM
3995 start_sequence ();
3996 ... emit the new instructions ...
3997 insns_head = get_insns ();
3998 end_sequence ();
23b2ce53 3999
2f937369 4000 emit_insn_before (insns_head, SPOT);
23b2ce53 4001
2f937369
DM
4002 It used to be common to generate SEQUENCE rtl instead, but that
4003 is a relic of the past which no longer occurs. The reason is that
4004 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4005 generated would almost certainly die right after it was created. */
23b2ce53 4006
2f937369 4007/* Make X be output before the instruction BEFORE. */
23b2ce53
RS
4008
4009rtx
6fb5fa3c 4010emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
23b2ce53 4011{
2f937369 4012 rtx last = before;
b3694847 4013 rtx insn;
23b2ce53 4014
5b0264cb 4015 gcc_assert (before);
2f937369
DM
4016
4017 if (x == NULL_RTX)
4018 return last;
4019
4020 switch (GET_CODE (x))
23b2ce53 4021 {
b5b8b0ac 4022 case DEBUG_INSN:
2f937369
DM
4023 case INSN:
4024 case JUMP_INSN:
4025 case CALL_INSN:
4026 case CODE_LABEL:
4027 case BARRIER:
4028 case NOTE:
4029 insn = x;
4030 while (insn)
4031 {
4032 rtx next = NEXT_INSN (insn);
6fb5fa3c 4033 add_insn_before (insn, before, bb);
2f937369
DM
4034 last = insn;
4035 insn = next;
4036 }
4037 break;
4038
4039#ifdef ENABLE_RTL_CHECKING
4040 case SEQUENCE:
5b0264cb 4041 gcc_unreachable ();
2f937369
DM
4042 break;
4043#endif
4044
4045 default:
4046 last = make_insn_raw (x);
6fb5fa3c 4047 add_insn_before (last, before, bb);
2f937369 4048 break;
23b2ce53
RS
4049 }
4050
2f937369 4051 return last;
23b2ce53
RS
4052}
4053
2f937369 4054/* Make an instruction with body X and code JUMP_INSN
23b2ce53
RS
4055 and output it before the instruction BEFORE. */
4056
4057rtx
a7102479 4058emit_jump_insn_before_noloc (rtx x, rtx before)
23b2ce53 4059{
d950dee3 4060 rtx insn, last = NULL_RTX;
aff507f4 4061
5b0264cb 4062 gcc_assert (before);
2f937369
DM
4063
4064 switch (GET_CODE (x))
aff507f4 4065 {
b5b8b0ac 4066 case DEBUG_INSN:
2f937369
DM
4067 case INSN:
4068 case JUMP_INSN:
4069 case CALL_INSN:
4070 case CODE_LABEL:
4071 case BARRIER:
4072 case NOTE:
4073 insn = x;
4074 while (insn)
4075 {
4076 rtx next = NEXT_INSN (insn);
6fb5fa3c 4077 add_insn_before (insn, before, NULL);
2f937369
DM
4078 last = insn;
4079 insn = next;
4080 }
4081 break;
4082
4083#ifdef ENABLE_RTL_CHECKING
4084 case SEQUENCE:
5b0264cb 4085 gcc_unreachable ();
2f937369
DM
4086 break;
4087#endif
4088
4089 default:
4090 last = make_jump_insn_raw (x);
6fb5fa3c 4091 add_insn_before (last, before, NULL);
2f937369 4092 break;
aff507f4
RK
4093 }
4094
2f937369 4095 return last;
23b2ce53
RS
4096}
4097
2f937369 4098/* Make an instruction with body X and code CALL_INSN
969d70ca
JH
4099 and output it before the instruction BEFORE. */
4100
4101rtx
a7102479 4102emit_call_insn_before_noloc (rtx x, rtx before)
969d70ca 4103{
d950dee3 4104 rtx last = NULL_RTX, insn;
969d70ca 4105
5b0264cb 4106 gcc_assert (before);
2f937369
DM
4107
4108 switch (GET_CODE (x))
969d70ca 4109 {
b5b8b0ac 4110 case DEBUG_INSN:
2f937369
DM
4111 case INSN:
4112 case JUMP_INSN:
4113 case CALL_INSN:
4114 case CODE_LABEL:
4115 case BARRIER:
4116 case NOTE:
4117 insn = x;
4118 while (insn)
4119 {
4120 rtx next = NEXT_INSN (insn);
6fb5fa3c 4121 add_insn_before (insn, before, NULL);
2f937369
DM
4122 last = insn;
4123 insn = next;
4124 }
4125 break;
4126
4127#ifdef ENABLE_RTL_CHECKING
4128 case SEQUENCE:
5b0264cb 4129 gcc_unreachable ();
2f937369
DM
4130 break;
4131#endif
4132
4133 default:
4134 last = make_call_insn_raw (x);
6fb5fa3c 4135 add_insn_before (last, before, NULL);
2f937369 4136 break;
969d70ca
JH
4137 }
4138
2f937369 4139 return last;
969d70ca
JH
4140}
4141
b5b8b0ac
AO
4142/* Make an instruction with body X and code DEBUG_INSN
4143 and output it before the instruction BEFORE. */
4144
4145rtx
4146emit_debug_insn_before_noloc (rtx x, rtx before)
4147{
4148 rtx last = NULL_RTX, insn;
4149
4150 gcc_assert (before);
4151
4152 switch (GET_CODE (x))
4153 {
4154 case DEBUG_INSN:
4155 case INSN:
4156 case JUMP_INSN:
4157 case CALL_INSN:
4158 case CODE_LABEL:
4159 case BARRIER:
4160 case NOTE:
4161 insn = x;
4162 while (insn)
4163 {
4164 rtx next = NEXT_INSN (insn);
4165 add_insn_before (insn, before, NULL);
4166 last = insn;
4167 insn = next;
4168 }
4169 break;
4170
4171#ifdef ENABLE_RTL_CHECKING
4172 case SEQUENCE:
4173 gcc_unreachable ();
4174 break;
4175#endif
4176
4177 default:
4178 last = make_debug_insn_raw (x);
4179 add_insn_before (last, before, NULL);
4180 break;
4181 }
4182
4183 return last;
4184}
4185
23b2ce53 4186/* Make an insn of code BARRIER
e881bb1b 4187 and output it before the insn BEFORE. */
23b2ce53
RS
4188
4189rtx
502b8322 4190emit_barrier_before (rtx before)
23b2ce53 4191{
b3694847 4192 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
4193
4194 INSN_UID (insn) = cur_insn_uid++;
4195
6fb5fa3c 4196 add_insn_before (insn, before, NULL);
23b2ce53
RS
4197 return insn;
4198}
4199
e881bb1b
RH
4200/* Emit the label LABEL before the insn BEFORE. */
4201
4202rtx
502b8322 4203emit_label_before (rtx label, rtx before)
e881bb1b
RH
4204{
4205 /* This can be called twice for the same label as a result of the
4206 confusion that follows a syntax error! So make it harmless. */
4207 if (INSN_UID (label) == 0)
4208 {
4209 INSN_UID (label) = cur_insn_uid++;
6fb5fa3c 4210 add_insn_before (label, before, NULL);
e881bb1b
RH
4211 }
4212
4213 return label;
4214}
4215
23b2ce53
RS
4216/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4217
4218rtx
a38e7aa5 4219emit_note_before (enum insn_note subtype, rtx before)
23b2ce53 4220{
b3694847 4221 rtx note = rtx_alloc (NOTE);
23b2ce53 4222 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 4223 NOTE_KIND (note) = subtype;
ba4f7968 4224 BLOCK_FOR_INSN (note) = NULL;
9dbe7947 4225 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
23b2ce53 4226
6fb5fa3c 4227 add_insn_before (note, before, NULL);
23b2ce53
RS
4228 return note;
4229}
4230\f
2f937369
DM
4231/* Helper for emit_insn_after, handles lists of instructions
4232 efficiently. */
23b2ce53 4233
2f937369 4234static rtx
6fb5fa3c 4235emit_insn_after_1 (rtx first, rtx after, basic_block bb)
23b2ce53 4236{
2f937369
DM
4237 rtx last;
4238 rtx after_after;
6fb5fa3c
DB
4239 if (!bb && !BARRIER_P (after))
4240 bb = BLOCK_FOR_INSN (after);
23b2ce53 4241
6fb5fa3c 4242 if (bb)
23b2ce53 4243 {
6fb5fa3c 4244 df_set_bb_dirty (bb);
2f937369 4245 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4b4bf941 4246 if (!BARRIER_P (last))
6fb5fa3c
DB
4247 {
4248 set_block_for_insn (last, bb);
4249 df_insn_rescan (last);
4250 }
4b4bf941 4251 if (!BARRIER_P (last))
6fb5fa3c
DB
4252 {
4253 set_block_for_insn (last, bb);
4254 df_insn_rescan (last);
4255 }
a813c111
SB
4256 if (BB_END (bb) == after)
4257 BB_END (bb) = last;
23b2ce53
RS
4258 }
4259 else
2f937369
DM
4260 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4261 continue;
4262
4263 after_after = NEXT_INSN (after);
4264
4265 NEXT_INSN (after) = first;
4266 PREV_INSN (first) = after;
4267 NEXT_INSN (last) = after_after;
4268 if (after_after)
4269 PREV_INSN (after_after) = last;
4270
4271 if (after == last_insn)
4272 last_insn = last;
e855c69d 4273
2f937369
DM
4274 return last;
4275}
4276
6fb5fa3c
DB
4277/* Make X be output after the insn AFTER and set the BB of insn. If
4278 BB is NULL, an attempt is made to infer the BB from AFTER. */
2f937369
DM
4279
4280rtx
6fb5fa3c 4281emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
2f937369
DM
4282{
4283 rtx last = after;
4284
5b0264cb 4285 gcc_assert (after);
2f937369
DM
4286
4287 if (x == NULL_RTX)
4288 return last;
4289
4290 switch (GET_CODE (x))
23b2ce53 4291 {
b5b8b0ac 4292 case DEBUG_INSN:
2f937369
DM
4293 case INSN:
4294 case JUMP_INSN:
4295 case CALL_INSN:
4296 case CODE_LABEL:
4297 case BARRIER:
4298 case NOTE:
6fb5fa3c 4299 last = emit_insn_after_1 (x, after, bb);
2f937369
DM
4300 break;
4301
4302#ifdef ENABLE_RTL_CHECKING
4303 case SEQUENCE:
5b0264cb 4304 gcc_unreachable ();
2f937369
DM
4305 break;
4306#endif
4307
4308 default:
4309 last = make_insn_raw (x);
6fb5fa3c 4310 add_insn_after (last, after, bb);
2f937369 4311 break;
23b2ce53
RS
4312 }
4313
2f937369 4314 return last;
23b2ce53
RS
4315}
4316
255680cf 4317
2f937369 4318/* Make an insn of code JUMP_INSN with body X
23b2ce53
RS
4319 and output it after the insn AFTER. */
4320
4321rtx
a7102479 4322emit_jump_insn_after_noloc (rtx x, rtx after)
23b2ce53 4323{
2f937369 4324 rtx last;
23b2ce53 4325
5b0264cb 4326 gcc_assert (after);
2f937369
DM
4327
4328 switch (GET_CODE (x))
23b2ce53 4329 {
b5b8b0ac 4330 case DEBUG_INSN:
2f937369
DM
4331 case INSN:
4332 case JUMP_INSN:
4333 case CALL_INSN:
4334 case CODE_LABEL:
4335 case BARRIER:
4336 case NOTE:
6fb5fa3c 4337 last = emit_insn_after_1 (x, after, NULL);
2f937369
DM
4338 break;
4339
4340#ifdef ENABLE_RTL_CHECKING
4341 case SEQUENCE:
5b0264cb 4342 gcc_unreachable ();
2f937369
DM
4343 break;
4344#endif
4345
4346 default:
4347 last = make_jump_insn_raw (x);
6fb5fa3c 4348 add_insn_after (last, after, NULL);
2f937369 4349 break;
23b2ce53
RS
4350 }
4351
2f937369
DM
4352 return last;
4353}
4354
4355/* Make an instruction with body X and code CALL_INSN
4356 and output it after the instruction AFTER. */
4357
4358rtx
a7102479 4359emit_call_insn_after_noloc (rtx x, rtx after)
2f937369
DM
4360{
4361 rtx last;
4362
5b0264cb 4363 gcc_assert (after);
2f937369
DM
4364
4365 switch (GET_CODE (x))
4366 {
b5b8b0ac 4367 case DEBUG_INSN:
2f937369
DM
4368 case INSN:
4369 case JUMP_INSN:
4370 case CALL_INSN:
4371 case CODE_LABEL:
4372 case BARRIER:
4373 case NOTE:
6fb5fa3c 4374 last = emit_insn_after_1 (x, after, NULL);
2f937369
DM
4375 break;
4376
4377#ifdef ENABLE_RTL_CHECKING
4378 case SEQUENCE:
5b0264cb 4379 gcc_unreachable ();
2f937369
DM
4380 break;
4381#endif
4382
4383 default:
4384 last = make_call_insn_raw (x);
6fb5fa3c 4385 add_insn_after (last, after, NULL);
2f937369
DM
4386 break;
4387 }
4388
4389 return last;
23b2ce53
RS
4390}
4391
b5b8b0ac
AO
4392/* Make an instruction with body X and code CALL_INSN
4393 and output it after the instruction AFTER. */
4394
4395rtx
4396emit_debug_insn_after_noloc (rtx x, rtx after)
4397{
4398 rtx last;
4399
4400 gcc_assert (after);
4401
4402 switch (GET_CODE (x))
4403 {
4404 case DEBUG_INSN:
4405 case INSN:
4406 case JUMP_INSN:
4407 case CALL_INSN:
4408 case CODE_LABEL:
4409 case BARRIER:
4410 case NOTE:
4411 last = emit_insn_after_1 (x, after, NULL);
4412 break;
4413
4414#ifdef ENABLE_RTL_CHECKING
4415 case SEQUENCE:
4416 gcc_unreachable ();
4417 break;
4418#endif
4419
4420 default:
4421 last = make_debug_insn_raw (x);
4422 add_insn_after (last, after, NULL);
4423 break;
4424 }
4425
4426 return last;
4427}
4428
23b2ce53
RS
4429/* Make an insn of code BARRIER
4430 and output it after the insn AFTER. */
4431
4432rtx
502b8322 4433emit_barrier_after (rtx after)
23b2ce53 4434{
b3694847 4435 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
4436
4437 INSN_UID (insn) = cur_insn_uid++;
4438
6fb5fa3c 4439 add_insn_after (insn, after, NULL);
23b2ce53
RS
4440 return insn;
4441}
4442
4443/* Emit the label LABEL after the insn AFTER. */
4444
4445rtx
502b8322 4446emit_label_after (rtx label, rtx after)
23b2ce53
RS
4447{
4448 /* This can be called twice for the same label
4449 as a result of the confusion that follows a syntax error!
4450 So make it harmless. */
4451 if (INSN_UID (label) == 0)
4452 {
4453 INSN_UID (label) = cur_insn_uid++;
6fb5fa3c 4454 add_insn_after (label, after, NULL);
23b2ce53
RS
4455 }
4456
4457 return label;
4458}
4459
4460/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4461
4462rtx
a38e7aa5 4463emit_note_after (enum insn_note subtype, rtx after)
23b2ce53 4464{
b3694847 4465 rtx note = rtx_alloc (NOTE);
23b2ce53 4466 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 4467 NOTE_KIND (note) = subtype;
ba4f7968 4468 BLOCK_FOR_INSN (note) = NULL;
9dbe7947 4469 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
6fb5fa3c 4470 add_insn_after (note, after, NULL);
23b2ce53
RS
4471 return note;
4472}
23b2ce53 4473\f
a7102479 4474/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
0d682900 4475rtx
502b8322 4476emit_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4477{
6fb5fa3c 4478 rtx last = emit_insn_after_noloc (pattern, after, NULL);
0d682900 4479
a7102479 4480 if (pattern == NULL_RTX || !loc)
dd3adcf8
DJ
4481 return last;
4482
2f937369
DM
4483 after = NEXT_INSN (after);
4484 while (1)
4485 {
a7102479 4486 if (active_insn_p (after) && !INSN_LOCATOR (after))
0435312e 4487 INSN_LOCATOR (after) = loc;
2f937369
DM
4488 if (after == last)
4489 break;
4490 after = NEXT_INSN (after);
4491 }
0d682900
JH
4492 return last;
4493}
4494
a7102479
JH
4495/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4496rtx
4497emit_insn_after (rtx pattern, rtx after)
4498{
b5b8b0ac
AO
4499 rtx prev = after;
4500
4501 while (DEBUG_INSN_P (prev))
4502 prev = PREV_INSN (prev);
4503
4504 if (INSN_P (prev))
4505 return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
a7102479 4506 else
6fb5fa3c 4507 return emit_insn_after_noloc (pattern, after, NULL);
a7102479
JH
4508}
4509
4510/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
0d682900 4511rtx
502b8322 4512emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4513{
a7102479 4514 rtx last = emit_jump_insn_after_noloc (pattern, after);
2f937369 4515
a7102479 4516 if (pattern == NULL_RTX || !loc)
dd3adcf8
DJ
4517 return last;
4518
2f937369
DM
4519 after = NEXT_INSN (after);
4520 while (1)
4521 {
a7102479 4522 if (active_insn_p (after) && !INSN_LOCATOR (after))
0435312e 4523 INSN_LOCATOR (after) = loc;
2f937369
DM
4524 if (after == last)
4525 break;
4526 after = NEXT_INSN (after);
4527 }
0d682900
JH
4528 return last;
4529}
4530
a7102479
JH
4531/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4532rtx
4533emit_jump_insn_after (rtx pattern, rtx after)
4534{
b5b8b0ac
AO
4535 rtx prev = after;
4536
4537 while (DEBUG_INSN_P (prev))
4538 prev = PREV_INSN (prev);
4539
4540 if (INSN_P (prev))
4541 return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
a7102479
JH
4542 else
4543 return emit_jump_insn_after_noloc (pattern, after);
4544}
4545
4546/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
0d682900 4547rtx
502b8322 4548emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4549{
a7102479 4550 rtx last = emit_call_insn_after_noloc (pattern, after);
2f937369 4551
a7102479 4552 if (pattern == NULL_RTX || !loc)
dd3adcf8
DJ
4553 return last;
4554
2f937369
DM
4555 after = NEXT_INSN (after);
4556 while (1)
4557 {
a7102479 4558 if (active_insn_p (after) && !INSN_LOCATOR (after))
0435312e 4559 INSN_LOCATOR (after) = loc;
2f937369
DM
4560 if (after == last)
4561 break;
4562 after = NEXT_INSN (after);
4563 }
0d682900
JH
4564 return last;
4565}
4566
a7102479
JH
4567/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4568rtx
4569emit_call_insn_after (rtx pattern, rtx after)
4570{
b5b8b0ac
AO
4571 rtx prev = after;
4572
4573 while (DEBUG_INSN_P (prev))
4574 prev = PREV_INSN (prev);
4575
4576 if (INSN_P (prev))
4577 return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
a7102479
JH
4578 else
4579 return emit_call_insn_after_noloc (pattern, after);
4580}
4581
b5b8b0ac
AO
4582/* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4583rtx
4584emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4585{
4586 rtx last = emit_debug_insn_after_noloc (pattern, after);
4587
4588 if (pattern == NULL_RTX || !loc)
4589 return last;
4590
4591 after = NEXT_INSN (after);
4592 while (1)
4593 {
4594 if (active_insn_p (after) && !INSN_LOCATOR (after))
4595 INSN_LOCATOR (after) = loc;
4596 if (after == last)
4597 break;
4598 after = NEXT_INSN (after);
4599 }
4600 return last;
4601}
4602
4603/* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4604rtx
4605emit_debug_insn_after (rtx pattern, rtx after)
4606{
4607 if (INSN_P (after))
4608 return emit_debug_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4609 else
4610 return emit_debug_insn_after_noloc (pattern, after);
4611}
4612
a7102479 4613/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */
0d682900 4614rtx
502b8322 4615emit_insn_before_setloc (rtx pattern, rtx before, int loc)
0d682900
JH
4616{
4617 rtx first = PREV_INSN (before);
6fb5fa3c 4618 rtx last = emit_insn_before_noloc (pattern, before, NULL);
a7102479
JH
4619
4620 if (pattern == NULL_RTX || !loc)
4621 return last;
4622
26cb3993
JH
4623 if (!first)
4624 first = get_insns ();
4625 else
4626 first = NEXT_INSN (first);
a7102479
JH
4627 while (1)
4628 {
4629 if (active_insn_p (first) && !INSN_LOCATOR (first))
4630 INSN_LOCATOR (first) = loc;
4631 if (first == last)
4632 break;
4633 first = NEXT_INSN (first);
4634 }
4635 return last;
4636}
4637
4638/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4639rtx
4640emit_insn_before (rtx pattern, rtx before)
4641{
b5b8b0ac
AO
4642 rtx next = before;
4643
4644 while (DEBUG_INSN_P (next))
4645 next = PREV_INSN (next);
4646
4647 if (INSN_P (next))
4648 return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
a7102479 4649 else
6fb5fa3c 4650 return emit_insn_before_noloc (pattern, before, NULL);
a7102479
JH
4651}
4652
4653/* like emit_insn_before_noloc, but set insn_locator according to scope. */
4654rtx
4655emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4656{
4657 rtx first = PREV_INSN (before);
4658 rtx last = emit_jump_insn_before_noloc (pattern, before);
4659
4660 if (pattern == NULL_RTX)
4661 return last;
4662
4663 first = NEXT_INSN (first);
4664 while (1)
4665 {
4666 if (active_insn_p (first) && !INSN_LOCATOR (first))
4667 INSN_LOCATOR (first) = loc;
4668 if (first == last)
4669 break;
4670 first = NEXT_INSN (first);
4671 }
4672 return last;
4673}
4674
4675/* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4676rtx
4677emit_jump_insn_before (rtx pattern, rtx before)
4678{
b5b8b0ac
AO
4679 rtx next = before;
4680
4681 while (DEBUG_INSN_P (next))
4682 next = PREV_INSN (next);
4683
4684 if (INSN_P (next))
4685 return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
a7102479
JH
4686 else
4687 return emit_jump_insn_before_noloc (pattern, before);
4688}
4689
4690/* like emit_insn_before_noloc, but set insn_locator according to scope. */
4691rtx
4692emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4693{
4694 rtx first = PREV_INSN (before);
4695 rtx last = emit_call_insn_before_noloc (pattern, before);
0d682900 4696
dd3adcf8
DJ
4697 if (pattern == NULL_RTX)
4698 return last;
4699
2f937369
DM
4700 first = NEXT_INSN (first);
4701 while (1)
4702 {
a7102479 4703 if (active_insn_p (first) && !INSN_LOCATOR (first))
0435312e 4704 INSN_LOCATOR (first) = loc;
2f937369
DM
4705 if (first == last)
4706 break;
4707 first = NEXT_INSN (first);
4708 }
0d682900
JH
4709 return last;
4710}
a7102479
JH
4711
4712/* like emit_call_insn_before_noloc,
4713 but set insn_locator according to before. */
4714rtx
4715emit_call_insn_before (rtx pattern, rtx before)
4716{
b5b8b0ac
AO
4717 rtx next = before;
4718
4719 while (DEBUG_INSN_P (next))
4720 next = PREV_INSN (next);
4721
4722 if (INSN_P (next))
4723 return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
a7102479
JH
4724 else
4725 return emit_call_insn_before_noloc (pattern, before);
4726}
b5b8b0ac
AO
4727
4728/* like emit_insn_before_noloc, but set insn_locator according to scope. */
4729rtx
4730emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4731{
4732 rtx first = PREV_INSN (before);
4733 rtx last = emit_debug_insn_before_noloc (pattern, before);
4734
4735 if (pattern == NULL_RTX)
4736 return last;
4737
4738 first = NEXT_INSN (first);
4739 while (1)
4740 {
4741 if (active_insn_p (first) && !INSN_LOCATOR (first))
4742 INSN_LOCATOR (first) = loc;
4743 if (first == last)
4744 break;
4745 first = NEXT_INSN (first);
4746 }
4747 return last;
4748}
4749
4750/* like emit_debug_insn_before_noloc,
4751 but set insn_locator according to before. */
4752rtx
4753emit_debug_insn_before (rtx pattern, rtx before)
4754{
4755 if (INSN_P (before))
4756 return emit_debug_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4757 else
4758 return emit_debug_insn_before_noloc (pattern, before);
4759}
0d682900 4760\f
2f937369
DM
4761/* Take X and emit it at the end of the doubly-linked
4762 INSN list.
23b2ce53
RS
4763
4764 Returns the last insn emitted. */
4765
4766rtx
502b8322 4767emit_insn (rtx x)
23b2ce53 4768{
2f937369
DM
4769 rtx last = last_insn;
4770 rtx insn;
23b2ce53 4771
2f937369
DM
4772 if (x == NULL_RTX)
4773 return last;
23b2ce53 4774
2f937369
DM
4775 switch (GET_CODE (x))
4776 {
b5b8b0ac 4777 case DEBUG_INSN:
2f937369
DM
4778 case INSN:
4779 case JUMP_INSN:
4780 case CALL_INSN:
4781 case CODE_LABEL:
4782 case BARRIER:
4783 case NOTE:
4784 insn = x;
4785 while (insn)
23b2ce53 4786 {
2f937369 4787 rtx next = NEXT_INSN (insn);
23b2ce53 4788 add_insn (insn);
2f937369
DM
4789 last = insn;
4790 insn = next;
23b2ce53 4791 }
2f937369 4792 break;
23b2ce53 4793
2f937369
DM
4794#ifdef ENABLE_RTL_CHECKING
4795 case SEQUENCE:
5b0264cb 4796 gcc_unreachable ();
2f937369
DM
4797 break;
4798#endif
23b2ce53 4799
2f937369
DM
4800 default:
4801 last = make_insn_raw (x);
4802 add_insn (last);
4803 break;
23b2ce53
RS
4804 }
4805
4806 return last;
4807}
4808
b5b8b0ac
AO
4809/* Make an insn of code DEBUG_INSN with pattern X
4810 and add it to the end of the doubly-linked list. */
4811
4812rtx
4813emit_debug_insn (rtx x)
4814{
4815 rtx last = last_insn;
4816 rtx insn;
4817
4818 if (x == NULL_RTX)
4819 return last;
4820
4821 switch (GET_CODE (x))
4822 {
4823 case DEBUG_INSN:
4824 case INSN:
4825 case JUMP_INSN:
4826 case CALL_INSN:
4827 case CODE_LABEL:
4828 case BARRIER:
4829 case NOTE:
4830 insn = x;
4831 while (insn)
4832 {
4833 rtx next = NEXT_INSN (insn);
4834 add_insn (insn);
4835 last = insn;
4836 insn = next;
4837 }
4838 break;
4839
4840#ifdef ENABLE_RTL_CHECKING
4841 case SEQUENCE:
4842 gcc_unreachable ();
4843 break;
4844#endif
4845
4846 default:
4847 last = make_debug_insn_raw (x);
4848 add_insn (last);
4849 break;
4850 }
4851
4852 return last;
4853}
4854
2f937369
DM
4855/* Make an insn of code JUMP_INSN with pattern X
4856 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4857
4858rtx
502b8322 4859emit_jump_insn (rtx x)
23b2ce53 4860{
d950dee3 4861 rtx last = NULL_RTX, insn;
23b2ce53 4862
2f937369 4863 switch (GET_CODE (x))
23b2ce53 4864 {
b5b8b0ac 4865 case DEBUG_INSN:
2f937369
DM
4866 case INSN:
4867 case JUMP_INSN:
4868 case CALL_INSN:
4869 case CODE_LABEL:
4870 case BARRIER:
4871 case NOTE:
4872 insn = x;
4873 while (insn)
4874 {
4875 rtx next = NEXT_INSN (insn);
4876 add_insn (insn);
4877 last = insn;
4878 insn = next;
4879 }
4880 break;
e0a5c5eb 4881
2f937369
DM
4882#ifdef ENABLE_RTL_CHECKING
4883 case SEQUENCE:
5b0264cb 4884 gcc_unreachable ();
2f937369
DM
4885 break;
4886#endif
e0a5c5eb 4887
2f937369
DM
4888 default:
4889 last = make_jump_insn_raw (x);
4890 add_insn (last);
4891 break;
3c030e88 4892 }
e0a5c5eb
RS
4893
4894 return last;
4895}
4896
2f937369 4897/* Make an insn of code CALL_INSN with pattern X
23b2ce53
RS
4898 and add it to the end of the doubly-linked list. */
4899
4900rtx
502b8322 4901emit_call_insn (rtx x)
23b2ce53 4902{
2f937369
DM
4903 rtx insn;
4904
4905 switch (GET_CODE (x))
23b2ce53 4906 {
b5b8b0ac 4907 case DEBUG_INSN:
2f937369
DM
4908 case INSN:
4909 case JUMP_INSN:
4910 case CALL_INSN:
4911 case CODE_LABEL:
4912 case BARRIER:
4913 case NOTE:
4914 insn = emit_insn (x);
4915 break;
23b2ce53 4916
2f937369
DM
4917#ifdef ENABLE_RTL_CHECKING
4918 case SEQUENCE:
5b0264cb 4919 gcc_unreachable ();
2f937369
DM
4920 break;
4921#endif
23b2ce53 4922
2f937369
DM
4923 default:
4924 insn = make_call_insn_raw (x);
23b2ce53 4925 add_insn (insn);
2f937369 4926 break;
23b2ce53 4927 }
2f937369
DM
4928
4929 return insn;
23b2ce53
RS
4930}
4931
4932/* Add the label LABEL to the end of the doubly-linked list. */
4933
4934rtx
502b8322 4935emit_label (rtx label)
23b2ce53
RS
4936{
4937 /* This can be called twice for the same label
4938 as a result of the confusion that follows a syntax error!
4939 So make it harmless. */
4940 if (INSN_UID (label) == 0)
4941 {
4942 INSN_UID (label) = cur_insn_uid++;
4943 add_insn (label);
4944 }
4945 return label;
4946}
4947
4948/* Make an insn of code BARRIER
4949 and add it to the end of the doubly-linked list. */
4950
4951rtx
502b8322 4952emit_barrier (void)
23b2ce53 4953{
b3694847 4954 rtx barrier = rtx_alloc (BARRIER);
23b2ce53
RS
4955 INSN_UID (barrier) = cur_insn_uid++;
4956 add_insn (barrier);
4957 return barrier;
4958}
4959
5f2fc772 4960/* Emit a copy of note ORIG. */
502b8322 4961
5f2fc772
NS
4962rtx
4963emit_note_copy (rtx orig)
4964{
4965 rtx note;
4966
5f2fc772
NS
4967 note = rtx_alloc (NOTE);
4968
4969 INSN_UID (note) = cur_insn_uid++;
4970 NOTE_DATA (note) = NOTE_DATA (orig);
a38e7aa5 4971 NOTE_KIND (note) = NOTE_KIND (orig);
5f2fc772
NS
4972 BLOCK_FOR_INSN (note) = NULL;
4973 add_insn (note);
4974
2e040219 4975 return note;
23b2ce53
RS
4976}
4977
2e040219
NS
4978/* Make an insn of code NOTE or type NOTE_NO
4979 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4980
4981rtx
a38e7aa5 4982emit_note (enum insn_note kind)
23b2ce53 4983{
b3694847 4984 rtx note;
23b2ce53 4985
23b2ce53
RS
4986 note = rtx_alloc (NOTE);
4987 INSN_UID (note) = cur_insn_uid++;
a38e7aa5 4988 NOTE_KIND (note) = kind;
dd107e66 4989 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
ba4f7968 4990 BLOCK_FOR_INSN (note) = NULL;
23b2ce53
RS
4991 add_insn (note);
4992 return note;
4993}
4994
c41c1387
RS
4995/* Emit a clobber of lvalue X. */
4996
4997rtx
4998emit_clobber (rtx x)
4999{
5000 /* CONCATs should not appear in the insn stream. */
5001 if (GET_CODE (x) == CONCAT)
5002 {
5003 emit_clobber (XEXP (x, 0));
5004 return emit_clobber (XEXP (x, 1));
5005 }
5006 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5007}
5008
5009/* Return a sequence of insns to clobber lvalue X. */
5010
5011rtx
5012gen_clobber (rtx x)
5013{
5014 rtx seq;
5015
5016 start_sequence ();
5017 emit_clobber (x);
5018 seq = get_insns ();
5019 end_sequence ();
5020 return seq;
5021}
5022
5023/* Emit a use of rvalue X. */
5024
5025rtx
5026emit_use (rtx x)
5027{
5028 /* CONCATs should not appear in the insn stream. */
5029 if (GET_CODE (x) == CONCAT)
5030 {
5031 emit_use (XEXP (x, 0));
5032 return emit_use (XEXP (x, 1));
5033 }
5034 return emit_insn (gen_rtx_USE (VOIDmode, x));
5035}
5036
5037/* Return a sequence of insns to use rvalue X. */
5038
5039rtx
5040gen_use (rtx x)
5041{
5042 rtx seq;
5043
5044 start_sequence ();
5045 emit_use (x);
5046 seq = get_insns ();
5047 end_sequence ();
5048 return seq;
5049}
5050
23b2ce53 5051/* Cause next statement to emit a line note even if the line number
0cea056b 5052 has not changed. */
23b2ce53
RS
5053
5054void
502b8322 5055force_next_line_note (void)
23b2ce53 5056{
6773e15f 5057 last_location = -1;
23b2ce53 5058}
87b47c85
AM
5059
5060/* Place a note of KIND on insn INSN with DATUM as the datum. If a
30f7a378 5061 note of this type already exists, remove it first. */
87b47c85 5062
3d238248 5063rtx
502b8322 5064set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
87b47c85
AM
5065{
5066 rtx note = find_reg_note (insn, kind, NULL_RTX);
5067
52488da1
JW
5068 switch (kind)
5069 {
5070 case REG_EQUAL:
5071 case REG_EQUIV:
5072 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
5073 has multiple sets (some callers assume single_set
5074 means the insn only has one set, when in fact it
5075 means the insn only has one * useful * set). */
5076 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
5077 {
5b0264cb 5078 gcc_assert (!note);
52488da1
JW
5079 return NULL_RTX;
5080 }
5081
5082 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5083 It serves no useful purpose and breaks eliminate_regs. */
5084 if (GET_CODE (datum) == ASM_OPERANDS)
5085 return NULL_RTX;
6fb5fa3c
DB
5086
5087 if (note)
5088 {
5089 XEXP (note, 0) = datum;
5090 df_notes_rescan (insn);
5091 return note;
5092 }
52488da1
JW
5093 break;
5094
5095 default:
6fb5fa3c
DB
5096 if (note)
5097 {
5098 XEXP (note, 0) = datum;
5099 return note;
5100 }
52488da1
JW
5101 break;
5102 }
3d238248 5103
65c5f2a6 5104 add_reg_note (insn, kind, datum);
6fb5fa3c
DB
5105
5106 switch (kind)
3d238248 5107 {
6fb5fa3c
DB
5108 case REG_EQUAL:
5109 case REG_EQUIV:
5110 df_notes_rescan (insn);
5111 break;
5112 default:
5113 break;
3d238248 5114 }
87b47c85 5115
3d238248 5116 return REG_NOTES (insn);
87b47c85 5117}
23b2ce53
RS
5118\f
5119/* Return an indication of which type of insn should have X as a body.
5120 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5121
d78db459 5122static enum rtx_code
502b8322 5123classify_insn (rtx x)
23b2ce53 5124{
4b4bf941 5125 if (LABEL_P (x))
23b2ce53
RS
5126 return CODE_LABEL;
5127 if (GET_CODE (x) == CALL)
5128 return CALL_INSN;
5129 if (GET_CODE (x) == RETURN)
5130 return JUMP_INSN;
5131 if (GET_CODE (x) == SET)
5132 {
5133 if (SET_DEST (x) == pc_rtx)
5134 return JUMP_INSN;
5135 else if (GET_CODE (SET_SRC (x)) == CALL)
5136 return CALL_INSN;
5137 else
5138 return INSN;
5139 }
5140 if (GET_CODE (x) == PARALLEL)
5141 {
b3694847 5142 int j;
23b2ce53
RS
5143 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5144 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5145 return CALL_INSN;
5146 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5147 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5148 return JUMP_INSN;
5149 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5150 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5151 return CALL_INSN;
5152 }
5153 return INSN;
5154}
5155
5156/* Emit the rtl pattern X as an appropriate kind of insn.
5157 If X is a label, it is simply added into the insn chain. */
5158
5159rtx
502b8322 5160emit (rtx x)
23b2ce53
RS
5161{
5162 enum rtx_code code = classify_insn (x);
5163
5b0264cb 5164 switch (code)
23b2ce53 5165 {
5b0264cb
NS
5166 case CODE_LABEL:
5167 return emit_label (x);
5168 case INSN:
5169 return emit_insn (x);
5170 case JUMP_INSN:
5171 {
5172 rtx insn = emit_jump_insn (x);
5173 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5174 return emit_barrier ();
5175 return insn;
5176 }
5177 case CALL_INSN:
5178 return emit_call_insn (x);
b5b8b0ac
AO
5179 case DEBUG_INSN:
5180 return emit_debug_insn (x);
5b0264cb
NS
5181 default:
5182 gcc_unreachable ();
23b2ce53 5183 }
23b2ce53
RS
5184}
5185\f
e2500fed 5186/* Space for free sequence stack entries. */
1431042e 5187static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
e2500fed 5188
4dfa0342
RH
5189/* Begin emitting insns to a sequence. If this sequence will contain
5190 something that might cause the compiler to pop arguments to function
5191 calls (because those pops have previously been deferred; see
5192 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5193 before calling this function. That will ensure that the deferred
5194 pops are not accidentally emitted in the middle of this sequence. */
23b2ce53
RS
5195
5196void
502b8322 5197start_sequence (void)
23b2ce53
RS
5198{
5199 struct sequence_stack *tem;
5200
e2500fed
GK
5201 if (free_sequence_stack != NULL)
5202 {
5203 tem = free_sequence_stack;
5204 free_sequence_stack = tem->next;
5205 }
5206 else
1b4572a8 5207 tem = GGC_NEW (struct sequence_stack);
23b2ce53 5208
49ad7cfa 5209 tem->next = seq_stack;
23b2ce53
RS
5210 tem->first = first_insn;
5211 tem->last = last_insn;
5212
49ad7cfa 5213 seq_stack = tem;
23b2ce53
RS
5214
5215 first_insn = 0;
5216 last_insn = 0;
5217}
5218
5c7a310f
MM
5219/* Set up the insn chain starting with FIRST as the current sequence,
5220 saving the previously current one. See the documentation for
5221 start_sequence for more information about how to use this function. */
23b2ce53
RS
5222
5223void
502b8322 5224push_to_sequence (rtx first)
23b2ce53
RS
5225{
5226 rtx last;
5227
5228 start_sequence ();
5229
5230 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
5231
5232 first_insn = first;
5233 last_insn = last;
5234}
5235
bb27eeda
SE
5236/* Like push_to_sequence, but take the last insn as an argument to avoid
5237 looping through the list. */
5238
5239void
5240push_to_sequence2 (rtx first, rtx last)
5241{
5242 start_sequence ();
5243
5244 first_insn = first;
5245 last_insn = last;
5246}
5247
f15ae3a1
TW
5248/* Set up the outer-level insn chain
5249 as the current sequence, saving the previously current one. */
5250
5251void
502b8322 5252push_topmost_sequence (void)
f15ae3a1 5253{
aefdd5ab 5254 struct sequence_stack *stack, *top = NULL;
f15ae3a1
TW
5255
5256 start_sequence ();
5257
49ad7cfa 5258 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
5259 top = stack;
5260
5261 first_insn = top->first;
5262 last_insn = top->last;
5263}
5264
5265/* After emitting to the outer-level insn chain, update the outer-level
5266 insn chain, and restore the previous saved state. */
5267
5268void
502b8322 5269pop_topmost_sequence (void)
f15ae3a1 5270{
aefdd5ab 5271 struct sequence_stack *stack, *top = NULL;
f15ae3a1 5272
49ad7cfa 5273 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
5274 top = stack;
5275
5276 top->first = first_insn;
5277 top->last = last_insn;
5278
5279 end_sequence ();
5280}
5281
23b2ce53
RS
5282/* After emitting to a sequence, restore previous saved state.
5283
5c7a310f 5284 To get the contents of the sequence just made, you must call
2f937369 5285 `get_insns' *before* calling here.
5c7a310f
MM
5286
5287 If the compiler might have deferred popping arguments while
5288 generating this sequence, and this sequence will not be immediately
5289 inserted into the instruction stream, use do_pending_stack_adjust
2f937369 5290 before calling get_insns. That will ensure that the deferred
5c7a310f
MM
5291 pops are inserted into this sequence, and not into some random
5292 location in the instruction stream. See INHIBIT_DEFER_POP for more
5293 information about deferred popping of arguments. */
23b2ce53
RS
5294
5295void
502b8322 5296end_sequence (void)
23b2ce53 5297{
49ad7cfa 5298 struct sequence_stack *tem = seq_stack;
23b2ce53
RS
5299
5300 first_insn = tem->first;
5301 last_insn = tem->last;
49ad7cfa 5302 seq_stack = tem->next;
23b2ce53 5303
e2500fed
GK
5304 memset (tem, 0, sizeof (*tem));
5305 tem->next = free_sequence_stack;
5306 free_sequence_stack = tem;
23b2ce53
RS
5307}
5308
5309/* Return 1 if currently emitting into a sequence. */
5310
5311int
502b8322 5312in_sequence_p (void)
23b2ce53 5313{
49ad7cfa 5314 return seq_stack != 0;
23b2ce53 5315}
23b2ce53 5316\f
59ec66dc
MM
5317/* Put the various virtual registers into REGNO_REG_RTX. */
5318
2bbdec73 5319static void
bd60bab2 5320init_virtual_regs (void)
59ec66dc 5321{
bd60bab2
JH
5322 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5323 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5324 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5325 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5326 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
49ad7cfa
BS
5327}
5328
da43a810
BS
5329\f
5330/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5331static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5332static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5333static int copy_insn_n_scratches;
5334
5335/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5336 copied an ASM_OPERANDS.
5337 In that case, it is the original input-operand vector. */
5338static rtvec orig_asm_operands_vector;
5339
5340/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5341 copied an ASM_OPERANDS.
5342 In that case, it is the copied input-operand vector. */
5343static rtvec copy_asm_operands_vector;
5344
5345/* Likewise for the constraints vector. */
5346static rtvec orig_asm_constraints_vector;
5347static rtvec copy_asm_constraints_vector;
5348
5349/* Recursively create a new copy of an rtx for copy_insn.
5350 This function differs from copy_rtx in that it handles SCRATCHes and
5351 ASM_OPERANDs properly.
5352 Normally, this function is not used directly; use copy_insn as front end.
5353 However, you could first copy an insn pattern with copy_insn and then use
5354 this function afterwards to properly copy any REG_NOTEs containing
5355 SCRATCHes. */
5356
5357rtx
502b8322 5358copy_insn_1 (rtx orig)
da43a810 5359{
b3694847
SS
5360 rtx copy;
5361 int i, j;
5362 RTX_CODE code;
5363 const char *format_ptr;
da43a810 5364
cd9c1ca8
RH
5365 if (orig == NULL)
5366 return NULL;
5367
da43a810
BS
5368 code = GET_CODE (orig);
5369
5370 switch (code)
5371 {
5372 case REG:
da43a810
BS
5373 case CONST_INT:
5374 case CONST_DOUBLE:
091a3ac7 5375 case CONST_FIXED:
69ef87e2 5376 case CONST_VECTOR:
da43a810
BS
5377 case SYMBOL_REF:
5378 case CODE_LABEL:
5379 case PC:
5380 case CC0:
da43a810 5381 return orig;
3e89ed8d
JH
5382 case CLOBBER:
5383 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5384 return orig;
5385 break;
da43a810
BS
5386
5387 case SCRATCH:
5388 for (i = 0; i < copy_insn_n_scratches; i++)
5389 if (copy_insn_scratch_in[i] == orig)
5390 return copy_insn_scratch_out[i];
5391 break;
5392
5393 case CONST:
6fb5fa3c 5394 if (shared_const_p (orig))
da43a810
BS
5395 return orig;
5396 break;
750c9258 5397
da43a810
BS
5398 /* A MEM with a constant address is not sharable. The problem is that
5399 the constant address may need to be reloaded. If the mem is shared,
5400 then reloading one copy of this mem will cause all copies to appear
5401 to have been reloaded. */
5402
5403 default:
5404 break;
5405 }
5406
aacd3885
RS
5407 /* Copy the various flags, fields, and other information. We assume
5408 that all fields need copying, and then clear the fields that should
da43a810
BS
5409 not be copied. That is the sensible default behavior, and forces
5410 us to explicitly document why we are *not* copying a flag. */
aacd3885 5411 copy = shallow_copy_rtx (orig);
da43a810
BS
5412
5413 /* We do not copy the USED flag, which is used as a mark bit during
5414 walks over the RTL. */
2adc7f12 5415 RTX_FLAG (copy, used) = 0;
da43a810
BS
5416
5417 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
ec8e098d 5418 if (INSN_P (orig))
da43a810 5419 {
2adc7f12
JJ
5420 RTX_FLAG (copy, jump) = 0;
5421 RTX_FLAG (copy, call) = 0;
5422 RTX_FLAG (copy, frame_related) = 0;
da43a810 5423 }
750c9258 5424
da43a810
BS
5425 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5426
5427 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
aacd3885
RS
5428 switch (*format_ptr++)
5429 {
5430 case 'e':
5431 if (XEXP (orig, i) != NULL)
5432 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5433 break;
da43a810 5434
aacd3885
RS
5435 case 'E':
5436 case 'V':
5437 if (XVEC (orig, i) == orig_asm_constraints_vector)
5438 XVEC (copy, i) = copy_asm_constraints_vector;
5439 else if (XVEC (orig, i) == orig_asm_operands_vector)
5440 XVEC (copy, i) = copy_asm_operands_vector;
5441 else if (XVEC (orig, i) != NULL)
5442 {
5443 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5444 for (j = 0; j < XVECLEN (copy, i); j++)
5445 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5446 }
5447 break;
da43a810 5448
aacd3885
RS
5449 case 't':
5450 case 'w':
5451 case 'i':
5452 case 's':
5453 case 'S':
5454 case 'u':
5455 case '0':
5456 /* These are left unchanged. */
5457 break;
da43a810 5458
aacd3885
RS
5459 default:
5460 gcc_unreachable ();
5461 }
da43a810
BS
5462
5463 if (code == SCRATCH)
5464 {
5465 i = copy_insn_n_scratches++;
5b0264cb 5466 gcc_assert (i < MAX_RECOG_OPERANDS);
da43a810
BS
5467 copy_insn_scratch_in[i] = orig;
5468 copy_insn_scratch_out[i] = copy;
5469 }
5470 else if (code == ASM_OPERANDS)
5471 {
6462bb43
AO
5472 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5473 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5474 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5475 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
da43a810
BS
5476 }
5477
5478 return copy;
5479}
5480
5481/* Create a new copy of an rtx.
5482 This function differs from copy_rtx in that it handles SCRATCHes and
5483 ASM_OPERANDs properly.
5484 INSN doesn't really have to be a full INSN; it could be just the
5485 pattern. */
5486rtx
502b8322 5487copy_insn (rtx insn)
da43a810
BS
5488{
5489 copy_insn_n_scratches = 0;
5490 orig_asm_operands_vector = 0;
5491 orig_asm_constraints_vector = 0;
5492 copy_asm_operands_vector = 0;
5493 copy_asm_constraints_vector = 0;
5494 return copy_insn_1 (insn);
5495}
59ec66dc 5496
23b2ce53
RS
5497/* Initialize data structures and variables in this file
5498 before generating rtl for each function. */
5499
5500void
502b8322 5501init_emit (void)
23b2ce53 5502{
23b2ce53
RS
5503 first_insn = NULL;
5504 last_insn = NULL;
b5b8b0ac
AO
5505 if (MIN_NONDEBUG_INSN_UID)
5506 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5507 else
5508 cur_insn_uid = 1;
5509 cur_debug_insn_uid = 1;
23b2ce53 5510 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
6773e15f 5511 last_location = UNKNOWN_LOCATION;
23b2ce53 5512 first_label_num = label_num;
49ad7cfa 5513 seq_stack = NULL;
23b2ce53 5514
23b2ce53
RS
5515 /* Init the tables that describe all the pseudo regs. */
5516
3e029763 5517 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
23b2ce53 5518
3e029763 5519 crtl->emit.regno_pointer_align
1b4572a8 5520 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
86fe05e0 5521
750c9258 5522 regno_reg_rtx
1b4572a8 5523 = GGC_NEWVEC (rtx, crtl->emit.regno_pointer_align_length);
0d4903b8 5524
e50126e8 5525 /* Put copies of all the hard registers into regno_reg_rtx. */
6cde4876
JL
5526 memcpy (regno_reg_rtx,
5527 static_regno_reg_rtx,
5528 FIRST_PSEUDO_REGISTER * sizeof (rtx));
e50126e8 5529
23b2ce53 5530 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
bd60bab2 5531 init_virtual_regs ();
740ab4a2
RK
5532
5533 /* Indicate that the virtual registers and stack locations are
5534 all pointers. */
3502dc9c
JDA
5535 REG_POINTER (stack_pointer_rtx) = 1;
5536 REG_POINTER (frame_pointer_rtx) = 1;
5537 REG_POINTER (hard_frame_pointer_rtx) = 1;
5538 REG_POINTER (arg_pointer_rtx) = 1;
740ab4a2 5539
3502dc9c
JDA
5540 REG_POINTER (virtual_incoming_args_rtx) = 1;
5541 REG_POINTER (virtual_stack_vars_rtx) = 1;
5542 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5543 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5544 REG_POINTER (virtual_cfa_rtx) = 1;
5e82e7bd 5545
86fe05e0 5546#ifdef STACK_BOUNDARY
bdb429a5
RK
5547 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5548 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5549 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5550 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5551
5552 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5553 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5554 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5555 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5556 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
86fe05e0
RK
5557#endif
5558
5e82e7bd
JVA
5559#ifdef INIT_EXPANDERS
5560 INIT_EXPANDERS;
5561#endif
23b2ce53
RS
5562}
5563
a73b091d 5564/* Generate a vector constant for mode MODE and constant value CONSTANT. */
69ef87e2
AH
5565
5566static rtx
a73b091d 5567gen_const_vector (enum machine_mode mode, int constant)
69ef87e2
AH
5568{
5569 rtx tem;
5570 rtvec v;
5571 int units, i;
5572 enum machine_mode inner;
5573
5574 units = GET_MODE_NUNITS (mode);
5575 inner = GET_MODE_INNER (mode);
5576
15ed7b52
JG
5577 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5578
69ef87e2
AH
5579 v = rtvec_alloc (units);
5580
a73b091d
JW
5581 /* We need to call this function after we set the scalar const_tiny_rtx
5582 entries. */
5583 gcc_assert (const_tiny_rtx[constant][(int) inner]);
69ef87e2
AH
5584
5585 for (i = 0; i < units; ++i)
a73b091d 5586 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
69ef87e2 5587
a06e3c40 5588 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
69ef87e2
AH
5589 return tem;
5590}
5591
a06e3c40 5592/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
a73b091d 5593 all elements are zero, and the one vector when all elements are one. */
a06e3c40 5594rtx
502b8322 5595gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
a06e3c40 5596{
a73b091d
JW
5597 enum machine_mode inner = GET_MODE_INNER (mode);
5598 int nunits = GET_MODE_NUNITS (mode);
5599 rtx x;
a06e3c40
R
5600 int i;
5601
a73b091d
JW
5602 /* Check to see if all of the elements have the same value. */
5603 x = RTVEC_ELT (v, nunits - 1);
5604 for (i = nunits - 2; i >= 0; i--)
5605 if (RTVEC_ELT (v, i) != x)
5606 break;
5607
5608 /* If the values are all the same, check to see if we can use one of the
5609 standard constant vectors. */
5610 if (i == -1)
5611 {
5612 if (x == CONST0_RTX (inner))
5613 return CONST0_RTX (mode);
5614 else if (x == CONST1_RTX (inner))
5615 return CONST1_RTX (mode);
5616 }
5617
5618 return gen_rtx_raw_CONST_VECTOR (mode, v);
a06e3c40
R
5619}
5620
b5deb7b6
SL
5621/* Initialise global register information required by all functions. */
5622
5623void
5624init_emit_regs (void)
5625{
5626 int i;
5627
5628 /* Reset register attributes */
5629 htab_empty (reg_attrs_htab);
5630
5631 /* We need reg_raw_mode, so initialize the modes now. */
5632 init_reg_modes_target ();
5633
5634 /* Assign register numbers to the globally defined register rtx. */
5635 pc_rtx = gen_rtx_PC (VOIDmode);
5636 cc0_rtx = gen_rtx_CC0 (VOIDmode);
5637 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5638 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5639 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5640 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5641 virtual_incoming_args_rtx =
5642 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5643 virtual_stack_vars_rtx =
5644 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5645 virtual_stack_dynamic_rtx =
5646 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5647 virtual_outgoing_args_rtx =
5648 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5649 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5650
5651 /* Initialize RTL for commonly used hard registers. These are
5652 copied into regno_reg_rtx as we begin to compile each function. */
5653 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5654 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5655
5656#ifdef RETURN_ADDRESS_POINTER_REGNUM
5657 return_address_pointer_rtx
5658 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5659#endif
5660
5661#ifdef STATIC_CHAIN_REGNUM
5662 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5663
5664#ifdef STATIC_CHAIN_INCOMING_REGNUM
5665 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5666 static_chain_incoming_rtx
5667 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5668 else
5669#endif
5670 static_chain_incoming_rtx = static_chain_rtx;
5671#endif
5672
5673#ifdef STATIC_CHAIN
5674 static_chain_rtx = STATIC_CHAIN;
5675
5676#ifdef STATIC_CHAIN_INCOMING
5677 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5678#else
5679 static_chain_incoming_rtx = static_chain_rtx;
5680#endif
5681#endif
5682
5683 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5684 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5685 else
5686 pic_offset_table_rtx = NULL_RTX;
5687}
5688
23b2ce53
RS
5689/* Create some permanent unique rtl objects shared between all functions.
5690 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5691
5692void
502b8322 5693init_emit_once (int line_numbers)
23b2ce53
RS
5694{
5695 int i;
5696 enum machine_mode mode;
9ec36da5 5697 enum machine_mode double_mode;
23b2ce53 5698
091a3ac7
CF
5699 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5700 hash tables. */
17211ab5
GK
5701 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5702 const_int_htab_eq, NULL);
173b24b9 5703
17211ab5
GK
5704 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5705 const_double_htab_eq, NULL);
5692c7bc 5706
091a3ac7
CF
5707 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5708 const_fixed_htab_eq, NULL);
5709
17211ab5
GK
5710 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5711 mem_attrs_htab_eq, NULL);
a560d4d4
JH
5712 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5713 reg_attrs_htab_eq, NULL);
67673f5c 5714
23b2ce53
RS
5715 no_line_numbers = ! line_numbers;
5716
43fa6302
AS
5717 /* Compute the word and byte modes. */
5718
5719 byte_mode = VOIDmode;
5720 word_mode = VOIDmode;
5721 double_mode = VOIDmode;
5722
15ed7b52
JG
5723 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5724 mode != VOIDmode;
43fa6302
AS
5725 mode = GET_MODE_WIDER_MODE (mode))
5726 {
5727 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5728 && byte_mode == VOIDmode)
5729 byte_mode = mode;
5730
5731 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5732 && word_mode == VOIDmode)
5733 word_mode = mode;
5734 }
5735
15ed7b52
JG
5736 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5737 mode != VOIDmode;
43fa6302
AS
5738 mode = GET_MODE_WIDER_MODE (mode))
5739 {
5740 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5741 && double_mode == VOIDmode)
5742 double_mode = mode;
5743 }
5744
5745 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5746
5da077de 5747#ifdef INIT_EXPANDERS
414c4dc4
NC
5748 /* This is to initialize {init|mark|free}_machine_status before the first
5749 call to push_function_context_to. This is needed by the Chill front
a1f300c0 5750 end which calls push_function_context_to before the first call to
5da077de
AS
5751 init_function_start. */
5752 INIT_EXPANDERS;
5753#endif
5754
23b2ce53
RS
5755 /* Create the unique rtx's for certain rtx codes and operand values. */
5756
a2a8cc44 5757 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
c5c76735 5758 tries to use these variables. */
23b2ce53 5759 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
750c9258 5760 const_int_rtx[i + MAX_SAVED_CONST_INT] =
f1b690f1 5761 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
23b2ce53 5762
68d75312
JC
5763 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5764 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5da077de 5765 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
68d75312 5766 else
3b80f6ca 5767 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
23b2ce53 5768
5692c7bc
ZW
5769 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5770 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5771 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
aefa9d43
KG
5772
5773 dconstm1 = dconst1;
5774 dconstm1.sign = 1;
03f2ea93
RS
5775
5776 dconsthalf = dconst1;
1e92bbb9 5777 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
23b2ce53 5778
f7657db9 5779 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
23b2ce53 5780 {
aefa9d43 5781 const REAL_VALUE_TYPE *const r =
b216cd4a
ZW
5782 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5783
15ed7b52
JG
5784 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5785 mode != VOIDmode;
5786 mode = GET_MODE_WIDER_MODE (mode))
5787 const_tiny_rtx[i][(int) mode] =
5788 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5789
5790 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5791 mode != VOIDmode;
23b2ce53 5792 mode = GET_MODE_WIDER_MODE (mode))
5692c7bc
ZW
5793 const_tiny_rtx[i][(int) mode] =
5794 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
23b2ce53 5795
906c4e36 5796 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
23b2ce53 5797
15ed7b52
JG
5798 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5799 mode != VOIDmode;
23b2ce53 5800 mode = GET_MODE_WIDER_MODE (mode))
906c4e36 5801 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
33d3e559
RS
5802
5803 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5804 mode != VOIDmode;
5805 mode = GET_MODE_WIDER_MODE (mode))
5806 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
23b2ce53
RS
5807 }
5808
e90721b1
AP
5809 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5810 mode != VOIDmode;
5811 mode = GET_MODE_WIDER_MODE (mode))
5812 {
5813 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5814 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5815 }
5816
5817 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5818 mode != VOIDmode;
5819 mode = GET_MODE_WIDER_MODE (mode))
5820 {
5821 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5822 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5823 }
5824
69ef87e2
AH
5825 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5826 mode != VOIDmode;
5827 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5828 {
5829 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5830 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5831 }
69ef87e2
AH
5832
5833 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5834 mode != VOIDmode;
5835 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5836 {
5837 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5838 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5839 }
69ef87e2 5840
325217ed
CF
5841 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5842 mode != VOIDmode;
5843 mode = GET_MODE_WIDER_MODE (mode))
5844 {
5845 FCONST0(mode).data.high = 0;
5846 FCONST0(mode).data.low = 0;
5847 FCONST0(mode).mode = mode;
091a3ac7
CF
5848 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5849 FCONST0 (mode), mode);
325217ed
CF
5850 }
5851
5852 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5853 mode != VOIDmode;
5854 mode = GET_MODE_WIDER_MODE (mode))
5855 {
5856 FCONST0(mode).data.high = 0;
5857 FCONST0(mode).data.low = 0;
5858 FCONST0(mode).mode = mode;
091a3ac7
CF
5859 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5860 FCONST0 (mode), mode);
325217ed
CF
5861 }
5862
5863 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5864 mode != VOIDmode;
5865 mode = GET_MODE_WIDER_MODE (mode))
5866 {
5867 FCONST0(mode).data.high = 0;
5868 FCONST0(mode).data.low = 0;
5869 FCONST0(mode).mode = mode;
091a3ac7
CF
5870 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5871 FCONST0 (mode), mode);
325217ed
CF
5872
5873 /* We store the value 1. */
5874 FCONST1(mode).data.high = 0;
5875 FCONST1(mode).data.low = 0;
5876 FCONST1(mode).mode = mode;
5877 lshift_double (1, 0, GET_MODE_FBIT (mode),
5878 2 * HOST_BITS_PER_WIDE_INT,
5879 &FCONST1(mode).data.low,
5880 &FCONST1(mode).data.high,
5881 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
5882 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5883 FCONST1 (mode), mode);
325217ed
CF
5884 }
5885
5886 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5887 mode != VOIDmode;
5888 mode = GET_MODE_WIDER_MODE (mode))
5889 {
5890 FCONST0(mode).data.high = 0;
5891 FCONST0(mode).data.low = 0;
5892 FCONST0(mode).mode = mode;
091a3ac7
CF
5893 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5894 FCONST0 (mode), mode);
325217ed
CF
5895
5896 /* We store the value 1. */
5897 FCONST1(mode).data.high = 0;
5898 FCONST1(mode).data.low = 0;
5899 FCONST1(mode).mode = mode;
5900 lshift_double (1, 0, GET_MODE_FBIT (mode),
5901 2 * HOST_BITS_PER_WIDE_INT,
5902 &FCONST1(mode).data.low,
5903 &FCONST1(mode).data.high,
5904 SIGNED_FIXED_POINT_MODE_P (mode));
091a3ac7
CF
5905 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5906 FCONST1 (mode), mode);
5907 }
5908
5909 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5910 mode != VOIDmode;
5911 mode = GET_MODE_WIDER_MODE (mode))
5912 {
5913 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5914 }
5915
5916 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5917 mode != VOIDmode;
5918 mode = GET_MODE_WIDER_MODE (mode))
5919 {
5920 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5921 }
5922
5923 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5924 mode != VOIDmode;
5925 mode = GET_MODE_WIDER_MODE (mode))
5926 {
5927 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5928 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5929 }
5930
5931 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5932 mode != VOIDmode;
5933 mode = GET_MODE_WIDER_MODE (mode))
5934 {
5935 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5936 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
325217ed
CF
5937 }
5938
dbbbbf3b
JDA
5939 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5940 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5941 const_tiny_rtx[0][i] = const0_rtx;
23b2ce53 5942
f0417c82
RH
5943 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5944 if (STORE_FLAG_VALUE == 1)
5945 const_tiny_rtx[1][(int) BImode] = const1_rtx;
23b2ce53 5946}
a11759a3 5947\f
969d70ca
JH
5948/* Produce exact duplicate of insn INSN after AFTER.
5949 Care updating of libcall regions if present. */
5950
5951rtx
502b8322 5952emit_copy_of_insn_after (rtx insn, rtx after)
969d70ca 5953{
60564289 5954 rtx new_rtx, link;
969d70ca
JH
5955
5956 switch (GET_CODE (insn))
5957 {
5958 case INSN:
60564289 5959 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
5960 break;
5961
5962 case JUMP_INSN:
60564289 5963 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca
JH
5964 break;
5965
b5b8b0ac
AO
5966 case DEBUG_INSN:
5967 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
5968 break;
5969
969d70ca 5970 case CALL_INSN:
60564289 5971 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
969d70ca 5972 if (CALL_INSN_FUNCTION_USAGE (insn))
60564289 5973 CALL_INSN_FUNCTION_USAGE (new_rtx)
969d70ca 5974 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
60564289
KG
5975 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5976 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5977 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
5978 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
becfd6e5 5979 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
969d70ca
JH
5980 break;
5981
5982 default:
5b0264cb 5983 gcc_unreachable ();
969d70ca
JH
5984 }
5985
5986 /* Update LABEL_NUSES. */
60564289 5987 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
969d70ca 5988
60564289 5989 INSN_LOCATOR (new_rtx) = INSN_LOCATOR (insn);
ba4f7968 5990
0a3d71f5
JW
5991 /* If the old insn is frame related, then so is the new one. This is
5992 primarily needed for IA-64 unwind info which marks epilogue insns,
5993 which may be duplicated by the basic block reordering code. */
60564289 5994 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
0a3d71f5 5995
cf7c4aa6
HPN
5996 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
5997 will make them. REG_LABEL_TARGETs are created there too, but are
5998 supposed to be sticky, so we copy them. */
969d70ca 5999 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
cf7c4aa6 6000 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
969d70ca
JH
6001 {
6002 if (GET_CODE (link) == EXPR_LIST)
60564289 6003 add_reg_note (new_rtx, REG_NOTE_KIND (link),
65c5f2a6 6004 copy_insn_1 (XEXP (link, 0)));
969d70ca 6005 else
60564289 6006 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
969d70ca
JH
6007 }
6008
60564289
KG
6009 INSN_CODE (new_rtx) = INSN_CODE (insn);
6010 return new_rtx;
969d70ca 6011}
e2500fed 6012
1431042e 6013static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
3e89ed8d
JH
6014rtx
6015gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
6016{
6017 if (hard_reg_clobbers[mode][regno])
6018 return hard_reg_clobbers[mode][regno];
6019 else
6020 return (hard_reg_clobbers[mode][regno] =
6021 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6022}
6023
e2500fed 6024#include "gt-emit-rtl.h"