]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
c_locale.cc (__convert_to_v(long&), [...]): Remove.
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
5e6908ea 1/* Emit RTL for the GCC expander.
ef58a523 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
9d18e06b 3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
23b2ce53 4
1322177d 5This file is part of GCC.
23b2ce53 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
23b2ce53 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
23b2ce53
RS
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
23b2ce53
RS
21
22
23/* Middle-to-low level generation of rtx code and insns.
24
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
28
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
31
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
37
38#include "config.h"
670ee920 39#include "system.h"
4977bab6
ZW
40#include "coretypes.h"
41#include "tm.h"
01198c2f 42#include "toplev.h"
23b2ce53 43#include "rtl.h"
a25c7971 44#include "tree.h"
6baf1cc8 45#include "tm_p.h"
23b2ce53
RS
46#include "flags.h"
47#include "function.h"
48#include "expr.h"
49#include "regs.h"
aff48bca 50#include "hard-reg-set.h"
c13e8210 51#include "hashtab.h"
23b2ce53 52#include "insn-config.h"
e9a25f70 53#include "recog.h"
23b2ce53 54#include "real.h"
0dfa1860 55#include "bitmap.h"
a05924f9 56#include "basic-block.h"
87ff9c8e 57#include "ggc.h"
e1772ac0 58#include "debug.h"
d23c55c2 59#include "langhooks.h"
ca695ac9 60
1d445e9e
ILT
61/* Commonly used modes. */
62
0f41302f
MS
63enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
64enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
9ec36da5 65enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
0f41302f 66enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
1d445e9e 67
23b2ce53
RS
68
69/* This is *not* reset after each function. It gives each CODE_LABEL
70 in the entire compilation a unique label number. */
71
044b4de3 72static GTY(()) int label_num = 1;
23b2ce53 73
23b2ce53
RS
74/* Highest label number in current function.
75 Zero means use the value of label_num instead.
76 This is nonzero only when belatedly compiling an inline function. */
77
78static int last_label_num;
79
80/* Value label_num had when set_new_first_and_last_label_number was called.
81 If label_num has not changed since then, last_label_num is valid. */
82
83static int base_label_num;
84
85/* Nonzero means do not generate NOTEs for source line numbers. */
86
87static int no_line_numbers;
88
89/* Commonly used rtx's, so that we only need space for one copy.
90 These are initialized once for the entire compilation.
5692c7bc
ZW
91 All of these are unique; no other rtx-object will be equal to any
92 of these. */
23b2ce53 93
5da077de 94rtx global_rtl[GR_MAX];
23b2ce53 95
6cde4876
JL
96/* Commonly used RTL for hard registers. These objects are not necessarily
97 unique, so we allocate them separately from global_rtl. They are
98 initialized once per compilation unit, then copied into regno_reg_rtx
99 at the beginning of each function. */
100static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
101
23b2ce53
RS
102/* We record floating-point CONST_DOUBLEs in each floating-point mode for
103 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
104 record a copy of const[012]_rtx. */
105
106rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
107
68d75312
JC
108rtx const_true_rtx;
109
23b2ce53
RS
110REAL_VALUE_TYPE dconst0;
111REAL_VALUE_TYPE dconst1;
112REAL_VALUE_TYPE dconst2;
f7657db9
KG
113REAL_VALUE_TYPE dconst3;
114REAL_VALUE_TYPE dconst10;
23b2ce53 115REAL_VALUE_TYPE dconstm1;
03f2ea93
RS
116REAL_VALUE_TYPE dconstm2;
117REAL_VALUE_TYPE dconsthalf;
f7657db9 118REAL_VALUE_TYPE dconstthird;
ab01a87c
KG
119REAL_VALUE_TYPE dconstpi;
120REAL_VALUE_TYPE dconste;
23b2ce53
RS
121
122/* All references to the following fixed hard registers go through
123 these unique rtl objects. On machines where the frame-pointer and
124 arg-pointer are the same register, they use the same unique object.
125
126 After register allocation, other rtl objects which used to be pseudo-regs
127 may be clobbered to refer to the frame-pointer register.
128 But references that were originally to the frame-pointer can be
129 distinguished from the others because they contain frame_pointer_rtx.
130
ac6f08b0
DE
131 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
132 tricky: until register elimination has taken place hard_frame_pointer_rtx
750c9258 133 should be used if it is being set, and frame_pointer_rtx otherwise. After
ac6f08b0
DE
134 register elimination hard_frame_pointer_rtx should always be used.
135 On machines where the two registers are same (most) then these are the
136 same.
137
23b2ce53
RS
138 In an inline procedure, the stack and frame pointer rtxs may not be
139 used for anything else. */
23b2ce53
RS
140rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
141rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
142rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
143
a4417a86
JW
144/* This is used to implement __builtin_return_address for some machines.
145 See for instance the MIPS port. */
146rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
147
23b2ce53
RS
148/* We make one copy of (const_int C) where C is in
149 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
150 to save space during the compilation and simplify comparisons of
151 integers. */
152
5da077de 153rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
23b2ce53 154
c13e8210
MM
155/* A hash table storing CONST_INTs whose absolute value is greater
156 than MAX_SAVED_CONST_INT. */
157
e2500fed
GK
158static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
159 htab_t const_int_htab;
c13e8210 160
173b24b9 161/* A hash table storing memory attribute structures. */
e2500fed
GK
162static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
163 htab_t mem_attrs_htab;
173b24b9 164
a560d4d4
JH
165/* A hash table storing register attribute structures. */
166static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
167 htab_t reg_attrs_htab;
168
5692c7bc 169/* A hash table storing all CONST_DOUBLEs. */
e2500fed
GK
170static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
171 htab_t const_double_htab;
5692c7bc 172
01d939e8
BS
173#define first_insn (cfun->emit->x_first_insn)
174#define last_insn (cfun->emit->x_last_insn)
175#define cur_insn_uid (cfun->emit->x_cur_insn_uid)
fd3acbb3 176#define last_location (cfun->emit->x_last_location)
01d939e8 177#define first_label_num (cfun->emit->x_first_label_num)
23b2ce53 178
502b8322
AJ
179static rtx make_jump_insn_raw (rtx);
180static rtx make_call_insn_raw (rtx);
181static rtx find_line_note (rtx);
182static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
502b8322
AJ
183static void unshare_all_decls (tree);
184static void reset_used_decls (tree);
185static void mark_label_nuses (rtx);
186static hashval_t const_int_htab_hash (const void *);
187static int const_int_htab_eq (const void *, const void *);
188static hashval_t const_double_htab_hash (const void *);
189static int const_double_htab_eq (const void *, const void *);
190static rtx lookup_const_double (rtx);
191static hashval_t mem_attrs_htab_hash (const void *);
192static int mem_attrs_htab_eq (const void *, const void *);
193static mem_attrs *get_mem_attrs (HOST_WIDE_INT, tree, rtx, rtx, unsigned int,
194 enum machine_mode);
195static hashval_t reg_attrs_htab_hash (const void *);
196static int reg_attrs_htab_eq (const void *, const void *);
197static reg_attrs *get_reg_attrs (tree, int);
198static tree component_ref_for_mem_expr (tree);
199static rtx gen_const_vector_0 (enum machine_mode);
200static rtx gen_complex_constant_part (enum machine_mode, rtx, int);
c13e8210 201
6b24c259
JH
202/* Probability of the conditional branch currently proceeded by try_split.
203 Set to -1 otherwise. */
204int split_branch_probability = -1;
ca695ac9 205\f
c13e8210
MM
206/* Returns a hash code for X (which is a really a CONST_INT). */
207
208static hashval_t
502b8322 209const_int_htab_hash (const void *x)
c13e8210 210{
bcda12f4 211 return (hashval_t) INTVAL ((rtx) x);
c13e8210
MM
212}
213
cc2902df 214/* Returns nonzero if the value represented by X (which is really a
c13e8210
MM
215 CONST_INT) is the same as that given by Y (which is really a
216 HOST_WIDE_INT *). */
217
218static int
502b8322 219const_int_htab_eq (const void *x, const void *y)
c13e8210 220{
5692c7bc
ZW
221 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
222}
223
224/* Returns a hash code for X (which is really a CONST_DOUBLE). */
225static hashval_t
502b8322 226const_double_htab_hash (const void *x)
5692c7bc 227{
5692c7bc 228 rtx value = (rtx) x;
46b33600 229 hashval_t h;
5692c7bc 230
46b33600
RH
231 if (GET_MODE (value) == VOIDmode)
232 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
233 else
fe352c29 234 {
15c812e3 235 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
fe352c29
DJ
236 /* MODE is used in the comparison, so it should be in the hash. */
237 h ^= GET_MODE (value);
238 }
5692c7bc
ZW
239 return h;
240}
241
cc2902df 242/* Returns nonzero if the value represented by X (really a ...)
5692c7bc
ZW
243 is the same as that represented by Y (really a ...) */
244static int
502b8322 245const_double_htab_eq (const void *x, const void *y)
5692c7bc
ZW
246{
247 rtx a = (rtx)x, b = (rtx)y;
5692c7bc
ZW
248
249 if (GET_MODE (a) != GET_MODE (b))
250 return 0;
8580f7a0
RH
251 if (GET_MODE (a) == VOIDmode)
252 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
253 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
254 else
255 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
256 CONST_DOUBLE_REAL_VALUE (b));
c13e8210
MM
257}
258
173b24b9
RK
259/* Returns a hash code for X (which is a really a mem_attrs *). */
260
261static hashval_t
502b8322 262mem_attrs_htab_hash (const void *x)
173b24b9
RK
263{
264 mem_attrs *p = (mem_attrs *) x;
265
266 return (p->alias ^ (p->align * 1000)
267 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
268 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
998d7deb 269 ^ (size_t) p->expr);
173b24b9
RK
270}
271
cc2902df 272/* Returns nonzero if the value represented by X (which is really a
173b24b9
RK
273 mem_attrs *) is the same as that given by Y (which is also really a
274 mem_attrs *). */
c13e8210
MM
275
276static int
502b8322 277mem_attrs_htab_eq (const void *x, const void *y)
c13e8210 278{
173b24b9
RK
279 mem_attrs *p = (mem_attrs *) x;
280 mem_attrs *q = (mem_attrs *) y;
281
998d7deb 282 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
173b24b9 283 && p->size == q->size && p->align == q->align);
c13e8210
MM
284}
285
173b24b9 286/* Allocate a new mem_attrs structure and insert it into the hash table if
10b76d73
RK
287 one identical to it is not already in the table. We are doing this for
288 MEM of mode MODE. */
173b24b9
RK
289
290static mem_attrs *
502b8322
AJ
291get_mem_attrs (HOST_WIDE_INT alias, tree expr, rtx offset, rtx size,
292 unsigned int align, enum machine_mode mode)
173b24b9
RK
293{
294 mem_attrs attrs;
295 void **slot;
296
bb056a77
OH
297 /* If everything is the default, we can just return zero.
298 This must match what the corresponding MEM_* macros return when the
299 field is not present. */
998d7deb 300 if (alias == 0 && expr == 0 && offset == 0
10b76d73
RK
301 && (size == 0
302 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
bb056a77
OH
303 && (STRICT_ALIGNMENT && mode != BLKmode
304 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
10b76d73
RK
305 return 0;
306
173b24b9 307 attrs.alias = alias;
998d7deb 308 attrs.expr = expr;
173b24b9
RK
309 attrs.offset = offset;
310 attrs.size = size;
311 attrs.align = align;
312
313 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
314 if (*slot == 0)
315 {
316 *slot = ggc_alloc (sizeof (mem_attrs));
317 memcpy (*slot, &attrs, sizeof (mem_attrs));
318 }
319
320 return *slot;
c13e8210
MM
321}
322
a560d4d4
JH
323/* Returns a hash code for X (which is a really a reg_attrs *). */
324
325static hashval_t
502b8322 326reg_attrs_htab_hash (const void *x)
a560d4d4
JH
327{
328 reg_attrs *p = (reg_attrs *) x;
329
330 return ((p->offset * 1000) ^ (long) p->decl);
331}
332
6356f892 333/* Returns nonzero if the value represented by X (which is really a
a560d4d4
JH
334 reg_attrs *) is the same as that given by Y (which is also really a
335 reg_attrs *). */
336
337static int
502b8322 338reg_attrs_htab_eq (const void *x, const void *y)
a560d4d4
JH
339{
340 reg_attrs *p = (reg_attrs *) x;
341 reg_attrs *q = (reg_attrs *) y;
342
343 return (p->decl == q->decl && p->offset == q->offset);
344}
345/* Allocate a new reg_attrs structure and insert it into the hash table if
346 one identical to it is not already in the table. We are doing this for
347 MEM of mode MODE. */
348
349static reg_attrs *
502b8322 350get_reg_attrs (tree decl, int offset)
a560d4d4
JH
351{
352 reg_attrs attrs;
353 void **slot;
354
355 /* If everything is the default, we can just return zero. */
356 if (decl == 0 && offset == 0)
357 return 0;
358
359 attrs.decl = decl;
360 attrs.offset = offset;
361
362 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
363 if (*slot == 0)
364 {
365 *slot = ggc_alloc (sizeof (reg_attrs));
366 memcpy (*slot, &attrs, sizeof (reg_attrs));
367 }
368
369 return *slot;
370}
371
08394eef
BS
372/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
373 don't attempt to share with the various global pieces of rtl (such as
374 frame_pointer_rtx). */
375
376rtx
502b8322 377gen_raw_REG (enum machine_mode mode, int regno)
08394eef
BS
378{
379 rtx x = gen_rtx_raw_REG (mode, regno);
380 ORIGINAL_REGNO (x) = regno;
381 return x;
382}
383
c5c76735
JL
384/* There are some RTL codes that require special attention; the generation
385 functions do the raw handling. If you add to this list, modify
386 special_rtx in gengenrtl.c as well. */
387
3b80f6ca 388rtx
502b8322 389gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3b80f6ca 390{
c13e8210
MM
391 void **slot;
392
3b80f6ca 393 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
5da077de 394 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3b80f6ca
RH
395
396#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
397 if (const_true_rtx && arg == STORE_FLAG_VALUE)
398 return const_true_rtx;
399#endif
400
c13e8210 401 /* Look up the CONST_INT in the hash table. */
e38992e8
RK
402 slot = htab_find_slot_with_hash (const_int_htab, &arg,
403 (hashval_t) arg, INSERT);
29105cea 404 if (*slot == 0)
1f8f4a0b 405 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
c13e8210
MM
406
407 return (rtx) *slot;
3b80f6ca
RH
408}
409
2496c7bd 410rtx
502b8322 411gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
2496c7bd
LB
412{
413 return GEN_INT (trunc_int_for_mode (c, mode));
414}
415
5692c7bc
ZW
416/* CONST_DOUBLEs might be created from pairs of integers, or from
417 REAL_VALUE_TYPEs. Also, their length is known only at run time,
418 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
419
420/* Determine whether REAL, a CONST_DOUBLE, already exists in the
421 hash table. If so, return its counterpart; otherwise add it
422 to the hash table and return it. */
423static rtx
502b8322 424lookup_const_double (rtx real)
5692c7bc
ZW
425{
426 void **slot = htab_find_slot (const_double_htab, real, INSERT);
427 if (*slot == 0)
428 *slot = real;
429
430 return (rtx) *slot;
431}
29105cea 432
5692c7bc
ZW
433/* Return a CONST_DOUBLE rtx for a floating-point value specified by
434 VALUE in mode MODE. */
0133b7d9 435rtx
502b8322 436const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
0133b7d9 437{
5692c7bc
ZW
438 rtx real = rtx_alloc (CONST_DOUBLE);
439 PUT_MODE (real, mode);
440
441 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
442
443 return lookup_const_double (real);
444}
445
446/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
447 of ints: I0 is the low-order word and I1 is the high-order word.
448 Do not use this routine for non-integer modes; convert to
449 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
450
451rtx
502b8322 452immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
5692c7bc
ZW
453{
454 rtx value;
455 unsigned int i;
456
457 if (mode != VOIDmode)
458 {
459 int width;
460 if (GET_MODE_CLASS (mode) != MODE_INT
cb2a532e
AH
461 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT
462 /* We can get a 0 for an error mark. */
463 && GET_MODE_CLASS (mode) != MODE_VECTOR_INT
464 && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
5692c7bc
ZW
465 abort ();
466
467 /* We clear out all bits that don't belong in MODE, unless they and
468 our sign bit are all one. So we get either a reasonable negative
469 value or a reasonable unsigned value for this mode. */
470 width = GET_MODE_BITSIZE (mode);
471 if (width < HOST_BITS_PER_WIDE_INT
472 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
473 != ((HOST_WIDE_INT) (-1) << (width - 1))))
474 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
475 else if (width == HOST_BITS_PER_WIDE_INT
476 && ! (i1 == ~0 && i0 < 0))
477 i1 = 0;
478 else if (width > 2 * HOST_BITS_PER_WIDE_INT)
479 /* We cannot represent this value as a constant. */
480 abort ();
481
482 /* If this would be an entire word for the target, but is not for
483 the host, then sign-extend on the host so that the number will
484 look the same way on the host that it would on the target.
485
486 For example, when building a 64 bit alpha hosted 32 bit sparc
487 targeted compiler, then we want the 32 bit unsigned value -1 to be
488 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
489 The latter confuses the sparc backend. */
490
491 if (width < HOST_BITS_PER_WIDE_INT
492 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
493 i0 |= ((HOST_WIDE_INT) (-1) << width);
2454beaf 494
5692c7bc
ZW
495 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
496 CONST_INT.
2454beaf 497
5692c7bc
ZW
498 ??? Strictly speaking, this is wrong if we create a CONST_INT for
499 a large unsigned constant with the size of MODE being
500 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
501 in a wider mode. In that case we will mis-interpret it as a
502 negative number.
2454beaf 503
5692c7bc
ZW
504 Unfortunately, the only alternative is to make a CONST_DOUBLE for
505 any constant in any mode if it is an unsigned constant larger
506 than the maximum signed integer in an int on the host. However,
507 doing this will break everyone that always expects to see a
508 CONST_INT for SImode and smaller.
509
510 We have always been making CONST_INTs in this case, so nothing
511 new is being broken. */
512
513 if (width <= HOST_BITS_PER_WIDE_INT)
514 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
515 }
516
517 /* If this integer fits in one word, return a CONST_INT. */
518 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
519 return GEN_INT (i0);
520
521 /* We use VOIDmode for integers. */
522 value = rtx_alloc (CONST_DOUBLE);
523 PUT_MODE (value, VOIDmode);
524
525 CONST_DOUBLE_LOW (value) = i0;
526 CONST_DOUBLE_HIGH (value) = i1;
527
528 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
529 XWINT (value, i) = 0;
530
531 return lookup_const_double (value);
0133b7d9
RH
532}
533
3b80f6ca 534rtx
502b8322 535gen_rtx_REG (enum machine_mode mode, unsigned int regno)
3b80f6ca
RH
536{
537 /* In case the MD file explicitly references the frame pointer, have
538 all such references point to the same frame pointer. This is
539 used during frame pointer elimination to distinguish the explicit
540 references to these registers from pseudos that happened to be
541 assigned to them.
542
543 If we have eliminated the frame pointer or arg pointer, we will
544 be using it as a normal register, for example as a spill
545 register. In such cases, we might be accessing it in a mode that
546 is not Pmode and therefore cannot use the pre-allocated rtx.
547
548 Also don't do this when we are making new REGs in reload, since
549 we don't want to get confused with the real pointers. */
550
551 if (mode == Pmode && !reload_in_progress)
552 {
e10c79fe
LB
553 if (regno == FRAME_POINTER_REGNUM
554 && (!reload_completed || frame_pointer_needed))
3b80f6ca
RH
555 return frame_pointer_rtx;
556#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
e10c79fe
LB
557 if (regno == HARD_FRAME_POINTER_REGNUM
558 && (!reload_completed || frame_pointer_needed))
3b80f6ca
RH
559 return hard_frame_pointer_rtx;
560#endif
561#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
bcb33994 562 if (regno == ARG_POINTER_REGNUM)
3b80f6ca
RH
563 return arg_pointer_rtx;
564#endif
565#ifdef RETURN_ADDRESS_POINTER_REGNUM
bcb33994 566 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3b80f6ca
RH
567 return return_address_pointer_rtx;
568#endif
fc555370 569 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
2d67bd7b 570 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
68252e27 571 return pic_offset_table_rtx;
bcb33994 572 if (regno == STACK_POINTER_REGNUM)
3b80f6ca
RH
573 return stack_pointer_rtx;
574 }
575
006a94b0 576#if 0
6cde4876 577 /* If the per-function register table has been set up, try to re-use
006a94b0
JL
578 an existing entry in that table to avoid useless generation of RTL.
579
580 This code is disabled for now until we can fix the various backends
581 which depend on having non-shared hard registers in some cases. Long
582 term we want to re-enable this code as it can significantly cut down
e10c79fe
LB
583 on the amount of useless RTL that gets generated.
584
585 We'll also need to fix some code that runs after reload that wants to
586 set ORIGINAL_REGNO. */
587
6cde4876
JL
588 if (cfun
589 && cfun->emit
590 && regno_reg_rtx
591 && regno < FIRST_PSEUDO_REGISTER
592 && reg_raw_mode[regno] == mode)
593 return regno_reg_rtx[regno];
006a94b0 594#endif
6cde4876 595
08394eef 596 return gen_raw_REG (mode, regno);
3b80f6ca
RH
597}
598
41472af8 599rtx
502b8322 600gen_rtx_MEM (enum machine_mode mode, rtx addr)
41472af8
MM
601{
602 rtx rt = gen_rtx_raw_MEM (mode, addr);
603
604 /* This field is not cleared by the mere allocation of the rtx, so
605 we clear it here. */
173b24b9 606 MEM_ATTRS (rt) = 0;
41472af8
MM
607
608 return rt;
609}
ddef6bc7
JJ
610
611rtx
502b8322 612gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
ddef6bc7
JJ
613{
614 /* This is the most common failure type.
615 Catch it early so we can see who does it. */
616 if ((offset % GET_MODE_SIZE (mode)) != 0)
617 abort ();
618
619 /* This check isn't usable right now because combine will
620 throw arbitrary crap like a CALL into a SUBREG in
621 gen_lowpart_for_combine so we must just eat it. */
622#if 0
623 /* Check for this too. */
624 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
625 abort ();
626#endif
5692c7bc 627 return gen_rtx_raw_SUBREG (mode, reg, offset);
ddef6bc7
JJ
628}
629
173b24b9
RK
630/* Generate a SUBREG representing the least-significant part of REG if MODE
631 is smaller than mode of REG, otherwise paradoxical SUBREG. */
632
ddef6bc7 633rtx
502b8322 634gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
ddef6bc7
JJ
635{
636 enum machine_mode inmode;
ddef6bc7
JJ
637
638 inmode = GET_MODE (reg);
639 if (inmode == VOIDmode)
640 inmode = mode;
e0e08ac2
JH
641 return gen_rtx_SUBREG (mode, reg,
642 subreg_lowpart_offset (mode, inmode));
ddef6bc7 643}
c5c76735 644\f
23b2ce53
RS
645/* rtx gen_rtx (code, mode, [element1, ..., elementn])
646**
647** This routine generates an RTX of the size specified by
648** <code>, which is an RTX code. The RTX structure is initialized
649** from the arguments <element1> through <elementn>, which are
650** interpreted according to the specific RTX type's format. The
651** special machine mode associated with the rtx (if any) is specified
652** in <mode>.
653**
1632afca 654** gen_rtx can be invoked in a way which resembles the lisp-like
23b2ce53
RS
655** rtx it will generate. For example, the following rtx structure:
656**
657** (plus:QI (mem:QI (reg:SI 1))
658** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
659**
660** ...would be generated by the following C code:
661**
750c9258 662** gen_rtx (PLUS, QImode,
23b2ce53
RS
663** gen_rtx (MEM, QImode,
664** gen_rtx (REG, SImode, 1)),
665** gen_rtx (MEM, QImode,
666** gen_rtx (PLUS, SImode,
667** gen_rtx (REG, SImode, 2),
668** gen_rtx (REG, SImode, 3)))),
669*/
670
671/*VARARGS2*/
672rtx
e34d07f2 673gen_rtx (enum rtx_code code, enum machine_mode mode, ...)
23b2ce53 674{
b3694847
SS
675 int i; /* Array indices... */
676 const char *fmt; /* Current rtx's format... */
677 rtx rt_val; /* RTX to return to caller... */
e34d07f2 678 va_list p;
23b2ce53 679
e34d07f2 680 va_start (p, mode);
23b2ce53 681
0133b7d9 682 switch (code)
23b2ce53 683 {
0133b7d9
RH
684 case CONST_INT:
685 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
686 break;
687
688 case CONST_DOUBLE:
689 {
a79e3a45 690 HOST_WIDE_INT arg0 = va_arg (p, HOST_WIDE_INT);
0133b7d9 691 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
a79e3a45 692
0fb7aeda 693 rt_val = immed_double_const (arg0, arg1, mode);
0133b7d9
RH
694 }
695 break;
696
697 case REG:
698 rt_val = gen_rtx_REG (mode, va_arg (p, int));
699 break;
700
701 case MEM:
702 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
703 break;
704
705 default:
23b2ce53
RS
706 rt_val = rtx_alloc (code); /* Allocate the storage space. */
707 rt_val->mode = mode; /* Store the machine mode... */
708
709 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
710 for (i = 0; i < GET_RTX_LENGTH (code); i++)
711 {
712 switch (*fmt++)
713 {
a73071bc 714 case '0': /* Field with unknown use. Zero it. */
d5b9108c 715 X0EXP (rt_val, i) = NULL_RTX;
23b2ce53
RS
716 break;
717
718 case 'i': /* An integer? */
719 XINT (rt_val, i) = va_arg (p, int);
720 break;
721
906c4e36
RK
722 case 'w': /* A wide integer? */
723 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
724 break;
725
23b2ce53
RS
726 case 's': /* A string? */
727 XSTR (rt_val, i) = va_arg (p, char *);
728 break;
729
730 case 'e': /* An expression? */
731 case 'u': /* An insn? Same except when printing. */
732 XEXP (rt_val, i) = va_arg (p, rtx);
733 break;
734
735 case 'E': /* An RTX vector? */
736 XVEC (rt_val, i) = va_arg (p, rtvec);
737 break;
738
0dfa1860
MM
739 case 'b': /* A bitmap? */
740 XBITMAP (rt_val, i) = va_arg (p, bitmap);
741 break;
742
743 case 't': /* A tree? */
744 XTREE (rt_val, i) = va_arg (p, tree);
745 break;
746
23b2ce53 747 default:
1632afca 748 abort ();
23b2ce53
RS
749 }
750 }
0133b7d9 751 break;
23b2ce53 752 }
0133b7d9 753
e34d07f2 754 va_end (p);
0133b7d9 755 return rt_val;
23b2ce53
RS
756}
757
758/* gen_rtvec (n, [rt1, ..., rtn])
759**
760** This routine creates an rtvec and stores within it the
761** pointers to rtx's which are its arguments.
762*/
763
764/*VARARGS1*/
765rtvec
e34d07f2 766gen_rtvec (int n, ...)
23b2ce53 767{
6268b922 768 int i, save_n;
23b2ce53 769 rtx *vector;
e34d07f2 770 va_list p;
23b2ce53 771
e34d07f2 772 va_start (p, n);
23b2ce53
RS
773
774 if (n == 0)
775 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
776
703ad42b 777 vector = alloca (n * sizeof (rtx));
4f90e4a0 778
23b2ce53
RS
779 for (i = 0; i < n; i++)
780 vector[i] = va_arg (p, rtx);
6268b922
KG
781
782 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
783 save_n = n;
e34d07f2 784 va_end (p);
23b2ce53 785
6268b922 786 return gen_rtvec_v (save_n, vector);
23b2ce53
RS
787}
788
789rtvec
502b8322 790gen_rtvec_v (int n, rtx *argp)
23b2ce53 791{
b3694847
SS
792 int i;
793 rtvec rt_val;
23b2ce53
RS
794
795 if (n == 0)
796 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
797
798 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
799
800 for (i = 0; i < n; i++)
8f985ec4 801 rt_val->elem[i] = *argp++;
23b2ce53
RS
802
803 return rt_val;
804}
805\f
806/* Generate a REG rtx for a new pseudo register of mode MODE.
807 This pseudo is assigned the next sequential register number. */
808
809rtx
502b8322 810gen_reg_rtx (enum machine_mode mode)
23b2ce53 811{
01d939e8 812 struct function *f = cfun;
b3694847 813 rtx val;
23b2ce53 814
f1db3576
JL
815 /* Don't let anything called after initial flow analysis create new
816 registers. */
817 if (no_new_pseudos)
23b2ce53
RS
818 abort ();
819
1b3d8f8a
GK
820 if (generating_concat_p
821 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
822 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
fc84e8a8
RS
823 {
824 /* For complex modes, don't make a single pseudo.
825 Instead, make a CONCAT of two pseudos.
826 This allows noncontiguous allocation of the real and imaginary parts,
827 which makes much better code. Besides, allocating DCmode
828 pseudos overstrains reload on some machines like the 386. */
829 rtx realpart, imagpart;
27e58a70 830 enum machine_mode partmode = GET_MODE_INNER (mode);
fc84e8a8
RS
831
832 realpart = gen_reg_rtx (partmode);
833 imagpart = gen_reg_rtx (partmode);
3b80f6ca 834 return gen_rtx_CONCAT (mode, realpart, imagpart);
fc84e8a8
RS
835 }
836
a560d4d4 837 /* Make sure regno_pointer_align, and regno_reg_rtx are large
0d4903b8 838 enough to have an element for this pseudo reg number. */
23b2ce53 839
3502dc9c 840 if (reg_rtx_no == f->emit->regno_pointer_align_length)
23b2ce53 841 {
3502dc9c 842 int old_size = f->emit->regno_pointer_align_length;
e2ecd91c 843 char *new;
0d4903b8 844 rtx *new1;
0d4903b8 845
e2500fed 846 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
49ad7cfa 847 memset (new + old_size, 0, old_size);
f9e158c3 848 f->emit->regno_pointer_align = (unsigned char *) new;
49ad7cfa 849
703ad42b
KG
850 new1 = ggc_realloc (f->emit->x_regno_reg_rtx,
851 old_size * 2 * sizeof (rtx));
49ad7cfa 852 memset (new1 + old_size, 0, old_size * sizeof (rtx));
23b2ce53
RS
853 regno_reg_rtx = new1;
854
3502dc9c 855 f->emit->regno_pointer_align_length = old_size * 2;
23b2ce53
RS
856 }
857
08394eef 858 val = gen_raw_REG (mode, reg_rtx_no);
23b2ce53
RS
859 regno_reg_rtx[reg_rtx_no++] = val;
860 return val;
861}
862
dcc24678 863/* Generate a register with same attributes as REG,
a560d4d4
JH
864 but offsetted by OFFSET. */
865
866rtx
502b8322 867gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, int offset)
a560d4d4
JH
868{
869 rtx new = gen_rtx_REG (mode, regno);
870 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
502b8322 871 REG_OFFSET (reg) + offset);
a560d4d4
JH
872 return new;
873}
874
875/* Set the decl for MEM to DECL. */
876
877void
502b8322 878set_reg_attrs_from_mem (rtx reg, rtx mem)
a560d4d4
JH
879{
880 if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
881 REG_ATTRS (reg)
882 = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
883}
884
9d18e06b
JZ
885/* Set the register attributes for registers contained in PARM_RTX.
886 Use needed values from memory attributes of MEM. */
887
888void
502b8322 889set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
9d18e06b
JZ
890{
891 if (GET_CODE (parm_rtx) == REG)
892 set_reg_attrs_from_mem (parm_rtx, mem);
893 else if (GET_CODE (parm_rtx) == PARALLEL)
894 {
895 /* Check for a NULL entry in the first slot, used to indicate that the
896 parameter goes both on the stack and in registers. */
897 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
898 for (; i < XVECLEN (parm_rtx, 0); i++)
899 {
900 rtx x = XVECEXP (parm_rtx, 0, i);
901 if (GET_CODE (XEXP (x, 0)) == REG)
902 REG_ATTRS (XEXP (x, 0))
903 = get_reg_attrs (MEM_EXPR (mem),
904 INTVAL (XEXP (x, 1)));
905 }
906 }
907}
908
a560d4d4
JH
909/* Assign the RTX X to declaration T. */
910void
502b8322 911set_decl_rtl (tree t, rtx x)
a560d4d4
JH
912{
913 DECL_CHECK (t)->decl.rtl = x;
914
915 if (!x)
916 return;
4d6922ee 917 /* For register, we maintain the reverse information too. */
a560d4d4
JH
918 if (GET_CODE (x) == REG)
919 REG_ATTRS (x) = get_reg_attrs (t, 0);
920 else if (GET_CODE (x) == SUBREG)
921 REG_ATTRS (SUBREG_REG (x))
922 = get_reg_attrs (t, -SUBREG_BYTE (x));
923 if (GET_CODE (x) == CONCAT)
924 {
925 if (REG_P (XEXP (x, 0)))
926 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
927 if (REG_P (XEXP (x, 1)))
928 REG_ATTRS (XEXP (x, 1))
929 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
930 }
931 if (GET_CODE (x) == PARALLEL)
932 {
933 int i;
934 for (i = 0; i < XVECLEN (x, 0); i++)
935 {
936 rtx y = XVECEXP (x, 0, i);
937 if (REG_P (XEXP (y, 0)))
938 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
939 }
940 }
941}
942
754fdcca
RK
943/* Identify REG (which may be a CONCAT) as a user register. */
944
945void
502b8322 946mark_user_reg (rtx reg)
754fdcca
RK
947{
948 if (GET_CODE (reg) == CONCAT)
949 {
950 REG_USERVAR_P (XEXP (reg, 0)) = 1;
951 REG_USERVAR_P (XEXP (reg, 1)) = 1;
952 }
953 else if (GET_CODE (reg) == REG)
954 REG_USERVAR_P (reg) = 1;
955 else
956 abort ();
957}
958
86fe05e0
RK
959/* Identify REG as a probable pointer register and show its alignment
960 as ALIGN, if nonzero. */
23b2ce53
RS
961
962void
502b8322 963mark_reg_pointer (rtx reg, int align)
23b2ce53 964{
3502dc9c 965 if (! REG_POINTER (reg))
00995e78 966 {
3502dc9c 967 REG_POINTER (reg) = 1;
86fe05e0 968
00995e78
RE
969 if (align)
970 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
971 }
972 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
973 /* We can no-longer be sure just how aligned this pointer is */
86fe05e0 974 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
23b2ce53
RS
975}
976
977/* Return 1 plus largest pseudo reg number used in the current function. */
978
979int
502b8322 980max_reg_num (void)
23b2ce53
RS
981{
982 return reg_rtx_no;
983}
984
985/* Return 1 + the largest label number used so far in the current function. */
986
987int
502b8322 988max_label_num (void)
23b2ce53
RS
989{
990 if (last_label_num && label_num == base_label_num)
991 return last_label_num;
992 return label_num;
993}
994
995/* Return first label number used in this function (if any were used). */
996
997int
502b8322 998get_first_label_num (void)
23b2ce53
RS
999{
1000 return first_label_num;
1001}
1002\f
ddef6bc7
JJ
1003/* Return the final regno of X, which is a SUBREG of a hard
1004 register. */
1005int
502b8322 1006subreg_hard_regno (rtx x, int check_mode)
ddef6bc7
JJ
1007{
1008 enum machine_mode mode = GET_MODE (x);
1009 unsigned int byte_offset, base_regno, final_regno;
1010 rtx reg = SUBREG_REG (x);
1011
1012 /* This is where we attempt to catch illegal subregs
1013 created by the compiler. */
1014 if (GET_CODE (x) != SUBREG
1015 || GET_CODE (reg) != REG)
1016 abort ();
1017 base_regno = REGNO (reg);
1018 if (base_regno >= FIRST_PSEUDO_REGISTER)
1019 abort ();
0607953c 1020 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
ddef6bc7 1021 abort ();
04c5580f
JH
1022#ifdef ENABLE_CHECKING
1023 if (!subreg_offset_representable_p (REGNO (reg), GET_MODE (reg),
502b8322 1024 SUBREG_BYTE (x), mode))
04c5580f
JH
1025 abort ();
1026#endif
ddef6bc7
JJ
1027 /* Catch non-congruent offsets too. */
1028 byte_offset = SUBREG_BYTE (x);
1029 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
1030 abort ();
1031
1032 final_regno = subreg_regno (x);
1033
1034 return final_regno;
1035}
1036
23b2ce53
RS
1037/* Return a value representing some low-order bits of X, where the number
1038 of low-order bits is given by MODE. Note that no conversion is done
750c9258 1039 between floating-point and fixed-point values, rather, the bit
23b2ce53
RS
1040 representation is returned.
1041
1042 This function handles the cases in common between gen_lowpart, below,
1043 and two variants in cse.c and combine.c. These are the cases that can
1044 be safely handled at all points in the compilation.
1045
1046 If this is not a case we can handle, return 0. */
1047
1048rtx
502b8322 1049gen_lowpart_common (enum machine_mode mode, rtx x)
23b2ce53 1050{
ddef6bc7
JJ
1051 int msize = GET_MODE_SIZE (mode);
1052 int xsize = GET_MODE_SIZE (GET_MODE (x));
1053 int offset = 0;
23b2ce53
RS
1054
1055 if (GET_MODE (x) == mode)
1056 return x;
1057
1058 /* MODE must occupy no more words than the mode of X. */
1059 if (GET_MODE (x) != VOIDmode
ddef6bc7
JJ
1060 && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1061 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
23b2ce53
RS
1062 return 0;
1063
53501a19
BS
1064 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1065 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1066 && GET_MODE (x) != VOIDmode && msize > xsize)
1067 return 0;
1068
e0e08ac2 1069 offset = subreg_lowpart_offset (mode, GET_MODE (x));
23b2ce53
RS
1070
1071 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
83e9c679
RK
1072 && (GET_MODE_CLASS (mode) == MODE_INT
1073 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
23b2ce53
RS
1074 {
1075 /* If we are getting the low-order part of something that has been
1076 sign- or zero-extended, we can either just use the object being
1077 extended or make a narrower extension. If we want an even smaller
1078 piece than the size of the object being extended, call ourselves
1079 recursively.
1080
1081 This case is used mostly by combine and cse. */
1082
1083 if (GET_MODE (XEXP (x, 0)) == mode)
1084 return XEXP (x, 0);
1085 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1086 return gen_lowpart_common (mode, XEXP (x, 0));
1087 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
3b80f6ca 1088 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
23b2ce53 1089 }
76321db6 1090 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
34a80643 1091 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR)
e0e08ac2 1092 return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
9f629a21 1093 else if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
0488fa7c 1094 return simplify_gen_subreg (mode, x, int_mode_for_mode (mode), offset);
23b2ce53
RS
1095 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
1096 from the low-order part of the constant. */
83e9c679
RK
1097 else if ((GET_MODE_CLASS (mode) == MODE_INT
1098 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1099 && GET_MODE (x) == VOIDmode
23b2ce53 1100 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
1a5b457d
RK
1101 {
1102 /* If MODE is twice the host word size, X is already the desired
1103 representation. Otherwise, if MODE is wider than a word, we can't
e1389cac 1104 do this. If MODE is exactly a word, return just one CONST_INT. */
1a5b457d 1105
a8dd0e73 1106 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
1a5b457d 1107 return x;
906c4e36 1108 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1a5b457d 1109 return 0;
906c4e36 1110 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
1a5b457d 1111 return (GET_CODE (x) == CONST_INT ? x
906c4e36 1112 : GEN_INT (CONST_DOUBLE_LOW (x)));
1a5b457d
RK
1113 else
1114 {
27eef9ce 1115 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
906c4e36
RK
1116 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
1117 : CONST_DOUBLE_LOW (x));
1a5b457d 1118
27eef9ce 1119 /* Sign extend to HOST_WIDE_INT. */
e1389cac 1120 val = trunc_int_for_mode (val, mode);
1a5b457d
RK
1121
1122 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
906c4e36 1123 : GEN_INT (val));
1a5b457d
RK
1124 }
1125 }
23b2ce53 1126
ba31d94e 1127 /* The floating-point emulator can handle all conversions between
a2061c0d
GK
1128 FP and integer operands. This simplifies reload because it
1129 doesn't have to deal with constructs like (subreg:DI
1130 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
57dadce2
EC
1131 /* Single-precision floats are always 32-bits and double-precision
1132 floats are always 64-bits. */
a2061c0d 1133
76321db6 1134 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
57dadce2 1135 && GET_MODE_BITSIZE (mode) == 32
a2061c0d 1136 && GET_CODE (x) == CONST_INT)
68252e27 1137 {
a2061c0d 1138 REAL_VALUE_TYPE r;
efdc7e19 1139 long i = INTVAL (x);
a2061c0d 1140
efdc7e19 1141 real_from_target (&r, &i, mode);
a2061c0d 1142 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
68252e27 1143 }
76321db6 1144 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
57dadce2 1145 && GET_MODE_BITSIZE (mode) == 64
a2061c0d
GK
1146 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
1147 && GET_MODE (x) == VOIDmode)
1148 {
1149 REAL_VALUE_TYPE r;
a2061c0d 1150 HOST_WIDE_INT low, high;
efdc7e19 1151 long i[2];
a2061c0d
GK
1152
1153 if (GET_CODE (x) == CONST_INT)
1154 {
1155 low = INTVAL (x);
1156 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
1157 }
1158 else
1159 {
750c9258 1160 low = CONST_DOUBLE_LOW (x);
a2061c0d
GK
1161 high = CONST_DOUBLE_HIGH (x);
1162 }
1163
efdc7e19
RH
1164 if (HOST_BITS_PER_WIDE_INT > 32)
1165 high = low >> 31 >> 1;
1166
a2061c0d
GK
1167 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
1168 target machine. */
1169 if (WORDS_BIG_ENDIAN)
1170 i[0] = high, i[1] = low;
1171 else
1172 i[0] = low, i[1] = high;
1173
efdc7e19 1174 real_from_target (&r, i, mode);
a2061c0d
GK
1175 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1176 }
1177 else if ((GET_MODE_CLASS (mode) == MODE_INT
1178 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1179 && GET_CODE (x) == CONST_DOUBLE
1180 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1181 {
1182 REAL_VALUE_TYPE r;
1183 long i[4]; /* Only the low 32 bits of each 'long' are used. */
1184 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
1185
8125704b
GK
1186 /* Convert 'r' into an array of four 32-bit words in target word
1187 order. */
a2061c0d 1188 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
57dadce2 1189 switch (GET_MODE_BITSIZE (GET_MODE (x)))
a2061c0d 1190 {
57dadce2 1191 case 32:
68252e27 1192 REAL_VALUE_TO_TARGET_SINGLE (r, i[3 * endian]);
8125704b
GK
1193 i[1] = 0;
1194 i[2] = 0;
68252e27
KH
1195 i[3 - 3 * endian] = 0;
1196 break;
57dadce2 1197 case 64:
68252e27 1198 REAL_VALUE_TO_TARGET_DOUBLE (r, i + 2 * endian);
8125704b
GK
1199 i[2 - 2 * endian] = 0;
1200 i[3 - 2 * endian] = 0;
68252e27 1201 break;
57dadce2 1202 case 96:
e389897b 1203 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
8125704b 1204 i[3 - 3 * endian] = 0;
76321db6 1205 break;
57dadce2 1206 case 128:
a2061c0d
GK
1207 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
1208 break;
1209 default:
1156b23c 1210 abort ();
a2061c0d 1211 }
a2061c0d
GK
1212 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
1213 and return it. */
1214#if HOST_BITS_PER_WIDE_INT == 32
8125704b 1215 return immed_double_const (i[3 * endian], i[1 + endian], mode);
a2061c0d 1216#else
8125704b
GK
1217 if (HOST_BITS_PER_WIDE_INT != 64)
1218 abort ();
50e60bc3 1219
a76033a0
GK
1220 return immed_double_const ((((unsigned long) i[3 * endian])
1221 | ((HOST_WIDE_INT) i[1 + endian] << 32)),
1222 (((unsigned long) i[2 - endian])
1223 | ((HOST_WIDE_INT) i[3 - 3 * endian] << 32)),
8125704b 1224 mode);
a2061c0d
GK
1225#endif
1226 }
14ca333d
RS
1227 /* If MODE is a condition code and X is a CONST_INT, the value of X
1228 must already have been "recognized" by the back-end, and we can
1229 assume that it is valid for this mode. */
1230 else if (GET_MODE_CLASS (mode) == MODE_CC
1231 && GET_CODE (x) == CONST_INT)
1232 return x;
8aada4ad 1233
23b2ce53
RS
1234 /* Otherwise, we can't do this. */
1235 return 0;
1236}
1237\f
b1d673be
RS
1238/* Return the constant real or imaginary part (which has mode MODE)
1239 of a complex value X. The IMAGPART_P argument determines whether
1240 the real or complex component should be returned. This function
1241 returns NULL_RTX if the component isn't a constant. */
1242
1243static rtx
502b8322 1244gen_complex_constant_part (enum machine_mode mode, rtx x, int imagpart_p)
b1d673be
RS
1245{
1246 tree decl, part;
1247
1248 if (GET_CODE (x) == MEM
4c2da7f2 1249 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
b1d673be
RS
1250 {
1251 decl = SYMBOL_REF_DECL (XEXP (x, 0));
1252 if (decl != NULL_TREE && TREE_CODE (decl) == COMPLEX_CST)
1253 {
1254 part = imagpart_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
1255 if (TREE_CODE (part) == REAL_CST
1256 || TREE_CODE (part) == INTEGER_CST)
1257 return expand_expr (part, NULL_RTX, mode, 0);
1258 }
1259 }
1260 return NULL_RTX;
1261}
1262
280194b0
RS
1263/* Return the real part (which has mode MODE) of a complex value X.
1264 This always comes at the low address in memory. */
1265
1266rtx
502b8322 1267gen_realpart (enum machine_mode mode, rtx x)
280194b0 1268{
b1d673be
RS
1269 rtx part;
1270
1271 /* Handle complex constants. */
1272 part = gen_complex_constant_part (mode, x, 0);
1273 if (part != NULL_RTX)
1274 return part;
1275
e0e08ac2
JH
1276 if (WORDS_BIG_ENDIAN
1277 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1278 && REG_P (x)
1279 && REGNO (x) < FIRST_PSEUDO_REGISTER)
400500c4 1280 internal_error
c725bd79 1281 ("can't access real part of complex value in hard register");
dc139c90 1282 else if (WORDS_BIG_ENDIAN)
280194b0
RS
1283 return gen_highpart (mode, x);
1284 else
1285 return gen_lowpart (mode, x);
1286}
1287
1288/* Return the imaginary part (which has mode MODE) of a complex value X.
1289 This always comes at the high address in memory. */
1290
1291rtx
502b8322 1292gen_imagpart (enum machine_mode mode, rtx x)
280194b0 1293{
b1d673be
RS
1294 rtx part;
1295
1296 /* Handle complex constants. */
1297 part = gen_complex_constant_part (mode, x, 1);
1298 if (part != NULL_RTX)
1299 return part;
1300
e0e08ac2 1301 if (WORDS_BIG_ENDIAN)
280194b0 1302 return gen_lowpart (mode, x);
ddef6bc7 1303 else if (! WORDS_BIG_ENDIAN
40c0c3cf
JL
1304 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1305 && REG_P (x)
1306 && REGNO (x) < FIRST_PSEUDO_REGISTER)
400500c4
RK
1307 internal_error
1308 ("can't access imaginary part of complex value in hard register");
280194b0
RS
1309 else
1310 return gen_highpart (mode, x);
1311}
81284a6a
JW
1312
1313/* Return 1 iff X, assumed to be a SUBREG,
1314 refers to the real part of the complex value in its containing reg.
1315 Complex values are always stored with the real part in the first word,
1316 regardless of WORDS_BIG_ENDIAN. */
1317
1318int
502b8322 1319subreg_realpart_p (rtx x)
81284a6a
JW
1320{
1321 if (GET_CODE (x) != SUBREG)
1322 abort ();
1323
ddef6bc7 1324 return ((unsigned int) SUBREG_BYTE (x)
770ae6cc 1325 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
81284a6a 1326}
280194b0 1327\f
23b2ce53
RS
1328/* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1329 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1330 least-significant part of X.
1331 MODE specifies how big a part of X to return;
1332 it usually should not be larger than a word.
1333 If X is a MEM whose address is a QUEUED, the value may be so also. */
1334
1335rtx
502b8322 1336gen_lowpart (enum machine_mode mode, rtx x)
23b2ce53
RS
1337{
1338 rtx result = gen_lowpart_common (mode, x);
1339
1340 if (result)
1341 return result;
ea8262b0
RK
1342 else if (GET_CODE (x) == REG)
1343 {
1344 /* Must be a hard reg that's not valid in MODE. */
1345 result = gen_lowpart_common (mode, copy_to_reg (x));
1346 if (result == 0)
1347 abort ();
72c3833b 1348 return result;
ea8262b0 1349 }
23b2ce53
RS
1350 else if (GET_CODE (x) == MEM)
1351 {
1352 /* The only additional case we can do is MEM. */
b3694847 1353 int offset = 0;
37f5242b
RS
1354
1355 /* The following exposes the use of "x" to CSE. */
1356 if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
9dd04ab5 1357 && SCALAR_INT_MODE_P (GET_MODE (x))
90db942b
RS
1358 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1359 GET_MODE_BITSIZE (GET_MODE (x)))
37f5242b
RS
1360 && ! no_new_pseudos)
1361 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1362
23b2ce53
RS
1363 if (WORDS_BIG_ENDIAN)
1364 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1365 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1366
1367 if (BYTES_BIG_ENDIAN)
1368 /* Adjust the address so that the address-after-the-data
1369 is unchanged. */
1370 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1371 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1372
f4ef873c 1373 return adjust_address (x, mode, offset);
23b2ce53 1374 }
e9a25f70
JL
1375 else if (GET_CODE (x) == ADDRESSOF)
1376 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
23b2ce53
RS
1377 else
1378 abort ();
1379}
1380
750c9258 1381/* Like `gen_lowpart', but refer to the most significant part.
ccba022b
RS
1382 This is used to access the imaginary part of a complex number. */
1383
1384rtx
502b8322 1385gen_highpart (enum machine_mode mode, rtx x)
ccba022b 1386{
ddef6bc7 1387 unsigned int msize = GET_MODE_SIZE (mode);
e0e08ac2 1388 rtx result;
ddef6bc7 1389
ccba022b
RS
1390 /* This case loses if X is a subreg. To catch bugs early,
1391 complain if an invalid MODE is used even in other cases. */
ddef6bc7
JJ
1392 if (msize > UNITS_PER_WORD
1393 && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
ccba022b 1394 abort ();
ddef6bc7 1395
e0e08ac2
JH
1396 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1397 subreg_highpart_offset (mode, GET_MODE (x)));
09482e0d
JW
1398
1399 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1400 the target if we have a MEM. gen_highpart must return a valid operand,
1401 emitting code if necessary to do so. */
13b8c631 1402 if (result != NULL_RTX && GET_CODE (result) == MEM)
09482e0d
JW
1403 result = validize_mem (result);
1404
e0e08ac2
JH
1405 if (!result)
1406 abort ();
1407 return result;
1408}
5222e470 1409
26d249eb 1410/* Like gen_highpart, but accept mode of EXP operand in case EXP can
5222e470
JH
1411 be VOIDmode constant. */
1412rtx
502b8322 1413gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
5222e470
JH
1414{
1415 if (GET_MODE (exp) != VOIDmode)
1416 {
1417 if (GET_MODE (exp) != innermode)
1418 abort ();
1419 return gen_highpart (outermode, exp);
1420 }
1421 return simplify_gen_subreg (outermode, exp, innermode,
1422 subreg_highpart_offset (outermode, innermode));
1423}
68252e27 1424
e0e08ac2
JH
1425/* Return offset in bytes to get OUTERMODE low part
1426 of the value in mode INNERMODE stored in memory in target format. */
8698cce3 1427
e0e08ac2 1428unsigned int
502b8322 1429subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
e0e08ac2
JH
1430{
1431 unsigned int offset = 0;
1432 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
8698cce3 1433
e0e08ac2 1434 if (difference > 0)
ccba022b 1435 {
e0e08ac2
JH
1436 if (WORDS_BIG_ENDIAN)
1437 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1438 if (BYTES_BIG_ENDIAN)
1439 offset += difference % UNITS_PER_WORD;
ccba022b 1440 }
ddef6bc7 1441
e0e08ac2 1442 return offset;
ccba022b 1443}
eea50aa0 1444
e0e08ac2
JH
1445/* Return offset in bytes to get OUTERMODE high part
1446 of the value in mode INNERMODE stored in memory in target format. */
1447unsigned int
502b8322 1448subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
eea50aa0
JH
1449{
1450 unsigned int offset = 0;
1451 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1452
e0e08ac2 1453 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
68252e27 1454 abort ();
e0e08ac2 1455
eea50aa0
JH
1456 if (difference > 0)
1457 {
e0e08ac2 1458 if (! WORDS_BIG_ENDIAN)
eea50aa0 1459 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
e0e08ac2 1460 if (! BYTES_BIG_ENDIAN)
eea50aa0
JH
1461 offset += difference % UNITS_PER_WORD;
1462 }
1463
e0e08ac2 1464 return offset;
eea50aa0 1465}
ccba022b 1466
23b2ce53
RS
1467/* Return 1 iff X, assumed to be a SUBREG,
1468 refers to the least significant part of its containing reg.
1469 If X is not a SUBREG, always return 1 (it is its own low part!). */
1470
1471int
502b8322 1472subreg_lowpart_p (rtx x)
23b2ce53
RS
1473{
1474 if (GET_CODE (x) != SUBREG)
1475 return 1;
a3a03040
RK
1476 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1477 return 0;
23b2ce53 1478
e0e08ac2
JH
1479 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1480 == SUBREG_BYTE (x));
23b2ce53
RS
1481}
1482\f
23b2ce53 1483
ddef6bc7
JJ
1484/* Helper routine for all the constant cases of operand_subword.
1485 Some places invoke this directly. */
23b2ce53
RS
1486
1487rtx
502b8322 1488constant_subword (rtx op, int offset, enum machine_mode mode)
23b2ce53 1489{
906c4e36 1490 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
ddef6bc7 1491 HOST_WIDE_INT val;
23b2ce53
RS
1492
1493 /* If OP is already an integer word, return it. */
1494 if (GET_MODE_CLASS (mode) == MODE_INT
1495 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1496 return op;
1497
5495cc55
RH
1498 /* The output is some bits, the width of the target machine's word.
1499 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1500 host can't. */
9847c2f6 1501 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1632afca 1502 && GET_MODE_CLASS (mode) == MODE_FLOAT
7677ffa4 1503 && GET_MODE_BITSIZE (mode) == 64
1632afca
RS
1504 && GET_CODE (op) == CONST_DOUBLE)
1505 {
9847c2f6 1506 long k[2];
1632afca
RS
1507 REAL_VALUE_TYPE rv;
1508
1509 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1510 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
7677ffa4 1511
9847c2f6 1512 /* We handle 32-bit and >= 64-bit words here. Note that the order in
7677ffa4 1513 which the words are written depends on the word endianness.
7677ffa4 1514 ??? This is a potential portability problem and should
7cae975e
RH
1515 be fixed at some point.
1516
a1f300c0 1517 We must exercise caution with the sign bit. By definition there
7cae975e
RH
1518 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1519 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1520 So we explicitly mask and sign-extend as necessary. */
9847c2f6 1521 if (BITS_PER_WORD == 32)
7cae975e 1522 {
ddef6bc7 1523 val = k[offset];
7cae975e
RH
1524 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1525 return GEN_INT (val);
1526 }
1527#if HOST_BITS_PER_WIDE_INT >= 64
ddef6bc7 1528 else if (BITS_PER_WORD >= 64 && offset == 0)
7cae975e
RH
1529 {
1530 val = k[! WORDS_BIG_ENDIAN];
1531 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1532 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1533 return GEN_INT (val);
1534 }
9847c2f6 1535#endif
47b34d40
JW
1536 else if (BITS_PER_WORD == 16)
1537 {
ddef6bc7
JJ
1538 val = k[offset >> 1];
1539 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
7cae975e 1540 val >>= 16;
73de376f 1541 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
7cae975e 1542 return GEN_INT (val);
47b34d40 1543 }
7677ffa4
RK
1544 else
1545 abort ();
1632afca 1546 }
a5559dbc
RE
1547 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1548 && GET_MODE_CLASS (mode) == MODE_FLOAT
1549 && GET_MODE_BITSIZE (mode) > 64
1550 && GET_CODE (op) == CONST_DOUBLE)
5495cc55
RH
1551 {
1552 long k[4];
1553 REAL_VALUE_TYPE rv;
a5559dbc 1554
5495cc55
RH
1555 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1556 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
a5559dbc 1557
5495cc55
RH
1558 if (BITS_PER_WORD == 32)
1559 {
ddef6bc7 1560 val = k[offset];
5495cc55
RH
1561 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1562 return GEN_INT (val);
1563 }
1564#if HOST_BITS_PER_WIDE_INT >= 64
ddef6bc7 1565 else if (BITS_PER_WORD >= 64 && offset <= 1)
5495cc55 1566 {
ddef6bc7 1567 val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
5495cc55 1568 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
ddef6bc7 1569 val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
5495cc55
RH
1570 return GEN_INT (val);
1571 }
1572#endif
1573 else
1574 abort ();
1575 }
23b2ce53
RS
1576
1577 /* Single word float is a little harder, since single- and double-word
1578 values often do not have the same high-order bits. We have already
1579 verified that we want the only defined word of the single-word value. */
9847c2f6 1580 if (GET_MODE_CLASS (mode) == MODE_FLOAT
7677ffa4 1581 && GET_MODE_BITSIZE (mode) == 32
1632afca
RS
1582 && GET_CODE (op) == CONST_DOUBLE)
1583 {
9847c2f6 1584 long l;
1632afca
RS
1585 REAL_VALUE_TYPE rv;
1586
1587 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1588 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
aa2ae679 1589
7cae975e
RH
1590 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1591 val = l;
1592 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
b5a3eb84 1593
aa2ae679
JL
1594 if (BITS_PER_WORD == 16)
1595 {
ddef6bc7 1596 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
7cae975e 1597 val >>= 16;
73de376f 1598 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
aa2ae679 1599 }
7cae975e
RH
1600
1601 return GEN_INT (val);
1632afca 1602 }
750c9258 1603
23b2ce53
RS
1604 /* The only remaining cases that we can handle are integers.
1605 Convert to proper endianness now since these cases need it.
750c9258 1606 At this point, offset == 0 means the low-order word.
23b2ce53 1607
2d4f57f8
RK
1608 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1609 in general. However, if OP is (const_int 0), we can just return
1610 it for any word. */
1611
1612 if (op == const0_rtx)
1613 return op;
23b2ce53
RS
1614
1615 if (GET_MODE_CLASS (mode) != MODE_INT
2d4f57f8 1616 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
0cf214a0 1617 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
23b2ce53
RS
1618 return 0;
1619
1620 if (WORDS_BIG_ENDIAN)
ddef6bc7 1621 offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
23b2ce53
RS
1622
1623 /* Find out which word on the host machine this value is in and get
1624 it from the constant. */
ddef6bc7 1625 val = (offset / size_ratio == 0
23b2ce53
RS
1626 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1627 : (GET_CODE (op) == CONST_INT
1628 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1629
3f518020 1630 /* Get the value we want into the low bits of val. */
906c4e36 1631 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
ddef6bc7 1632 val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
3f518020 1633
7e4ce834 1634 val = trunc_int_for_mode (val, word_mode);
23b2ce53 1635
906c4e36 1636 return GEN_INT (val);
23b2ce53
RS
1637}
1638
ddef6bc7
JJ
1639/* Return subword OFFSET of operand OP.
1640 The word number, OFFSET, is interpreted as the word number starting
1641 at the low-order address. OFFSET 0 is the low-order word if not
1642 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1643
1644 If we cannot extract the required word, we return zero. Otherwise,
1645 an rtx corresponding to the requested word will be returned.
1646
1647 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1648 reload has completed, a valid address will always be returned. After
1649 reload, if a valid address cannot be returned, we return zero.
1650
1651 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1652 it is the responsibility of the caller.
1653
1654 MODE is the mode of OP in case it is a CONST_INT.
1655
1656 ??? This is still rather broken for some cases. The problem for the
1657 moment is that all callers of this thing provide no 'goal mode' to
1658 tell us to work with. This exists because all callers were written
0631e0bf
JH
1659 in a word based SUBREG world.
1660 Now use of this function can be deprecated by simplify_subreg in most
1661 cases.
1662 */
ddef6bc7
JJ
1663
1664rtx
502b8322 1665operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
ddef6bc7
JJ
1666{
1667 if (mode == VOIDmode)
1668 mode = GET_MODE (op);
1669
1670 if (mode == VOIDmode)
1671 abort ();
1672
30f7a378 1673 /* If OP is narrower than a word, fail. */
ddef6bc7
JJ
1674 if (mode != BLKmode
1675 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1676 return 0;
1677
30f7a378 1678 /* If we want a word outside OP, return zero. */
ddef6bc7
JJ
1679 if (mode != BLKmode
1680 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1681 return const0_rtx;
1682
ddef6bc7
JJ
1683 /* Form a new MEM at the requested address. */
1684 if (GET_CODE (op) == MEM)
1685 {
f1ec5147 1686 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
ddef6bc7 1687
f1ec5147
RK
1688 if (! validate_address)
1689 return new;
1690
1691 else if (reload_completed)
ddef6bc7 1692 {
f1ec5147
RK
1693 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1694 return 0;
ddef6bc7 1695 }
f1ec5147
RK
1696 else
1697 return replace_equiv_address (new, XEXP (new, 0));
ddef6bc7
JJ
1698 }
1699
0631e0bf
JH
1700 /* Rest can be handled by simplify_subreg. */
1701 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
ddef6bc7
JJ
1702}
1703
23b2ce53
RS
1704/* Similar to `operand_subword', but never return 0. If we can't extract
1705 the required subword, put OP into a register and try again. If that fails,
750c9258 1706 abort. We always validate the address in this case.
23b2ce53
RS
1707
1708 MODE is the mode of OP, in case it is CONST_INT. */
1709
1710rtx
502b8322 1711operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
23b2ce53 1712{
ddef6bc7 1713 rtx result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1714
1715 if (result)
1716 return result;
1717
1718 if (mode != BLKmode && mode != VOIDmode)
77e6b0eb
JC
1719 {
1720 /* If this is a register which can not be accessed by words, copy it
1721 to a pseudo register. */
1722 if (GET_CODE (op) == REG)
1723 op = copy_to_reg (op);
1724 else
1725 op = force_reg (mode, op);
1726 }
23b2ce53 1727
ddef6bc7 1728 result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1729 if (result == 0)
1730 abort ();
1731
1732 return result;
1733}
1734\f
1735/* Given a compare instruction, swap the operands.
1736 A test instruction is changed into a compare of 0 against the operand. */
1737
1738void
502b8322 1739reverse_comparison (rtx insn)
23b2ce53
RS
1740{
1741 rtx body = PATTERN (insn);
1742 rtx comp;
1743
1744 if (GET_CODE (body) == SET)
1745 comp = SET_SRC (body);
1746 else
1747 comp = SET_SRC (XVECEXP (body, 0, 0));
1748
1749 if (GET_CODE (comp) == COMPARE)
1750 {
1751 rtx op0 = XEXP (comp, 0);
1752 rtx op1 = XEXP (comp, 1);
1753 XEXP (comp, 0) = op1;
1754 XEXP (comp, 1) = op0;
1755 }
1756 else
1757 {
c5c76735
JL
1758 rtx new = gen_rtx_COMPARE (VOIDmode,
1759 CONST0_RTX (GET_MODE (comp)), comp);
23b2ce53
RS
1760 if (GET_CODE (body) == SET)
1761 SET_SRC (body) = new;
1762 else
1763 SET_SRC (XVECEXP (body, 0, 0)) = new;
1764 }
1765}
1766\f
998d7deb
RH
1767/* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1768 or (2) a component ref of something variable. Represent the later with
1769 a NULL expression. */
1770
1771static tree
502b8322 1772component_ref_for_mem_expr (tree ref)
998d7deb
RH
1773{
1774 tree inner = TREE_OPERAND (ref, 0);
1775
1776 if (TREE_CODE (inner) == COMPONENT_REF)
1777 inner = component_ref_for_mem_expr (inner);
c56e3582
RK
1778 else
1779 {
1780 tree placeholder_ptr = 0;
1781
1782 /* Now remove any conversions: they don't change what the underlying
1783 object is. Likewise for SAVE_EXPR. Also handle PLACEHOLDER_EXPR. */
1784 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1785 || TREE_CODE (inner) == NON_LVALUE_EXPR
1786 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1787 || TREE_CODE (inner) == SAVE_EXPR
1788 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
68252e27
KH
1789 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1790 inner = find_placeholder (inner, &placeholder_ptr);
1791 else
1792 inner = TREE_OPERAND (inner, 0);
c56e3582
RK
1793
1794 if (! DECL_P (inner))
1795 inner = NULL_TREE;
1796 }
998d7deb
RH
1797
1798 if (inner == TREE_OPERAND (ref, 0))
1799 return ref;
1800 else
c56e3582
RK
1801 return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1802 TREE_OPERAND (ref, 1));
998d7deb 1803}
173b24b9
RK
1804
1805/* Given REF, a MEM, and T, either the type of X or the expression
1806 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f1087be
RH
1807 if we are making a new object of this type. BITPOS is nonzero if
1808 there is an offset outstanding on T that will be applied later. */
173b24b9
RK
1809
1810void
502b8322
AJ
1811set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1812 HOST_WIDE_INT bitpos)
173b24b9 1813{
8ac61af7 1814 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
998d7deb 1815 tree expr = MEM_EXPR (ref);
8ac61af7
RK
1816 rtx offset = MEM_OFFSET (ref);
1817 rtx size = MEM_SIZE (ref);
1818 unsigned int align = MEM_ALIGN (ref);
6f1087be 1819 HOST_WIDE_INT apply_bitpos = 0;
173b24b9
RK
1820 tree type;
1821
1822 /* It can happen that type_for_mode was given a mode for which there
1823 is no language-level type. In which case it returns NULL, which
1824 we can see here. */
1825 if (t == NULL_TREE)
1826 return;
1827
1828 type = TYPE_P (t) ? t : TREE_TYPE (t);
eeb23c11
MM
1829 if (type == error_mark_node)
1830 return;
173b24b9 1831
173b24b9
RK
1832 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1833 wrong answer, as it assumes that DECL_RTL already has the right alias
1834 info. Callers should not set DECL_RTL until after the call to
1835 set_mem_attributes. */
1836 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1837 abort ();
1838
738cc472 1839 /* Get the alias set from the expression or type (perhaps using a
8ac61af7
RK
1840 front-end routine) and use it. */
1841 alias = get_alias_set (t);
173b24b9
RK
1842
1843 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1844 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
03bf2c23 1845 RTX_UNCHANGING_P (ref)
1285011e
RK
1846 |= ((lang_hooks.honor_readonly
1847 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1848 || (! TYPE_P (t) && TREE_CONSTANT (t)));
173b24b9 1849
8ac61af7
RK
1850 /* If we are making an object of this type, or if this is a DECL, we know
1851 that it is a scalar if the type is not an aggregate. */
1852 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
173b24b9
RK
1853 MEM_SCALAR_P (ref) = 1;
1854
c3d32120
RK
1855 /* We can set the alignment from the type if we are making an object,
1856 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1857 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1858 align = MAX (align, TYPE_ALIGN (type));
40c0668b 1859
738cc472
RK
1860 /* If the size is known, we can set that. */
1861 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
8ac61af7 1862 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
738cc472 1863
80965c18
RK
1864 /* If T is not a type, we may be able to deduce some more information about
1865 the expression. */
1866 if (! TYPE_P (t))
8ac61af7
RK
1867 {
1868 maybe_set_unchanging (ref, t);
1869 if (TREE_THIS_VOLATILE (t))
1870 MEM_VOLATILE_P (ref) = 1;
173b24b9 1871
c56e3582
RK
1872 /* Now remove any conversions: they don't change what the underlying
1873 object is. Likewise for SAVE_EXPR. */
8ac61af7 1874 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
c56e3582
RK
1875 || TREE_CODE (t) == NON_LVALUE_EXPR
1876 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1877 || TREE_CODE (t) == SAVE_EXPR)
8ac61af7
RK
1878 t = TREE_OPERAND (t, 0);
1879
10b76d73
RK
1880 /* If this expression can't be addressed (e.g., it contains a reference
1881 to a non-addressable field), show we don't change its alias set. */
1882 if (! can_address_p (t))
1883 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1884
8ac61af7
RK
1885 /* If this is a decl, set the attributes of the MEM from it. */
1886 if (DECL_P (t))
1887 {
998d7deb
RH
1888 expr = t;
1889 offset = const0_rtx;
6f1087be 1890 apply_bitpos = bitpos;
8ac61af7
RK
1891 size = (DECL_SIZE_UNIT (t)
1892 && host_integerp (DECL_SIZE_UNIT (t), 1)
1893 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
68252e27 1894 align = DECL_ALIGN (t);
8ac61af7
RK
1895 }
1896
40c0668b 1897 /* If this is a constant, we know the alignment. */
9ddfb1a7
RK
1898 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1899 {
1900 align = TYPE_ALIGN (type);
1901#ifdef CONSTANT_ALIGNMENT
1902 align = CONSTANT_ALIGNMENT (t, align);
1903#endif
1904 }
998d7deb
RH
1905
1906 /* If this is a field reference and not a bit-field, record it. */
1907 /* ??? There is some information that can be gleened from bit-fields,
1908 such as the word offset in the structure that might be modified.
1909 But skip it for now. */
1910 else if (TREE_CODE (t) == COMPONENT_REF
1911 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1912 {
1913 expr = component_ref_for_mem_expr (t);
1914 offset = const0_rtx;
6f1087be 1915 apply_bitpos = bitpos;
998d7deb
RH
1916 /* ??? Any reason the field size would be different than
1917 the size we got from the type? */
1918 }
1919
1920 /* If this is an array reference, look for an outer field reference. */
1921 else if (TREE_CODE (t) == ARRAY_REF)
1922 {
1923 tree off_tree = size_zero_node;
1b1838b6
JW
1924 /* We can't modify t, because we use it at the end of the
1925 function. */
1926 tree t2 = t;
998d7deb
RH
1927
1928 do
1929 {
1b1838b6
JW
1930 tree index = TREE_OPERAND (t2, 1);
1931 tree array = TREE_OPERAND (t2, 0);
2567406a
JH
1932 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
1933 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
1934 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
1935
1936 /* We assume all arrays have sizes that are a multiple of a byte.
1937 First subtract the lower bound, if any, in the type of the
1938 index, then convert to sizetype and multiply by the size of the
1939 array element. */
1940 if (low_bound != 0 && ! integer_zerop (low_bound))
1941 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
1942 index, low_bound));
1943
1944 /* If the index has a self-referential type, pass it to a
1945 WITH_RECORD_EXPR; if the component size is, pass our
1946 component to one. */
7a6cdb44 1947 if (CONTAINS_PLACEHOLDER_P (index))
1b1838b6 1948 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, t2);
7a6cdb44 1949 if (CONTAINS_PLACEHOLDER_P (unit_size))
2567406a
JH
1950 unit_size = build (WITH_RECORD_EXPR, sizetype,
1951 unit_size, array);
1952
998d7deb
RH
1953 off_tree
1954 = fold (build (PLUS_EXPR, sizetype,
1955 fold (build (MULT_EXPR, sizetype,
2567406a
JH
1956 index,
1957 unit_size)),
998d7deb 1958 off_tree));
1b1838b6 1959 t2 = TREE_OPERAND (t2, 0);
998d7deb 1960 }
1b1838b6 1961 while (TREE_CODE (t2) == ARRAY_REF);
998d7deb 1962
1b1838b6 1963 if (DECL_P (t2))
c67a1cf6 1964 {
1b1838b6 1965 expr = t2;
40cb04f1 1966 offset = NULL;
c67a1cf6 1967 if (host_integerp (off_tree, 1))
40cb04f1
RH
1968 {
1969 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1970 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1b1838b6 1971 align = DECL_ALIGN (t2);
fc555370 1972 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
40cb04f1
RH
1973 align = aoff;
1974 offset = GEN_INT (ioff);
6f1087be 1975 apply_bitpos = bitpos;
40cb04f1 1976 }
c67a1cf6 1977 }
1b1838b6 1978 else if (TREE_CODE (t2) == COMPONENT_REF)
998d7deb 1979 {
1b1838b6 1980 expr = component_ref_for_mem_expr (t2);
998d7deb 1981 if (host_integerp (off_tree, 1))
6f1087be
RH
1982 {
1983 offset = GEN_INT (tree_low_cst (off_tree, 1));
1984 apply_bitpos = bitpos;
1985 }
998d7deb
RH
1986 /* ??? Any reason the field size would be different than
1987 the size we got from the type? */
1988 }
c67a1cf6 1989 else if (flag_argument_noalias > 1
1b1838b6
JW
1990 && TREE_CODE (t2) == INDIRECT_REF
1991 && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
c67a1cf6 1992 {
1b1838b6 1993 expr = t2;
c67a1cf6
RH
1994 offset = NULL;
1995 }
1996 }
1997
1998 /* If this is a Fortran indirect argument reference, record the
1999 parameter decl. */
2000 else if (flag_argument_noalias > 1
2001 && TREE_CODE (t) == INDIRECT_REF
2002 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
2003 {
2004 expr = t;
2005 offset = NULL;
998d7deb 2006 }
8ac61af7
RK
2007 }
2008
15c812e3 2009 /* If we modified OFFSET based on T, then subtract the outstanding
8c317c5f
RH
2010 bit position offset. Similarly, increase the size of the accessed
2011 object to contain the negative offset. */
6f1087be 2012 if (apply_bitpos)
8c317c5f
RH
2013 {
2014 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
2015 if (size)
2016 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
2017 }
6f1087be 2018
8ac61af7 2019 /* Now set the attributes we computed above. */
10b76d73 2020 MEM_ATTRS (ref)
998d7deb 2021 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
8ac61af7
RK
2022
2023 /* If this is already known to be a scalar or aggregate, we are done. */
2024 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
738cc472
RK
2025 return;
2026
8ac61af7
RK
2027 /* If it is a reference into an aggregate, this is part of an aggregate.
2028 Otherwise we don't know. */
173b24b9
RK
2029 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
2030 || TREE_CODE (t) == ARRAY_RANGE_REF
2031 || TREE_CODE (t) == BIT_FIELD_REF)
2032 MEM_IN_STRUCT_P (ref) = 1;
2033}
2034
6f1087be 2035void
502b8322 2036set_mem_attributes (rtx ref, tree t, int objectp)
6f1087be
RH
2037{
2038 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2039}
2040
a560d4d4
JH
2041/* Set the decl for MEM to DECL. */
2042
2043void
502b8322 2044set_mem_attrs_from_reg (rtx mem, rtx reg)
a560d4d4
JH
2045{
2046 MEM_ATTRS (mem)
2047 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
2048 GEN_INT (REG_OFFSET (reg)),
2049 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
2050}
2051
173b24b9
RK
2052/* Set the alias set of MEM to SET. */
2053
2054void
502b8322 2055set_mem_alias_set (rtx mem, HOST_WIDE_INT set)
173b24b9 2056{
68252e27 2057#ifdef ENABLE_CHECKING
173b24b9
RK
2058 /* If the new and old alias sets don't conflict, something is wrong. */
2059 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
2060 abort ();
173b24b9
RK
2061#endif
2062
998d7deb 2063 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
10b76d73
RK
2064 MEM_SIZE (mem), MEM_ALIGN (mem),
2065 GET_MODE (mem));
173b24b9 2066}
738cc472 2067
d022d93e 2068/* Set the alignment of MEM to ALIGN bits. */
738cc472
RK
2069
2070void
502b8322 2071set_mem_align (rtx mem, unsigned int align)
738cc472 2072{
998d7deb 2073 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
10b76d73
RK
2074 MEM_OFFSET (mem), MEM_SIZE (mem), align,
2075 GET_MODE (mem));
738cc472 2076}
1285011e 2077
998d7deb 2078/* Set the expr for MEM to EXPR. */
1285011e
RK
2079
2080void
502b8322 2081set_mem_expr (rtx mem, tree expr)
1285011e
RK
2082{
2083 MEM_ATTRS (mem)
998d7deb 2084 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1285011e
RK
2085 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
2086}
998d7deb
RH
2087
2088/* Set the offset of MEM to OFFSET. */
2089
2090void
502b8322 2091set_mem_offset (rtx mem, rtx offset)
998d7deb
RH
2092{
2093 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2094 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
2095 GET_MODE (mem));
35aff10b
AM
2096}
2097
2098/* Set the size of MEM to SIZE. */
2099
2100void
502b8322 2101set_mem_size (rtx mem, rtx size)
35aff10b
AM
2102{
2103 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2104 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
2105 GET_MODE (mem));
998d7deb 2106}
173b24b9 2107\f
738cc472
RK
2108/* Return a memory reference like MEMREF, but with its mode changed to MODE
2109 and its address changed to ADDR. (VOIDmode means don't change the mode.
2110 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2111 returned memory location is required to be valid. The memory
2112 attributes are not changed. */
23b2ce53 2113
738cc472 2114static rtx
502b8322 2115change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
23b2ce53
RS
2116{
2117 rtx new;
2118
2119 if (GET_CODE (memref) != MEM)
2120 abort ();
2121 if (mode == VOIDmode)
2122 mode = GET_MODE (memref);
2123 if (addr == 0)
2124 addr = XEXP (memref, 0);
2125
f1ec5147 2126 if (validate)
23b2ce53 2127 {
f1ec5147
RK
2128 if (reload_in_progress || reload_completed)
2129 {
2130 if (! memory_address_p (mode, addr))
2131 abort ();
2132 }
2133 else
2134 addr = memory_address (mode, addr);
23b2ce53 2135 }
750c9258 2136
9b04c6a8
RK
2137 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2138 return memref;
2139
3b80f6ca 2140 new = gen_rtx_MEM (mode, addr);
c6df88cb 2141 MEM_COPY_ATTRIBUTES (new, memref);
23b2ce53
RS
2142 return new;
2143}
792760b9 2144
738cc472
RK
2145/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2146 way we are changing MEMREF, so we only preserve the alias set. */
f4ef873c
RK
2147
2148rtx
502b8322 2149change_address (rtx memref, enum machine_mode mode, rtx addr)
f4ef873c 2150{
738cc472
RK
2151 rtx new = change_address_1 (memref, mode, addr, 1);
2152 enum machine_mode mmode = GET_MODE (new);
c2f7bcc3 2153
738cc472
RK
2154 MEM_ATTRS (new)
2155 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
2156 mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
a06ef755
RK
2157 (mmode == BLKmode ? BITS_PER_UNIT
2158 : GET_MODE_ALIGNMENT (mmode)),
10b76d73 2159 mmode);
823e3574 2160
738cc472 2161 return new;
f4ef873c 2162}
792760b9 2163
738cc472
RK
2164/* Return a memory reference like MEMREF, but with its mode changed
2165 to MODE and its address offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
2166 nonzero, the memory address is forced to be valid.
2167 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2168 and caller is responsible for adjusting MEMREF base register. */
f1ec5147
RK
2169
2170rtx
502b8322
AJ
2171adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2172 int validate, int adjust)
f1ec5147 2173{
823e3574 2174 rtx addr = XEXP (memref, 0);
738cc472
RK
2175 rtx new;
2176 rtx memoffset = MEM_OFFSET (memref);
10b76d73 2177 rtx size = 0;
738cc472 2178 unsigned int memalign = MEM_ALIGN (memref);
823e3574 2179
d14419e4 2180 /* ??? Prefer to create garbage instead of creating shared rtl.
cc2902df 2181 This may happen even if offset is nonzero -- consider
d14419e4
RH
2182 (plus (plus reg reg) const_int) -- so do this always. */
2183 addr = copy_rtx (addr);
2184
4a78c787
RH
2185 if (adjust)
2186 {
2187 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2188 object, we can merge it into the LO_SUM. */
2189 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2190 && offset >= 0
2191 && (unsigned HOST_WIDE_INT) offset
2192 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2193 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
2194 plus_constant (XEXP (addr, 1), offset));
2195 else
2196 addr = plus_constant (addr, offset);
2197 }
823e3574 2198
738cc472
RK
2199 new = change_address_1 (memref, mode, addr, validate);
2200
2201 /* Compute the new values of the memory attributes due to this adjustment.
2202 We add the offsets and update the alignment. */
2203 if (memoffset)
2204 memoffset = GEN_INT (offset + INTVAL (memoffset));
2205
03bf2c23
RK
2206 /* Compute the new alignment by taking the MIN of the alignment and the
2207 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2208 if zero. */
2209 if (offset != 0)
3bf1e984
RK
2210 memalign
2211 = MIN (memalign,
2212 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
738cc472 2213
10b76d73 2214 /* We can compute the size in a number of ways. */
a06ef755
RK
2215 if (GET_MODE (new) != BLKmode)
2216 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
10b76d73
RK
2217 else if (MEM_SIZE (memref))
2218 size = plus_constant (MEM_SIZE (memref), -offset);
2219
998d7deb 2220 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
10b76d73 2221 memoffset, size, memalign, GET_MODE (new));
738cc472
RK
2222
2223 /* At some point, we should validate that this offset is within the object,
2224 if all the appropriate values are known. */
2225 return new;
f1ec5147
RK
2226}
2227
630036c6
JJ
2228/* Return a memory reference like MEMREF, but with its mode changed
2229 to MODE and its address changed to ADDR, which is assumed to be
2230 MEMREF offseted by OFFSET bytes. If VALIDATE is
2231 nonzero, the memory address is forced to be valid. */
2232
2233rtx
502b8322
AJ
2234adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2235 HOST_WIDE_INT offset, int validate)
630036c6
JJ
2236{
2237 memref = change_address_1 (memref, VOIDmode, addr, validate);
2238 return adjust_address_1 (memref, mode, offset, validate, 0);
2239}
2240
8ac61af7
RK
2241/* Return a memory reference like MEMREF, but whose address is changed by
2242 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2243 known to be in OFFSET (possibly 1). */
0d4903b8
RK
2244
2245rtx
502b8322 2246offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
0d4903b8 2247{
e3c8ea67
RH
2248 rtx new, addr = XEXP (memref, 0);
2249
2250 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2251
68252e27 2252 /* At this point we don't know _why_ the address is invalid. It
4d6922ee 2253 could have secondary memory references, multiplies or anything.
e3c8ea67
RH
2254
2255 However, if we did go and rearrange things, we can wind up not
2256 being able to recognize the magic around pic_offset_table_rtx.
2257 This stuff is fragile, and is yet another example of why it is
2258 bad to expose PIC machinery too early. */
2259 if (! memory_address_p (GET_MODE (memref), new)
2260 && GET_CODE (addr) == PLUS
2261 && XEXP (addr, 0) == pic_offset_table_rtx)
2262 {
2263 addr = force_reg (GET_MODE (addr), addr);
2264 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2265 }
2266
f6041ed8 2267 update_temp_slot_address (XEXP (memref, 0), new);
e3c8ea67 2268 new = change_address_1 (memref, VOIDmode, new, 1);
0d4903b8
RK
2269
2270 /* Update the alignment to reflect the offset. Reset the offset, which
2271 we don't know. */
2cc2d4bb
RK
2272 MEM_ATTRS (new)
2273 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
9ceca302 2274 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
2cc2d4bb 2275 GET_MODE (new));
0d4903b8
RK
2276 return new;
2277}
68252e27 2278
792760b9
RK
2279/* Return a memory reference like MEMREF, but with its address changed to
2280 ADDR. The caller is asserting that the actual piece of memory pointed
2281 to is the same, just the form of the address is being changed, such as
2282 by putting something into a register. */
2283
2284rtx
502b8322 2285replace_equiv_address (rtx memref, rtx addr)
792760b9 2286{
738cc472
RK
2287 /* change_address_1 copies the memory attribute structure without change
2288 and that's exactly what we want here. */
40c0668b 2289 update_temp_slot_address (XEXP (memref, 0), addr);
738cc472 2290 return change_address_1 (memref, VOIDmode, addr, 1);
792760b9 2291}
738cc472 2292
f1ec5147
RK
2293/* Likewise, but the reference is not required to be valid. */
2294
2295rtx
502b8322 2296replace_equiv_address_nv (rtx memref, rtx addr)
f1ec5147 2297{
f1ec5147
RK
2298 return change_address_1 (memref, VOIDmode, addr, 0);
2299}
e7dfe4bb
RH
2300
2301/* Return a memory reference like MEMREF, but with its mode widened to
2302 MODE and offset by OFFSET. This would be used by targets that e.g.
2303 cannot issue QImode memory operations and have to use SImode memory
2304 operations plus masking logic. */
2305
2306rtx
502b8322 2307widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
e7dfe4bb
RH
2308{
2309 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2310 tree expr = MEM_EXPR (new);
2311 rtx memoffset = MEM_OFFSET (new);
2312 unsigned int size = GET_MODE_SIZE (mode);
2313
2314 /* If we don't know what offset we were at within the expression, then
2315 we can't know if we've overstepped the bounds. */
fa1591cb 2316 if (! memoffset)
e7dfe4bb
RH
2317 expr = NULL_TREE;
2318
2319 while (expr)
2320 {
2321 if (TREE_CODE (expr) == COMPONENT_REF)
2322 {
2323 tree field = TREE_OPERAND (expr, 1);
2324
2325 if (! DECL_SIZE_UNIT (field))
2326 {
2327 expr = NULL_TREE;
2328 break;
2329 }
2330
2331 /* Is the field at least as large as the access? If so, ok,
2332 otherwise strip back to the containing structure. */
03667700
RK
2333 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2334 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
e7dfe4bb
RH
2335 && INTVAL (memoffset) >= 0)
2336 break;
2337
2338 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
2339 {
2340 expr = NULL_TREE;
2341 break;
2342 }
2343
2344 expr = TREE_OPERAND (expr, 0);
2345 memoffset = (GEN_INT (INTVAL (memoffset)
2346 + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2347 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2348 / BITS_PER_UNIT)));
2349 }
2350 /* Similarly for the decl. */
2351 else if (DECL_P (expr)
2352 && DECL_SIZE_UNIT (expr)
45f79783 2353 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
e7dfe4bb
RH
2354 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2355 && (! memoffset || INTVAL (memoffset) >= 0))
2356 break;
2357 else
2358 {
2359 /* The widened memory access overflows the expression, which means
2360 that it could alias another expression. Zap it. */
2361 expr = NULL_TREE;
2362 break;
2363 }
2364 }
2365
2366 if (! expr)
2367 memoffset = NULL_RTX;
2368
2369 /* The widened memory may alias other stuff, so zap the alias set. */
2370 /* ??? Maybe use get_alias_set on any remaining expression. */
2371
2372 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2373 MEM_ALIGN (new), mode);
2374
2375 return new;
2376}
23b2ce53
RS
2377\f
2378/* Return a newly created CODE_LABEL rtx with a unique label number. */
2379
2380rtx
502b8322 2381gen_label_rtx (void)
23b2ce53 2382{
0dc36574 2383 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
502b8322 2384 NULL, label_num++, NULL);
23b2ce53
RS
2385}
2386\f
2387/* For procedure integration. */
2388
23b2ce53 2389/* Install new pointers to the first and last insns in the chain.
86fe05e0 2390 Also, set cur_insn_uid to one higher than the last in use.
23b2ce53
RS
2391 Used for an inline-procedure after copying the insn chain. */
2392
2393void
502b8322 2394set_new_first_and_last_insn (rtx first, rtx last)
23b2ce53 2395{
86fe05e0
RK
2396 rtx insn;
2397
23b2ce53
RS
2398 first_insn = first;
2399 last_insn = last;
86fe05e0
RK
2400 cur_insn_uid = 0;
2401
2402 for (insn = first; insn; insn = NEXT_INSN (insn))
2403 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2404
2405 cur_insn_uid++;
23b2ce53
RS
2406}
2407
2408/* Set the range of label numbers found in the current function.
2409 This is used when belatedly compiling an inline function. */
2410
2411void
502b8322 2412set_new_first_and_last_label_num (int first, int last)
23b2ce53
RS
2413{
2414 base_label_num = label_num;
2415 first_label_num = first;
2416 last_label_num = last;
2417}
49ad7cfa
BS
2418
2419/* Set the last label number found in the current function.
2420 This is used when belatedly compiling an inline function. */
23b2ce53
RS
2421
2422void
502b8322 2423set_new_last_label_num (int last)
23b2ce53 2424{
49ad7cfa
BS
2425 base_label_num = label_num;
2426 last_label_num = last;
23b2ce53 2427}
49ad7cfa 2428\f
23b2ce53
RS
2429/* Restore all variables describing the current status from the structure *P.
2430 This is used after a nested function. */
2431
2432void
502b8322 2433restore_emit_status (struct function *p ATTRIBUTE_UNUSED)
23b2ce53 2434{
457a2d9c 2435 last_label_num = 0;
23b2ce53
RS
2436}
2437\f
750c9258 2438/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779 2439 structure. This routine should only be called once. */
23b2ce53
RS
2440
2441void
502b8322 2442unshare_all_rtl (tree fndecl, rtx insn)
23b2ce53 2443{
d1b81779 2444 tree decl;
23b2ce53 2445
d1b81779
GK
2446 /* Make sure that virtual parameters are not shared. */
2447 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
19e7881c 2448 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
d1b81779 2449
5c6df058
AO
2450 /* Make sure that virtual stack slots are not shared. */
2451 unshare_all_decls (DECL_INITIAL (fndecl));
2452
d1b81779 2453 /* Unshare just about everything else. */
2c07f13b 2454 unshare_all_rtl_in_chain (insn);
750c9258 2455
23b2ce53
RS
2456 /* Make sure the addresses of stack slots found outside the insn chain
2457 (such as, in DECL_RTL of a variable) are not shared
2458 with the insn chain.
2459
2460 This special care is necessary when the stack slot MEM does not
2461 actually appear in the insn chain. If it does appear, its address
2462 is unshared from all else at that point. */
242b0ce6 2463 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
23b2ce53
RS
2464}
2465
750c9258 2466/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779
GK
2467 structure, again. This is a fairly expensive thing to do so it
2468 should be done sparingly. */
2469
2470void
502b8322 2471unshare_all_rtl_again (rtx insn)
d1b81779
GK
2472{
2473 rtx p;
624c87aa
RE
2474 tree decl;
2475
d1b81779 2476 for (p = insn; p; p = NEXT_INSN (p))
2c3c49de 2477 if (INSN_P (p))
d1b81779
GK
2478 {
2479 reset_used_flags (PATTERN (p));
2480 reset_used_flags (REG_NOTES (p));
2481 reset_used_flags (LOG_LINKS (p));
2482 }
624c87aa 2483
2d4aecb3
AO
2484 /* Make sure that virtual stack slots are not shared. */
2485 reset_used_decls (DECL_INITIAL (cfun->decl));
2486
624c87aa
RE
2487 /* Make sure that virtual parameters are not shared. */
2488 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2489 reset_used_flags (DECL_RTL (decl));
2490
2491 reset_used_flags (stack_slot_list);
2492
2493 unshare_all_rtl (cfun->decl, insn);
d1b81779
GK
2494}
2495
2c07f13b
JH
2496/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2497 Recursively does the same for subexpressions. */
2498
2499static void
2500verify_rtx_sharing (rtx orig, rtx insn)
2501{
2502 rtx x = orig;
2503 int i;
2504 enum rtx_code code;
2505 const char *format_ptr;
2506
2507 if (x == 0)
2508 return;
2509
2510 code = GET_CODE (x);
2511
2512 /* These types may be freely shared. */
2513
2514 switch (code)
2515 {
2516 case REG:
2517 case QUEUED:
2518 case CONST_INT:
2519 case CONST_DOUBLE:
2520 case CONST_VECTOR:
2521 case SYMBOL_REF:
2522 case LABEL_REF:
2523 case CODE_LABEL:
2524 case PC:
2525 case CC0:
2526 case SCRATCH:
2527 /* SCRATCH must be shared because they represent distinct values. */
2528 return;
2529
2530 case CONST:
2531 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2532 a LABEL_REF, it isn't sharable. */
2533 if (GET_CODE (XEXP (x, 0)) == PLUS
2534 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2535 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2536 return;
2537 break;
2538
2539 case MEM:
2540 /* A MEM is allowed to be shared if its address is constant. */
2541 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2542 || reload_completed || reload_in_progress)
2543 return;
2544
2545 break;
2546
2547 default:
2548 break;
2549 }
2550
2551 /* This rtx may not be shared. If it has already been seen,
2552 replace it with a copy of itself. */
2553
2554 if (RTX_FLAG (x, used))
2555 {
2556 error ("Invalid rtl sharing found in the insn");
2557 debug_rtx (insn);
2558 error ("Shared rtx");
2559 debug_rtx (x);
2560 abort ();
2561 }
2562 RTX_FLAG (x, used) = 1;
2563
2564 /* Now scan the subexpressions recursively. */
2565
2566 format_ptr = GET_RTX_FORMAT (code);
2567
2568 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2569 {
2570 switch (*format_ptr++)
2571 {
2572 case 'e':
2573 verify_rtx_sharing (XEXP (x, i), insn);
2574 break;
2575
2576 case 'E':
2577 if (XVEC (x, i) != NULL)
2578 {
2579 int j;
2580 int len = XVECLEN (x, i);
2581
2582 for (j = 0; j < len; j++)
2583 {
2584 /* We allow sharing of ASM_OPERANDS inside single instruction. */
2585 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2586 && GET_CODE (SET_SRC (XVECEXP (x, i, j))) == ASM_OPERANDS)
2587 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2588 else
2589 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2590 }
2591 }
2592 break;
2593 }
2594 }
2595 return;
2596}
2597
2598/* Go through all the RTL insn bodies and chec that there is no inexpected
2599 sharing in between the subexpressions. */
2600
2601void
2602verify_rtl_sharing (void)
2603{
2604 rtx p;
2605
2606 for (p = get_insns (); p; p = NEXT_INSN (p))
2607 if (INSN_P (p))
2608 {
2609 reset_used_flags (PATTERN (p));
2610 reset_used_flags (REG_NOTES (p));
2611 reset_used_flags (LOG_LINKS (p));
2612 }
2613
2614 for (p = get_insns (); p; p = NEXT_INSN (p))
2615 if (INSN_P (p))
2616 {
2617 verify_rtx_sharing (PATTERN (p), p);
2618 verify_rtx_sharing (REG_NOTES (p), p);
2619 verify_rtx_sharing (LOG_LINKS (p), p);
2620 }
2621}
2622
d1b81779
GK
2623/* Go through all the RTL insn bodies and copy any invalid shared structure.
2624 Assumes the mark bits are cleared at entry. */
2625
2c07f13b
JH
2626void
2627unshare_all_rtl_in_chain (rtx insn)
d1b81779
GK
2628{
2629 for (; insn; insn = NEXT_INSN (insn))
2c3c49de 2630 if (INSN_P (insn))
d1b81779
GK
2631 {
2632 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2633 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2634 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2635 }
2636}
2637
5c6df058
AO
2638/* Go through all virtual stack slots of a function and copy any
2639 shared structure. */
2640static void
502b8322 2641unshare_all_decls (tree blk)
5c6df058
AO
2642{
2643 tree t;
2644
2645 /* Copy shared decls. */
2646 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
19e7881c
MM
2647 if (DECL_RTL_SET_P (t))
2648 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
5c6df058
AO
2649
2650 /* Now process sub-blocks. */
2651 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2652 unshare_all_decls (t);
2653}
2654
2d4aecb3 2655/* Go through all virtual stack slots of a function and mark them as
30f7a378 2656 not shared. */
2d4aecb3 2657static void
502b8322 2658reset_used_decls (tree blk)
2d4aecb3
AO
2659{
2660 tree t;
2661
2662 /* Mark decls. */
2663 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
19e7881c
MM
2664 if (DECL_RTL_SET_P (t))
2665 reset_used_flags (DECL_RTL (t));
2d4aecb3
AO
2666
2667 /* Now process sub-blocks. */
2668 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2669 reset_used_decls (t);
2670}
2671
127c1ba5 2672/* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
93fe8e92
RK
2673 placed in the result directly, rather than being copied. MAY_SHARE is
2674 either a MEM of an EXPR_LIST of MEMs. */
127c1ba5
RK
2675
2676rtx
502b8322 2677copy_most_rtx (rtx orig, rtx may_share)
127c1ba5
RK
2678{
2679 rtx copy;
2680 int i, j;
2681 RTX_CODE code;
2682 const char *format_ptr;
2683
93fe8e92
RK
2684 if (orig == may_share
2685 || (GET_CODE (may_share) == EXPR_LIST
2686 && in_expr_list_p (may_share, orig)))
127c1ba5
RK
2687 return orig;
2688
2689 code = GET_CODE (orig);
2690
2691 switch (code)
2692 {
2693 case REG:
2694 case QUEUED:
2695 case CONST_INT:
2696 case CONST_DOUBLE:
2697 case CONST_VECTOR:
2698 case SYMBOL_REF:
2699 case CODE_LABEL:
2700 case PC:
2701 case CC0:
2702 return orig;
2703 default:
2704 break;
2705 }
2706
2707 copy = rtx_alloc (code);
2708 PUT_MODE (copy, GET_MODE (orig));
2adc7f12
JJ
2709 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2710 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2711 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2712 RTX_FLAG (copy, integrated) = RTX_FLAG (orig, integrated);
2713 RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
127c1ba5
RK
2714
2715 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2716
2717 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2718 {
2719 switch (*format_ptr++)
2720 {
2721 case 'e':
2722 XEXP (copy, i) = XEXP (orig, i);
2723 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2724 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2725 break;
2726
2727 case 'u':
2728 XEXP (copy, i) = XEXP (orig, i);
2729 break;
2730
2731 case 'E':
2732 case 'V':
2733 XVEC (copy, i) = XVEC (orig, i);
2734 if (XVEC (orig, i) != NULL)
2735 {
2736 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2737 for (j = 0; j < XVECLEN (copy, i); j++)
2738 XVECEXP (copy, i, j)
2739 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2740 }
2741 break;
2742
2743 case 'w':
2744 XWINT (copy, i) = XWINT (orig, i);
2745 break;
2746
2747 case 'n':
2748 case 'i':
2749 XINT (copy, i) = XINT (orig, i);
2750 break;
2751
2752 case 't':
2753 XTREE (copy, i) = XTREE (orig, i);
2754 break;
2755
2756 case 's':
2757 case 'S':
2758 XSTR (copy, i) = XSTR (orig, i);
2759 break;
2760
2761 case '0':
e1de1560 2762 X0ANY (copy, i) = X0ANY (orig, i);
127c1ba5
RK
2763 break;
2764
2765 default:
2766 abort ();
2767 }
2768 }
2769 return copy;
2770}
2771
23b2ce53
RS
2772/* Mark ORIG as in use, and return a copy of it if it was already in use.
2773 Recursively does the same for subexpressions. */
2774
2775rtx
502b8322 2776copy_rtx_if_shared (rtx orig)
23b2ce53 2777{
b3694847
SS
2778 rtx x = orig;
2779 int i;
2780 enum rtx_code code;
2781 const char *format_ptr;
23b2ce53
RS
2782 int copied = 0;
2783
2784 if (x == 0)
2785 return 0;
2786
2787 code = GET_CODE (x);
2788
2789 /* These types may be freely shared. */
2790
2791 switch (code)
2792 {
2793 case REG:
2794 case QUEUED:
2795 case CONST_INT:
2796 case CONST_DOUBLE:
69ef87e2 2797 case CONST_VECTOR:
23b2ce53 2798 case SYMBOL_REF:
2c07f13b 2799 case LABEL_REF:
23b2ce53
RS
2800 case CODE_LABEL:
2801 case PC:
2802 case CC0:
2803 case SCRATCH:
0f41302f 2804 /* SCRATCH must be shared because they represent distinct values. */
23b2ce53
RS
2805 return x;
2806
b851ea09
RK
2807 case CONST:
2808 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2809 a LABEL_REF, it isn't sharable. */
2810 if (GET_CODE (XEXP (x, 0)) == PLUS
2811 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2812 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2813 return x;
2814 break;
2815
23b2ce53
RS
2816 case INSN:
2817 case JUMP_INSN:
2818 case CALL_INSN:
2819 case NOTE:
23b2ce53
RS
2820 case BARRIER:
2821 /* The chain of insns is not being copied. */
2822 return x;
2823
e9a25f70
JL
2824 default:
2825 break;
23b2ce53
RS
2826 }
2827
2828 /* This rtx may not be shared. If it has already been seen,
2829 replace it with a copy of itself. */
2830
2adc7f12 2831 if (RTX_FLAG (x, used))
23b2ce53 2832 {
b3694847 2833 rtx copy;
23b2ce53
RS
2834
2835 copy = rtx_alloc (code);
e1de1560 2836 memcpy (copy, x, RTX_SIZE (code));
23b2ce53
RS
2837 x = copy;
2838 copied = 1;
2839 }
2adc7f12 2840 RTX_FLAG (x, used) = 1;
23b2ce53
RS
2841
2842 /* Now scan the subexpressions recursively.
2843 We can store any replaced subexpressions directly into X
2844 since we know X is not shared! Any vectors in X
2845 must be copied if X was copied. */
2846
2847 format_ptr = GET_RTX_FORMAT (code);
2848
2849 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2850 {
2851 switch (*format_ptr++)
2852 {
2853 case 'e':
2854 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
2855 break;
2856
2857 case 'E':
2858 if (XVEC (x, i) != NULL)
2859 {
b3694847 2860 int j;
f0722107 2861 int len = XVECLEN (x, i);
23b2ce53 2862
f0722107 2863 if (copied && len > 0)
8f985ec4 2864 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
f0722107
RS
2865 for (j = 0; j < len; j++)
2866 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
23b2ce53
RS
2867 }
2868 break;
2869 }
2870 }
2871 return x;
2872}
2873
2874/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2875 to look for shared sub-parts. */
2876
2877void
502b8322 2878reset_used_flags (rtx x)
23b2ce53 2879{
b3694847
SS
2880 int i, j;
2881 enum rtx_code code;
2882 const char *format_ptr;
23b2ce53
RS
2883
2884 if (x == 0)
2885 return;
2886
2887 code = GET_CODE (x);
2888
9faa82d8 2889 /* These types may be freely shared so we needn't do any resetting
23b2ce53
RS
2890 for them. */
2891
2892 switch (code)
2893 {
2894 case REG:
2895 case QUEUED:
2896 case CONST_INT:
2897 case CONST_DOUBLE:
69ef87e2 2898 case CONST_VECTOR:
23b2ce53
RS
2899 case SYMBOL_REF:
2900 case CODE_LABEL:
2901 case PC:
2902 case CC0:
2903 return;
2904
2905 case INSN:
2906 case JUMP_INSN:
2907 case CALL_INSN:
2908 case NOTE:
2909 case LABEL_REF:
2910 case BARRIER:
2911 /* The chain of insns is not being copied. */
2912 return;
750c9258 2913
e9a25f70
JL
2914 default:
2915 break;
23b2ce53
RS
2916 }
2917
2adc7f12 2918 RTX_FLAG (x, used) = 0;
23b2ce53
RS
2919
2920 format_ptr = GET_RTX_FORMAT (code);
2921 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2922 {
2923 switch (*format_ptr++)
2924 {
2925 case 'e':
2926 reset_used_flags (XEXP (x, i));
2927 break;
2928
2929 case 'E':
2930 for (j = 0; j < XVECLEN (x, i); j++)
2931 reset_used_flags (XVECEXP (x, i, j));
2932 break;
2933 }
2934 }
2935}
2c07f13b
JH
2936
2937/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2938 to look for shared sub-parts. */
2939
2940void
2941set_used_flags (rtx x)
2942{
2943 int i, j;
2944 enum rtx_code code;
2945 const char *format_ptr;
2946
2947 if (x == 0)
2948 return;
2949
2950 code = GET_CODE (x);
2951
2952 /* These types may be freely shared so we needn't do any resetting
2953 for them. */
2954
2955 switch (code)
2956 {
2957 case REG:
2958 case QUEUED:
2959 case CONST_INT:
2960 case CONST_DOUBLE:
2961 case CONST_VECTOR:
2962 case SYMBOL_REF:
2963 case CODE_LABEL:
2964 case PC:
2965 case CC0:
2966 return;
2967
2968 case INSN:
2969 case JUMP_INSN:
2970 case CALL_INSN:
2971 case NOTE:
2972 case LABEL_REF:
2973 case BARRIER:
2974 /* The chain of insns is not being copied. */
2975 return;
2976
2977 default:
2978 break;
2979 }
2980
2981 RTX_FLAG (x, used) = 1;
2982
2983 format_ptr = GET_RTX_FORMAT (code);
2984 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2985 {
2986 switch (*format_ptr++)
2987 {
2988 case 'e':
2989 set_used_flags (XEXP (x, i));
2990 break;
2991
2992 case 'E':
2993 for (j = 0; j < XVECLEN (x, i); j++)
2994 set_used_flags (XVECEXP (x, i, j));
2995 break;
2996 }
2997 }
2998}
23b2ce53
RS
2999\f
3000/* Copy X if necessary so that it won't be altered by changes in OTHER.
3001 Return X or the rtx for the pseudo reg the value of X was copied into.
3002 OTHER must be valid as a SET_DEST. */
3003
3004rtx
502b8322 3005make_safe_from (rtx x, rtx other)
23b2ce53
RS
3006{
3007 while (1)
3008 switch (GET_CODE (other))
3009 {
3010 case SUBREG:
3011 other = SUBREG_REG (other);
3012 break;
3013 case STRICT_LOW_PART:
3014 case SIGN_EXTEND:
3015 case ZERO_EXTEND:
3016 other = XEXP (other, 0);
3017 break;
3018 default:
3019 goto done;
3020 }
3021 done:
3022 if ((GET_CODE (other) == MEM
3023 && ! CONSTANT_P (x)
3024 && GET_CODE (x) != REG
3025 && GET_CODE (x) != SUBREG)
3026 || (GET_CODE (other) == REG
3027 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3028 || reg_mentioned_p (other, x))))
3029 {
3030 rtx temp = gen_reg_rtx (GET_MODE (x));
3031 emit_move_insn (temp, x);
3032 return temp;
3033 }
3034 return x;
3035}
3036\f
3037/* Emission of insns (adding them to the doubly-linked list). */
3038
3039/* Return the first insn of the current sequence or current function. */
3040
3041rtx
502b8322 3042get_insns (void)
23b2ce53
RS
3043{
3044 return first_insn;
3045}
3046
3dec4024
JH
3047/* Specify a new insn as the first in the chain. */
3048
3049void
502b8322 3050set_first_insn (rtx insn)
3dec4024
JH
3051{
3052 if (PREV_INSN (insn) != 0)
3053 abort ();
3054 first_insn = insn;
3055}
3056
23b2ce53
RS
3057/* Return the last insn emitted in current sequence or current function. */
3058
3059rtx
502b8322 3060get_last_insn (void)
23b2ce53
RS
3061{
3062 return last_insn;
3063}
3064
3065/* Specify a new insn as the last in the chain. */
3066
3067void
502b8322 3068set_last_insn (rtx insn)
23b2ce53
RS
3069{
3070 if (NEXT_INSN (insn) != 0)
3071 abort ();
3072 last_insn = insn;
3073}
3074
3075/* Return the last insn emitted, even if it is in a sequence now pushed. */
3076
3077rtx
502b8322 3078get_last_insn_anywhere (void)
23b2ce53
RS
3079{
3080 struct sequence_stack *stack;
3081 if (last_insn)
3082 return last_insn;
49ad7cfa 3083 for (stack = seq_stack; stack; stack = stack->next)
23b2ce53
RS
3084 if (stack->last != 0)
3085 return stack->last;
3086 return 0;
3087}
3088
2a496e8b
JDA
3089/* Return the first nonnote insn emitted in current sequence or current
3090 function. This routine looks inside SEQUENCEs. */
3091
3092rtx
502b8322 3093get_first_nonnote_insn (void)
2a496e8b
JDA
3094{
3095 rtx insn = first_insn;
3096
3097 while (insn)
3098 {
3099 insn = next_insn (insn);
3100 if (insn == 0 || GET_CODE (insn) != NOTE)
3101 break;
3102 }
3103
3104 return insn;
3105}
3106
3107/* Return the last nonnote insn emitted in current sequence or current
3108 function. This routine looks inside SEQUENCEs. */
3109
3110rtx
502b8322 3111get_last_nonnote_insn (void)
2a496e8b
JDA
3112{
3113 rtx insn = last_insn;
3114
3115 while (insn)
3116 {
3117 insn = previous_insn (insn);
3118 if (insn == 0 || GET_CODE (insn) != NOTE)
3119 break;
3120 }
3121
3122 return insn;
3123}
3124
23b2ce53
RS
3125/* Return a number larger than any instruction's uid in this function. */
3126
3127int
502b8322 3128get_max_uid (void)
23b2ce53
RS
3129{
3130 return cur_insn_uid;
3131}
aeeeda03 3132
673b5311
MM
3133/* Renumber instructions so that no instruction UIDs are wasted. */
3134
aeeeda03 3135void
502b8322 3136renumber_insns (FILE *stream)
aeeeda03
MM
3137{
3138 rtx insn;
aeeeda03 3139
673b5311
MM
3140 /* If we're not supposed to renumber instructions, don't. */
3141 if (!flag_renumber_insns)
3142 return;
3143
aeeeda03
MM
3144 /* If there aren't that many instructions, then it's not really
3145 worth renumbering them. */
673b5311 3146 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
aeeeda03
MM
3147 return;
3148
3149 cur_insn_uid = 1;
3150
3151 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
673b5311
MM
3152 {
3153 if (stream)
750c9258 3154 fprintf (stream, "Renumbering insn %d to %d\n",
673b5311
MM
3155 INSN_UID (insn), cur_insn_uid);
3156 INSN_UID (insn) = cur_insn_uid++;
3157 }
aeeeda03 3158}
23b2ce53
RS
3159\f
3160/* Return the next insn. If it is a SEQUENCE, return the first insn
3161 of the sequence. */
3162
3163rtx
502b8322 3164next_insn (rtx insn)
23b2ce53
RS
3165{
3166 if (insn)
3167 {
3168 insn = NEXT_INSN (insn);
3169 if (insn && GET_CODE (insn) == INSN
3170 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3171 insn = XVECEXP (PATTERN (insn), 0, 0);
3172 }
3173
3174 return insn;
3175}
3176
3177/* Return the previous insn. If it is a SEQUENCE, return the last insn
3178 of the sequence. */
3179
3180rtx
502b8322 3181previous_insn (rtx insn)
23b2ce53
RS
3182{
3183 if (insn)
3184 {
3185 insn = PREV_INSN (insn);
3186 if (insn && GET_CODE (insn) == INSN
3187 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3188 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3189 }
3190
3191 return insn;
3192}
3193
3194/* Return the next insn after INSN that is not a NOTE. This routine does not
3195 look inside SEQUENCEs. */
3196
3197rtx
502b8322 3198next_nonnote_insn (rtx insn)
23b2ce53
RS
3199{
3200 while (insn)
3201 {
3202 insn = NEXT_INSN (insn);
3203 if (insn == 0 || GET_CODE (insn) != NOTE)
3204 break;
3205 }
3206
3207 return insn;
3208}
3209
3210/* Return the previous insn before INSN that is not a NOTE. This routine does
3211 not look inside SEQUENCEs. */
3212
3213rtx
502b8322 3214prev_nonnote_insn (rtx insn)
23b2ce53
RS
3215{
3216 while (insn)
3217 {
3218 insn = PREV_INSN (insn);
3219 if (insn == 0 || GET_CODE (insn) != NOTE)
3220 break;
3221 }
3222
3223 return insn;
3224}
3225
3226/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3227 or 0, if there is none. This routine does not look inside
0f41302f 3228 SEQUENCEs. */
23b2ce53
RS
3229
3230rtx
502b8322 3231next_real_insn (rtx insn)
23b2ce53
RS
3232{
3233 while (insn)
3234 {
3235 insn = NEXT_INSN (insn);
3236 if (insn == 0 || GET_CODE (insn) == INSN
3237 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
3238 break;
3239 }
3240
3241 return insn;
3242}
3243
3244/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3245 or 0, if there is none. This routine does not look inside
3246 SEQUENCEs. */
3247
3248rtx
502b8322 3249prev_real_insn (rtx insn)
23b2ce53
RS
3250{
3251 while (insn)
3252 {
3253 insn = PREV_INSN (insn);
3254 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
3255 || GET_CODE (insn) == JUMP_INSN)
3256 break;
3257 }
3258
3259 return insn;
3260}
3261
ee960939
OH
3262/* Return the last CALL_INSN in the current list, or 0 if there is none.
3263 This routine does not look inside SEQUENCEs. */
3264
3265rtx
502b8322 3266last_call_insn (void)
ee960939
OH
3267{
3268 rtx insn;
3269
3270 for (insn = get_last_insn ();
3271 insn && GET_CODE (insn) != CALL_INSN;
3272 insn = PREV_INSN (insn))
3273 ;
3274
3275 return insn;
3276}
3277
23b2ce53
RS
3278/* Find the next insn after INSN that really does something. This routine
3279 does not look inside SEQUENCEs. Until reload has completed, this is the
3280 same as next_real_insn. */
3281
69732dcb 3282int
502b8322 3283active_insn_p (rtx insn)
69732dcb 3284{
23b8ba81
RH
3285 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
3286 || (GET_CODE (insn) == INSN
3287 && (! reload_completed
3288 || (GET_CODE (PATTERN (insn)) != USE
3289 && GET_CODE (PATTERN (insn)) != CLOBBER))));
69732dcb
RH
3290}
3291
23b2ce53 3292rtx
502b8322 3293next_active_insn (rtx insn)
23b2ce53
RS
3294{
3295 while (insn)
3296 {
3297 insn = NEXT_INSN (insn);
69732dcb 3298 if (insn == 0 || active_insn_p (insn))
23b2ce53
RS
3299 break;
3300 }
3301
3302 return insn;
3303}
3304
3305/* Find the last insn before INSN that really does something. This routine
3306 does not look inside SEQUENCEs. Until reload has completed, this is the
3307 same as prev_real_insn. */
3308
3309rtx
502b8322 3310prev_active_insn (rtx insn)
23b2ce53
RS
3311{
3312 while (insn)
3313 {
3314 insn = PREV_INSN (insn);
69732dcb 3315 if (insn == 0 || active_insn_p (insn))
23b2ce53
RS
3316 break;
3317 }
3318
3319 return insn;
3320}
3321
3322/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3323
3324rtx
502b8322 3325next_label (rtx insn)
23b2ce53
RS
3326{
3327 while (insn)
3328 {
3329 insn = NEXT_INSN (insn);
3330 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3331 break;
3332 }
3333
3334 return insn;
3335}
3336
3337/* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3338
3339rtx
502b8322 3340prev_label (rtx insn)
23b2ce53
RS
3341{
3342 while (insn)
3343 {
3344 insn = PREV_INSN (insn);
3345 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3346 break;
3347 }
3348
3349 return insn;
3350}
3351\f
3352#ifdef HAVE_cc0
c572e5ba
JVA
3353/* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3354 and REG_CC_USER notes so we can find it. */
3355
3356void
502b8322 3357link_cc0_insns (rtx insn)
c572e5ba
JVA
3358{
3359 rtx user = next_nonnote_insn (insn);
3360
3361 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
3362 user = XVECEXP (PATTERN (user), 0, 0);
3363
c5c76735
JL
3364 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3365 REG_NOTES (user));
3b80f6ca 3366 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
c572e5ba
JVA
3367}
3368
23b2ce53
RS
3369/* Return the next insn that uses CC0 after INSN, which is assumed to
3370 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3371 applied to the result of this function should yield INSN).
3372
3373 Normally, this is simply the next insn. However, if a REG_CC_USER note
3374 is present, it contains the insn that uses CC0.
3375
3376 Return 0 if we can't find the insn. */
3377
3378rtx
502b8322 3379next_cc0_user (rtx insn)
23b2ce53 3380{
906c4e36 3381 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
23b2ce53
RS
3382
3383 if (note)
3384 return XEXP (note, 0);
3385
3386 insn = next_nonnote_insn (insn);
3387 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
3388 insn = XVECEXP (PATTERN (insn), 0, 0);
3389
2c3c49de 3390 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
23b2ce53
RS
3391 return insn;
3392
3393 return 0;
3394}
3395
3396/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3397 note, it is the previous insn. */
3398
3399rtx
502b8322 3400prev_cc0_setter (rtx insn)
23b2ce53 3401{
906c4e36 3402 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
23b2ce53
RS
3403
3404 if (note)
3405 return XEXP (note, 0);
3406
3407 insn = prev_nonnote_insn (insn);
3408 if (! sets_cc0_p (PATTERN (insn)))
3409 abort ();
3410
3411 return insn;
3412}
3413#endif
e5bef2e4
HB
3414
3415/* Increment the label uses for all labels present in rtx. */
3416
3417static void
502b8322 3418mark_label_nuses (rtx x)
e5bef2e4 3419{
b3694847
SS
3420 enum rtx_code code;
3421 int i, j;
3422 const char *fmt;
e5bef2e4
HB
3423
3424 code = GET_CODE (x);
3425 if (code == LABEL_REF)
3426 LABEL_NUSES (XEXP (x, 0))++;
3427
3428 fmt = GET_RTX_FORMAT (code);
3429 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3430 {
3431 if (fmt[i] == 'e')
0fb7aeda 3432 mark_label_nuses (XEXP (x, i));
e5bef2e4 3433 else if (fmt[i] == 'E')
0fb7aeda 3434 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
e5bef2e4
HB
3435 mark_label_nuses (XVECEXP (x, i, j));
3436 }
3437}
3438
23b2ce53
RS
3439\f
3440/* Try splitting insns that can be split for better scheduling.
3441 PAT is the pattern which might split.
3442 TRIAL is the insn providing PAT.
cc2902df 3443 LAST is nonzero if we should return the last insn of the sequence produced.
23b2ce53
RS
3444
3445 If this routine succeeds in splitting, it returns the first or last
11147ebe 3446 replacement insn depending on the value of LAST. Otherwise, it
23b2ce53
RS
3447 returns TRIAL. If the insn to be returned can be split, it will be. */
3448
3449rtx
502b8322 3450try_split (rtx pat, rtx trial, int last)
23b2ce53
RS
3451{
3452 rtx before = PREV_INSN (trial);
3453 rtx after = NEXT_INSN (trial);
23b2ce53
RS
3454 int has_barrier = 0;
3455 rtx tem;
6b24c259
JH
3456 rtx note, seq;
3457 int probability;
599aedd9
RH
3458 rtx insn_last, insn;
3459 int njumps = 0;
6b24c259
JH
3460
3461 if (any_condjump_p (trial)
3462 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3463 split_branch_probability = INTVAL (XEXP (note, 0));
3464 probability = split_branch_probability;
3465
3466 seq = split_insns (pat, trial);
3467
3468 split_branch_probability = -1;
23b2ce53
RS
3469
3470 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3471 We may need to handle this specially. */
3472 if (after && GET_CODE (after) == BARRIER)
3473 {
3474 has_barrier = 1;
3475 after = NEXT_INSN (after);
3476 }
3477
599aedd9
RH
3478 if (!seq)
3479 return trial;
3480
3481 /* Avoid infinite loop if any insn of the result matches
3482 the original pattern. */
3483 insn_last = seq;
3484 while (1)
23b2ce53 3485 {
599aedd9
RH
3486 if (INSN_P (insn_last)
3487 && rtx_equal_p (PATTERN (insn_last), pat))
3488 return trial;
3489 if (!NEXT_INSN (insn_last))
3490 break;
3491 insn_last = NEXT_INSN (insn_last);
3492 }
750c9258 3493
599aedd9
RH
3494 /* Mark labels. */
3495 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3496 {
3497 if (GET_CODE (insn) == JUMP_INSN)
3498 {
3499 mark_jump_label (PATTERN (insn), insn, 0);
3500 njumps++;
3501 if (probability != -1
3502 && any_condjump_p (insn)
3503 && !find_reg_note (insn, REG_BR_PROB, 0))
2f937369 3504 {
599aedd9
RH
3505 /* We can preserve the REG_BR_PROB notes only if exactly
3506 one jump is created, otherwise the machine description
3507 is responsible for this step using
3508 split_branch_probability variable. */
3509 if (njumps != 1)
3510 abort ();
3511 REG_NOTES (insn)
3512 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3513 GEN_INT (probability),
3514 REG_NOTES (insn));
2f937369 3515 }
599aedd9
RH
3516 }
3517 }
3518
3519 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3520 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3521 if (GET_CODE (trial) == CALL_INSN)
3522 {
3523 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3524 if (GET_CODE (insn) == CALL_INSN)
3525 {
f6a1f3f6
RH
3526 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3527 while (*p)
3528 p = &XEXP (*p, 1);
3529 *p = CALL_INSN_FUNCTION_USAGE (trial);
599aedd9
RH
3530 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3531 }
3532 }
4b5e8abe 3533
599aedd9
RH
3534 /* Copy notes, particularly those related to the CFG. */
3535 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3536 {
3537 switch (REG_NOTE_KIND (note))
3538 {
3539 case REG_EH_REGION:
2f937369
DM
3540 insn = insn_last;
3541 while (insn != NULL_RTX)
3542 {
599aedd9
RH
3543 if (GET_CODE (insn) == CALL_INSN
3544 || (flag_non_call_exceptions
3545 && may_trap_p (PATTERN (insn))))
3546 REG_NOTES (insn)
3547 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3548 XEXP (note, 0),
3549 REG_NOTES (insn));
2f937369
DM
3550 insn = PREV_INSN (insn);
3551 }
599aedd9 3552 break;
216183ce 3553
599aedd9
RH
3554 case REG_NORETURN:
3555 case REG_SETJMP:
3556 case REG_ALWAYS_RETURN:
3557 insn = insn_last;
3558 while (insn != NULL_RTX)
216183ce 3559 {
599aedd9
RH
3560 if (GET_CODE (insn) == CALL_INSN)
3561 REG_NOTES (insn)
3562 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3563 XEXP (note, 0),
3564 REG_NOTES (insn));
3565 insn = PREV_INSN (insn);
216183ce 3566 }
599aedd9 3567 break;
d6e95df8 3568
599aedd9
RH
3569 case REG_NON_LOCAL_GOTO:
3570 insn = insn_last;
3571 while (insn != NULL_RTX)
2f937369 3572 {
599aedd9
RH
3573 if (GET_CODE (insn) == JUMP_INSN)
3574 REG_NOTES (insn)
3575 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3576 XEXP (note, 0),
3577 REG_NOTES (insn));
3578 insn = PREV_INSN (insn);
2f937369 3579 }
599aedd9 3580 break;
e5bef2e4 3581
599aedd9
RH
3582 default:
3583 break;
23b2ce53 3584 }
599aedd9
RH
3585 }
3586
3587 /* If there are LABELS inside the split insns increment the
3588 usage count so we don't delete the label. */
3589 if (GET_CODE (trial) == INSN)
3590 {
3591 insn = insn_last;
3592 while (insn != NULL_RTX)
23b2ce53 3593 {
599aedd9
RH
3594 if (GET_CODE (insn) == INSN)
3595 mark_label_nuses (PATTERN (insn));
23b2ce53 3596
599aedd9
RH
3597 insn = PREV_INSN (insn);
3598 }
23b2ce53
RS
3599 }
3600
0435312e 3601 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
599aedd9
RH
3602
3603 delete_insn (trial);
3604 if (has_barrier)
3605 emit_barrier_after (tem);
3606
3607 /* Recursively call try_split for each new insn created; by the
3608 time control returns here that insn will be fully split, so
3609 set LAST and continue from the insn after the one returned.
3610 We can't use next_active_insn here since AFTER may be a note.
3611 Ignore deleted insns, which can be occur if not optimizing. */
3612 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3613 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3614 tem = try_split (PATTERN (tem), tem, 1);
3615
3616 /* Return either the first or the last insn, depending on which was
3617 requested. */
3618 return last
3619 ? (after ? PREV_INSN (after) : last_insn)
3620 : NEXT_INSN (before);
23b2ce53
RS
3621}
3622\f
3623/* Make and return an INSN rtx, initializing all its slots.
4b1f5e8c 3624 Store PATTERN in the pattern slots. */
23b2ce53
RS
3625
3626rtx
502b8322 3627make_insn_raw (rtx pattern)
23b2ce53 3628{
b3694847 3629 rtx insn;
23b2ce53 3630
1f8f4a0b 3631 insn = rtx_alloc (INSN);
23b2ce53 3632
43127294 3633 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3634 PATTERN (insn) = pattern;
3635 INSN_CODE (insn) = -1;
1632afca
RS
3636 LOG_LINKS (insn) = NULL;
3637 REG_NOTES (insn) = NULL;
0435312e 3638 INSN_LOCATOR (insn) = 0;
ba4f7968 3639 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53 3640
47984720
NC
3641#ifdef ENABLE_RTL_CHECKING
3642 if (insn
2c3c49de 3643 && INSN_P (insn)
47984720
NC
3644 && (returnjump_p (insn)
3645 || (GET_CODE (insn) == SET
3646 && SET_DEST (insn) == pc_rtx)))
3647 {
3648 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3649 debug_rtx (insn);
3650 }
3651#endif
750c9258 3652
23b2ce53
RS
3653 return insn;
3654}
3655
2f937369 3656/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
23b2ce53
RS
3657
3658static rtx
502b8322 3659make_jump_insn_raw (rtx pattern)
23b2ce53 3660{
b3694847 3661 rtx insn;
23b2ce53 3662
4b1f5e8c 3663 insn = rtx_alloc (JUMP_INSN);
1632afca 3664 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3665
3666 PATTERN (insn) = pattern;
3667 INSN_CODE (insn) = -1;
1632afca
RS
3668 LOG_LINKS (insn) = NULL;
3669 REG_NOTES (insn) = NULL;
3670 JUMP_LABEL (insn) = NULL;
0435312e 3671 INSN_LOCATOR (insn) = 0;
ba4f7968 3672 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53
RS
3673
3674 return insn;
3675}
aff507f4 3676
2f937369 3677/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
aff507f4
RK
3678
3679static rtx
502b8322 3680make_call_insn_raw (rtx pattern)
aff507f4 3681{
b3694847 3682 rtx insn;
aff507f4
RK
3683
3684 insn = rtx_alloc (CALL_INSN);
3685 INSN_UID (insn) = cur_insn_uid++;
3686
3687 PATTERN (insn) = pattern;
3688 INSN_CODE (insn) = -1;
3689 LOG_LINKS (insn) = NULL;
3690 REG_NOTES (insn) = NULL;
3691 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
0435312e 3692 INSN_LOCATOR (insn) = 0;
ba4f7968 3693 BLOCK_FOR_INSN (insn) = NULL;
aff507f4
RK
3694
3695 return insn;
3696}
23b2ce53
RS
3697\f
3698/* Add INSN to the end of the doubly-linked list.
3699 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3700
3701void
502b8322 3702add_insn (rtx insn)
23b2ce53
RS
3703{
3704 PREV_INSN (insn) = last_insn;
3705 NEXT_INSN (insn) = 0;
3706
3707 if (NULL != last_insn)
3708 NEXT_INSN (last_insn) = insn;
3709
3710 if (NULL == first_insn)
3711 first_insn = insn;
3712
3713 last_insn = insn;
3714}
3715
a0ae8e8d
RK
3716/* Add INSN into the doubly-linked list after insn AFTER. This and
3717 the next should be the only functions called to insert an insn once
ba213285 3718 delay slots have been filled since only they know how to update a
a0ae8e8d 3719 SEQUENCE. */
23b2ce53
RS
3720
3721void
502b8322 3722add_insn_after (rtx insn, rtx after)
23b2ce53
RS
3723{
3724 rtx next = NEXT_INSN (after);
3c030e88 3725 basic_block bb;
23b2ce53 3726
6782074d 3727 if (optimize && INSN_DELETED_P (after))
ba213285
RK
3728 abort ();
3729
23b2ce53
RS
3730 NEXT_INSN (insn) = next;
3731 PREV_INSN (insn) = after;
3732
3733 if (next)
3734 {
3735 PREV_INSN (next) = insn;
3736 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3737 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3738 }
3739 else if (last_insn == after)
3740 last_insn = insn;
3741 else
3742 {
49ad7cfa 3743 struct sequence_stack *stack = seq_stack;
23b2ce53
RS
3744 /* Scan all pending sequences too. */
3745 for (; stack; stack = stack->next)
3746 if (after == stack->last)
fef0509b
RK
3747 {
3748 stack->last = insn;
3749 break;
3750 }
a0ae8e8d
RK
3751
3752 if (stack == 0)
3753 abort ();
23b2ce53
RS
3754 }
3755
ba4f7968
JH
3756 if (GET_CODE (after) != BARRIER
3757 && GET_CODE (insn) != BARRIER
3c030e88
JH
3758 && (bb = BLOCK_FOR_INSN (after)))
3759 {
3760 set_block_for_insn (insn, bb);
38c1593d 3761 if (INSN_P (insn))
68252e27 3762 bb->flags |= BB_DIRTY;
3c030e88 3763 /* Should not happen as first in the BB is always
a1f300c0 3764 either NOTE or LABEL. */
3c030e88
JH
3765 if (bb->end == after
3766 /* Avoid clobbering of structure when creating new BB. */
3767 && GET_CODE (insn) != BARRIER
3768 && (GET_CODE (insn) != NOTE
3769 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3770 bb->end = insn;
3771 }
3772
23b2ce53
RS
3773 NEXT_INSN (after) = insn;
3774 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3775 {
3776 rtx sequence = PATTERN (after);
3777 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3778 }
3779}
3780
a0ae8e8d
RK
3781/* Add INSN into the doubly-linked list before insn BEFORE. This and
3782 the previous should be the only functions called to insert an insn once
ba213285 3783 delay slots have been filled since only they know how to update a
a0ae8e8d
RK
3784 SEQUENCE. */
3785
3786void
502b8322 3787add_insn_before (rtx insn, rtx before)
a0ae8e8d
RK
3788{
3789 rtx prev = PREV_INSN (before);
3c030e88 3790 basic_block bb;
a0ae8e8d 3791
6782074d 3792 if (optimize && INSN_DELETED_P (before))
ba213285
RK
3793 abort ();
3794
a0ae8e8d
RK
3795 PREV_INSN (insn) = prev;
3796 NEXT_INSN (insn) = before;
3797
3798 if (prev)
3799 {
3800 NEXT_INSN (prev) = insn;
3801 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3802 {
3803 rtx sequence = PATTERN (prev);
3804 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3805 }
3806 }
3807 else if (first_insn == before)
3808 first_insn = insn;
3809 else
3810 {
49ad7cfa 3811 struct sequence_stack *stack = seq_stack;
a0ae8e8d
RK
3812 /* Scan all pending sequences too. */
3813 for (; stack; stack = stack->next)
3814 if (before == stack->first)
fef0509b
RK
3815 {
3816 stack->first = insn;
3817 break;
3818 }
a0ae8e8d
RK
3819
3820 if (stack == 0)
3821 abort ();
3822 }
3823
ba4f7968
JH
3824 if (GET_CODE (before) != BARRIER
3825 && GET_CODE (insn) != BARRIER
3c030e88
JH
3826 && (bb = BLOCK_FOR_INSN (before)))
3827 {
3828 set_block_for_insn (insn, bb);
38c1593d 3829 if (INSN_P (insn))
68252e27 3830 bb->flags |= BB_DIRTY;
3c030e88 3831 /* Should not happen as first in the BB is always
a1f300c0 3832 either NOTE or LABEl. */
3c030e88
JH
3833 if (bb->head == insn
3834 /* Avoid clobbering of structure when creating new BB. */
3835 && GET_CODE (insn) != BARRIER
3836 && (GET_CODE (insn) != NOTE
3837 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3838 abort ();
3839 }
3840
a0ae8e8d
RK
3841 PREV_INSN (before) = insn;
3842 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3843 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3844}
3845
89e99eea
DB
3846/* Remove an insn from its doubly-linked list. This function knows how
3847 to handle sequences. */
3848void
502b8322 3849remove_insn (rtx insn)
89e99eea
DB
3850{
3851 rtx next = NEXT_INSN (insn);
3852 rtx prev = PREV_INSN (insn);
53c17031
JH
3853 basic_block bb;
3854
89e99eea
DB
3855 if (prev)
3856 {
3857 NEXT_INSN (prev) = next;
3858 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3859 {
3860 rtx sequence = PATTERN (prev);
3861 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3862 }
3863 }
3864 else if (first_insn == insn)
3865 first_insn = next;
3866 else
3867 {
49ad7cfa 3868 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3869 /* Scan all pending sequences too. */
3870 for (; stack; stack = stack->next)
3871 if (insn == stack->first)
3872 {
3873 stack->first = next;
3874 break;
3875 }
3876
3877 if (stack == 0)
3878 abort ();
3879 }
3880
3881 if (next)
3882 {
3883 PREV_INSN (next) = prev;
3884 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3885 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3886 }
3887 else if (last_insn == insn)
3888 last_insn = prev;
3889 else
3890 {
49ad7cfa 3891 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3892 /* Scan all pending sequences too. */
3893 for (; stack; stack = stack->next)
3894 if (insn == stack->last)
3895 {
3896 stack->last = prev;
3897 break;
3898 }
3899
3900 if (stack == 0)
3901 abort ();
3902 }
ba4f7968 3903 if (GET_CODE (insn) != BARRIER
53c17031
JH
3904 && (bb = BLOCK_FOR_INSN (insn)))
3905 {
38c1593d 3906 if (INSN_P (insn))
68252e27 3907 bb->flags |= BB_DIRTY;
53c17031
JH
3908 if (bb->head == insn)
3909 {
3bf1e984
RK
3910 /* Never ever delete the basic block note without deleting whole
3911 basic block. */
53c17031
JH
3912 if (GET_CODE (insn) == NOTE)
3913 abort ();
3914 bb->head = next;
3915 }
3916 if (bb->end == insn)
3917 bb->end = prev;
3918 }
89e99eea
DB
3919}
3920
ee960939
OH
3921/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3922
3923void
502b8322 3924add_function_usage_to (rtx call_insn, rtx call_fusage)
ee960939
OH
3925{
3926 if (! call_insn || GET_CODE (call_insn) != CALL_INSN)
3927 abort ();
3928
3929 /* Put the register usage information on the CALL. If there is already
3930 some usage information, put ours at the end. */
3931 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3932 {
3933 rtx link;
3934
3935 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3936 link = XEXP (link, 1))
3937 ;
3938
3939 XEXP (link, 1) = call_fusage;
3940 }
3941 else
3942 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3943}
3944
23b2ce53
RS
3945/* Delete all insns made since FROM.
3946 FROM becomes the new last instruction. */
3947
3948void
502b8322 3949delete_insns_since (rtx from)
23b2ce53
RS
3950{
3951 if (from == 0)
3952 first_insn = 0;
3953 else
3954 NEXT_INSN (from) = 0;
3955 last_insn = from;
3956}
3957
5dab5552
MS
3958/* This function is deprecated, please use sequences instead.
3959
3960 Move a consecutive bunch of insns to a different place in the chain.
23b2ce53
RS
3961 The insns to be moved are those between FROM and TO.
3962 They are moved to a new position after the insn AFTER.
3963 AFTER must not be FROM or TO or any insn in between.
3964
3965 This function does not know about SEQUENCEs and hence should not be
3966 called after delay-slot filling has been done. */
3967
3968void
502b8322 3969reorder_insns_nobb (rtx from, rtx to, rtx after)
23b2ce53
RS
3970{
3971 /* Splice this bunch out of where it is now. */
3972 if (PREV_INSN (from))
3973 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3974 if (NEXT_INSN (to))
3975 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3976 if (last_insn == to)
3977 last_insn = PREV_INSN (from);
3978 if (first_insn == from)
3979 first_insn = NEXT_INSN (to);
3980
3981 /* Make the new neighbors point to it and it to them. */
3982 if (NEXT_INSN (after))
3983 PREV_INSN (NEXT_INSN (after)) = to;
3984
3985 NEXT_INSN (to) = NEXT_INSN (after);
3986 PREV_INSN (from) = after;
3987 NEXT_INSN (after) = from;
3988 if (after == last_insn)
3989 last_insn = to;
3990}
3991
3c030e88
JH
3992/* Same as function above, but take care to update BB boundaries. */
3993void
502b8322 3994reorder_insns (rtx from, rtx to, rtx after)
3c030e88
JH
3995{
3996 rtx prev = PREV_INSN (from);
3997 basic_block bb, bb2;
3998
3999 reorder_insns_nobb (from, to, after);
4000
ba4f7968 4001 if (GET_CODE (after) != BARRIER
3c030e88
JH
4002 && (bb = BLOCK_FOR_INSN (after)))
4003 {
4004 rtx x;
38c1593d 4005 bb->flags |= BB_DIRTY;
68252e27 4006
ba4f7968 4007 if (GET_CODE (from) != BARRIER
3c030e88
JH
4008 && (bb2 = BLOCK_FOR_INSN (from)))
4009 {
4010 if (bb2->end == to)
4011 bb2->end = prev;
38c1593d 4012 bb2->flags |= BB_DIRTY;
3c030e88
JH
4013 }
4014
4015 if (bb->end == after)
4016 bb->end = to;
4017
4018 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4019 set_block_for_insn (x, bb);
4020 }
4021}
4022
23b2ce53
RS
4023/* Return the line note insn preceding INSN. */
4024
4025static rtx
502b8322 4026find_line_note (rtx insn)
23b2ce53
RS
4027{
4028 if (no_line_numbers)
4029 return 0;
4030
4031 for (; insn; insn = PREV_INSN (insn))
4032 if (GET_CODE (insn) == NOTE
0fb7aeda 4033 && NOTE_LINE_NUMBER (insn) >= 0)
23b2ce53
RS
4034 break;
4035
4036 return insn;
4037}
4038
4039/* Like reorder_insns, but inserts line notes to preserve the line numbers
4040 of the moved insns when debugging. This may insert a note between AFTER
4041 and FROM, and another one after TO. */
4042
4043void
502b8322 4044reorder_insns_with_line_notes (rtx from, rtx to, rtx after)
23b2ce53
RS
4045{
4046 rtx from_line = find_line_note (from);
4047 rtx after_line = find_line_note (after);
4048
4049 reorder_insns (from, to, after);
4050
4051 if (from_line == after_line)
4052 return;
4053
4054 if (from_line)
5f2fc772 4055 emit_note_copy_after (from_line, after);
23b2ce53 4056 if (after_line)
5f2fc772 4057 emit_note_copy_after (after_line, to);
23b2ce53 4058}
aeeeda03 4059
64b59a80 4060/* Remove unnecessary notes from the instruction stream. */
aeeeda03
MM
4061
4062void
502b8322 4063remove_unnecessary_notes (void)
aeeeda03 4064{
542d73ae
RH
4065 rtx block_stack = NULL_RTX;
4066 rtx eh_stack = NULL_RTX;
aeeeda03
MM
4067 rtx insn;
4068 rtx next;
542d73ae 4069 rtx tmp;
aeeeda03 4070
116eebd6
MM
4071 /* We must not remove the first instruction in the function because
4072 the compiler depends on the first instruction being a note. */
aeeeda03
MM
4073 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
4074 {
4075 /* Remember what's next. */
4076 next = NEXT_INSN (insn);
4077
4078 /* We're only interested in notes. */
4079 if (GET_CODE (insn) != NOTE)
4080 continue;
4081
542d73ae 4082 switch (NOTE_LINE_NUMBER (insn))
18c038b9 4083 {
542d73ae 4084 case NOTE_INSN_DELETED:
e803a64b 4085 case NOTE_INSN_LOOP_END_TOP_COND:
542d73ae
RH
4086 remove_insn (insn);
4087 break;
4088
4089 case NOTE_INSN_EH_REGION_BEG:
4090 eh_stack = alloc_INSN_LIST (insn, eh_stack);
4091 break;
4092
4093 case NOTE_INSN_EH_REGION_END:
4094 /* Too many end notes. */
4095 if (eh_stack == NULL_RTX)
4096 abort ();
4097 /* Mismatched nesting. */
4098 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
4099 abort ();
4100 tmp = eh_stack;
4101 eh_stack = XEXP (eh_stack, 1);
4102 free_INSN_LIST_node (tmp);
4103 break;
4104
4105 case NOTE_INSN_BLOCK_BEG:
4106 /* By now, all notes indicating lexical blocks should have
4107 NOTE_BLOCK filled in. */
4108 if (NOTE_BLOCK (insn) == NULL_TREE)
4109 abort ();
4110 block_stack = alloc_INSN_LIST (insn, block_stack);
4111 break;
4112
4113 case NOTE_INSN_BLOCK_END:
4114 /* Too many end notes. */
4115 if (block_stack == NULL_RTX)
4116 abort ();
4117 /* Mismatched nesting. */
4118 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
4119 abort ();
4120 tmp = block_stack;
4121 block_stack = XEXP (block_stack, 1);
4122 free_INSN_LIST_node (tmp);
4123
18c038b9
MM
4124 /* Scan back to see if there are any non-note instructions
4125 between INSN and the beginning of this block. If not,
4126 then there is no PC range in the generated code that will
4127 actually be in this block, so there's no point in
4128 remembering the existence of the block. */
68252e27 4129 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
18c038b9
MM
4130 {
4131 /* This block contains a real instruction. Note that we
4132 don't include labels; if the only thing in the block
4133 is a label, then there are still no PC values that
4134 lie within the block. */
542d73ae 4135 if (INSN_P (tmp))
18c038b9
MM
4136 break;
4137
4138 /* We're only interested in NOTEs. */
542d73ae 4139 if (GET_CODE (tmp) != NOTE)
18c038b9
MM
4140 continue;
4141
542d73ae 4142 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
18c038b9 4143 {
e1772ac0
NB
4144 /* We just verified that this BLOCK matches us with
4145 the block_stack check above. Never delete the
4146 BLOCK for the outermost scope of the function; we
4147 can refer to names from that scope even if the
4148 block notes are messed up. */
4149 if (! is_body_block (NOTE_BLOCK (insn))
4150 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
deb5e280 4151 {
542d73ae 4152 remove_insn (tmp);
deb5e280
JM
4153 remove_insn (insn);
4154 }
18c038b9
MM
4155 break;
4156 }
542d73ae 4157 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
18c038b9
MM
4158 /* There's a nested block. We need to leave the
4159 current block in place since otherwise the debugger
4160 wouldn't be able to show symbols from our block in
4161 the nested block. */
4162 break;
4163 }
4164 }
aeeeda03 4165 }
542d73ae
RH
4166
4167 /* Too many begin notes. */
4168 if (block_stack || eh_stack)
4169 abort ();
aeeeda03
MM
4170}
4171
23b2ce53 4172\f
2f937369
DM
4173/* Emit insn(s) of given code and pattern
4174 at a specified place within the doubly-linked list.
23b2ce53 4175
2f937369
DM
4176 All of the emit_foo global entry points accept an object
4177 X which is either an insn list or a PATTERN of a single
4178 instruction.
23b2ce53 4179
2f937369
DM
4180 There are thus a few canonical ways to generate code and
4181 emit it at a specific place in the instruction stream. For
4182 example, consider the instruction named SPOT and the fact that
4183 we would like to emit some instructions before SPOT. We might
4184 do it like this:
23b2ce53 4185
2f937369
DM
4186 start_sequence ();
4187 ... emit the new instructions ...
4188 insns_head = get_insns ();
4189 end_sequence ();
23b2ce53 4190
2f937369 4191 emit_insn_before (insns_head, SPOT);
23b2ce53 4192
2f937369
DM
4193 It used to be common to generate SEQUENCE rtl instead, but that
4194 is a relic of the past which no longer occurs. The reason is that
4195 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4196 generated would almost certainly die right after it was created. */
23b2ce53 4197
2f937369 4198/* Make X be output before the instruction BEFORE. */
23b2ce53
RS
4199
4200rtx
502b8322 4201emit_insn_before (rtx x, rtx before)
23b2ce53 4202{
2f937369 4203 rtx last = before;
b3694847 4204 rtx insn;
23b2ce53 4205
2f937369
DM
4206#ifdef ENABLE_RTL_CHECKING
4207 if (before == NULL_RTX)
4208 abort ();
4209#endif
4210
4211 if (x == NULL_RTX)
4212 return last;
4213
4214 switch (GET_CODE (x))
23b2ce53 4215 {
2f937369
DM
4216 case INSN:
4217 case JUMP_INSN:
4218 case CALL_INSN:
4219 case CODE_LABEL:
4220 case BARRIER:
4221 case NOTE:
4222 insn = x;
4223 while (insn)
4224 {
4225 rtx next = NEXT_INSN (insn);
4226 add_insn_before (insn, before);
4227 last = insn;
4228 insn = next;
4229 }
4230 break;
4231
4232#ifdef ENABLE_RTL_CHECKING
4233 case SEQUENCE:
4234 abort ();
4235 break;
4236#endif
4237
4238 default:
4239 last = make_insn_raw (x);
4240 add_insn_before (last, before);
4241 break;
23b2ce53
RS
4242 }
4243
2f937369 4244 return last;
23b2ce53
RS
4245}
4246
2f937369 4247/* Make an instruction with body X and code JUMP_INSN
23b2ce53
RS
4248 and output it before the instruction BEFORE. */
4249
4250rtx
502b8322 4251emit_jump_insn_before (rtx x, rtx before)
23b2ce53 4252{
d950dee3 4253 rtx insn, last = NULL_RTX;
aff507f4 4254
2f937369
DM
4255#ifdef ENABLE_RTL_CHECKING
4256 if (before == NULL_RTX)
4257 abort ();
4258#endif
4259
4260 switch (GET_CODE (x))
aff507f4 4261 {
2f937369
DM
4262 case INSN:
4263 case JUMP_INSN:
4264 case CALL_INSN:
4265 case CODE_LABEL:
4266 case BARRIER:
4267 case NOTE:
4268 insn = x;
4269 while (insn)
4270 {
4271 rtx next = NEXT_INSN (insn);
4272 add_insn_before (insn, before);
4273 last = insn;
4274 insn = next;
4275 }
4276 break;
4277
4278#ifdef ENABLE_RTL_CHECKING
4279 case SEQUENCE:
4280 abort ();
4281 break;
4282#endif
4283
4284 default:
4285 last = make_jump_insn_raw (x);
4286 add_insn_before (last, before);
4287 break;
aff507f4
RK
4288 }
4289
2f937369 4290 return last;
23b2ce53
RS
4291}
4292
2f937369 4293/* Make an instruction with body X and code CALL_INSN
969d70ca
JH
4294 and output it before the instruction BEFORE. */
4295
4296rtx
502b8322 4297emit_call_insn_before (rtx x, rtx before)
969d70ca 4298{
d950dee3 4299 rtx last = NULL_RTX, insn;
969d70ca 4300
2f937369
DM
4301#ifdef ENABLE_RTL_CHECKING
4302 if (before == NULL_RTX)
4303 abort ();
4304#endif
4305
4306 switch (GET_CODE (x))
969d70ca 4307 {
2f937369
DM
4308 case INSN:
4309 case JUMP_INSN:
4310 case CALL_INSN:
4311 case CODE_LABEL:
4312 case BARRIER:
4313 case NOTE:
4314 insn = x;
4315 while (insn)
4316 {
4317 rtx next = NEXT_INSN (insn);
4318 add_insn_before (insn, before);
4319 last = insn;
4320 insn = next;
4321 }
4322 break;
4323
4324#ifdef ENABLE_RTL_CHECKING
4325 case SEQUENCE:
4326 abort ();
4327 break;
4328#endif
4329
4330 default:
4331 last = make_call_insn_raw (x);
4332 add_insn_before (last, before);
4333 break;
969d70ca
JH
4334 }
4335
2f937369 4336 return last;
969d70ca
JH
4337}
4338
23b2ce53 4339/* Make an insn of code BARRIER
e881bb1b 4340 and output it before the insn BEFORE. */
23b2ce53
RS
4341
4342rtx
502b8322 4343emit_barrier_before (rtx before)
23b2ce53 4344{
b3694847 4345 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
4346
4347 INSN_UID (insn) = cur_insn_uid++;
4348
a0ae8e8d 4349 add_insn_before (insn, before);
23b2ce53
RS
4350 return insn;
4351}
4352
e881bb1b
RH
4353/* Emit the label LABEL before the insn BEFORE. */
4354
4355rtx
502b8322 4356emit_label_before (rtx label, rtx before)
e881bb1b
RH
4357{
4358 /* This can be called twice for the same label as a result of the
4359 confusion that follows a syntax error! So make it harmless. */
4360 if (INSN_UID (label) == 0)
4361 {
4362 INSN_UID (label) = cur_insn_uid++;
4363 add_insn_before (label, before);
4364 }
4365
4366 return label;
4367}
4368
23b2ce53
RS
4369/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4370
4371rtx
502b8322 4372emit_note_before (int subtype, rtx before)
23b2ce53 4373{
b3694847 4374 rtx note = rtx_alloc (NOTE);
23b2ce53
RS
4375 INSN_UID (note) = cur_insn_uid++;
4376 NOTE_SOURCE_FILE (note) = 0;
4377 NOTE_LINE_NUMBER (note) = subtype;
ba4f7968 4378 BLOCK_FOR_INSN (note) = NULL;
23b2ce53 4379
a0ae8e8d 4380 add_insn_before (note, before);
23b2ce53
RS
4381 return note;
4382}
4383\f
2f937369
DM
4384/* Helper for emit_insn_after, handles lists of instructions
4385 efficiently. */
23b2ce53 4386
502b8322 4387static rtx emit_insn_after_1 (rtx, rtx);
2f937369
DM
4388
4389static rtx
502b8322 4390emit_insn_after_1 (rtx first, rtx after)
23b2ce53 4391{
2f937369
DM
4392 rtx last;
4393 rtx after_after;
4394 basic_block bb;
23b2ce53 4395
2f937369
DM
4396 if (GET_CODE (after) != BARRIER
4397 && (bb = BLOCK_FOR_INSN (after)))
23b2ce53 4398 {
2f937369
DM
4399 bb->flags |= BB_DIRTY;
4400 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4401 if (GET_CODE (last) != BARRIER)
4402 set_block_for_insn (last, bb);
4403 if (GET_CODE (last) != BARRIER)
4404 set_block_for_insn (last, bb);
4405 if (bb->end == after)
4406 bb->end = last;
23b2ce53
RS
4407 }
4408 else
2f937369
DM
4409 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4410 continue;
4411
4412 after_after = NEXT_INSN (after);
4413
4414 NEXT_INSN (after) = first;
4415 PREV_INSN (first) = after;
4416 NEXT_INSN (last) = after_after;
4417 if (after_after)
4418 PREV_INSN (after_after) = last;
4419
4420 if (after == last_insn)
4421 last_insn = last;
4422 return last;
4423}
4424
4425/* Make X be output after the insn AFTER. */
4426
4427rtx
502b8322 4428emit_insn_after (rtx x, rtx after)
2f937369
DM
4429{
4430 rtx last = after;
4431
4432#ifdef ENABLE_RTL_CHECKING
4433 if (after == NULL_RTX)
4434 abort ();
4435#endif
4436
4437 if (x == NULL_RTX)
4438 return last;
4439
4440 switch (GET_CODE (x))
23b2ce53 4441 {
2f937369
DM
4442 case INSN:
4443 case JUMP_INSN:
4444 case CALL_INSN:
4445 case CODE_LABEL:
4446 case BARRIER:
4447 case NOTE:
4448 last = emit_insn_after_1 (x, after);
4449 break;
4450
4451#ifdef ENABLE_RTL_CHECKING
4452 case SEQUENCE:
4453 abort ();
4454 break;
4455#endif
4456
4457 default:
4458 last = make_insn_raw (x);
4459 add_insn_after (last, after);
4460 break;
23b2ce53
RS
4461 }
4462
2f937369 4463 return last;
23b2ce53
RS
4464}
4465
255680cf
RK
4466/* Similar to emit_insn_after, except that line notes are to be inserted so
4467 as to act as if this insn were at FROM. */
4468
4469void
502b8322 4470emit_insn_after_with_line_notes (rtx x, rtx after, rtx from)
255680cf
RK
4471{
4472 rtx from_line = find_line_note (from);
4473 rtx after_line = find_line_note (after);
2f937369 4474 rtx insn = emit_insn_after (x, after);
255680cf
RK
4475
4476 if (from_line)
5f2fc772 4477 emit_note_copy_after (from_line, after);
255680cf
RK
4478
4479 if (after_line)
5f2fc772 4480 emit_note_copy_after (after_line, insn);
255680cf
RK
4481}
4482
2f937369 4483/* Make an insn of code JUMP_INSN with body X
23b2ce53
RS
4484 and output it after the insn AFTER. */
4485
4486rtx
502b8322 4487emit_jump_insn_after (rtx x, rtx after)
23b2ce53 4488{
2f937369 4489 rtx last;
23b2ce53 4490
2f937369
DM
4491#ifdef ENABLE_RTL_CHECKING
4492 if (after == NULL_RTX)
4493 abort ();
4494#endif
4495
4496 switch (GET_CODE (x))
23b2ce53 4497 {
2f937369
DM
4498 case INSN:
4499 case JUMP_INSN:
4500 case CALL_INSN:
4501 case CODE_LABEL:
4502 case BARRIER:
4503 case NOTE:
4504 last = emit_insn_after_1 (x, after);
4505 break;
4506
4507#ifdef ENABLE_RTL_CHECKING
4508 case SEQUENCE:
4509 abort ();
4510 break;
4511#endif
4512
4513 default:
4514 last = make_jump_insn_raw (x);
4515 add_insn_after (last, after);
4516 break;
23b2ce53
RS
4517 }
4518
2f937369
DM
4519 return last;
4520}
4521
4522/* Make an instruction with body X and code CALL_INSN
4523 and output it after the instruction AFTER. */
4524
4525rtx
502b8322 4526emit_call_insn_after (rtx x, rtx after)
2f937369
DM
4527{
4528 rtx last;
4529
4530#ifdef ENABLE_RTL_CHECKING
4531 if (after == NULL_RTX)
4532 abort ();
4533#endif
4534
4535 switch (GET_CODE (x))
4536 {
4537 case INSN:
4538 case JUMP_INSN:
4539 case CALL_INSN:
4540 case CODE_LABEL:
4541 case BARRIER:
4542 case NOTE:
4543 last = emit_insn_after_1 (x, after);
4544 break;
4545
4546#ifdef ENABLE_RTL_CHECKING
4547 case SEQUENCE:
4548 abort ();
4549 break;
4550#endif
4551
4552 default:
4553 last = make_call_insn_raw (x);
4554 add_insn_after (last, after);
4555 break;
4556 }
4557
4558 return last;
23b2ce53
RS
4559}
4560
4561/* Make an insn of code BARRIER
4562 and output it after the insn AFTER. */
4563
4564rtx
502b8322 4565emit_barrier_after (rtx after)
23b2ce53 4566{
b3694847 4567 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
4568
4569 INSN_UID (insn) = cur_insn_uid++;
4570
4571 add_insn_after (insn, after);
4572 return insn;
4573}
4574
4575/* Emit the label LABEL after the insn AFTER. */
4576
4577rtx
502b8322 4578emit_label_after (rtx label, rtx after)
23b2ce53
RS
4579{
4580 /* This can be called twice for the same label
4581 as a result of the confusion that follows a syntax error!
4582 So make it harmless. */
4583 if (INSN_UID (label) == 0)
4584 {
4585 INSN_UID (label) = cur_insn_uid++;
4586 add_insn_after (label, after);
4587 }
4588
4589 return label;
4590}
4591
4592/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4593
4594rtx
502b8322 4595emit_note_after (int subtype, rtx after)
23b2ce53 4596{
b3694847 4597 rtx note = rtx_alloc (NOTE);
23b2ce53
RS
4598 INSN_UID (note) = cur_insn_uid++;
4599 NOTE_SOURCE_FILE (note) = 0;
4600 NOTE_LINE_NUMBER (note) = subtype;
ba4f7968 4601 BLOCK_FOR_INSN (note) = NULL;
23b2ce53
RS
4602 add_insn_after (note, after);
4603 return note;
4604}
4605
5f2fc772 4606/* Emit a copy of note ORIG after the insn AFTER. */
23b2ce53
RS
4607
4608rtx
5f2fc772 4609emit_note_copy_after (rtx orig, rtx after)
23b2ce53 4610{
b3694847 4611 rtx note;
23b2ce53 4612
5f2fc772 4613 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
23b2ce53
RS
4614 {
4615 cur_insn_uid++;
4616 return 0;
4617 }
4618
68252e27 4619 note = rtx_alloc (NOTE);
23b2ce53 4620 INSN_UID (note) = cur_insn_uid++;
5f2fc772
NS
4621 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4622 NOTE_DATA (note) = NOTE_DATA (orig);
ba4f7968 4623 BLOCK_FOR_INSN (note) = NULL;
23b2ce53
RS
4624 add_insn_after (note, after);
4625 return note;
4626}
4627\f
0435312e 4628/* Like emit_insn_after, but set INSN_LOCATOR according to SCOPE. */
0d682900 4629rtx
502b8322 4630emit_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900
JH
4631{
4632 rtx last = emit_insn_after (pattern, after);
0d682900 4633
2f937369
DM
4634 after = NEXT_INSN (after);
4635 while (1)
4636 {
d11cea13 4637 if (active_insn_p (after))
0435312e 4638 INSN_LOCATOR (after) = loc;
2f937369
DM
4639 if (after == last)
4640 break;
4641 after = NEXT_INSN (after);
4642 }
0d682900
JH
4643 return last;
4644}
4645
0435312e 4646/* Like emit_jump_insn_after, but set INSN_LOCATOR according to SCOPE. */
0d682900 4647rtx
502b8322 4648emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900
JH
4649{
4650 rtx last = emit_jump_insn_after (pattern, after);
2f937369
DM
4651
4652 after = NEXT_INSN (after);
4653 while (1)
4654 {
d11cea13 4655 if (active_insn_p (after))
0435312e 4656 INSN_LOCATOR (after) = loc;
2f937369
DM
4657 if (after == last)
4658 break;
4659 after = NEXT_INSN (after);
4660 }
0d682900
JH
4661 return last;
4662}
4663
0435312e 4664/* Like emit_call_insn_after, but set INSN_LOCATOR according to SCOPE. */
0d682900 4665rtx
502b8322 4666emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900
JH
4667{
4668 rtx last = emit_call_insn_after (pattern, after);
2f937369
DM
4669
4670 after = NEXT_INSN (after);
4671 while (1)
4672 {
d11cea13 4673 if (active_insn_p (after))
0435312e 4674 INSN_LOCATOR (after) = loc;
2f937369
DM
4675 if (after == last)
4676 break;
4677 after = NEXT_INSN (after);
4678 }
0d682900
JH
4679 return last;
4680}
4681
0435312e 4682/* Like emit_insn_before, but set INSN_LOCATOR according to SCOPE. */
0d682900 4683rtx
502b8322 4684emit_insn_before_setloc (rtx pattern, rtx before, int loc)
0d682900
JH
4685{
4686 rtx first = PREV_INSN (before);
4687 rtx last = emit_insn_before (pattern, before);
4688
2f937369
DM
4689 first = NEXT_INSN (first);
4690 while (1)
4691 {
d11cea13 4692 if (active_insn_p (first))
0435312e 4693 INSN_LOCATOR (first) = loc;
2f937369
DM
4694 if (first == last)
4695 break;
4696 first = NEXT_INSN (first);
4697 }
0d682900
JH
4698 return last;
4699}
4700\f
2f937369
DM
4701/* Take X and emit it at the end of the doubly-linked
4702 INSN list.
23b2ce53
RS
4703
4704 Returns the last insn emitted. */
4705
4706rtx
502b8322 4707emit_insn (rtx x)
23b2ce53 4708{
2f937369
DM
4709 rtx last = last_insn;
4710 rtx insn;
23b2ce53 4711
2f937369
DM
4712 if (x == NULL_RTX)
4713 return last;
23b2ce53 4714
2f937369
DM
4715 switch (GET_CODE (x))
4716 {
4717 case INSN:
4718 case JUMP_INSN:
4719 case CALL_INSN:
4720 case CODE_LABEL:
4721 case BARRIER:
4722 case NOTE:
4723 insn = x;
4724 while (insn)
23b2ce53 4725 {
2f937369 4726 rtx next = NEXT_INSN (insn);
23b2ce53 4727 add_insn (insn);
2f937369
DM
4728 last = insn;
4729 insn = next;
23b2ce53 4730 }
2f937369 4731 break;
23b2ce53 4732
2f937369
DM
4733#ifdef ENABLE_RTL_CHECKING
4734 case SEQUENCE:
4735 abort ();
4736 break;
4737#endif
23b2ce53 4738
2f937369
DM
4739 default:
4740 last = make_insn_raw (x);
4741 add_insn (last);
4742 break;
23b2ce53
RS
4743 }
4744
4745 return last;
4746}
4747
2f937369
DM
4748/* Make an insn of code JUMP_INSN with pattern X
4749 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4750
4751rtx
502b8322 4752emit_jump_insn (rtx x)
23b2ce53 4753{
d950dee3 4754 rtx last = NULL_RTX, insn;
23b2ce53 4755
2f937369 4756 switch (GET_CODE (x))
23b2ce53 4757 {
2f937369
DM
4758 case INSN:
4759 case JUMP_INSN:
4760 case CALL_INSN:
4761 case CODE_LABEL:
4762 case BARRIER:
4763 case NOTE:
4764 insn = x;
4765 while (insn)
4766 {
4767 rtx next = NEXT_INSN (insn);
4768 add_insn (insn);
4769 last = insn;
4770 insn = next;
4771 }
4772 break;
e0a5c5eb 4773
2f937369
DM
4774#ifdef ENABLE_RTL_CHECKING
4775 case SEQUENCE:
4776 abort ();
4777 break;
4778#endif
e0a5c5eb 4779
2f937369
DM
4780 default:
4781 last = make_jump_insn_raw (x);
4782 add_insn (last);
4783 break;
3c030e88 4784 }
e0a5c5eb
RS
4785
4786 return last;
4787}
4788
2f937369 4789/* Make an insn of code CALL_INSN with pattern X
23b2ce53
RS
4790 and add it to the end of the doubly-linked list. */
4791
4792rtx
502b8322 4793emit_call_insn (rtx x)
23b2ce53 4794{
2f937369
DM
4795 rtx insn;
4796
4797 switch (GET_CODE (x))
23b2ce53 4798 {
2f937369
DM
4799 case INSN:
4800 case JUMP_INSN:
4801 case CALL_INSN:
4802 case CODE_LABEL:
4803 case BARRIER:
4804 case NOTE:
4805 insn = emit_insn (x);
4806 break;
23b2ce53 4807
2f937369
DM
4808#ifdef ENABLE_RTL_CHECKING
4809 case SEQUENCE:
4810 abort ();
4811 break;
4812#endif
23b2ce53 4813
2f937369
DM
4814 default:
4815 insn = make_call_insn_raw (x);
23b2ce53 4816 add_insn (insn);
2f937369 4817 break;
23b2ce53 4818 }
2f937369
DM
4819
4820 return insn;
23b2ce53
RS
4821}
4822
4823/* Add the label LABEL to the end of the doubly-linked list. */
4824
4825rtx
502b8322 4826emit_label (rtx label)
23b2ce53
RS
4827{
4828 /* This can be called twice for the same label
4829 as a result of the confusion that follows a syntax error!
4830 So make it harmless. */
4831 if (INSN_UID (label) == 0)
4832 {
4833 INSN_UID (label) = cur_insn_uid++;
4834 add_insn (label);
4835 }
4836 return label;
4837}
4838
4839/* Make an insn of code BARRIER
4840 and add it to the end of the doubly-linked list. */
4841
4842rtx
502b8322 4843emit_barrier (void)
23b2ce53 4844{
b3694847 4845 rtx barrier = rtx_alloc (BARRIER);
23b2ce53
RS
4846 INSN_UID (barrier) = cur_insn_uid++;
4847 add_insn (barrier);
4848 return barrier;
4849}
4850
0cea056b
NS
4851/* Make line numbering NOTE insn for LOCATION add it to the end
4852 of the doubly-linked list, but only if line-numbers are desired for
4853 debugging info and it doesn't match the previous one. */
23b2ce53
RS
4854
4855rtx
0cea056b 4856emit_line_note (location_t location)
23b2ce53 4857{
2e040219 4858 rtx note;
0cea056b
NS
4859
4860 set_file_and_line_for_stmt (location);
4861
4862 if (location.file && last_location.file
4863 && !strcmp (location.file, last_location.file)
4864 && location.line == last_location.line)
fd3acbb3 4865 return NULL_RTX;
0cea056b
NS
4866 last_location = location;
4867
23b2ce53 4868 if (no_line_numbers)
fd3acbb3
NS
4869 {
4870 cur_insn_uid++;
4871 return NULL_RTX;
4872 }
23b2ce53 4873
0cea056b
NS
4874 note = emit_note (location.line);
4875 NOTE_SOURCE_FILE (note) = location.file;
5f2fc772
NS
4876
4877 return note;
4878}
4879
4880/* Emit a copy of note ORIG. */
502b8322 4881
5f2fc772
NS
4882rtx
4883emit_note_copy (rtx orig)
4884{
4885 rtx note;
4886
4887 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4888 {
4889 cur_insn_uid++;
4890 return NULL_RTX;
4891 }
4892
4893 note = rtx_alloc (NOTE);
4894
4895 INSN_UID (note) = cur_insn_uid++;
4896 NOTE_DATA (note) = NOTE_DATA (orig);
4897 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4898 BLOCK_FOR_INSN (note) = NULL;
4899 add_insn (note);
4900
2e040219 4901 return note;
23b2ce53
RS
4902}
4903
2e040219
NS
4904/* Make an insn of code NOTE or type NOTE_NO
4905 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4906
4907rtx
502b8322 4908emit_note (int note_no)
23b2ce53 4909{
b3694847 4910 rtx note;
23b2ce53 4911
23b2ce53
RS
4912 note = rtx_alloc (NOTE);
4913 INSN_UID (note) = cur_insn_uid++;
2e040219 4914 NOTE_LINE_NUMBER (note) = note_no;
dd107e66 4915 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
ba4f7968 4916 BLOCK_FOR_INSN (note) = NULL;
23b2ce53
RS
4917 add_insn (note);
4918 return note;
4919}
4920
23b2ce53 4921/* Cause next statement to emit a line note even if the line number
0cea056b 4922 has not changed. */
23b2ce53
RS
4923
4924void
502b8322 4925force_next_line_note (void)
23b2ce53 4926{
fd3acbb3 4927 last_location.line = -1;
23b2ce53 4928}
87b47c85
AM
4929
4930/* Place a note of KIND on insn INSN with DATUM as the datum. If a
30f7a378 4931 note of this type already exists, remove it first. */
87b47c85 4932
3d238248 4933rtx
502b8322 4934set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
87b47c85
AM
4935{
4936 rtx note = find_reg_note (insn, kind, NULL_RTX);
4937
52488da1
JW
4938 switch (kind)
4939 {
4940 case REG_EQUAL:
4941 case REG_EQUIV:
4942 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4943 has multiple sets (some callers assume single_set
4944 means the insn only has one set, when in fact it
4945 means the insn only has one * useful * set). */
4946 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4947 {
4948 if (note)
4949 abort ();
4950 return NULL_RTX;
4951 }
4952
4953 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4954 It serves no useful purpose and breaks eliminate_regs. */
4955 if (GET_CODE (datum) == ASM_OPERANDS)
4956 return NULL_RTX;
4957 break;
4958
4959 default:
4960 break;
4961 }
3d238248 4962
750c9258 4963 if (note)
3d238248
JJ
4964 {
4965 XEXP (note, 0) = datum;
4966 return note;
4967 }
87b47c85
AM
4968
4969 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
3d238248 4970 return REG_NOTES (insn);
87b47c85 4971}
23b2ce53
RS
4972\f
4973/* Return an indication of which type of insn should have X as a body.
4974 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4975
4976enum rtx_code
502b8322 4977classify_insn (rtx x)
23b2ce53
RS
4978{
4979 if (GET_CODE (x) == CODE_LABEL)
4980 return CODE_LABEL;
4981 if (GET_CODE (x) == CALL)
4982 return CALL_INSN;
4983 if (GET_CODE (x) == RETURN)
4984 return JUMP_INSN;
4985 if (GET_CODE (x) == SET)
4986 {
4987 if (SET_DEST (x) == pc_rtx)
4988 return JUMP_INSN;
4989 else if (GET_CODE (SET_SRC (x)) == CALL)
4990 return CALL_INSN;
4991 else
4992 return INSN;
4993 }
4994 if (GET_CODE (x) == PARALLEL)
4995 {
b3694847 4996 int j;
23b2ce53
RS
4997 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4998 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4999 return CALL_INSN;
5000 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5001 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5002 return JUMP_INSN;
5003 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5004 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5005 return CALL_INSN;
5006 }
5007 return INSN;
5008}
5009
5010/* Emit the rtl pattern X as an appropriate kind of insn.
5011 If X is a label, it is simply added into the insn chain. */
5012
5013rtx
502b8322 5014emit (rtx x)
23b2ce53
RS
5015{
5016 enum rtx_code code = classify_insn (x);
5017
5018 if (code == CODE_LABEL)
5019 return emit_label (x);
5020 else if (code == INSN)
5021 return emit_insn (x);
5022 else if (code == JUMP_INSN)
5023 {
b3694847 5024 rtx insn = emit_jump_insn (x);
7f1c097d 5025 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
23b2ce53
RS
5026 return emit_barrier ();
5027 return insn;
5028 }
5029 else if (code == CALL_INSN)
5030 return emit_call_insn (x);
5031 else
5032 abort ();
5033}
5034\f
e2500fed
GK
5035/* Space for free sequence stack entries. */
5036static GTY ((deletable (""))) struct sequence_stack *free_sequence_stack;
5037
5c7a310f
MM
5038/* Begin emitting insns to a sequence which can be packaged in an
5039 RTL_EXPR. If this sequence will contain something that might cause
5040 the compiler to pop arguments to function calls (because those
5041 pops have previously been deferred; see INHIBIT_DEFER_POP for more
5042 details), use do_pending_stack_adjust before calling this function.
5043 That will ensure that the deferred pops are not accidentally
4eb00163 5044 emitted in the middle of this sequence. */
23b2ce53
RS
5045
5046void
502b8322 5047start_sequence (void)
23b2ce53
RS
5048{
5049 struct sequence_stack *tem;
5050
e2500fed
GK
5051 if (free_sequence_stack != NULL)
5052 {
5053 tem = free_sequence_stack;
5054 free_sequence_stack = tem->next;
5055 }
5056 else
703ad42b 5057 tem = ggc_alloc (sizeof (struct sequence_stack));
23b2ce53 5058
49ad7cfa 5059 tem->next = seq_stack;
23b2ce53
RS
5060 tem->first = first_insn;
5061 tem->last = last_insn;
591ccf92 5062 tem->sequence_rtl_expr = seq_rtl_expr;
23b2ce53 5063
49ad7cfa 5064 seq_stack = tem;
23b2ce53
RS
5065
5066 first_insn = 0;
5067 last_insn = 0;
5068}
5069
591ccf92
MM
5070/* Similarly, but indicate that this sequence will be placed in T, an
5071 RTL_EXPR. See the documentation for start_sequence for more
5072 information about how to use this function. */
5073
5074void
502b8322 5075start_sequence_for_rtl_expr (tree t)
591ccf92
MM
5076{
5077 start_sequence ();
5078
5079 seq_rtl_expr = t;
5080}
5081
5c7a310f
MM
5082/* Set up the insn chain starting with FIRST as the current sequence,
5083 saving the previously current one. See the documentation for
5084 start_sequence for more information about how to use this function. */
23b2ce53
RS
5085
5086void
502b8322 5087push_to_sequence (rtx first)
23b2ce53
RS
5088{
5089 rtx last;
5090
5091 start_sequence ();
5092
5093 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
5094
5095 first_insn = first;
5096 last_insn = last;
5097}
5098
c14f7160
ML
5099/* Set up the insn chain from a chain stort in FIRST to LAST. */
5100
5101void
502b8322 5102push_to_full_sequence (rtx first, rtx last)
c14f7160
ML
5103{
5104 start_sequence ();
5105 first_insn = first;
5106 last_insn = last;
5107 /* We really should have the end of the insn chain here. */
5108 if (last && NEXT_INSN (last))
5109 abort ();
5110}
5111
f15ae3a1
TW
5112/* Set up the outer-level insn chain
5113 as the current sequence, saving the previously current one. */
5114
5115void
502b8322 5116push_topmost_sequence (void)
f15ae3a1 5117{
aefdd5ab 5118 struct sequence_stack *stack, *top = NULL;
f15ae3a1
TW
5119
5120 start_sequence ();
5121
49ad7cfa 5122 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
5123 top = stack;
5124
5125 first_insn = top->first;
5126 last_insn = top->last;
591ccf92 5127 seq_rtl_expr = top->sequence_rtl_expr;
f15ae3a1
TW
5128}
5129
5130/* After emitting to the outer-level insn chain, update the outer-level
5131 insn chain, and restore the previous saved state. */
5132
5133void
502b8322 5134pop_topmost_sequence (void)
f15ae3a1 5135{
aefdd5ab 5136 struct sequence_stack *stack, *top = NULL;
f15ae3a1 5137
49ad7cfa 5138 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
5139 top = stack;
5140
5141 top->first = first_insn;
5142 top->last = last_insn;
591ccf92 5143 /* ??? Why don't we save seq_rtl_expr here? */
f15ae3a1
TW
5144
5145 end_sequence ();
5146}
5147
23b2ce53
RS
5148/* After emitting to a sequence, restore previous saved state.
5149
5c7a310f 5150 To get the contents of the sequence just made, you must call
2f937369 5151 `get_insns' *before* calling here.
5c7a310f
MM
5152
5153 If the compiler might have deferred popping arguments while
5154 generating this sequence, and this sequence will not be immediately
5155 inserted into the instruction stream, use do_pending_stack_adjust
2f937369 5156 before calling get_insns. That will ensure that the deferred
5c7a310f
MM
5157 pops are inserted into this sequence, and not into some random
5158 location in the instruction stream. See INHIBIT_DEFER_POP for more
5159 information about deferred popping of arguments. */
23b2ce53
RS
5160
5161void
502b8322 5162end_sequence (void)
23b2ce53 5163{
49ad7cfa 5164 struct sequence_stack *tem = seq_stack;
23b2ce53
RS
5165
5166 first_insn = tem->first;
5167 last_insn = tem->last;
591ccf92 5168 seq_rtl_expr = tem->sequence_rtl_expr;
49ad7cfa 5169 seq_stack = tem->next;
23b2ce53 5170
e2500fed
GK
5171 memset (tem, 0, sizeof (*tem));
5172 tem->next = free_sequence_stack;
5173 free_sequence_stack = tem;
23b2ce53
RS
5174}
5175
c14f7160
ML
5176/* This works like end_sequence, but records the old sequence in FIRST
5177 and LAST. */
5178
5179void
502b8322 5180end_full_sequence (rtx *first, rtx *last)
c14f7160
ML
5181{
5182 *first = first_insn;
5183 *last = last_insn;
68252e27 5184 end_sequence ();
c14f7160
ML
5185}
5186
23b2ce53
RS
5187/* Return 1 if currently emitting into a sequence. */
5188
5189int
502b8322 5190in_sequence_p (void)
23b2ce53 5191{
49ad7cfa 5192 return seq_stack != 0;
23b2ce53 5193}
23b2ce53 5194\f
59ec66dc
MM
5195/* Put the various virtual registers into REGNO_REG_RTX. */
5196
5197void
502b8322 5198init_virtual_regs (struct emit_status *es)
59ec66dc 5199{
49ad7cfa
BS
5200 rtx *ptr = es->x_regno_reg_rtx;
5201 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5202 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5203 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5204 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5205 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5206}
5207
da43a810
BS
5208\f
5209/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5210static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5211static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5212static int copy_insn_n_scratches;
5213
5214/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5215 copied an ASM_OPERANDS.
5216 In that case, it is the original input-operand vector. */
5217static rtvec orig_asm_operands_vector;
5218
5219/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5220 copied an ASM_OPERANDS.
5221 In that case, it is the copied input-operand vector. */
5222static rtvec copy_asm_operands_vector;
5223
5224/* Likewise for the constraints vector. */
5225static rtvec orig_asm_constraints_vector;
5226static rtvec copy_asm_constraints_vector;
5227
5228/* Recursively create a new copy of an rtx for copy_insn.
5229 This function differs from copy_rtx in that it handles SCRATCHes and
5230 ASM_OPERANDs properly.
5231 Normally, this function is not used directly; use copy_insn as front end.
5232 However, you could first copy an insn pattern with copy_insn and then use
5233 this function afterwards to properly copy any REG_NOTEs containing
5234 SCRATCHes. */
5235
5236rtx
502b8322 5237copy_insn_1 (rtx orig)
da43a810 5238{
b3694847
SS
5239 rtx copy;
5240 int i, j;
5241 RTX_CODE code;
5242 const char *format_ptr;
da43a810
BS
5243
5244 code = GET_CODE (orig);
5245
5246 switch (code)
5247 {
5248 case REG:
5249 case QUEUED:
5250 case CONST_INT:
5251 case CONST_DOUBLE:
69ef87e2 5252 case CONST_VECTOR:
da43a810
BS
5253 case SYMBOL_REF:
5254 case CODE_LABEL:
5255 case PC:
5256 case CC0:
5257 case ADDRESSOF:
5258 return orig;
5259
5260 case SCRATCH:
5261 for (i = 0; i < copy_insn_n_scratches; i++)
5262 if (copy_insn_scratch_in[i] == orig)
5263 return copy_insn_scratch_out[i];
5264 break;
5265
5266 case CONST:
5267 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
5268 a LABEL_REF, it isn't sharable. */
5269 if (GET_CODE (XEXP (orig, 0)) == PLUS
5270 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
5271 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
5272 return orig;
5273 break;
750c9258 5274
da43a810
BS
5275 /* A MEM with a constant address is not sharable. The problem is that
5276 the constant address may need to be reloaded. If the mem is shared,
5277 then reloading one copy of this mem will cause all copies to appear
5278 to have been reloaded. */
5279
5280 default:
5281 break;
5282 }
5283
5284 copy = rtx_alloc (code);
5285
5286 /* Copy the various flags, and other information. We assume that
5287 all fields need copying, and then clear the fields that should
5288 not be copied. That is the sensible default behavior, and forces
5289 us to explicitly document why we are *not* copying a flag. */
e1de1560 5290 memcpy (copy, orig, RTX_HDR_SIZE);
da43a810
BS
5291
5292 /* We do not copy the USED flag, which is used as a mark bit during
5293 walks over the RTL. */
2adc7f12 5294 RTX_FLAG (copy, used) = 0;
da43a810
BS
5295
5296 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5297 if (GET_RTX_CLASS (code) == 'i')
5298 {
2adc7f12
JJ
5299 RTX_FLAG (copy, jump) = 0;
5300 RTX_FLAG (copy, call) = 0;
5301 RTX_FLAG (copy, frame_related) = 0;
da43a810 5302 }
750c9258 5303
da43a810
BS
5304 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5305
5306 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5307 {
e1de1560 5308 copy->u.fld[i] = orig->u.fld[i];
da43a810
BS
5309 switch (*format_ptr++)
5310 {
5311 case 'e':
da43a810
BS
5312 if (XEXP (orig, i) != NULL)
5313 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5314 break;
5315
da43a810
BS
5316 case 'E':
5317 case 'V':
da43a810
BS
5318 if (XVEC (orig, i) == orig_asm_constraints_vector)
5319 XVEC (copy, i) = copy_asm_constraints_vector;
5320 else if (XVEC (orig, i) == orig_asm_operands_vector)
5321 XVEC (copy, i) = copy_asm_operands_vector;
5322 else if (XVEC (orig, i) != NULL)
5323 {
5324 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5325 for (j = 0; j < XVECLEN (copy, i); j++)
5326 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5327 }
5328 break;
5329
da43a810 5330 case 't':
da43a810 5331 case 'w':
da43a810 5332 case 'i':
da43a810
BS
5333 case 's':
5334 case 'S':
e63db8f6
BS
5335 case 'u':
5336 case '0':
5337 /* These are left unchanged. */
da43a810
BS
5338 break;
5339
5340 default:
5341 abort ();
5342 }
5343 }
5344
5345 if (code == SCRATCH)
5346 {
5347 i = copy_insn_n_scratches++;
5348 if (i >= MAX_RECOG_OPERANDS)
5349 abort ();
5350 copy_insn_scratch_in[i] = orig;
5351 copy_insn_scratch_out[i] = copy;
5352 }
5353 else if (code == ASM_OPERANDS)
5354 {
6462bb43
AO
5355 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5356 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5357 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5358 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
da43a810
BS
5359 }
5360
5361 return copy;
5362}
5363
5364/* Create a new copy of an rtx.
5365 This function differs from copy_rtx in that it handles SCRATCHes and
5366 ASM_OPERANDs properly.
5367 INSN doesn't really have to be a full INSN; it could be just the
5368 pattern. */
5369rtx
502b8322 5370copy_insn (rtx insn)
da43a810
BS
5371{
5372 copy_insn_n_scratches = 0;
5373 orig_asm_operands_vector = 0;
5374 orig_asm_constraints_vector = 0;
5375 copy_asm_operands_vector = 0;
5376 copy_asm_constraints_vector = 0;
5377 return copy_insn_1 (insn);
5378}
59ec66dc 5379
23b2ce53
RS
5380/* Initialize data structures and variables in this file
5381 before generating rtl for each function. */
5382
5383void
502b8322 5384init_emit (void)
23b2ce53 5385{
01d939e8 5386 struct function *f = cfun;
23b2ce53 5387
703ad42b 5388 f->emit = ggc_alloc (sizeof (struct emit_status));
23b2ce53
RS
5389 first_insn = NULL;
5390 last_insn = NULL;
591ccf92 5391 seq_rtl_expr = NULL;
23b2ce53
RS
5392 cur_insn_uid = 1;
5393 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
fd3acbb3
NS
5394 last_location.line = 0;
5395 last_location.file = 0;
23b2ce53
RS
5396 first_label_num = label_num;
5397 last_label_num = 0;
49ad7cfa 5398 seq_stack = NULL;
23b2ce53 5399
23b2ce53
RS
5400 /* Init the tables that describe all the pseudo regs. */
5401
3502dc9c 5402 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
23b2ce53 5403
49ad7cfa 5404 f->emit->regno_pointer_align
703ad42b
KG
5405 = ggc_alloc_cleared (f->emit->regno_pointer_align_length
5406 * sizeof (unsigned char));
86fe05e0 5407
750c9258 5408 regno_reg_rtx
703ad42b 5409 = ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
0d4903b8 5410
e50126e8 5411 /* Put copies of all the hard registers into regno_reg_rtx. */
6cde4876
JL
5412 memcpy (regno_reg_rtx,
5413 static_regno_reg_rtx,
5414 FIRST_PSEUDO_REGISTER * sizeof (rtx));
e50126e8 5415
23b2ce53 5416 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
49ad7cfa 5417 init_virtual_regs (f->emit);
740ab4a2
RK
5418
5419 /* Indicate that the virtual registers and stack locations are
5420 all pointers. */
3502dc9c
JDA
5421 REG_POINTER (stack_pointer_rtx) = 1;
5422 REG_POINTER (frame_pointer_rtx) = 1;
5423 REG_POINTER (hard_frame_pointer_rtx) = 1;
5424 REG_POINTER (arg_pointer_rtx) = 1;
740ab4a2 5425
3502dc9c
JDA
5426 REG_POINTER (virtual_incoming_args_rtx) = 1;
5427 REG_POINTER (virtual_stack_vars_rtx) = 1;
5428 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5429 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5430 REG_POINTER (virtual_cfa_rtx) = 1;
5e82e7bd 5431
86fe05e0 5432#ifdef STACK_BOUNDARY
bdb429a5
RK
5433 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5434 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5435 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5436 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5437
5438 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5439 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5440 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5441 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5442 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
86fe05e0
RK
5443#endif
5444
5e82e7bd
JVA
5445#ifdef INIT_EXPANDERS
5446 INIT_EXPANDERS;
5447#endif
23b2ce53
RS
5448}
5449
ff88fe10 5450/* Generate the constant 0. */
69ef87e2
AH
5451
5452static rtx
502b8322 5453gen_const_vector_0 (enum machine_mode mode)
69ef87e2
AH
5454{
5455 rtx tem;
5456 rtvec v;
5457 int units, i;
5458 enum machine_mode inner;
5459
5460 units = GET_MODE_NUNITS (mode);
5461 inner = GET_MODE_INNER (mode);
5462
5463 v = rtvec_alloc (units);
5464
5465 /* We need to call this function after we to set CONST0_RTX first. */
5466 if (!CONST0_RTX (inner))
5467 abort ();
5468
5469 for (i = 0; i < units; ++i)
5470 RTVEC_ELT (v, i) = CONST0_RTX (inner);
5471
a06e3c40 5472 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
69ef87e2
AH
5473 return tem;
5474}
5475
a06e3c40
R
5476/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5477 all elements are zero. */
5478rtx
502b8322 5479gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
a06e3c40
R
5480{
5481 rtx inner_zero = CONST0_RTX (GET_MODE_INNER (mode));
5482 int i;
5483
5484 for (i = GET_MODE_NUNITS (mode) - 1; i >= 0; i--)
5485 if (RTVEC_ELT (v, i) != inner_zero)
5486 return gen_rtx_raw_CONST_VECTOR (mode, v);
5487 return CONST0_RTX (mode);
5488}
5489
23b2ce53
RS
5490/* Create some permanent unique rtl objects shared between all functions.
5491 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5492
5493void
502b8322 5494init_emit_once (int line_numbers)
23b2ce53
RS
5495{
5496 int i;
5497 enum machine_mode mode;
9ec36da5 5498 enum machine_mode double_mode;
23b2ce53 5499
59e4e217 5500 /* We need reg_raw_mode, so initialize the modes now. */
28420116
PB
5501 init_reg_modes_once ();
5502
5692c7bc
ZW
5503 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5504 tables. */
17211ab5
GK
5505 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5506 const_int_htab_eq, NULL);
173b24b9 5507
17211ab5
GK
5508 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5509 const_double_htab_eq, NULL);
5692c7bc 5510
17211ab5
GK
5511 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5512 mem_attrs_htab_eq, NULL);
a560d4d4
JH
5513 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5514 reg_attrs_htab_eq, NULL);
67673f5c 5515
23b2ce53
RS
5516 no_line_numbers = ! line_numbers;
5517
43fa6302
AS
5518 /* Compute the word and byte modes. */
5519
5520 byte_mode = VOIDmode;
5521 word_mode = VOIDmode;
5522 double_mode = VOIDmode;
5523
5524 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5525 mode = GET_MODE_WIDER_MODE (mode))
5526 {
5527 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5528 && byte_mode == VOIDmode)
5529 byte_mode = mode;
5530
5531 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5532 && word_mode == VOIDmode)
5533 word_mode = mode;
5534 }
5535
43fa6302
AS
5536 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5537 mode = GET_MODE_WIDER_MODE (mode))
5538 {
5539 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5540 && double_mode == VOIDmode)
5541 double_mode = mode;
5542 }
5543
5544 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5545
5da077de
AS
5546 /* Assign register numbers to the globally defined register rtx.
5547 This must be done at runtime because the register number field
5548 is in a union and some compilers can't initialize unions. */
5549
5550 pc_rtx = gen_rtx (PC, VOIDmode);
5551 cc0_rtx = gen_rtx (CC0, VOIDmode);
08394eef
BS
5552 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5553 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5da077de 5554 if (hard_frame_pointer_rtx == 0)
750c9258 5555 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
08394eef 5556 HARD_FRAME_POINTER_REGNUM);
5da077de 5557 if (arg_pointer_rtx == 0)
08394eef 5558 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
750c9258 5559 virtual_incoming_args_rtx =
08394eef 5560 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
750c9258 5561 virtual_stack_vars_rtx =
08394eef 5562 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
750c9258 5563 virtual_stack_dynamic_rtx =
08394eef 5564 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
750c9258
AJ
5565 virtual_outgoing_args_rtx =
5566 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
08394eef 5567 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5da077de 5568
6cde4876
JL
5569 /* Initialize RTL for commonly used hard registers. These are
5570 copied into regno_reg_rtx as we begin to compile each function. */
5571 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5572 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5573
5da077de 5574#ifdef INIT_EXPANDERS
414c4dc4
NC
5575 /* This is to initialize {init|mark|free}_machine_status before the first
5576 call to push_function_context_to. This is needed by the Chill front
a1f300c0 5577 end which calls push_function_context_to before the first call to
5da077de
AS
5578 init_function_start. */
5579 INIT_EXPANDERS;
5580#endif
5581
23b2ce53
RS
5582 /* Create the unique rtx's for certain rtx codes and operand values. */
5583
c5c76735
JL
5584 /* Don't use gen_rtx here since gen_rtx in this case
5585 tries to use these variables. */
23b2ce53 5586 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
750c9258 5587 const_int_rtx[i + MAX_SAVED_CONST_INT] =
f1b690f1 5588 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
23b2ce53 5589
68d75312
JC
5590 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5591 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5da077de 5592 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
68d75312 5593 else
3b80f6ca 5594 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
23b2ce53 5595
5692c7bc
ZW
5596 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5597 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5598 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
f7657db9
KG
5599 REAL_VALUE_FROM_INT (dconst3, 3, 0, double_mode);
5600 REAL_VALUE_FROM_INT (dconst10, 10, 0, double_mode);
5692c7bc 5601 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
03f2ea93
RS
5602 REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5603
5604 dconsthalf = dconst1;
5605 dconsthalf.exp--;
23b2ce53 5606
f7657db9
KG
5607 real_arithmetic (&dconstthird, RDIV_EXPR, &dconst1, &dconst3);
5608
ab01a87c
KG
5609 /* Initialize mathematical constants for constant folding builtins.
5610 These constants need to be given to at least 160 bits precision. */
5611 real_from_string (&dconstpi,
5612 "3.1415926535897932384626433832795028841971693993751058209749445923078");
5613 real_from_string (&dconste,
5614 "2.7182818284590452353602874713526624977572470936999595749669676277241");
5615
f7657db9 5616 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
23b2ce53 5617 {
b216cd4a
ZW
5618 REAL_VALUE_TYPE *r =
5619 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5620
23b2ce53
RS
5621 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5622 mode = GET_MODE_WIDER_MODE (mode))
5692c7bc
ZW
5623 const_tiny_rtx[i][(int) mode] =
5624 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
23b2ce53 5625
906c4e36 5626 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
23b2ce53
RS
5627
5628 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5629 mode = GET_MODE_WIDER_MODE (mode))
906c4e36 5630 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
33d3e559
RS
5631
5632 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5633 mode != VOIDmode;
5634 mode = GET_MODE_WIDER_MODE (mode))
5635 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
23b2ce53
RS
5636 }
5637
69ef87e2
AH
5638 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5639 mode != VOIDmode;
5640 mode = GET_MODE_WIDER_MODE (mode))
ff88fe10 5641 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
69ef87e2
AH
5642
5643 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5644 mode != VOIDmode;
5645 mode = GET_MODE_WIDER_MODE (mode))
ff88fe10 5646 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
69ef87e2 5647
dbbbbf3b
JDA
5648 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5649 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5650 const_tiny_rtx[0][i] = const0_rtx;
23b2ce53 5651
f0417c82
RH
5652 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5653 if (STORE_FLAG_VALUE == 1)
5654 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5655
a7e1e2ac
AO
5656#ifdef RETURN_ADDRESS_POINTER_REGNUM
5657 return_address_pointer_rtx
08394eef 5658 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
a7e1e2ac
AO
5659#endif
5660
a7e1e2ac
AO
5661#ifdef STATIC_CHAIN_REGNUM
5662 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5663
5664#ifdef STATIC_CHAIN_INCOMING_REGNUM
5665 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5666 static_chain_incoming_rtx
5667 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5668 else
5669#endif
5670 static_chain_incoming_rtx = static_chain_rtx;
5671#endif
5672
5673#ifdef STATIC_CHAIN
5674 static_chain_rtx = STATIC_CHAIN;
5675
5676#ifdef STATIC_CHAIN_INCOMING
5677 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5678#else
5679 static_chain_incoming_rtx = static_chain_rtx;
5680#endif
5681#endif
5682
fc555370 5683 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
751551d5 5684 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
23b2ce53 5685}
a11759a3
JR
5686\f
5687/* Query and clear/ restore no_line_numbers. This is used by the
5688 switch / case handling in stmt.c to give proper line numbers in
5689 warnings about unreachable code. */
5690
5691int
502b8322 5692force_line_numbers (void)
a11759a3
JR
5693{
5694 int old = no_line_numbers;
5695
5696 no_line_numbers = 0;
5697 if (old)
5698 force_next_line_note ();
5699 return old;
5700}
5701
5702void
502b8322 5703restore_line_number_status (int old_value)
a11759a3
JR
5704{
5705 no_line_numbers = old_value;
5706}
969d70ca
JH
5707
5708/* Produce exact duplicate of insn INSN after AFTER.
5709 Care updating of libcall regions if present. */
5710
5711rtx
502b8322 5712emit_copy_of_insn_after (rtx insn, rtx after)
969d70ca
JH
5713{
5714 rtx new;
5715 rtx note1, note2, link;
5716
5717 switch (GET_CODE (insn))
5718 {
5719 case INSN:
5720 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5721 break;
5722
5723 case JUMP_INSN:
5724 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5725 break;
5726
5727 case CALL_INSN:
5728 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5729 if (CALL_INSN_FUNCTION_USAGE (insn))
5730 CALL_INSN_FUNCTION_USAGE (new)
5731 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5732 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5733 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5734 break;
5735
5736 default:
5737 abort ();
5738 }
5739
5740 /* Update LABEL_NUSES. */
5741 mark_jump_label (PATTERN (new), new, 0);
5742
0435312e 5743 INSN_LOCATOR (new) = INSN_LOCATOR (insn);
ba4f7968 5744
969d70ca
JH
5745 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5746 make them. */
5747 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5748 if (REG_NOTE_KIND (link) != REG_LABEL)
5749 {
5750 if (GET_CODE (link) == EXPR_LIST)
5751 REG_NOTES (new)
5752 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5753 XEXP (link, 0),
5754 REG_NOTES (new)));
5755 else
5756 REG_NOTES (new)
5757 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5758 XEXP (link, 0),
5759 REG_NOTES (new)));
5760 }
5761
5762 /* Fix the libcall sequences. */
5763 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5764 {
5765 rtx p = new;
5766 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5767 p = PREV_INSN (p);
5768 XEXP (note1, 0) = p;
5769 XEXP (note2, 0) = new;
5770 }
6f0d3566 5771 INSN_CODE (new) = INSN_CODE (insn);
969d70ca
JH
5772 return new;
5773}
e2500fed
GK
5774
5775#include "gt-emit-rtl.h"