]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/emit-rtl.c
* c-tree.h (grokfield): Remove unused filename and line parameters.
[thirdparty/gcc.git] / gcc / emit-rtl.c
CommitLineData
bccafa26 1/* Emit RTL for the GCC expander.
ddca3e9d 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
263c416c 3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
15bbde2b 4
f12b58b3 5This file is part of GCC.
15bbde2b 6
f12b58b3 7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
15bbde2b 11
f12b58b3 12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
15bbde2b 16
17You should have received a copy of the GNU General Public License
f12b58b3 18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
15bbde2b 21
22
23/* Middle-to-low level generation of rtx code and insns.
24
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
28
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
31
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
37
38#include "config.h"
405711de 39#include "system.h"
805e22b2 40#include "coretypes.h"
41#include "tm.h"
d3b64f2d 42#include "toplev.h"
15bbde2b 43#include "rtl.h"
3fd7e17f 44#include "tree.h"
7953c610 45#include "tm_p.h"
15bbde2b 46#include "flags.h"
47#include "function.h"
48#include "expr.h"
49#include "regs.h"
c6b6c51f 50#include "hard-reg-set.h"
73f5c1e3 51#include "hashtab.h"
15bbde2b 52#include "insn-config.h"
0dbd1c74 53#include "recog.h"
15bbde2b 54#include "real.h"
a3426c4c 55#include "bitmap.h"
f3d96a58 56#include "basic-block.h"
a7b0c170 57#include "ggc.h"
b29760a8 58#include "debug.h"
b0278d39 59#include "langhooks.h"
649d8da6 60
399d45d3 61/* Commonly used modes. */
62
a92771b8 63enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
64enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
9e042f31 65enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
a92771b8 66enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
399d45d3 67
15bbde2b 68
69/* This is *not* reset after each function. It gives each CODE_LABEL
70 in the entire compilation a unique label number. */
71
9105005a 72static GTY(()) int label_num = 1;
15bbde2b 73
15bbde2b 74/* Highest label number in current function.
75 Zero means use the value of label_num instead.
76 This is nonzero only when belatedly compiling an inline function. */
77
78static int last_label_num;
79
80/* Value label_num had when set_new_first_and_last_label_number was called.
81 If label_num has not changed since then, last_label_num is valid. */
82
83static int base_label_num;
84
85/* Nonzero means do not generate NOTEs for source line numbers. */
86
87static int no_line_numbers;
88
89/* Commonly used rtx's, so that we only need space for one copy.
90 These are initialized once for the entire compilation.
2ff23ed0 91 All of these are unique; no other rtx-object will be equal to any
92 of these. */
15bbde2b 93
57c097d5 94rtx global_rtl[GR_MAX];
15bbde2b 95
90295bd2 96/* Commonly used RTL for hard registers. These objects are not necessarily
97 unique, so we allocate them separately from global_rtl. They are
98 initialized once per compilation unit, then copied into regno_reg_rtx
99 at the beginning of each function. */
100static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
101
15bbde2b 102/* We record floating-point CONST_DOUBLEs in each floating-point mode for
103 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
104 record a copy of const[012]_rtx. */
105
106rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
107
1a60f06a 108rtx const_true_rtx;
109
15bbde2b 110REAL_VALUE_TYPE dconst0;
111REAL_VALUE_TYPE dconst1;
112REAL_VALUE_TYPE dconst2;
113REAL_VALUE_TYPE dconstm1;
77e89269 114REAL_VALUE_TYPE dconstm2;
115REAL_VALUE_TYPE dconsthalf;
15bbde2b 116
117/* All references to the following fixed hard registers go through
118 these unique rtl objects. On machines where the frame-pointer and
119 arg-pointer are the same register, they use the same unique object.
120
121 After register allocation, other rtl objects which used to be pseudo-regs
122 may be clobbered to refer to the frame-pointer register.
123 But references that were originally to the frame-pointer can be
124 distinguished from the others because they contain frame_pointer_rtx.
125
90c25824 126 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
127 tricky: until register elimination has taken place hard_frame_pointer_rtx
d823ba47 128 should be used if it is being set, and frame_pointer_rtx otherwise. After
90c25824 129 register elimination hard_frame_pointer_rtx should always be used.
130 On machines where the two registers are same (most) then these are the
131 same.
132
15bbde2b 133 In an inline procedure, the stack and frame pointer rtxs may not be
134 used for anything else. */
15bbde2b 135rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
136rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
137rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
138rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
139rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
140
4b0c5859 141/* This is used to implement __builtin_return_address for some machines.
142 See for instance the MIPS port. */
143rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
144
15bbde2b 145/* We make one copy of (const_int C) where C is in
146 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
147 to save space during the compilation and simplify comparisons of
148 integers. */
149
57c097d5 150rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
15bbde2b 151
73f5c1e3 152/* A hash table storing CONST_INTs whose absolute value is greater
153 than MAX_SAVED_CONST_INT. */
154
1f3233d1 155static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
156 htab_t const_int_htab;
73f5c1e3 157
c6259b83 158/* A hash table storing memory attribute structures. */
1f3233d1 159static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
160 htab_t mem_attrs_htab;
c6259b83 161
ca74b940 162/* A hash table storing register attribute structures. */
163static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
164 htab_t reg_attrs_htab;
165
2ff23ed0 166/* A hash table storing all CONST_DOUBLEs. */
1f3233d1 167static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
168 htab_t const_double_htab;
2ff23ed0 169
08513b52 170#define first_insn (cfun->emit->x_first_insn)
171#define last_insn (cfun->emit->x_last_insn)
172#define cur_insn_uid (cfun->emit->x_cur_insn_uid)
0a3b3d88 173#define last_location (cfun->emit->x_last_location)
08513b52 174#define first_label_num (cfun->emit->x_first_label_num)
15bbde2b 175
621f6678 176static rtx make_jump_insn_raw PARAMS ((rtx));
177static rtx make_call_insn_raw PARAMS ((rtx));
178static rtx find_line_note PARAMS ((rtx));
96216d37 179static rtx change_address_1 PARAMS ((rtx, enum machine_mode, rtx,
180 int));
2d96a59a 181static void unshare_all_rtl_1 PARAMS ((rtx));
72ec8878 182static void unshare_all_decls PARAMS ((tree));
01dc9f0c 183static void reset_used_decls PARAMS ((tree));
344dc2fa 184static void mark_label_nuses PARAMS ((rtx));
73f5c1e3 185static hashval_t const_int_htab_hash PARAMS ((const void *));
186static int const_int_htab_eq PARAMS ((const void *,
187 const void *));
2ff23ed0 188static hashval_t const_double_htab_hash PARAMS ((const void *));
189static int const_double_htab_eq PARAMS ((const void *,
190 const void *));
191static rtx lookup_const_double PARAMS ((rtx));
c6259b83 192static hashval_t mem_attrs_htab_hash PARAMS ((const void *));
193static int mem_attrs_htab_eq PARAMS ((const void *,
194 const void *));
c6259b83 195static mem_attrs *get_mem_attrs PARAMS ((HOST_WIDE_INT, tree, rtx,
5cc193e7 196 rtx, unsigned int,
197 enum machine_mode));
ca74b940 198static hashval_t reg_attrs_htab_hash PARAMS ((const void *));
199static int reg_attrs_htab_eq PARAMS ((const void *,
200 const void *));
201static reg_attrs *get_reg_attrs PARAMS ((tree, int));
b10dbbca 202static tree component_ref_for_mem_expr PARAMS ((tree));
89dd3424 203static rtx gen_const_vector_0 PARAMS ((enum machine_mode));
591356e8 204static rtx gen_complex_constant_part PARAMS ((enum machine_mode,
205 rtx, int));
73f5c1e3 206
3cd757b1 207/* Probability of the conditional branch currently proceeded by try_split.
208 Set to -1 otherwise. */
209int split_branch_probability = -1;
649d8da6 210\f
73f5c1e3 211/* Returns a hash code for X (which is a really a CONST_INT). */
212
213static hashval_t
214const_int_htab_hash (x)
215 const void *x;
216{
2ff23ed0 217 return (hashval_t) INTVAL ((struct rtx_def *) x);
73f5c1e3 218}
219
6ef828f9 220/* Returns nonzero if the value represented by X (which is really a
73f5c1e3 221 CONST_INT) is the same as that given by Y (which is really a
222 HOST_WIDE_INT *). */
223
224static int
225const_int_htab_eq (x, y)
226 const void *x;
227 const void *y;
228{
2ff23ed0 229 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
230}
231
232/* Returns a hash code for X (which is really a CONST_DOUBLE). */
233static hashval_t
234const_double_htab_hash (x)
235 const void *x;
236{
2ff23ed0 237 rtx value = (rtx) x;
3393215f 238 hashval_t h;
2ff23ed0 239
3393215f 240 if (GET_MODE (value) == VOIDmode)
241 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
242 else
a5760913 243 {
e2e205b3 244 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
a5760913 245 /* MODE is used in the comparison, so it should be in the hash. */
246 h ^= GET_MODE (value);
247 }
2ff23ed0 248 return h;
249}
250
6ef828f9 251/* Returns nonzero if the value represented by X (really a ...)
2ff23ed0 252 is the same as that represented by Y (really a ...) */
253static int
254const_double_htab_eq (x, y)
255 const void *x;
256 const void *y;
257{
258 rtx a = (rtx)x, b = (rtx)y;
2ff23ed0 259
260 if (GET_MODE (a) != GET_MODE (b))
261 return 0;
f82a103d 262 if (GET_MODE (a) == VOIDmode)
263 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
264 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
265 else
266 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
267 CONST_DOUBLE_REAL_VALUE (b));
73f5c1e3 268}
269
c6259b83 270/* Returns a hash code for X (which is a really a mem_attrs *). */
271
272static hashval_t
273mem_attrs_htab_hash (x)
274 const void *x;
275{
276 mem_attrs *p = (mem_attrs *) x;
277
278 return (p->alias ^ (p->align * 1000)
279 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
280 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
b10dbbca 281 ^ (size_t) p->expr);
c6259b83 282}
283
6ef828f9 284/* Returns nonzero if the value represented by X (which is really a
c6259b83 285 mem_attrs *) is the same as that given by Y (which is also really a
286 mem_attrs *). */
73f5c1e3 287
288static int
c6259b83 289mem_attrs_htab_eq (x, y)
290 const void *x;
291 const void *y;
73f5c1e3 292{
c6259b83 293 mem_attrs *p = (mem_attrs *) x;
294 mem_attrs *q = (mem_attrs *) y;
295
b10dbbca 296 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
c6259b83 297 && p->size == q->size && p->align == q->align);
73f5c1e3 298}
299
c6259b83 300/* Allocate a new mem_attrs structure and insert it into the hash table if
5cc193e7 301 one identical to it is not already in the table. We are doing this for
302 MEM of mode MODE. */
c6259b83 303
304static mem_attrs *
b10dbbca 305get_mem_attrs (alias, expr, offset, size, align, mode)
c6259b83 306 HOST_WIDE_INT alias;
b10dbbca 307 tree expr;
c6259b83 308 rtx offset;
309 rtx size;
310 unsigned int align;
5cc193e7 311 enum machine_mode mode;
c6259b83 312{
313 mem_attrs attrs;
314 void **slot;
315
d5c80165 316 /* If everything is the default, we can just return zero.
317 This must match what the corresponding MEM_* macros return when the
318 field is not present. */
b10dbbca 319 if (alias == 0 && expr == 0 && offset == 0
5cc193e7 320 && (size == 0
321 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
d5c80165 322 && (STRICT_ALIGNMENT && mode != BLKmode
323 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
5cc193e7 324 return 0;
325
c6259b83 326 attrs.alias = alias;
b10dbbca 327 attrs.expr = expr;
c6259b83 328 attrs.offset = offset;
329 attrs.size = size;
330 attrs.align = align;
331
332 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
333 if (*slot == 0)
334 {
335 *slot = ggc_alloc (sizeof (mem_attrs));
336 memcpy (*slot, &attrs, sizeof (mem_attrs));
337 }
338
339 return *slot;
73f5c1e3 340}
341
ca74b940 342/* Returns a hash code for X (which is a really a reg_attrs *). */
343
344static hashval_t
345reg_attrs_htab_hash (x)
346 const void *x;
347{
348 reg_attrs *p = (reg_attrs *) x;
349
350 return ((p->offset * 1000) ^ (long) p->decl);
351}
352
7ef5b942 353/* Returns nonzero if the value represented by X (which is really a
ca74b940 354 reg_attrs *) is the same as that given by Y (which is also really a
355 reg_attrs *). */
356
357static int
358reg_attrs_htab_eq (x, y)
359 const void *x;
360 const void *y;
361{
362 reg_attrs *p = (reg_attrs *) x;
363 reg_attrs *q = (reg_attrs *) y;
364
365 return (p->decl == q->decl && p->offset == q->offset);
366}
367/* Allocate a new reg_attrs structure and insert it into the hash table if
368 one identical to it is not already in the table. We are doing this for
369 MEM of mode MODE. */
370
371static reg_attrs *
372get_reg_attrs (decl, offset)
373 tree decl;
374 int offset;
375{
376 reg_attrs attrs;
377 void **slot;
378
379 /* If everything is the default, we can just return zero. */
380 if (decl == 0 && offset == 0)
381 return 0;
382
383 attrs.decl = decl;
384 attrs.offset = offset;
385
386 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
387 if (*slot == 0)
388 {
389 *slot = ggc_alloc (sizeof (reg_attrs));
390 memcpy (*slot, &attrs, sizeof (reg_attrs));
391 }
392
393 return *slot;
394}
395
22cf44bc 396/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
397 don't attempt to share with the various global pieces of rtl (such as
398 frame_pointer_rtx). */
399
400rtx
401gen_raw_REG (mode, regno)
402 enum machine_mode mode;
403 int regno;
404{
405 rtx x = gen_rtx_raw_REG (mode, regno);
406 ORIGINAL_REGNO (x) = regno;
407 return x;
408}
409
7014838c 410/* There are some RTL codes that require special attention; the generation
411 functions do the raw handling. If you add to this list, modify
412 special_rtx in gengenrtl.c as well. */
413
3ad7bb1c 414rtx
415gen_rtx_CONST_INT (mode, arg)
73f5c1e3 416 enum machine_mode mode ATTRIBUTE_UNUSED;
3ad7bb1c 417 HOST_WIDE_INT arg;
418{
73f5c1e3 419 void **slot;
420
3ad7bb1c 421 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
57c097d5 422 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3ad7bb1c 423
424#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
425 if (const_true_rtx && arg == STORE_FLAG_VALUE)
426 return const_true_rtx;
427#endif
428
73f5c1e3 429 /* Look up the CONST_INT in the hash table. */
2b3dbc20 430 slot = htab_find_slot_with_hash (const_int_htab, &arg,
431 (hashval_t) arg, INSERT);
7f2875d3 432 if (*slot == 0)
d7c47c0e 433 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
73f5c1e3 434
435 return (rtx) *slot;
3ad7bb1c 436}
437
2d232d05 438rtx
439gen_int_mode (c, mode)
440 HOST_WIDE_INT c;
441 enum machine_mode mode;
442{
443 return GEN_INT (trunc_int_for_mode (c, mode));
444}
445
2ff23ed0 446/* CONST_DOUBLEs might be created from pairs of integers, or from
447 REAL_VALUE_TYPEs. Also, their length is known only at run time,
448 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
449
450/* Determine whether REAL, a CONST_DOUBLE, already exists in the
451 hash table. If so, return its counterpart; otherwise add it
452 to the hash table and return it. */
453static rtx
454lookup_const_double (real)
455 rtx real;
456{
457 void **slot = htab_find_slot (const_double_htab, real, INSERT);
458 if (*slot == 0)
459 *slot = real;
460
461 return (rtx) *slot;
462}
7f2875d3 463
2ff23ed0 464/* Return a CONST_DOUBLE rtx for a floating-point value specified by
465 VALUE in mode MODE. */
67f2a2eb 466rtx
2ff23ed0 467const_double_from_real_value (value, mode)
468 REAL_VALUE_TYPE value;
67f2a2eb 469 enum machine_mode mode;
67f2a2eb 470{
2ff23ed0 471 rtx real = rtx_alloc (CONST_DOUBLE);
472 PUT_MODE (real, mode);
473
474 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
475
476 return lookup_const_double (real);
477}
478
479/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
480 of ints: I0 is the low-order word and I1 is the high-order word.
481 Do not use this routine for non-integer modes; convert to
482 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
483
484rtx
485immed_double_const (i0, i1, mode)
486 HOST_WIDE_INT i0, i1;
487 enum machine_mode mode;
488{
489 rtx value;
490 unsigned int i;
491
492 if (mode != VOIDmode)
493 {
494 int width;
495 if (GET_MODE_CLASS (mode) != MODE_INT
ead34f59 496 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT
497 /* We can get a 0 for an error mark. */
498 && GET_MODE_CLASS (mode) != MODE_VECTOR_INT
499 && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
2ff23ed0 500 abort ();
501
502 /* We clear out all bits that don't belong in MODE, unless they and
503 our sign bit are all one. So we get either a reasonable negative
504 value or a reasonable unsigned value for this mode. */
505 width = GET_MODE_BITSIZE (mode);
506 if (width < HOST_BITS_PER_WIDE_INT
507 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
508 != ((HOST_WIDE_INT) (-1) << (width - 1))))
509 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
510 else if (width == HOST_BITS_PER_WIDE_INT
511 && ! (i1 == ~0 && i0 < 0))
512 i1 = 0;
513 else if (width > 2 * HOST_BITS_PER_WIDE_INT)
514 /* We cannot represent this value as a constant. */
515 abort ();
516
517 /* If this would be an entire word for the target, but is not for
518 the host, then sign-extend on the host so that the number will
519 look the same way on the host that it would on the target.
520
521 For example, when building a 64 bit alpha hosted 32 bit sparc
522 targeted compiler, then we want the 32 bit unsigned value -1 to be
523 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
524 The latter confuses the sparc backend. */
525
526 if (width < HOST_BITS_PER_WIDE_INT
527 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
528 i0 |= ((HOST_WIDE_INT) (-1) << width);
4e929432 529
2ff23ed0 530 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
531 CONST_INT.
4e929432 532
2ff23ed0 533 ??? Strictly speaking, this is wrong if we create a CONST_INT for
534 a large unsigned constant with the size of MODE being
535 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
536 in a wider mode. In that case we will mis-interpret it as a
537 negative number.
4e929432 538
2ff23ed0 539 Unfortunately, the only alternative is to make a CONST_DOUBLE for
540 any constant in any mode if it is an unsigned constant larger
541 than the maximum signed integer in an int on the host. However,
542 doing this will break everyone that always expects to see a
543 CONST_INT for SImode and smaller.
544
545 We have always been making CONST_INTs in this case, so nothing
546 new is being broken. */
547
548 if (width <= HOST_BITS_PER_WIDE_INT)
549 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
550 }
551
552 /* If this integer fits in one word, return a CONST_INT. */
553 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
554 return GEN_INT (i0);
555
556 /* We use VOIDmode for integers. */
557 value = rtx_alloc (CONST_DOUBLE);
558 PUT_MODE (value, VOIDmode);
559
560 CONST_DOUBLE_LOW (value) = i0;
561 CONST_DOUBLE_HIGH (value) = i1;
562
563 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
564 XWINT (value, i) = 0;
565
566 return lookup_const_double (value);
67f2a2eb 567}
568
3ad7bb1c 569rtx
570gen_rtx_REG (mode, regno)
571 enum machine_mode mode;
2ff23ed0 572 unsigned int regno;
3ad7bb1c 573{
574 /* In case the MD file explicitly references the frame pointer, have
575 all such references point to the same frame pointer. This is
576 used during frame pointer elimination to distinguish the explicit
577 references to these registers from pseudos that happened to be
578 assigned to them.
579
580 If we have eliminated the frame pointer or arg pointer, we will
581 be using it as a normal register, for example as a spill
582 register. In such cases, we might be accessing it in a mode that
583 is not Pmode and therefore cannot use the pre-allocated rtx.
584
585 Also don't do this when we are making new REGs in reload, since
586 we don't want to get confused with the real pointers. */
587
588 if (mode == Pmode && !reload_in_progress)
589 {
71801afc 590 if (regno == FRAME_POINTER_REGNUM
591 && (!reload_completed || frame_pointer_needed))
3ad7bb1c 592 return frame_pointer_rtx;
593#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
71801afc 594 if (regno == HARD_FRAME_POINTER_REGNUM
595 && (!reload_completed || frame_pointer_needed))
3ad7bb1c 596 return hard_frame_pointer_rtx;
597#endif
598#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
e8b59353 599 if (regno == ARG_POINTER_REGNUM)
3ad7bb1c 600 return arg_pointer_rtx;
601#endif
602#ifdef RETURN_ADDRESS_POINTER_REGNUM
e8b59353 603 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3ad7bb1c 604 return return_address_pointer_rtx;
605#endif
3473aefe 606 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
6ea47475 607 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
d4c5e26d 608 return pic_offset_table_rtx;
e8b59353 609 if (regno == STACK_POINTER_REGNUM)
3ad7bb1c 610 return stack_pointer_rtx;
611 }
612
32b53d83 613#if 0
90295bd2 614 /* If the per-function register table has been set up, try to re-use
32b53d83 615 an existing entry in that table to avoid useless generation of RTL.
616
617 This code is disabled for now until we can fix the various backends
618 which depend on having non-shared hard registers in some cases. Long
619 term we want to re-enable this code as it can significantly cut down
71801afc 620 on the amount of useless RTL that gets generated.
621
622 We'll also need to fix some code that runs after reload that wants to
623 set ORIGINAL_REGNO. */
624
90295bd2 625 if (cfun
626 && cfun->emit
627 && regno_reg_rtx
628 && regno < FIRST_PSEUDO_REGISTER
629 && reg_raw_mode[regno] == mode)
630 return regno_reg_rtx[regno];
32b53d83 631#endif
90295bd2 632
22cf44bc 633 return gen_raw_REG (mode, regno);
3ad7bb1c 634}
635
b5ba9f3a 636rtx
637gen_rtx_MEM (mode, addr)
638 enum machine_mode mode;
639 rtx addr;
640{
641 rtx rt = gen_rtx_raw_MEM (mode, addr);
642
643 /* This field is not cleared by the mere allocation of the rtx, so
644 we clear it here. */
c6259b83 645 MEM_ATTRS (rt) = 0;
b5ba9f3a 646
647 return rt;
648}
701e46d0 649
650rtx
651gen_rtx_SUBREG (mode, reg, offset)
652 enum machine_mode mode;
653 rtx reg;
654 int offset;
655{
656 /* This is the most common failure type.
657 Catch it early so we can see who does it. */
658 if ((offset % GET_MODE_SIZE (mode)) != 0)
659 abort ();
660
661 /* This check isn't usable right now because combine will
662 throw arbitrary crap like a CALL into a SUBREG in
663 gen_lowpart_for_combine so we must just eat it. */
664#if 0
665 /* Check for this too. */
666 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
667 abort ();
668#endif
2ff23ed0 669 return gen_rtx_raw_SUBREG (mode, reg, offset);
701e46d0 670}
671
c6259b83 672/* Generate a SUBREG representing the least-significant part of REG if MODE
673 is smaller than mode of REG, otherwise paradoxical SUBREG. */
674
701e46d0 675rtx
676gen_lowpart_SUBREG (mode, reg)
677 enum machine_mode mode;
678 rtx reg;
679{
680 enum machine_mode inmode;
701e46d0 681
682 inmode = GET_MODE (reg);
683 if (inmode == VOIDmode)
684 inmode = mode;
81802af6 685 return gen_rtx_SUBREG (mode, reg,
686 subreg_lowpart_offset (mode, inmode));
701e46d0 687}
7014838c 688\f
15bbde2b 689/* rtx gen_rtx (code, mode, [element1, ..., elementn])
690**
691** This routine generates an RTX of the size specified by
692** <code>, which is an RTX code. The RTX structure is initialized
693** from the arguments <element1> through <elementn>, which are
694** interpreted according to the specific RTX type's format. The
695** special machine mode associated with the rtx (if any) is specified
696** in <mode>.
697**
fc92fa61 698** gen_rtx can be invoked in a way which resembles the lisp-like
15bbde2b 699** rtx it will generate. For example, the following rtx structure:
700**
701** (plus:QI (mem:QI (reg:SI 1))
702** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
703**
704** ...would be generated by the following C code:
705**
d823ba47 706** gen_rtx (PLUS, QImode,
15bbde2b 707** gen_rtx (MEM, QImode,
708** gen_rtx (REG, SImode, 1)),
709** gen_rtx (MEM, QImode,
710** gen_rtx (PLUS, SImode,
711** gen_rtx (REG, SImode, 2),
712** gen_rtx (REG, SImode, 3)))),
713*/
714
715/*VARARGS2*/
716rtx
ee582a61 717gen_rtx (enum rtx_code code, enum machine_mode mode, ...)
15bbde2b 718{
19cb6b50 719 int i; /* Array indices... */
720 const char *fmt; /* Current rtx's format... */
721 rtx rt_val; /* RTX to return to caller... */
ee582a61 722 va_list p;
15bbde2b 723
ee582a61 724 va_start (p, mode);
15bbde2b 725
67f2a2eb 726 switch (code)
15bbde2b 727 {
67f2a2eb 728 case CONST_INT:
729 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
730 break;
731
732 case CONST_DOUBLE:
733 {
ebde4c17 734 HOST_WIDE_INT arg0 = va_arg (p, HOST_WIDE_INT);
67f2a2eb 735 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
ebde4c17 736
ff385626 737 rt_val = immed_double_const (arg0, arg1, mode);
67f2a2eb 738 }
739 break;
740
741 case REG:
742 rt_val = gen_rtx_REG (mode, va_arg (p, int));
743 break;
744
745 case MEM:
746 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
747 break;
748
749 default:
15bbde2b 750 rt_val = rtx_alloc (code); /* Allocate the storage space. */
751 rt_val->mode = mode; /* Store the machine mode... */
752
753 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
754 for (i = 0; i < GET_RTX_LENGTH (code); i++)
755 {
756 switch (*fmt++)
757 {
6259dc05 758 case '0': /* Field with unknown use. Zero it. */
dbc02d14 759 X0EXP (rt_val, i) = NULL_RTX;
15bbde2b 760 break;
761
762 case 'i': /* An integer? */
763 XINT (rt_val, i) = va_arg (p, int);
764 break;
765
b572011e 766 case 'w': /* A wide integer? */
767 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
768 break;
769
15bbde2b 770 case 's': /* A string? */
771 XSTR (rt_val, i) = va_arg (p, char *);
772 break;
773
774 case 'e': /* An expression? */
775 case 'u': /* An insn? Same except when printing. */
776 XEXP (rt_val, i) = va_arg (p, rtx);
777 break;
778
779 case 'E': /* An RTX vector? */
780 XVEC (rt_val, i) = va_arg (p, rtvec);
781 break;
782
a3426c4c 783 case 'b': /* A bitmap? */
784 XBITMAP (rt_val, i) = va_arg (p, bitmap);
785 break;
786
787 case 't': /* A tree? */
788 XTREE (rt_val, i) = va_arg (p, tree);
789 break;
790
15bbde2b 791 default:
fc92fa61 792 abort ();
15bbde2b 793 }
794 }
67f2a2eb 795 break;
15bbde2b 796 }
67f2a2eb 797
ee582a61 798 va_end (p);
67f2a2eb 799 return rt_val;
15bbde2b 800}
801
802/* gen_rtvec (n, [rt1, ..., rtn])
803**
804** This routine creates an rtvec and stores within it the
805** pointers to rtx's which are its arguments.
806*/
807
808/*VARARGS1*/
809rtvec
ee582a61 810gen_rtvec (int n, ...)
15bbde2b 811{
7ad77798 812 int i, save_n;
15bbde2b 813 rtx *vector;
ee582a61 814 va_list p;
15bbde2b 815
ee582a61 816 va_start (p, n);
15bbde2b 817
818 if (n == 0)
819 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
820
821 vector = (rtx *) alloca (n * sizeof (rtx));
e5fcd76a 822
15bbde2b 823 for (i = 0; i < n; i++)
824 vector[i] = va_arg (p, rtx);
7ad77798 825
826 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
827 save_n = n;
ee582a61 828 va_end (p);
15bbde2b 829
7ad77798 830 return gen_rtvec_v (save_n, vector);
15bbde2b 831}
832
833rtvec
834gen_rtvec_v (n, argp)
835 int n;
836 rtx *argp;
837{
19cb6b50 838 int i;
839 rtvec rt_val;
15bbde2b 840
841 if (n == 0)
842 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
843
844 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
845
846 for (i = 0; i < n; i++)
a4070a91 847 rt_val->elem[i] = *argp++;
15bbde2b 848
849 return rt_val;
850}
851\f
852/* Generate a REG rtx for a new pseudo register of mode MODE.
853 This pseudo is assigned the next sequential register number. */
854
855rtx
856gen_reg_rtx (mode)
857 enum machine_mode mode;
858{
08513b52 859 struct function *f = cfun;
19cb6b50 860 rtx val;
15bbde2b 861
9e519b97 862 /* Don't let anything called after initial flow analysis create new
863 registers. */
864 if (no_new_pseudos)
15bbde2b 865 abort ();
866
316bc009 867 if (generating_concat_p
868 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
869 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
76c37538 870 {
871 /* For complex modes, don't make a single pseudo.
872 Instead, make a CONCAT of two pseudos.
873 This allows noncontiguous allocation of the real and imaginary parts,
874 which makes much better code. Besides, allocating DCmode
875 pseudos overstrains reload on some machines like the 386. */
876 rtx realpart, imagpart;
e9e12845 877 enum machine_mode partmode = GET_MODE_INNER (mode);
76c37538 878
879 realpart = gen_reg_rtx (partmode);
880 imagpart = gen_reg_rtx (partmode);
3ad7bb1c 881 return gen_rtx_CONCAT (mode, realpart, imagpart);
76c37538 882 }
883
ca74b940 884 /* Make sure regno_pointer_align, and regno_reg_rtx are large
fcdc122e 885 enough to have an element for this pseudo reg number. */
15bbde2b 886
e61a0a7f 887 if (reg_rtx_no == f->emit->regno_pointer_align_length)
15bbde2b 888 {
e61a0a7f 889 int old_size = f->emit->regno_pointer_align_length;
26df1c5e 890 char *new;
fcdc122e 891 rtx *new1;
fcdc122e 892
1f3233d1 893 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
0a893c29 894 memset (new + old_size, 0, old_size);
4491f79f 895 f->emit->regno_pointer_align = (unsigned char *) new;
0a893c29 896
1f3233d1 897 new1 = (rtx *) ggc_realloc (f->emit->x_regno_reg_rtx,
898 old_size * 2 * sizeof (rtx));
0a893c29 899 memset (new1 + old_size, 0, old_size * sizeof (rtx));
15bbde2b 900 regno_reg_rtx = new1;
901
e61a0a7f 902 f->emit->regno_pointer_align_length = old_size * 2;
15bbde2b 903 }
904
22cf44bc 905 val = gen_raw_REG (mode, reg_rtx_no);
15bbde2b 906 regno_reg_rtx[reg_rtx_no++] = val;
907 return val;
908}
909
ca74b940 910/* Generate an register with same attributes as REG,
911 but offsetted by OFFSET. */
912
913rtx
914gen_rtx_REG_offset (reg, mode, regno, offset)
915 enum machine_mode mode;
916 unsigned int regno;
917 int offset;
918 rtx reg;
919{
920 rtx new = gen_rtx_REG (mode, regno);
921 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
922 REG_OFFSET (reg) + offset);
923 return new;
924}
925
926/* Set the decl for MEM to DECL. */
927
928void
929set_reg_attrs_from_mem (reg, mem)
930 rtx reg;
931 rtx mem;
932{
933 if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
934 REG_ATTRS (reg)
935 = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
936}
937
263c416c 938/* Set the register attributes for registers contained in PARM_RTX.
939 Use needed values from memory attributes of MEM. */
940
941void
942set_reg_attrs_for_parm (parm_rtx, mem)
943 rtx parm_rtx;
944 rtx mem;
945{
946 if (GET_CODE (parm_rtx) == REG)
947 set_reg_attrs_from_mem (parm_rtx, mem);
948 else if (GET_CODE (parm_rtx) == PARALLEL)
949 {
950 /* Check for a NULL entry in the first slot, used to indicate that the
951 parameter goes both on the stack and in registers. */
952 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
953 for (; i < XVECLEN (parm_rtx, 0); i++)
954 {
955 rtx x = XVECEXP (parm_rtx, 0, i);
956 if (GET_CODE (XEXP (x, 0)) == REG)
957 REG_ATTRS (XEXP (x, 0))
958 = get_reg_attrs (MEM_EXPR (mem),
959 INTVAL (XEXP (x, 1)));
960 }
961 }
962}
963
ca74b940 964/* Assign the RTX X to declaration T. */
965void
966set_decl_rtl (t, x)
967 tree t;
968 rtx x;
969{
970 DECL_CHECK (t)->decl.rtl = x;
971
972 if (!x)
973 return;
974 /* For register, we maitain the reverse information too. */
975 if (GET_CODE (x) == REG)
976 REG_ATTRS (x) = get_reg_attrs (t, 0);
977 else if (GET_CODE (x) == SUBREG)
978 REG_ATTRS (SUBREG_REG (x))
979 = get_reg_attrs (t, -SUBREG_BYTE (x));
980 if (GET_CODE (x) == CONCAT)
981 {
982 if (REG_P (XEXP (x, 0)))
983 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
984 if (REG_P (XEXP (x, 1)))
985 REG_ATTRS (XEXP (x, 1))
986 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
987 }
988 if (GET_CODE (x) == PARALLEL)
989 {
990 int i;
991 for (i = 0; i < XVECLEN (x, 0); i++)
992 {
993 rtx y = XVECEXP (x, 0, i);
994 if (REG_P (XEXP (y, 0)))
995 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
996 }
997 }
998}
999
de8ecfb5 1000/* Identify REG (which may be a CONCAT) as a user register. */
1001
1002void
1003mark_user_reg (reg)
1004 rtx reg;
1005{
1006 if (GET_CODE (reg) == CONCAT)
1007 {
1008 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1009 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1010 }
1011 else if (GET_CODE (reg) == REG)
1012 REG_USERVAR_P (reg) = 1;
1013 else
1014 abort ();
1015}
1016
d4c332ff 1017/* Identify REG as a probable pointer register and show its alignment
1018 as ALIGN, if nonzero. */
15bbde2b 1019
1020void
d4c332ff 1021mark_reg_pointer (reg, align)
15bbde2b 1022 rtx reg;
d4c332ff 1023 int align;
15bbde2b 1024{
e61a0a7f 1025 if (! REG_POINTER (reg))
612409a6 1026 {
e61a0a7f 1027 REG_POINTER (reg) = 1;
d4c332ff 1028
612409a6 1029 if (align)
1030 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1031 }
1032 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1033 /* We can no-longer be sure just how aligned this pointer is */
d4c332ff 1034 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
15bbde2b 1035}
1036
1037/* Return 1 plus largest pseudo reg number used in the current function. */
1038
1039int
1040max_reg_num ()
1041{
1042 return reg_rtx_no;
1043}
1044
1045/* Return 1 + the largest label number used so far in the current function. */
1046
1047int
1048max_label_num ()
1049{
1050 if (last_label_num && label_num == base_label_num)
1051 return last_label_num;
1052 return label_num;
1053}
1054
1055/* Return first label number used in this function (if any were used). */
1056
1057int
1058get_first_label_num ()
1059{
1060 return first_label_num;
1061}
1062\f
701e46d0 1063/* Return the final regno of X, which is a SUBREG of a hard
1064 register. */
1065int
1066subreg_hard_regno (x, check_mode)
19cb6b50 1067 rtx x;
701e46d0 1068 int check_mode;
1069{
1070 enum machine_mode mode = GET_MODE (x);
1071 unsigned int byte_offset, base_regno, final_regno;
1072 rtx reg = SUBREG_REG (x);
1073
1074 /* This is where we attempt to catch illegal subregs
1075 created by the compiler. */
1076 if (GET_CODE (x) != SUBREG
1077 || GET_CODE (reg) != REG)
1078 abort ();
1079 base_regno = REGNO (reg);
1080 if (base_regno >= FIRST_PSEUDO_REGISTER)
1081 abort ();
475fa9bd 1082 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
701e46d0 1083 abort ();
d9b3752c 1084#ifdef ENABLE_CHECKING
1085 if (!subreg_offset_representable_p (REGNO (reg), GET_MODE (reg),
1086 SUBREG_BYTE (x), mode))
1087 abort ();
1088#endif
701e46d0 1089 /* Catch non-congruent offsets too. */
1090 byte_offset = SUBREG_BYTE (x);
1091 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
1092 abort ();
1093
1094 final_regno = subreg_regno (x);
1095
1096 return final_regno;
1097}
1098
15bbde2b 1099/* Return a value representing some low-order bits of X, where the number
1100 of low-order bits is given by MODE. Note that no conversion is done
d823ba47 1101 between floating-point and fixed-point values, rather, the bit
15bbde2b 1102 representation is returned.
1103
1104 This function handles the cases in common between gen_lowpart, below,
1105 and two variants in cse.c and combine.c. These are the cases that can
1106 be safely handled at all points in the compilation.
1107
1108 If this is not a case we can handle, return 0. */
1109
1110rtx
1111gen_lowpart_common (mode, x)
1112 enum machine_mode mode;
19cb6b50 1113 rtx x;
15bbde2b 1114{
701e46d0 1115 int msize = GET_MODE_SIZE (mode);
1116 int xsize = GET_MODE_SIZE (GET_MODE (x));
1117 int offset = 0;
15bbde2b 1118
1119 if (GET_MODE (x) == mode)
1120 return x;
1121
1122 /* MODE must occupy no more words than the mode of X. */
1123 if (GET_MODE (x) != VOIDmode
701e46d0 1124 && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1125 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
15bbde2b 1126 return 0;
1127
9abe1e73 1128 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1129 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1130 && GET_MODE (x) != VOIDmode && msize > xsize)
1131 return 0;
1132
81802af6 1133 offset = subreg_lowpart_offset (mode, GET_MODE (x));
15bbde2b 1134
1135 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
64115b39 1136 && (GET_MODE_CLASS (mode) == MODE_INT
1137 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
15bbde2b 1138 {
1139 /* If we are getting the low-order part of something that has been
1140 sign- or zero-extended, we can either just use the object being
1141 extended or make a narrower extension. If we want an even smaller
1142 piece than the size of the object being extended, call ourselves
1143 recursively.
1144
1145 This case is used mostly by combine and cse. */
1146
1147 if (GET_MODE (XEXP (x, 0)) == mode)
1148 return XEXP (x, 0);
1149 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1150 return gen_lowpart_common (mode, XEXP (x, 0));
1151 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
3ad7bb1c 1152 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
15bbde2b 1153 }
3c27c2b2 1154 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
20407c42 1155 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR)
81802af6 1156 return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
903669fa 1157 else if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
752291c1 1158 return simplify_gen_subreg (mode, x, int_mode_for_mode (mode), offset);
15bbde2b 1159 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
1160 from the low-order part of the constant. */
64115b39 1161 else if ((GET_MODE_CLASS (mode) == MODE_INT
1162 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1163 && GET_MODE (x) == VOIDmode
15bbde2b 1164 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
99e0010b 1165 {
1166 /* If MODE is twice the host word size, X is already the desired
1167 representation. Otherwise, if MODE is wider than a word, we can't
b3b27b2a 1168 do this. If MODE is exactly a word, return just one CONST_INT. */
99e0010b 1169
d347deeb 1170 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
99e0010b 1171 return x;
b572011e 1172 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
99e0010b 1173 return 0;
b572011e 1174 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
99e0010b 1175 return (GET_CODE (x) == CONST_INT ? x
b572011e 1176 : GEN_INT (CONST_DOUBLE_LOW (x)));
99e0010b 1177 else
1178 {
a0d52dee 1179 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
b572011e 1180 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
1181 : CONST_DOUBLE_LOW (x));
99e0010b 1182
a0d52dee 1183 /* Sign extend to HOST_WIDE_INT. */
b3b27b2a 1184 val = trunc_int_for_mode (val, mode);
99e0010b 1185
1186 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
b572011e 1187 : GEN_INT (val));
99e0010b 1188 }
1189 }
15bbde2b 1190
4268f174 1191 /* The floating-point emulator can handle all conversions between
c8971cad 1192 FP and integer operands. This simplifies reload because it
1193 doesn't have to deal with constructs like (subreg:DI
1194 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
7547817f 1195 /* Single-precision floats are always 32-bits and double-precision
1196 floats are always 64-bits. */
c8971cad 1197
3c27c2b2 1198 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
7547817f 1199 && GET_MODE_BITSIZE (mode) == 32
c8971cad 1200 && GET_CODE (x) == CONST_INT)
d4c5e26d 1201 {
c8971cad 1202 REAL_VALUE_TYPE r;
aa870c1b 1203 long i = INTVAL (x);
c8971cad 1204
aa870c1b 1205 real_from_target (&r, &i, mode);
c8971cad 1206 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
d4c5e26d 1207 }
3c27c2b2 1208 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
7547817f 1209 && GET_MODE_BITSIZE (mode) == 64
c8971cad 1210 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
1211 && GET_MODE (x) == VOIDmode)
1212 {
1213 REAL_VALUE_TYPE r;
c8971cad 1214 HOST_WIDE_INT low, high;
aa870c1b 1215 long i[2];
c8971cad 1216
1217 if (GET_CODE (x) == CONST_INT)
1218 {
1219 low = INTVAL (x);
1220 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
1221 }
1222 else
1223 {
d823ba47 1224 low = CONST_DOUBLE_LOW (x);
c8971cad 1225 high = CONST_DOUBLE_HIGH (x);
1226 }
1227
aa870c1b 1228 if (HOST_BITS_PER_WIDE_INT > 32)
1229 high = low >> 31 >> 1;
1230
c8971cad 1231 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
1232 target machine. */
1233 if (WORDS_BIG_ENDIAN)
1234 i[0] = high, i[1] = low;
1235 else
1236 i[0] = low, i[1] = high;
1237
aa870c1b 1238 real_from_target (&r, i, mode);
c8971cad 1239 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1240 }
1241 else if ((GET_MODE_CLASS (mode) == MODE_INT
1242 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1243 && GET_CODE (x) == CONST_DOUBLE
1244 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1245 {
1246 REAL_VALUE_TYPE r;
1247 long i[4]; /* Only the low 32 bits of each 'long' are used. */
1248 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
1249
db6bf7a6 1250 /* Convert 'r' into an array of four 32-bit words in target word
1251 order. */
c8971cad 1252 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
7547817f 1253 switch (GET_MODE_BITSIZE (GET_MODE (x)))
c8971cad 1254 {
7547817f 1255 case 32:
d4c5e26d 1256 REAL_VALUE_TO_TARGET_SINGLE (r, i[3 * endian]);
db6bf7a6 1257 i[1] = 0;
1258 i[2] = 0;
d4c5e26d 1259 i[3 - 3 * endian] = 0;
1260 break;
7547817f 1261 case 64:
d4c5e26d 1262 REAL_VALUE_TO_TARGET_DOUBLE (r, i + 2 * endian);
db6bf7a6 1263 i[2 - 2 * endian] = 0;
1264 i[3 - 2 * endian] = 0;
d4c5e26d 1265 break;
7547817f 1266 case 96:
3f980cf7 1267 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
db6bf7a6 1268 i[3 - 3 * endian] = 0;
3c27c2b2 1269 break;
7547817f 1270 case 128:
c8971cad 1271 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
1272 break;
1273 default:
a774762c 1274 abort ();
c8971cad 1275 }
c8971cad 1276 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
1277 and return it. */
1278#if HOST_BITS_PER_WIDE_INT == 32
db6bf7a6 1279 return immed_double_const (i[3 * endian], i[1 + endian], mode);
c8971cad 1280#else
db6bf7a6 1281 if (HOST_BITS_PER_WIDE_INT != 64)
1282 abort ();
83e32a86 1283
23f63ace 1284 return immed_double_const ((((unsigned long) i[3 * endian])
1285 | ((HOST_WIDE_INT) i[1 + endian] << 32)),
1286 (((unsigned long) i[2 - endian])
1287 | ((HOST_WIDE_INT) i[3 - 3 * endian] << 32)),
db6bf7a6 1288 mode);
c8971cad 1289#endif
1290 }
c0235ccb 1291 /* If MODE is a condition code and X is a CONST_INT, the value of X
1292 must already have been "recognized" by the back-end, and we can
1293 assume that it is valid for this mode. */
1294 else if (GET_MODE_CLASS (mode) == MODE_CC
1295 && GET_CODE (x) == CONST_INT)
1296 return x;
4a307dd5 1297
15bbde2b 1298 /* Otherwise, we can't do this. */
1299 return 0;
1300}
1301\f
591356e8 1302/* Return the constant real or imaginary part (which has mode MODE)
1303 of a complex value X. The IMAGPART_P argument determines whether
1304 the real or complex component should be returned. This function
1305 returns NULL_RTX if the component isn't a constant. */
1306
1307static rtx
1308gen_complex_constant_part (mode, x, imagpart_p)
1309 enum machine_mode mode;
1310 rtx x;
1311 int imagpart_p;
1312{
1313 tree decl, part;
1314
1315 if (GET_CODE (x) == MEM
e97f8822 1316 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
591356e8 1317 {
1318 decl = SYMBOL_REF_DECL (XEXP (x, 0));
1319 if (decl != NULL_TREE && TREE_CODE (decl) == COMPLEX_CST)
1320 {
1321 part = imagpart_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
1322 if (TREE_CODE (part) == REAL_CST
1323 || TREE_CODE (part) == INTEGER_CST)
1324 return expand_expr (part, NULL_RTX, mode, 0);
1325 }
1326 }
1327 return NULL_RTX;
1328}
1329
568f439b 1330/* Return the real part (which has mode MODE) of a complex value X.
1331 This always comes at the low address in memory. */
1332
1333rtx
1334gen_realpart (mode, x)
1335 enum machine_mode mode;
19cb6b50 1336 rtx x;
568f439b 1337{
591356e8 1338 rtx part;
1339
1340 /* Handle complex constants. */
1341 part = gen_complex_constant_part (mode, x, 0);
1342 if (part != NULL_RTX)
1343 return part;
1344
81802af6 1345 if (WORDS_BIG_ENDIAN
1346 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1347 && REG_P (x)
1348 && REGNO (x) < FIRST_PSEUDO_REGISTER)
f060a027 1349 internal_error
68435912 1350 ("can't access real part of complex value in hard register");
0864dc1b 1351 else if (WORDS_BIG_ENDIAN)
568f439b 1352 return gen_highpart (mode, x);
1353 else
1354 return gen_lowpart (mode, x);
1355}
1356
1357/* Return the imaginary part (which has mode MODE) of a complex value X.
1358 This always comes at the high address in memory. */
1359
1360rtx
1361gen_imagpart (mode, x)
1362 enum machine_mode mode;
19cb6b50 1363 rtx x;
568f439b 1364{
591356e8 1365 rtx part;
1366
1367 /* Handle complex constants. */
1368 part = gen_complex_constant_part (mode, x, 1);
1369 if (part != NULL_RTX)
1370 return part;
1371
81802af6 1372 if (WORDS_BIG_ENDIAN)
568f439b 1373 return gen_lowpart (mode, x);
701e46d0 1374 else if (! WORDS_BIG_ENDIAN
ea9a92b6 1375 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1376 && REG_P (x)
1377 && REGNO (x) < FIRST_PSEUDO_REGISTER)
f060a027 1378 internal_error
1379 ("can't access imaginary part of complex value in hard register");
568f439b 1380 else
1381 return gen_highpart (mode, x);
1382}
48c70a46 1383
1384/* Return 1 iff X, assumed to be a SUBREG,
1385 refers to the real part of the complex value in its containing reg.
1386 Complex values are always stored with the real part in the first word,
1387 regardless of WORDS_BIG_ENDIAN. */
1388
1389int
1390subreg_realpart_p (x)
1391 rtx x;
1392{
1393 if (GET_CODE (x) != SUBREG)
1394 abort ();
1395
701e46d0 1396 return ((unsigned int) SUBREG_BYTE (x)
02e7a332 1397 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
48c70a46 1398}
568f439b 1399\f
15bbde2b 1400/* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1401 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1402 least-significant part of X.
1403 MODE specifies how big a part of X to return;
1404 it usually should not be larger than a word.
1405 If X is a MEM whose address is a QUEUED, the value may be so also. */
1406
1407rtx
1408gen_lowpart (mode, x)
1409 enum machine_mode mode;
19cb6b50 1410 rtx x;
15bbde2b 1411{
1412 rtx result = gen_lowpart_common (mode, x);
1413
1414 if (result)
1415 return result;
365c9063 1416 else if (GET_CODE (x) == REG)
1417 {
1418 /* Must be a hard reg that's not valid in MODE. */
1419 result = gen_lowpart_common (mode, copy_to_reg (x));
1420 if (result == 0)
1421 abort ();
314d6ec8 1422 return result;
365c9063 1423 }
15bbde2b 1424 else if (GET_CODE (x) == MEM)
1425 {
1426 /* The only additional case we can do is MEM. */
19cb6b50 1427 int offset = 0;
7ed3fdd6 1428
1429 /* The following exposes the use of "x" to CSE. */
1430 if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
0d409008 1431 && SCALAR_INT_MODE_P (GET_MODE (x))
7ed3fdd6 1432 && ! no_new_pseudos)
1433 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1434
15bbde2b 1435 if (WORDS_BIG_ENDIAN)
1436 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1437 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1438
1439 if (BYTES_BIG_ENDIAN)
1440 /* Adjust the address so that the address-after-the-data
1441 is unchanged. */
1442 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1443 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1444
e513d163 1445 return adjust_address (x, mode, offset);
15bbde2b 1446 }
0dbd1c74 1447 else if (GET_CODE (x) == ADDRESSOF)
1448 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
15bbde2b 1449 else
1450 abort ();
1451}
1452
d823ba47 1453/* Like `gen_lowpart', but refer to the most significant part.
d56d0ca2 1454 This is used to access the imaginary part of a complex number. */
1455
1456rtx
1457gen_highpart (mode, x)
1458 enum machine_mode mode;
19cb6b50 1459 rtx x;
d56d0ca2 1460{
701e46d0 1461 unsigned int msize = GET_MODE_SIZE (mode);
81802af6 1462 rtx result;
701e46d0 1463
d56d0ca2 1464 /* This case loses if X is a subreg. To catch bugs early,
1465 complain if an invalid MODE is used even in other cases. */
701e46d0 1466 if (msize > UNITS_PER_WORD
1467 && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
d56d0ca2 1468 abort ();
701e46d0 1469
81802af6 1470 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1471 subreg_highpart_offset (mode, GET_MODE (x)));
a8c36ab2 1472
1473 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1474 the target if we have a MEM. gen_highpart must return a valid operand,
1475 emitting code if necessary to do so. */
81d5334a 1476 if (result != NULL_RTX && GET_CODE (result) == MEM)
a8c36ab2 1477 result = validize_mem (result);
1478
81802af6 1479 if (!result)
1480 abort ();
1481 return result;
1482}
704fcf2b 1483
29d56731 1484/* Like gen_highpart, but accept mode of EXP operand in case EXP can
704fcf2b 1485 be VOIDmode constant. */
1486rtx
1487gen_highpart_mode (outermode, innermode, exp)
d4c5e26d 1488 enum machine_mode outermode, innermode;
1489 rtx exp;
704fcf2b 1490{
1491 if (GET_MODE (exp) != VOIDmode)
1492 {
1493 if (GET_MODE (exp) != innermode)
1494 abort ();
1495 return gen_highpart (outermode, exp);
1496 }
1497 return simplify_gen_subreg (outermode, exp, innermode,
1498 subreg_highpart_offset (outermode, innermode));
1499}
d4c5e26d 1500
81802af6 1501/* Return offset in bytes to get OUTERMODE low part
1502 of the value in mode INNERMODE stored in memory in target format. */
10ef59ac 1503
81802af6 1504unsigned int
1505subreg_lowpart_offset (outermode, innermode)
1506 enum machine_mode outermode, innermode;
1507{
1508 unsigned int offset = 0;
1509 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
10ef59ac 1510
81802af6 1511 if (difference > 0)
d56d0ca2 1512 {
81802af6 1513 if (WORDS_BIG_ENDIAN)
1514 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1515 if (BYTES_BIG_ENDIAN)
1516 offset += difference % UNITS_PER_WORD;
d56d0ca2 1517 }
701e46d0 1518
81802af6 1519 return offset;
d56d0ca2 1520}
64ab453f 1521
81802af6 1522/* Return offset in bytes to get OUTERMODE high part
1523 of the value in mode INNERMODE stored in memory in target format. */
1524unsigned int
1525subreg_highpart_offset (outermode, innermode)
64ab453f 1526 enum machine_mode outermode, innermode;
64ab453f 1527{
1528 unsigned int offset = 0;
1529 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1530
81802af6 1531 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
d4c5e26d 1532 abort ();
81802af6 1533
64ab453f 1534 if (difference > 0)
1535 {
81802af6 1536 if (! WORDS_BIG_ENDIAN)
64ab453f 1537 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
81802af6 1538 if (! BYTES_BIG_ENDIAN)
64ab453f 1539 offset += difference % UNITS_PER_WORD;
1540 }
1541
81802af6 1542 return offset;
64ab453f 1543}
d56d0ca2 1544
15bbde2b 1545/* Return 1 iff X, assumed to be a SUBREG,
1546 refers to the least significant part of its containing reg.
1547 If X is not a SUBREG, always return 1 (it is its own low part!). */
1548
1549int
1550subreg_lowpart_p (x)
1551 rtx x;
1552{
1553 if (GET_CODE (x) != SUBREG)
1554 return 1;
7e14c1bf 1555 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1556 return 0;
15bbde2b 1557
81802af6 1558 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1559 == SUBREG_BYTE (x));
15bbde2b 1560}
1561\f
15bbde2b 1562
701e46d0 1563/* Helper routine for all the constant cases of operand_subword.
1564 Some places invoke this directly. */
15bbde2b 1565
1566rtx
701e46d0 1567constant_subword (op, offset, mode)
15bbde2b 1568 rtx op;
701e46d0 1569 int offset;
15bbde2b 1570 enum machine_mode mode;
1571{
b572011e 1572 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
701e46d0 1573 HOST_WIDE_INT val;
15bbde2b 1574
1575 /* If OP is already an integer word, return it. */
1576 if (GET_MODE_CLASS (mode) == MODE_INT
1577 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1578 return op;
1579
915c336f 1580 /* The output is some bits, the width of the target machine's word.
1581 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1582 host can't. */
0477aa9f 1583 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
fc92fa61 1584 && GET_MODE_CLASS (mode) == MODE_FLOAT
48509d90 1585 && GET_MODE_BITSIZE (mode) == 64
fc92fa61 1586 && GET_CODE (op) == CONST_DOUBLE)
1587 {
0477aa9f 1588 long k[2];
fc92fa61 1589 REAL_VALUE_TYPE rv;
1590
1591 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1592 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
48509d90 1593
0477aa9f 1594 /* We handle 32-bit and >= 64-bit words here. Note that the order in
48509d90 1595 which the words are written depends on the word endianness.
48509d90 1596 ??? This is a potential portability problem and should
cd1091f4 1597 be fixed at some point.
1598
3fb1e43b 1599 We must exercise caution with the sign bit. By definition there
cd1091f4 1600 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1601 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1602 So we explicitly mask and sign-extend as necessary. */
0477aa9f 1603 if (BITS_PER_WORD == 32)
cd1091f4 1604 {
701e46d0 1605 val = k[offset];
cd1091f4 1606 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1607 return GEN_INT (val);
1608 }
1609#if HOST_BITS_PER_WIDE_INT >= 64
701e46d0 1610 else if (BITS_PER_WORD >= 64 && offset == 0)
cd1091f4 1611 {
1612 val = k[! WORDS_BIG_ENDIAN];
1613 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1614 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1615 return GEN_INT (val);
1616 }
0477aa9f 1617#endif
de8de626 1618 else if (BITS_PER_WORD == 16)
1619 {
701e46d0 1620 val = k[offset >> 1];
1621 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
cd1091f4 1622 val >>= 16;
bfc60c50 1623 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
cd1091f4 1624 return GEN_INT (val);
de8de626 1625 }
48509d90 1626 else
1627 abort ();
fc92fa61 1628 }
61f41e95 1629 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1630 && GET_MODE_CLASS (mode) == MODE_FLOAT
1631 && GET_MODE_BITSIZE (mode) > 64
1632 && GET_CODE (op) == CONST_DOUBLE)
915c336f 1633 {
1634 long k[4];
1635 REAL_VALUE_TYPE rv;
61f41e95 1636
915c336f 1637 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1638 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
61f41e95 1639
915c336f 1640 if (BITS_PER_WORD == 32)
1641 {
701e46d0 1642 val = k[offset];
915c336f 1643 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1644 return GEN_INT (val);
1645 }
1646#if HOST_BITS_PER_WIDE_INT >= 64
701e46d0 1647 else if (BITS_PER_WORD >= 64 && offset <= 1)
915c336f 1648 {
701e46d0 1649 val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
915c336f 1650 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
701e46d0 1651 val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
915c336f 1652 return GEN_INT (val);
1653 }
1654#endif
1655 else
1656 abort ();
1657 }
15bbde2b 1658
1659 /* Single word float is a little harder, since single- and double-word
1660 values often do not have the same high-order bits. We have already
1661 verified that we want the only defined word of the single-word value. */
0477aa9f 1662 if (GET_MODE_CLASS (mode) == MODE_FLOAT
48509d90 1663 && GET_MODE_BITSIZE (mode) == 32
fc92fa61 1664 && GET_CODE (op) == CONST_DOUBLE)
1665 {
0477aa9f 1666 long l;
fc92fa61 1667 REAL_VALUE_TYPE rv;
1668
1669 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1670 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
587a9fdf 1671
cd1091f4 1672 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1673 val = l;
1674 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
eb294a2f 1675
587a9fdf 1676 if (BITS_PER_WORD == 16)
1677 {
701e46d0 1678 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
cd1091f4 1679 val >>= 16;
bfc60c50 1680 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
587a9fdf 1681 }
cd1091f4 1682
1683 return GEN_INT (val);
fc92fa61 1684 }
d823ba47 1685
15bbde2b 1686 /* The only remaining cases that we can handle are integers.
1687 Convert to proper endianness now since these cases need it.
d823ba47 1688 At this point, offset == 0 means the low-order word.
15bbde2b 1689
870288ea 1690 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1691 in general. However, if OP is (const_int 0), we can just return
1692 it for any word. */
1693
1694 if (op == const0_rtx)
1695 return op;
15bbde2b 1696
1697 if (GET_MODE_CLASS (mode) != MODE_INT
870288ea 1698 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
5c1fd70d 1699 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
15bbde2b 1700 return 0;
1701
1702 if (WORDS_BIG_ENDIAN)
701e46d0 1703 offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
15bbde2b 1704
1705 /* Find out which word on the host machine this value is in and get
1706 it from the constant. */
701e46d0 1707 val = (offset / size_ratio == 0
15bbde2b 1708 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1709 : (GET_CODE (op) == CONST_INT
1710 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1711
5418f2a8 1712 /* Get the value we want into the low bits of val. */
b572011e 1713 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
701e46d0 1714 val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
5418f2a8 1715
b2345915 1716 val = trunc_int_for_mode (val, word_mode);
15bbde2b 1717
b572011e 1718 return GEN_INT (val);
15bbde2b 1719}
1720
701e46d0 1721/* Return subword OFFSET of operand OP.
1722 The word number, OFFSET, is interpreted as the word number starting
1723 at the low-order address. OFFSET 0 is the low-order word if not
1724 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1725
1726 If we cannot extract the required word, we return zero. Otherwise,
1727 an rtx corresponding to the requested word will be returned.
1728
1729 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1730 reload has completed, a valid address will always be returned. After
1731 reload, if a valid address cannot be returned, we return zero.
1732
1733 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1734 it is the responsibility of the caller.
1735
1736 MODE is the mode of OP in case it is a CONST_INT.
1737
1738 ??? This is still rather broken for some cases. The problem for the
1739 moment is that all callers of this thing provide no 'goal mode' to
1740 tell us to work with. This exists because all callers were written
84e81e84 1741 in a word based SUBREG world.
1742 Now use of this function can be deprecated by simplify_subreg in most
1743 cases.
1744 */
701e46d0 1745
1746rtx
1747operand_subword (op, offset, validate_address, mode)
1748 rtx op;
1749 unsigned int offset;
1750 int validate_address;
1751 enum machine_mode mode;
1752{
1753 if (mode == VOIDmode)
1754 mode = GET_MODE (op);
1755
1756 if (mode == VOIDmode)
1757 abort ();
1758
6312a35e 1759 /* If OP is narrower than a word, fail. */
701e46d0 1760 if (mode != BLKmode
1761 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1762 return 0;
1763
6312a35e 1764 /* If we want a word outside OP, return zero. */
701e46d0 1765 if (mode != BLKmode
1766 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1767 return const0_rtx;
1768
701e46d0 1769 /* Form a new MEM at the requested address. */
1770 if (GET_CODE (op) == MEM)
1771 {
e4e86ec5 1772 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
701e46d0 1773
e4e86ec5 1774 if (! validate_address)
1775 return new;
1776
1777 else if (reload_completed)
701e46d0 1778 {
e4e86ec5 1779 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1780 return 0;
701e46d0 1781 }
e4e86ec5 1782 else
1783 return replace_equiv_address (new, XEXP (new, 0));
701e46d0 1784 }
1785
84e81e84 1786 /* Rest can be handled by simplify_subreg. */
1787 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
701e46d0 1788}
1789
15bbde2b 1790/* Similar to `operand_subword', but never return 0. If we can't extract
1791 the required subword, put OP into a register and try again. If that fails,
d823ba47 1792 abort. We always validate the address in this case.
15bbde2b 1793
1794 MODE is the mode of OP, in case it is CONST_INT. */
1795
1796rtx
701e46d0 1797operand_subword_force (op, offset, mode)
15bbde2b 1798 rtx op;
701e46d0 1799 unsigned int offset;
15bbde2b 1800 enum machine_mode mode;
1801{
701e46d0 1802 rtx result = operand_subword (op, offset, 1, mode);
15bbde2b 1803
1804 if (result)
1805 return result;
1806
1807 if (mode != BLKmode && mode != VOIDmode)
ac825d29 1808 {
1809 /* If this is a register which can not be accessed by words, copy it
1810 to a pseudo register. */
1811 if (GET_CODE (op) == REG)
1812 op = copy_to_reg (op);
1813 else
1814 op = force_reg (mode, op);
1815 }
15bbde2b 1816
701e46d0 1817 result = operand_subword (op, offset, 1, mode);
15bbde2b 1818 if (result == 0)
1819 abort ();
1820
1821 return result;
1822}
1823\f
1824/* Given a compare instruction, swap the operands.
1825 A test instruction is changed into a compare of 0 against the operand. */
1826
1827void
1828reverse_comparison (insn)
1829 rtx insn;
1830{
1831 rtx body = PATTERN (insn);
1832 rtx comp;
1833
1834 if (GET_CODE (body) == SET)
1835 comp = SET_SRC (body);
1836 else
1837 comp = SET_SRC (XVECEXP (body, 0, 0));
1838
1839 if (GET_CODE (comp) == COMPARE)
1840 {
1841 rtx op0 = XEXP (comp, 0);
1842 rtx op1 = XEXP (comp, 1);
1843 XEXP (comp, 0) = op1;
1844 XEXP (comp, 1) = op0;
1845 }
1846 else
1847 {
7014838c 1848 rtx new = gen_rtx_COMPARE (VOIDmode,
1849 CONST0_RTX (GET_MODE (comp)), comp);
15bbde2b 1850 if (GET_CODE (body) == SET)
1851 SET_SRC (body) = new;
1852 else
1853 SET_SRC (XVECEXP (body, 0, 0)) = new;
1854 }
1855}
1856\f
b10dbbca 1857/* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1858 or (2) a component ref of something variable. Represent the later with
1859 a NULL expression. */
1860
1861static tree
1862component_ref_for_mem_expr (ref)
1863 tree ref;
1864{
1865 tree inner = TREE_OPERAND (ref, 0);
1866
1867 if (TREE_CODE (inner) == COMPONENT_REF)
1868 inner = component_ref_for_mem_expr (inner);
3c00f11c 1869 else
1870 {
1871 tree placeholder_ptr = 0;
1872
1873 /* Now remove any conversions: they don't change what the underlying
1874 object is. Likewise for SAVE_EXPR. Also handle PLACEHOLDER_EXPR. */
1875 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1876 || TREE_CODE (inner) == NON_LVALUE_EXPR
1877 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1878 || TREE_CODE (inner) == SAVE_EXPR
1879 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
d4c5e26d 1880 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1881 inner = find_placeholder (inner, &placeholder_ptr);
1882 else
1883 inner = TREE_OPERAND (inner, 0);
3c00f11c 1884
1885 if (! DECL_P (inner))
1886 inner = NULL_TREE;
1887 }
b10dbbca 1888
1889 if (inner == TREE_OPERAND (ref, 0))
1890 return ref;
1891 else
3c00f11c 1892 return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1893 TREE_OPERAND (ref, 1));
b10dbbca 1894}
c6259b83 1895
1896/* Given REF, a MEM, and T, either the type of X or the expression
1897 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f717f77 1898 if we are making a new object of this type. BITPOS is nonzero if
1899 there is an offset outstanding on T that will be applied later. */
c6259b83 1900
1901void
6f717f77 1902set_mem_attributes_minus_bitpos (ref, t, objectp, bitpos)
c6259b83 1903 rtx ref;
1904 tree t;
1905 int objectp;
6f717f77 1906 HOST_WIDE_INT bitpos;
c6259b83 1907{
2a631e19 1908 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
b10dbbca 1909 tree expr = MEM_EXPR (ref);
2a631e19 1910 rtx offset = MEM_OFFSET (ref);
1911 rtx size = MEM_SIZE (ref);
1912 unsigned int align = MEM_ALIGN (ref);
6f717f77 1913 HOST_WIDE_INT apply_bitpos = 0;
c6259b83 1914 tree type;
1915
1916 /* It can happen that type_for_mode was given a mode for which there
1917 is no language-level type. In which case it returns NULL, which
1918 we can see here. */
1919 if (t == NULL_TREE)
1920 return;
1921
1922 type = TYPE_P (t) ? t : TREE_TYPE (t);
1923
c6259b83 1924 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1925 wrong answer, as it assumes that DECL_RTL already has the right alias
1926 info. Callers should not set DECL_RTL until after the call to
1927 set_mem_attributes. */
1928 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1929 abort ();
1930
96216d37 1931 /* Get the alias set from the expression or type (perhaps using a
2a631e19 1932 front-end routine) and use it. */
1933 alias = get_alias_set (t);
c6259b83 1934
1935 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1936 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
b8098e5b 1937 RTX_UNCHANGING_P (ref)
278fe152 1938 |= ((lang_hooks.honor_readonly
1939 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1940 || (! TYPE_P (t) && TREE_CONSTANT (t)));
c6259b83 1941
2a631e19 1942 /* If we are making an object of this type, or if this is a DECL, we know
1943 that it is a scalar if the type is not an aggregate. */
1944 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
c6259b83 1945 MEM_SCALAR_P (ref) = 1;
1946
a9d9ab08 1947 /* We can set the alignment from the type if we are making an object,
1948 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1949 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1950 align = MAX (align, TYPE_ALIGN (type));
ecfe4ca9 1951
96216d37 1952 /* If the size is known, we can set that. */
1953 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
2a631e19 1954 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
96216d37 1955
579bccf9 1956 /* If T is not a type, we may be able to deduce some more information about
1957 the expression. */
1958 if (! TYPE_P (t))
2a631e19 1959 {
1960 maybe_set_unchanging (ref, t);
1961 if (TREE_THIS_VOLATILE (t))
1962 MEM_VOLATILE_P (ref) = 1;
c6259b83 1963
3c00f11c 1964 /* Now remove any conversions: they don't change what the underlying
1965 object is. Likewise for SAVE_EXPR. */
2a631e19 1966 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
3c00f11c 1967 || TREE_CODE (t) == NON_LVALUE_EXPR
1968 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1969 || TREE_CODE (t) == SAVE_EXPR)
2a631e19 1970 t = TREE_OPERAND (t, 0);
1971
5cc193e7 1972 /* If this expression can't be addressed (e.g., it contains a reference
1973 to a non-addressable field), show we don't change its alias set. */
1974 if (! can_address_p (t))
1975 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1976
2a631e19 1977 /* If this is a decl, set the attributes of the MEM from it. */
1978 if (DECL_P (t))
1979 {
b10dbbca 1980 expr = t;
1981 offset = const0_rtx;
6f717f77 1982 apply_bitpos = bitpos;
2a631e19 1983 size = (DECL_SIZE_UNIT (t)
1984 && host_integerp (DECL_SIZE_UNIT (t), 1)
1985 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
d4c5e26d 1986 align = DECL_ALIGN (t);
2a631e19 1987 }
1988
ecfe4ca9 1989 /* If this is a constant, we know the alignment. */
42f6f447 1990 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1991 {
1992 align = TYPE_ALIGN (type);
1993#ifdef CONSTANT_ALIGNMENT
1994 align = CONSTANT_ALIGNMENT (t, align);
1995#endif
1996 }
b10dbbca 1997
1998 /* If this is a field reference and not a bit-field, record it. */
1999 /* ??? There is some information that can be gleened from bit-fields,
2000 such as the word offset in the structure that might be modified.
2001 But skip it for now. */
2002 else if (TREE_CODE (t) == COMPONENT_REF
2003 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
2004 {
2005 expr = component_ref_for_mem_expr (t);
2006 offset = const0_rtx;
6f717f77 2007 apply_bitpos = bitpos;
b10dbbca 2008 /* ??? Any reason the field size would be different than
2009 the size we got from the type? */
2010 }
2011
2012 /* If this is an array reference, look for an outer field reference. */
2013 else if (TREE_CODE (t) == ARRAY_REF)
2014 {
2015 tree off_tree = size_zero_node;
2016
2017 do
2018 {
97f8ce30 2019 tree index = TREE_OPERAND (t, 1);
2020 tree array = TREE_OPERAND (t, 0);
2021 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
2022 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
2023 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
2024
2025 /* We assume all arrays have sizes that are a multiple of a byte.
2026 First subtract the lower bound, if any, in the type of the
2027 index, then convert to sizetype and multiply by the size of the
2028 array element. */
2029 if (low_bound != 0 && ! integer_zerop (low_bound))
2030 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
2031 index, low_bound));
2032
2033 /* If the index has a self-referential type, pass it to a
2034 WITH_RECORD_EXPR; if the component size is, pass our
2035 component to one. */
ce3fb06e 2036 if (CONTAINS_PLACEHOLDER_P (index))
97f8ce30 2037 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, t);
ce3fb06e 2038 if (CONTAINS_PLACEHOLDER_P (unit_size))
97f8ce30 2039 unit_size = build (WITH_RECORD_EXPR, sizetype,
2040 unit_size, array);
2041
b10dbbca 2042 off_tree
2043 = fold (build (PLUS_EXPR, sizetype,
2044 fold (build (MULT_EXPR, sizetype,
97f8ce30 2045 index,
2046 unit_size)),
b10dbbca 2047 off_tree));
2048 t = TREE_OPERAND (t, 0);
2049 }
2050 while (TREE_CODE (t) == ARRAY_REF);
2051
2d8fe5d0 2052 if (DECL_P (t))
2053 {
2054 expr = t;
0318dc09 2055 offset = NULL;
2d8fe5d0 2056 if (host_integerp (off_tree, 1))
0318dc09 2057 {
2058 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
2059 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
2060 align = DECL_ALIGN (t);
3473aefe 2061 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
0318dc09 2062 align = aoff;
2063 offset = GEN_INT (ioff);
6f717f77 2064 apply_bitpos = bitpos;
0318dc09 2065 }
2d8fe5d0 2066 }
2067 else if (TREE_CODE (t) == COMPONENT_REF)
b10dbbca 2068 {
2069 expr = component_ref_for_mem_expr (t);
2070 if (host_integerp (off_tree, 1))
6f717f77 2071 {
2072 offset = GEN_INT (tree_low_cst (off_tree, 1));
2073 apply_bitpos = bitpos;
2074 }
b10dbbca 2075 /* ??? Any reason the field size would be different than
2076 the size we got from the type? */
2077 }
2d8fe5d0 2078 else if (flag_argument_noalias > 1
2079 && TREE_CODE (t) == INDIRECT_REF
2080 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
2081 {
2082 expr = t;
2083 offset = NULL;
2084 }
2085 }
2086
2087 /* If this is a Fortran indirect argument reference, record the
2088 parameter decl. */
2089 else if (flag_argument_noalias > 1
2090 && TREE_CODE (t) == INDIRECT_REF
2091 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
2092 {
2093 expr = t;
2094 offset = NULL;
b10dbbca 2095 }
2a631e19 2096 }
2097
e2e205b3 2098 /* If we modified OFFSET based on T, then subtract the outstanding
595f1461 2099 bit position offset. Similarly, increase the size of the accessed
2100 object to contain the negative offset. */
6f717f77 2101 if (apply_bitpos)
595f1461 2102 {
2103 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
2104 if (size)
2105 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
2106 }
6f717f77 2107
2a631e19 2108 /* Now set the attributes we computed above. */
5cc193e7 2109 MEM_ATTRS (ref)
b10dbbca 2110 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
2a631e19 2111
2112 /* If this is already known to be a scalar or aggregate, we are done. */
2113 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
96216d37 2114 return;
2115
2a631e19 2116 /* If it is a reference into an aggregate, this is part of an aggregate.
2117 Otherwise we don't know. */
c6259b83 2118 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
2119 || TREE_CODE (t) == ARRAY_RANGE_REF
2120 || TREE_CODE (t) == BIT_FIELD_REF)
2121 MEM_IN_STRUCT_P (ref) = 1;
2122}
2123
6f717f77 2124void
2125set_mem_attributes (ref, t, objectp)
2126 rtx ref;
2127 tree t;
2128 int objectp;
2129{
2130 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2131}
2132
ca74b940 2133/* Set the decl for MEM to DECL. */
2134
2135void
2136set_mem_attrs_from_reg (mem, reg)
2137 rtx mem;
2138 rtx reg;
2139{
2140 MEM_ATTRS (mem)
2141 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
2142 GEN_INT (REG_OFFSET (reg)),
2143 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
2144}
2145
c6259b83 2146/* Set the alias set of MEM to SET. */
2147
2148void
2149set_mem_alias_set (mem, set)
2150 rtx mem;
2151 HOST_WIDE_INT set;
2152{
d4c5e26d 2153#ifdef ENABLE_CHECKING
c6259b83 2154 /* If the new and old alias sets don't conflict, something is wrong. */
2155 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
2156 abort ();
c6259b83 2157#endif
2158
b10dbbca 2159 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
5cc193e7 2160 MEM_SIZE (mem), MEM_ALIGN (mem),
2161 GET_MODE (mem));
c6259b83 2162}
96216d37 2163
1c4512da 2164/* Set the alignment of MEM to ALIGN bits. */
96216d37 2165
2166void
2167set_mem_align (mem, align)
2168 rtx mem;
2169 unsigned int align;
2170{
b10dbbca 2171 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
5cc193e7 2172 MEM_OFFSET (mem), MEM_SIZE (mem), align,
2173 GET_MODE (mem));
96216d37 2174}
278fe152 2175
b10dbbca 2176/* Set the expr for MEM to EXPR. */
278fe152 2177
2178void
b10dbbca 2179set_mem_expr (mem, expr)
278fe152 2180 rtx mem;
b10dbbca 2181 tree expr;
278fe152 2182{
2183 MEM_ATTRS (mem)
b10dbbca 2184 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
278fe152 2185 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
2186}
b10dbbca 2187
2188/* Set the offset of MEM to OFFSET. */
2189
2190void
2191set_mem_offset (mem, offset)
2192 rtx mem, offset;
2193{
2194 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2195 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
2196 GET_MODE (mem));
f0500469 2197}
2198
2199/* Set the size of MEM to SIZE. */
2200
2201void
2202set_mem_size (mem, size)
2203 rtx mem, size;
2204{
2205 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2206 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
2207 GET_MODE (mem));
b10dbbca 2208}
c6259b83 2209\f
96216d37 2210/* Return a memory reference like MEMREF, but with its mode changed to MODE
2211 and its address changed to ADDR. (VOIDmode means don't change the mode.
2212 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2213 returned memory location is required to be valid. The memory
2214 attributes are not changed. */
15bbde2b 2215
96216d37 2216static rtx
e4e86ec5 2217change_address_1 (memref, mode, addr, validate)
15bbde2b 2218 rtx memref;
2219 enum machine_mode mode;
2220 rtx addr;
e4e86ec5 2221 int validate;
15bbde2b 2222{
2223 rtx new;
2224
2225 if (GET_CODE (memref) != MEM)
2226 abort ();
2227 if (mode == VOIDmode)
2228 mode = GET_MODE (memref);
2229 if (addr == 0)
2230 addr = XEXP (memref, 0);
2231
e4e86ec5 2232 if (validate)
15bbde2b 2233 {
e4e86ec5 2234 if (reload_in_progress || reload_completed)
2235 {
2236 if (! memory_address_p (mode, addr))
2237 abort ();
2238 }
2239 else
2240 addr = memory_address (mode, addr);
15bbde2b 2241 }
d823ba47 2242
e8976cd7 2243 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2244 return memref;
2245
3ad7bb1c 2246 new = gen_rtx_MEM (mode, addr);
6a0934dd 2247 MEM_COPY_ATTRIBUTES (new, memref);
15bbde2b 2248 return new;
2249}
537ffcfc 2250
96216d37 2251/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2252 way we are changing MEMREF, so we only preserve the alias set. */
e513d163 2253
2254rtx
96216d37 2255change_address (memref, mode, addr)
e513d163 2256 rtx memref;
2257 enum machine_mode mode;
96216d37 2258 rtx addr;
e513d163 2259{
96216d37 2260 rtx new = change_address_1 (memref, mode, addr, 1);
2261 enum machine_mode mmode = GET_MODE (new);
6cc60c4d 2262
96216d37 2263 MEM_ATTRS (new)
2264 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
2265 mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
2b96c5f6 2266 (mmode == BLKmode ? BITS_PER_UNIT
2267 : GET_MODE_ALIGNMENT (mmode)),
5cc193e7 2268 mmode);
fb257ae6 2269
96216d37 2270 return new;
e513d163 2271}
537ffcfc 2272
96216d37 2273/* Return a memory reference like MEMREF, but with its mode changed
2274 to MODE and its address offset by OFFSET bytes. If VALIDATE is
bf42c62d 2275 nonzero, the memory address is forced to be valid.
2276 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2277 and caller is responsible for adjusting MEMREF base register. */
e4e86ec5 2278
2279rtx
bf42c62d 2280adjust_address_1 (memref, mode, offset, validate, adjust)
e4e86ec5 2281 rtx memref;
2282 enum machine_mode mode;
2283 HOST_WIDE_INT offset;
bf42c62d 2284 int validate, adjust;
e4e86ec5 2285{
fb257ae6 2286 rtx addr = XEXP (memref, 0);
96216d37 2287 rtx new;
2288 rtx memoffset = MEM_OFFSET (memref);
5cc193e7 2289 rtx size = 0;
96216d37 2290 unsigned int memalign = MEM_ALIGN (memref);
fb257ae6 2291
e36c3d58 2292 /* ??? Prefer to create garbage instead of creating shared rtl.
6ef828f9 2293 This may happen even if offset is nonzero -- consider
e36c3d58 2294 (plus (plus reg reg) const_int) -- so do this always. */
2295 addr = copy_rtx (addr);
2296
cd358719 2297 if (adjust)
2298 {
2299 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2300 object, we can merge it into the LO_SUM. */
2301 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2302 && offset >= 0
2303 && (unsigned HOST_WIDE_INT) offset
2304 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2305 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
2306 plus_constant (XEXP (addr, 1), offset));
2307 else
2308 addr = plus_constant (addr, offset);
2309 }
fb257ae6 2310
96216d37 2311 new = change_address_1 (memref, mode, addr, validate);
2312
2313 /* Compute the new values of the memory attributes due to this adjustment.
2314 We add the offsets and update the alignment. */
2315 if (memoffset)
2316 memoffset = GEN_INT (offset + INTVAL (memoffset));
2317
b8098e5b 2318 /* Compute the new alignment by taking the MIN of the alignment and the
2319 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2320 if zero. */
2321 if (offset != 0)
f4aee538 2322 memalign
2323 = MIN (memalign,
2324 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
96216d37 2325
5cc193e7 2326 /* We can compute the size in a number of ways. */
2b96c5f6 2327 if (GET_MODE (new) != BLKmode)
2328 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
5cc193e7 2329 else if (MEM_SIZE (memref))
2330 size = plus_constant (MEM_SIZE (memref), -offset);
2331
b10dbbca 2332 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
5cc193e7 2333 memoffset, size, memalign, GET_MODE (new));
96216d37 2334
2335 /* At some point, we should validate that this offset is within the object,
2336 if all the appropriate values are known. */
2337 return new;
e4e86ec5 2338}
2339
bf42c62d 2340/* Return a memory reference like MEMREF, but with its mode changed
2341 to MODE and its address changed to ADDR, which is assumed to be
2342 MEMREF offseted by OFFSET bytes. If VALIDATE is
2343 nonzero, the memory address is forced to be valid. */
2344
2345rtx
2346adjust_automodify_address_1 (memref, mode, addr, offset, validate)
2347 rtx memref;
2348 enum machine_mode mode;
2349 rtx addr;
2350 HOST_WIDE_INT offset;
2351 int validate;
2352{
2353 memref = change_address_1 (memref, VOIDmode, addr, validate);
2354 return adjust_address_1 (memref, mode, offset, validate, 0);
2355}
2356
2a631e19 2357/* Return a memory reference like MEMREF, but whose address is changed by
2358 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2359 known to be in OFFSET (possibly 1). */
fcdc122e 2360
2361rtx
2362offset_address (memref, offset, pow2)
2363 rtx memref;
2364 rtx offset;
84130727 2365 unsigned HOST_WIDE_INT pow2;
fcdc122e 2366{
fac6aae6 2367 rtx new, addr = XEXP (memref, 0);
2368
2369 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2370
d4c5e26d 2371 /* At this point we don't know _why_ the address is invalid. It
fac6aae6 2372 could have secondary memory refereces, multiplies or anything.
2373
2374 However, if we did go and rearrange things, we can wind up not
2375 being able to recognize the magic around pic_offset_table_rtx.
2376 This stuff is fragile, and is yet another example of why it is
2377 bad to expose PIC machinery too early. */
2378 if (! memory_address_p (GET_MODE (memref), new)
2379 && GET_CODE (addr) == PLUS
2380 && XEXP (addr, 0) == pic_offset_table_rtx)
2381 {
2382 addr = force_reg (GET_MODE (addr), addr);
2383 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2384 }
2385
430816ab 2386 update_temp_slot_address (XEXP (memref, 0), new);
fac6aae6 2387 new = change_address_1 (memref, VOIDmode, new, 1);
fcdc122e 2388
2389 /* Update the alignment to reflect the offset. Reset the offset, which
2390 we don't know. */
80fabb90 2391 MEM_ATTRS (new)
2392 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
84130727 2393 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
80fabb90 2394 GET_MODE (new));
fcdc122e 2395 return new;
2396}
d4c5e26d 2397
537ffcfc 2398/* Return a memory reference like MEMREF, but with its address changed to
2399 ADDR. The caller is asserting that the actual piece of memory pointed
2400 to is the same, just the form of the address is being changed, such as
2401 by putting something into a register. */
2402
2403rtx
2404replace_equiv_address (memref, addr)
2405 rtx memref;
2406 rtx addr;
2407{
96216d37 2408 /* change_address_1 copies the memory attribute structure without change
2409 and that's exactly what we want here. */
ecfe4ca9 2410 update_temp_slot_address (XEXP (memref, 0), addr);
96216d37 2411 return change_address_1 (memref, VOIDmode, addr, 1);
537ffcfc 2412}
96216d37 2413
e4e86ec5 2414/* Likewise, but the reference is not required to be valid. */
2415
2416rtx
2417replace_equiv_address_nv (memref, addr)
2418 rtx memref;
2419 rtx addr;
2420{
e4e86ec5 2421 return change_address_1 (memref, VOIDmode, addr, 0);
2422}
8259ab07 2423
2424/* Return a memory reference like MEMREF, but with its mode widened to
2425 MODE and offset by OFFSET. This would be used by targets that e.g.
2426 cannot issue QImode memory operations and have to use SImode memory
2427 operations plus masking logic. */
2428
2429rtx
2430widen_memory_access (memref, mode, offset)
2431 rtx memref;
2432 enum machine_mode mode;
2433 HOST_WIDE_INT offset;
2434{
2435 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2436 tree expr = MEM_EXPR (new);
2437 rtx memoffset = MEM_OFFSET (new);
2438 unsigned int size = GET_MODE_SIZE (mode);
2439
2440 /* If we don't know what offset we were at within the expression, then
2441 we can't know if we've overstepped the bounds. */
22ee087b 2442 if (! memoffset)
8259ab07 2443 expr = NULL_TREE;
2444
2445 while (expr)
2446 {
2447 if (TREE_CODE (expr) == COMPONENT_REF)
2448 {
2449 tree field = TREE_OPERAND (expr, 1);
2450
2451 if (! DECL_SIZE_UNIT (field))
2452 {
2453 expr = NULL_TREE;
2454 break;
2455 }
2456
2457 /* Is the field at least as large as the access? If so, ok,
2458 otherwise strip back to the containing structure. */
8359cfb4 2459 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2460 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
8259ab07 2461 && INTVAL (memoffset) >= 0)
2462 break;
2463
2464 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
2465 {
2466 expr = NULL_TREE;
2467 break;
2468 }
2469
2470 expr = TREE_OPERAND (expr, 0);
2471 memoffset = (GEN_INT (INTVAL (memoffset)
2472 + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2473 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2474 / BITS_PER_UNIT)));
2475 }
2476 /* Similarly for the decl. */
2477 else if (DECL_P (expr)
2478 && DECL_SIZE_UNIT (expr)
40c4e66e 2479 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
8259ab07 2480 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2481 && (! memoffset || INTVAL (memoffset) >= 0))
2482 break;
2483 else
2484 {
2485 /* The widened memory access overflows the expression, which means
2486 that it could alias another expression. Zap it. */
2487 expr = NULL_TREE;
2488 break;
2489 }
2490 }
2491
2492 if (! expr)
2493 memoffset = NULL_RTX;
2494
2495 /* The widened memory may alias other stuff, so zap the alias set. */
2496 /* ??? Maybe use get_alias_set on any remaining expression. */
2497
2498 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2499 MEM_ALIGN (new), mode);
2500
2501 return new;
2502}
15bbde2b 2503\f
2504/* Return a newly created CODE_LABEL rtx with a unique label number. */
2505
2506rtx
2507gen_label_rtx ()
2508{
a7ae1e59 2509 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2510 NULL, label_num++, NULL);
15bbde2b 2511}
2512\f
2513/* For procedure integration. */
2514
15bbde2b 2515/* Install new pointers to the first and last insns in the chain.
d4c332ff 2516 Also, set cur_insn_uid to one higher than the last in use.
15bbde2b 2517 Used for an inline-procedure after copying the insn chain. */
2518
2519void
2520set_new_first_and_last_insn (first, last)
2521 rtx first, last;
2522{
d4c332ff 2523 rtx insn;
2524
15bbde2b 2525 first_insn = first;
2526 last_insn = last;
d4c332ff 2527 cur_insn_uid = 0;
2528
2529 for (insn = first; insn; insn = NEXT_INSN (insn))
2530 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2531
2532 cur_insn_uid++;
15bbde2b 2533}
2534
2535/* Set the range of label numbers found in the current function.
2536 This is used when belatedly compiling an inline function. */
2537
2538void
2539set_new_first_and_last_label_num (first, last)
2540 int first, last;
2541{
2542 base_label_num = label_num;
2543 first_label_num = first;
2544 last_label_num = last;
2545}
0a893c29 2546
2547/* Set the last label number found in the current function.
2548 This is used when belatedly compiling an inline function. */
15bbde2b 2549
2550void
0a893c29 2551set_new_last_label_num (last)
2552 int last;
15bbde2b 2553{
0a893c29 2554 base_label_num = label_num;
2555 last_label_num = last;
15bbde2b 2556}
0a893c29 2557\f
15bbde2b 2558/* Restore all variables describing the current status from the structure *P.
2559 This is used after a nested function. */
2560
2561void
2562restore_emit_status (p)
c5b89159 2563 struct function *p ATTRIBUTE_UNUSED;
15bbde2b 2564{
bb9d6298 2565 last_label_num = 0;
15bbde2b 2566}
2567\f
d823ba47 2568/* Go through all the RTL insn bodies and copy any invalid shared
2d96a59a 2569 structure. This routine should only be called once. */
15bbde2b 2570
2571void
2d96a59a 2572unshare_all_rtl (fndecl, insn)
2573 tree fndecl;
2574 rtx insn;
15bbde2b 2575{
2d96a59a 2576 tree decl;
15bbde2b 2577
2d96a59a 2578 /* Make sure that virtual parameters are not shared. */
2579 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
0e8e37b2 2580 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2d96a59a 2581
72ec8878 2582 /* Make sure that virtual stack slots are not shared. */
2583 unshare_all_decls (DECL_INITIAL (fndecl));
2584
2d96a59a 2585 /* Unshare just about everything else. */
2586 unshare_all_rtl_1 (insn);
d823ba47 2587
15bbde2b 2588 /* Make sure the addresses of stack slots found outside the insn chain
2589 (such as, in DECL_RTL of a variable) are not shared
2590 with the insn chain.
2591
2592 This special care is necessary when the stack slot MEM does not
2593 actually appear in the insn chain. If it does appear, its address
2594 is unshared from all else at that point. */
45733446 2595 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
15bbde2b 2596}
2597
d823ba47 2598/* Go through all the RTL insn bodies and copy any invalid shared
2d96a59a 2599 structure, again. This is a fairly expensive thing to do so it
2600 should be done sparingly. */
2601
2602void
2603unshare_all_rtl_again (insn)
2604 rtx insn;
2605{
2606 rtx p;
5244079b 2607 tree decl;
2608
2d96a59a 2609 for (p = insn; p; p = NEXT_INSN (p))
9204e736 2610 if (INSN_P (p))
2d96a59a 2611 {
2612 reset_used_flags (PATTERN (p));
2613 reset_used_flags (REG_NOTES (p));
2614 reset_used_flags (LOG_LINKS (p));
2615 }
5244079b 2616
01dc9f0c 2617 /* Make sure that virtual stack slots are not shared. */
2618 reset_used_decls (DECL_INITIAL (cfun->decl));
2619
5244079b 2620 /* Make sure that virtual parameters are not shared. */
2621 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2622 reset_used_flags (DECL_RTL (decl));
2623
2624 reset_used_flags (stack_slot_list);
2625
2626 unshare_all_rtl (cfun->decl, insn);
2d96a59a 2627}
2628
2629/* Go through all the RTL insn bodies and copy any invalid shared structure.
2630 Assumes the mark bits are cleared at entry. */
2631
2632static void
2633unshare_all_rtl_1 (insn)
2634 rtx insn;
2635{
2636 for (; insn; insn = NEXT_INSN (insn))
9204e736 2637 if (INSN_P (insn))
2d96a59a 2638 {
2639 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2640 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2641 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2642 }
2643}
2644
72ec8878 2645/* Go through all virtual stack slots of a function and copy any
2646 shared structure. */
2647static void
2648unshare_all_decls (blk)
2649 tree blk;
2650{
2651 tree t;
2652
2653 /* Copy shared decls. */
2654 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
0e8e37b2 2655 if (DECL_RTL_SET_P (t))
2656 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
72ec8878 2657
2658 /* Now process sub-blocks. */
2659 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2660 unshare_all_decls (t);
2661}
2662
01dc9f0c 2663/* Go through all virtual stack slots of a function and mark them as
6312a35e 2664 not shared. */
01dc9f0c 2665static void
2666reset_used_decls (blk)
2667 tree blk;
2668{
2669 tree t;
2670
2671 /* Mark decls. */
2672 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
0e8e37b2 2673 if (DECL_RTL_SET_P (t))
2674 reset_used_flags (DECL_RTL (t));
01dc9f0c 2675
2676 /* Now process sub-blocks. */
2677 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2678 reset_used_decls (t);
2679}
2680
c15aa775 2681/* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
dd367d39 2682 placed in the result directly, rather than being copied. MAY_SHARE is
2683 either a MEM of an EXPR_LIST of MEMs. */
c15aa775 2684
2685rtx
2686copy_most_rtx (orig, may_share)
2687 rtx orig;
2688 rtx may_share;
2689{
2690 rtx copy;
2691 int i, j;
2692 RTX_CODE code;
2693 const char *format_ptr;
2694
dd367d39 2695 if (orig == may_share
2696 || (GET_CODE (may_share) == EXPR_LIST
2697 && in_expr_list_p (may_share, orig)))
c15aa775 2698 return orig;
2699
2700 code = GET_CODE (orig);
2701
2702 switch (code)
2703 {
2704 case REG:
2705 case QUEUED:
2706 case CONST_INT:
2707 case CONST_DOUBLE:
2708 case CONST_VECTOR:
2709 case SYMBOL_REF:
2710 case CODE_LABEL:
2711 case PC:
2712 case CC0:
2713 return orig;
2714 default:
2715 break;
2716 }
2717
2718 copy = rtx_alloc (code);
2719 PUT_MODE (copy, GET_MODE (orig));
7c25cb91 2720 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2721 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2722 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2723 RTX_FLAG (copy, integrated) = RTX_FLAG (orig, integrated);
2724 RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
c15aa775 2725
2726 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2727
2728 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2729 {
2730 switch (*format_ptr++)
2731 {
2732 case 'e':
2733 XEXP (copy, i) = XEXP (orig, i);
2734 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2735 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2736 break;
2737
2738 case 'u':
2739 XEXP (copy, i) = XEXP (orig, i);
2740 break;
2741
2742 case 'E':
2743 case 'V':
2744 XVEC (copy, i) = XVEC (orig, i);
2745 if (XVEC (orig, i) != NULL)
2746 {
2747 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2748 for (j = 0; j < XVECLEN (copy, i); j++)
2749 XVECEXP (copy, i, j)
2750 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2751 }
2752 break;
2753
2754 case 'w':
2755 XWINT (copy, i) = XWINT (orig, i);
2756 break;
2757
2758 case 'n':
2759 case 'i':
2760 XINT (copy, i) = XINT (orig, i);
2761 break;
2762
2763 case 't':
2764 XTREE (copy, i) = XTREE (orig, i);
2765 break;
2766
2767 case 's':
2768 case 'S':
2769 XSTR (copy, i) = XSTR (orig, i);
2770 break;
2771
2772 case '0':
2773 /* Copy this through the wide int field; that's safest. */
2774 X0WINT (copy, i) = X0WINT (orig, i);
2775 break;
2776
2777 default:
2778 abort ();
2779 }
2780 }
2781 return copy;
2782}
2783
15bbde2b 2784/* Mark ORIG as in use, and return a copy of it if it was already in use.
2785 Recursively does the same for subexpressions. */
2786
2787rtx
2788copy_rtx_if_shared (orig)
2789 rtx orig;
2790{
19cb6b50 2791 rtx x = orig;
2792 int i;
2793 enum rtx_code code;
2794 const char *format_ptr;
15bbde2b 2795 int copied = 0;
2796
2797 if (x == 0)
2798 return 0;
2799
2800 code = GET_CODE (x);
2801
2802 /* These types may be freely shared. */
2803
2804 switch (code)
2805 {
2806 case REG:
2807 case QUEUED:
2808 case CONST_INT:
2809 case CONST_DOUBLE:
886cfd4f 2810 case CONST_VECTOR:
15bbde2b 2811 case SYMBOL_REF:
2812 case CODE_LABEL:
2813 case PC:
2814 case CC0:
2815 case SCRATCH:
a92771b8 2816 /* SCRATCH must be shared because they represent distinct values. */
15bbde2b 2817 return x;
2818
f63d12e3 2819 case CONST:
2820 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2821 a LABEL_REF, it isn't sharable. */
2822 if (GET_CODE (XEXP (x, 0)) == PLUS
2823 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2824 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2825 return x;
2826 break;
2827
15bbde2b 2828 case INSN:
2829 case JUMP_INSN:
2830 case CALL_INSN:
2831 case NOTE:
15bbde2b 2832 case BARRIER:
2833 /* The chain of insns is not being copied. */
2834 return x;
2835
2836 case MEM:
baf55c37 2837 /* A MEM is allowed to be shared if its address is constant.
2838
d823ba47 2839 We used to allow sharing of MEMs which referenced
baf55c37 2840 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
2841 that can lose. instantiate_virtual_regs will not unshare
2842 the MEMs, and combine may change the structure of the address
2843 because it looks safe and profitable in one context, but
2844 in some other context it creates unrecognizable RTL. */
2845 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
15bbde2b 2846 return x;
2847
0dbd1c74 2848 break;
2849
2850 default:
2851 break;
15bbde2b 2852 }
2853
2854 /* This rtx may not be shared. If it has already been seen,
2855 replace it with a copy of itself. */
2856
7c25cb91 2857 if (RTX_FLAG (x, used))
15bbde2b 2858 {
19cb6b50 2859 rtx copy;
15bbde2b 2860
2861 copy = rtx_alloc (code);
b1b63592 2862 memcpy (copy, x,
748e6d74 2863 (sizeof (*copy) - sizeof (copy->fld)
2864 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
15bbde2b 2865 x = copy;
2866 copied = 1;
2867 }
7c25cb91 2868 RTX_FLAG (x, used) = 1;
15bbde2b 2869
2870 /* Now scan the subexpressions recursively.
2871 We can store any replaced subexpressions directly into X
2872 since we know X is not shared! Any vectors in X
2873 must be copied if X was copied. */
2874
2875 format_ptr = GET_RTX_FORMAT (code);
2876
2877 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2878 {
2879 switch (*format_ptr++)
2880 {
2881 case 'e':
2882 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
2883 break;
2884
2885 case 'E':
2886 if (XVEC (x, i) != NULL)
2887 {
19cb6b50 2888 int j;
ffe0869b 2889 int len = XVECLEN (x, i);
15bbde2b 2890
ffe0869b 2891 if (copied && len > 0)
a4070a91 2892 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
ffe0869b 2893 for (j = 0; j < len; j++)
2894 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
15bbde2b 2895 }
2896 break;
2897 }
2898 }
2899 return x;
2900}
2901
2902/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2903 to look for shared sub-parts. */
2904
2905void
2906reset_used_flags (x)
2907 rtx x;
2908{
19cb6b50 2909 int i, j;
2910 enum rtx_code code;
2911 const char *format_ptr;
15bbde2b 2912
2913 if (x == 0)
2914 return;
2915
2916 code = GET_CODE (x);
2917
c3418f42 2918 /* These types may be freely shared so we needn't do any resetting
15bbde2b 2919 for them. */
2920
2921 switch (code)
2922 {
2923 case REG:
2924 case QUEUED:
2925 case CONST_INT:
2926 case CONST_DOUBLE:
886cfd4f 2927 case CONST_VECTOR:
15bbde2b 2928 case SYMBOL_REF:
2929 case CODE_LABEL:
2930 case PC:
2931 case CC0:
2932 return;
2933
2934 case INSN:
2935 case JUMP_INSN:
2936 case CALL_INSN:
2937 case NOTE:
2938 case LABEL_REF:
2939 case BARRIER:
2940 /* The chain of insns is not being copied. */
2941 return;
d823ba47 2942
0dbd1c74 2943 default:
2944 break;
15bbde2b 2945 }
2946
7c25cb91 2947 RTX_FLAG (x, used) = 0;
15bbde2b 2948
2949 format_ptr = GET_RTX_FORMAT (code);
2950 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2951 {
2952 switch (*format_ptr++)
2953 {
2954 case 'e':
2955 reset_used_flags (XEXP (x, i));
2956 break;
2957
2958 case 'E':
2959 for (j = 0; j < XVECLEN (x, i); j++)
2960 reset_used_flags (XVECEXP (x, i, j));
2961 break;
2962 }
2963 }
2964}
2965\f
2966/* Copy X if necessary so that it won't be altered by changes in OTHER.
2967 Return X or the rtx for the pseudo reg the value of X was copied into.
2968 OTHER must be valid as a SET_DEST. */
2969
2970rtx
2971make_safe_from (x, other)
2972 rtx x, other;
2973{
2974 while (1)
2975 switch (GET_CODE (other))
2976 {
2977 case SUBREG:
2978 other = SUBREG_REG (other);
2979 break;
2980 case STRICT_LOW_PART:
2981 case SIGN_EXTEND:
2982 case ZERO_EXTEND:
2983 other = XEXP (other, 0);
2984 break;
2985 default:
2986 goto done;
2987 }
2988 done:
2989 if ((GET_CODE (other) == MEM
2990 && ! CONSTANT_P (x)
2991 && GET_CODE (x) != REG
2992 && GET_CODE (x) != SUBREG)
2993 || (GET_CODE (other) == REG
2994 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2995 || reg_mentioned_p (other, x))))
2996 {
2997 rtx temp = gen_reg_rtx (GET_MODE (x));
2998 emit_move_insn (temp, x);
2999 return temp;
3000 }
3001 return x;
3002}
3003\f
3004/* Emission of insns (adding them to the doubly-linked list). */
3005
3006/* Return the first insn of the current sequence or current function. */
3007
3008rtx
3009get_insns ()
3010{
3011 return first_insn;
3012}
3013
fb20d6fa 3014/* Specify a new insn as the first in the chain. */
3015
3016void
3017set_first_insn (insn)
3018 rtx insn;
3019{
3020 if (PREV_INSN (insn) != 0)
3021 abort ();
3022 first_insn = insn;
3023}
3024
15bbde2b 3025/* Return the last insn emitted in current sequence or current function. */
3026
3027rtx
3028get_last_insn ()
3029{
3030 return last_insn;
3031}
3032
3033/* Specify a new insn as the last in the chain. */
3034
3035void
3036set_last_insn (insn)
3037 rtx insn;
3038{
3039 if (NEXT_INSN (insn) != 0)
3040 abort ();
3041 last_insn = insn;
3042}
3043
3044/* Return the last insn emitted, even if it is in a sequence now pushed. */
3045
3046rtx
3047get_last_insn_anywhere ()
3048{
3049 struct sequence_stack *stack;
3050 if (last_insn)
3051 return last_insn;
0a893c29 3052 for (stack = seq_stack; stack; stack = stack->next)
15bbde2b 3053 if (stack->last != 0)
3054 return stack->last;
3055 return 0;
3056}
3057
70545de4 3058/* Return the first nonnote insn emitted in current sequence or current
3059 function. This routine looks inside SEQUENCEs. */
3060
3061rtx
3062get_first_nonnote_insn ()
3063{
3064 rtx insn = first_insn;
3065
3066 while (insn)
3067 {
3068 insn = next_insn (insn);
3069 if (insn == 0 || GET_CODE (insn) != NOTE)
3070 break;
3071 }
3072
3073 return insn;
3074}
3075
3076/* Return the last nonnote insn emitted in current sequence or current
3077 function. This routine looks inside SEQUENCEs. */
3078
3079rtx
3080get_last_nonnote_insn ()
3081{
3082 rtx insn = last_insn;
3083
3084 while (insn)
3085 {
3086 insn = previous_insn (insn);
3087 if (insn == 0 || GET_CODE (insn) != NOTE)
3088 break;
3089 }
3090
3091 return insn;
3092}
3093
15bbde2b 3094/* Return a number larger than any instruction's uid in this function. */
3095
3096int
3097get_max_uid ()
3098{
3099 return cur_insn_uid;
3100}
90b89d2c 3101
214d02d0 3102/* Renumber instructions so that no instruction UIDs are wasted. */
3103
90b89d2c 3104void
214d02d0 3105renumber_insns (stream)
3106 FILE *stream;
90b89d2c 3107{
3108 rtx insn;
90b89d2c 3109
214d02d0 3110 /* If we're not supposed to renumber instructions, don't. */
3111 if (!flag_renumber_insns)
3112 return;
3113
90b89d2c 3114 /* If there aren't that many instructions, then it's not really
3115 worth renumbering them. */
214d02d0 3116 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
90b89d2c 3117 return;
3118
3119 cur_insn_uid = 1;
3120
3121 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
214d02d0 3122 {
3123 if (stream)
d823ba47 3124 fprintf (stream, "Renumbering insn %d to %d\n",
214d02d0 3125 INSN_UID (insn), cur_insn_uid);
3126 INSN_UID (insn) = cur_insn_uid++;
3127 }
90b89d2c 3128}
15bbde2b 3129\f
3130/* Return the next insn. If it is a SEQUENCE, return the first insn
3131 of the sequence. */
3132
3133rtx
3134next_insn (insn)
3135 rtx insn;
3136{
3137 if (insn)
3138 {
3139 insn = NEXT_INSN (insn);
3140 if (insn && GET_CODE (insn) == INSN
3141 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3142 insn = XVECEXP (PATTERN (insn), 0, 0);
3143 }
3144
3145 return insn;
3146}
3147
3148/* Return the previous insn. If it is a SEQUENCE, return the last insn
3149 of the sequence. */
3150
3151rtx
3152previous_insn (insn)
3153 rtx insn;
3154{
3155 if (insn)
3156 {
3157 insn = PREV_INSN (insn);
3158 if (insn && GET_CODE (insn) == INSN
3159 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3160 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3161 }
3162
3163 return insn;
3164}
3165
3166/* Return the next insn after INSN that is not a NOTE. This routine does not
3167 look inside SEQUENCEs. */
3168
3169rtx
3170next_nonnote_insn (insn)
3171 rtx insn;
3172{
3173 while (insn)
3174 {
3175 insn = NEXT_INSN (insn);
3176 if (insn == 0 || GET_CODE (insn) != NOTE)
3177 break;
3178 }
3179
3180 return insn;
3181}
3182
3183/* Return the previous insn before INSN that is not a NOTE. This routine does
3184 not look inside SEQUENCEs. */
3185
3186rtx
3187prev_nonnote_insn (insn)
3188 rtx insn;
3189{
3190 while (insn)
3191 {
3192 insn = PREV_INSN (insn);
3193 if (insn == 0 || GET_CODE (insn) != NOTE)
3194 break;
3195 }
3196
3197 return insn;
3198}
3199
3200/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3201 or 0, if there is none. This routine does not look inside
a92771b8 3202 SEQUENCEs. */
15bbde2b 3203
3204rtx
3205next_real_insn (insn)
3206 rtx insn;
3207{
3208 while (insn)
3209 {
3210 insn = NEXT_INSN (insn);
3211 if (insn == 0 || GET_CODE (insn) == INSN
3212 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
3213 break;
3214 }
3215
3216 return insn;
3217}
3218
3219/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3220 or 0, if there is none. This routine does not look inside
3221 SEQUENCEs. */
3222
3223rtx
3224prev_real_insn (insn)
3225 rtx insn;
3226{
3227 while (insn)
3228 {
3229 insn = PREV_INSN (insn);
3230 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
3231 || GET_CODE (insn) == JUMP_INSN)
3232 break;
3233 }
3234
3235 return insn;
3236}
3237
d5f9786f 3238/* Return the last CALL_INSN in the current list, or 0 if there is none.
3239 This routine does not look inside SEQUENCEs. */
3240
3241rtx
3242last_call_insn ()
3243{
3244 rtx insn;
3245
3246 for (insn = get_last_insn ();
3247 insn && GET_CODE (insn) != CALL_INSN;
3248 insn = PREV_INSN (insn))
3249 ;
3250
3251 return insn;
3252}
3253
15bbde2b 3254/* Find the next insn after INSN that really does something. This routine
3255 does not look inside SEQUENCEs. Until reload has completed, this is the
3256 same as next_real_insn. */
3257
2215ca0d 3258int
3259active_insn_p (insn)
3260 rtx insn;
3261{
3a66feab 3262 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
3263 || (GET_CODE (insn) == INSN
3264 && (! reload_completed
3265 || (GET_CODE (PATTERN (insn)) != USE
3266 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2215ca0d 3267}
3268
15bbde2b 3269rtx
3270next_active_insn (insn)
3271 rtx insn;
3272{
3273 while (insn)
3274 {
3275 insn = NEXT_INSN (insn);
2215ca0d 3276 if (insn == 0 || active_insn_p (insn))
15bbde2b 3277 break;
3278 }
3279
3280 return insn;
3281}
3282
3283/* Find the last insn before INSN that really does something. This routine
3284 does not look inside SEQUENCEs. Until reload has completed, this is the
3285 same as prev_real_insn. */
3286
3287rtx
3288prev_active_insn (insn)
3289 rtx insn;
3290{
3291 while (insn)
3292 {
3293 insn = PREV_INSN (insn);
2215ca0d 3294 if (insn == 0 || active_insn_p (insn))
15bbde2b 3295 break;
3296 }
3297
3298 return insn;
3299}
3300
3301/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3302
3303rtx
3304next_label (insn)
3305 rtx insn;
3306{
3307 while (insn)
3308 {
3309 insn = NEXT_INSN (insn);
3310 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3311 break;
3312 }
3313
3314 return insn;
3315}
3316
3317/* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3318
3319rtx
3320prev_label (insn)
3321 rtx insn;
3322{
3323 while (insn)
3324 {
3325 insn = PREV_INSN (insn);
3326 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3327 break;
3328 }
3329
3330 return insn;
3331}
3332\f
3333#ifdef HAVE_cc0
b15e0bba 3334/* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3335 and REG_CC_USER notes so we can find it. */
3336
3337void
3338link_cc0_insns (insn)
3339 rtx insn;
3340{
3341 rtx user = next_nonnote_insn (insn);
3342
3343 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
3344 user = XVECEXP (PATTERN (user), 0, 0);
3345
7014838c 3346 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3347 REG_NOTES (user));
3ad7bb1c 3348 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
b15e0bba 3349}
3350
15bbde2b 3351/* Return the next insn that uses CC0 after INSN, which is assumed to
3352 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3353 applied to the result of this function should yield INSN).
3354
3355 Normally, this is simply the next insn. However, if a REG_CC_USER note
3356 is present, it contains the insn that uses CC0.
3357
3358 Return 0 if we can't find the insn. */
3359
3360rtx
3361next_cc0_user (insn)
3362 rtx insn;
3363{
b572011e 3364 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
15bbde2b 3365
3366 if (note)
3367 return XEXP (note, 0);
3368
3369 insn = next_nonnote_insn (insn);
3370 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
3371 insn = XVECEXP (PATTERN (insn), 0, 0);
3372
9204e736 3373 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
15bbde2b 3374 return insn;
3375
3376 return 0;
3377}
3378
3379/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3380 note, it is the previous insn. */
3381
3382rtx
3383prev_cc0_setter (insn)
3384 rtx insn;
3385{
b572011e 3386 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
15bbde2b 3387
3388 if (note)
3389 return XEXP (note, 0);
3390
3391 insn = prev_nonnote_insn (insn);
3392 if (! sets_cc0_p (PATTERN (insn)))
3393 abort ();
3394
3395 return insn;
3396}
3397#endif
344dc2fa 3398
3399/* Increment the label uses for all labels present in rtx. */
3400
3401static void
d4c5e26d 3402mark_label_nuses (x)
3403 rtx x;
344dc2fa 3404{
19cb6b50 3405 enum rtx_code code;
3406 int i, j;
3407 const char *fmt;
344dc2fa 3408
3409 code = GET_CODE (x);
3410 if (code == LABEL_REF)
3411 LABEL_NUSES (XEXP (x, 0))++;
3412
3413 fmt = GET_RTX_FORMAT (code);
3414 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3415 {
3416 if (fmt[i] == 'e')
ff385626 3417 mark_label_nuses (XEXP (x, i));
344dc2fa 3418 else if (fmt[i] == 'E')
ff385626 3419 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
344dc2fa 3420 mark_label_nuses (XVECEXP (x, i, j));
3421 }
3422}
3423
15bbde2b 3424\f
3425/* Try splitting insns that can be split for better scheduling.
3426 PAT is the pattern which might split.
3427 TRIAL is the insn providing PAT.
6ef828f9 3428 LAST is nonzero if we should return the last insn of the sequence produced.
15bbde2b 3429
3430 If this routine succeeds in splitting, it returns the first or last
0e69a50a 3431 replacement insn depending on the value of LAST. Otherwise, it
15bbde2b 3432 returns TRIAL. If the insn to be returned can be split, it will be. */
3433
3434rtx
0e69a50a 3435try_split (pat, trial, last)
15bbde2b 3436 rtx pat, trial;
0e69a50a 3437 int last;
15bbde2b 3438{
3439 rtx before = PREV_INSN (trial);
3440 rtx after = NEXT_INSN (trial);
15bbde2b 3441 int has_barrier = 0;
3442 rtx tem;
3cd757b1 3443 rtx note, seq;
3444 int probability;
e13693ec 3445 rtx insn_last, insn;
3446 int njumps = 0;
3cd757b1 3447
3448 if (any_condjump_p (trial)
3449 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3450 split_branch_probability = INTVAL (XEXP (note, 0));
3451 probability = split_branch_probability;
3452
3453 seq = split_insns (pat, trial);
3454
3455 split_branch_probability = -1;
15bbde2b 3456
3457 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3458 We may need to handle this specially. */
3459 if (after && GET_CODE (after) == BARRIER)
3460 {
3461 has_barrier = 1;
3462 after = NEXT_INSN (after);
3463 }
3464
e13693ec 3465 if (!seq)
3466 return trial;
3467
3468 /* Avoid infinite loop if any insn of the result matches
3469 the original pattern. */
3470 insn_last = seq;
3471 while (1)
15bbde2b 3472 {
e13693ec 3473 if (INSN_P (insn_last)
3474 && rtx_equal_p (PATTERN (insn_last), pat))
3475 return trial;
3476 if (!NEXT_INSN (insn_last))
3477 break;
3478 insn_last = NEXT_INSN (insn_last);
3479 }
d823ba47 3480
e13693ec 3481 /* Mark labels. */
3482 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3483 {
3484 if (GET_CODE (insn) == JUMP_INSN)
3485 {
3486 mark_jump_label (PATTERN (insn), insn, 0);
3487 njumps++;
3488 if (probability != -1
3489 && any_condjump_p (insn)
3490 && !find_reg_note (insn, REG_BR_PROB, 0))
31d3e01c 3491 {
e13693ec 3492 /* We can preserve the REG_BR_PROB notes only if exactly
3493 one jump is created, otherwise the machine description
3494 is responsible for this step using
3495 split_branch_probability variable. */
3496 if (njumps != 1)
3497 abort ();
3498 REG_NOTES (insn)
3499 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3500 GEN_INT (probability),
3501 REG_NOTES (insn));
31d3e01c 3502 }
e13693ec 3503 }
3504 }
3505
3506 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3507 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3508 if (GET_CODE (trial) == CALL_INSN)
3509 {
3510 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3511 if (GET_CODE (insn) == CALL_INSN)
3512 {
0bb5a6cd 3513 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3514 while (*p)
3515 p = &XEXP (*p, 1);
3516 *p = CALL_INSN_FUNCTION_USAGE (trial);
e13693ec 3517 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3518 }
3519 }
5262c253 3520
e13693ec 3521 /* Copy notes, particularly those related to the CFG. */
3522 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3523 {
3524 switch (REG_NOTE_KIND (note))
3525 {
3526 case REG_EH_REGION:
31d3e01c 3527 insn = insn_last;
3528 while (insn != NULL_RTX)
3529 {
e13693ec 3530 if (GET_CODE (insn) == CALL_INSN
3531 || (flag_non_call_exceptions
3532 && may_trap_p (PATTERN (insn))))
3533 REG_NOTES (insn)
3534 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3535 XEXP (note, 0),
3536 REG_NOTES (insn));
31d3e01c 3537 insn = PREV_INSN (insn);
3538 }
e13693ec 3539 break;
381eb1e7 3540
e13693ec 3541 case REG_NORETURN:
3542 case REG_SETJMP:
3543 case REG_ALWAYS_RETURN:
3544 insn = insn_last;
3545 while (insn != NULL_RTX)
381eb1e7 3546 {
e13693ec 3547 if (GET_CODE (insn) == CALL_INSN)
3548 REG_NOTES (insn)
3549 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3550 XEXP (note, 0),
3551 REG_NOTES (insn));
3552 insn = PREV_INSN (insn);
381eb1e7 3553 }
e13693ec 3554 break;
5bb27a4b 3555
e13693ec 3556 case REG_NON_LOCAL_GOTO:
3557 insn = insn_last;
3558 while (insn != NULL_RTX)
31d3e01c 3559 {
e13693ec 3560 if (GET_CODE (insn) == JUMP_INSN)
3561 REG_NOTES (insn)
3562 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3563 XEXP (note, 0),
3564 REG_NOTES (insn));
3565 insn = PREV_INSN (insn);
31d3e01c 3566 }
e13693ec 3567 break;
344dc2fa 3568
e13693ec 3569 default:
3570 break;
15bbde2b 3571 }
e13693ec 3572 }
3573
3574 /* If there are LABELS inside the split insns increment the
3575 usage count so we don't delete the label. */
3576 if (GET_CODE (trial) == INSN)
3577 {
3578 insn = insn_last;
3579 while (insn != NULL_RTX)
15bbde2b 3580 {
e13693ec 3581 if (GET_CODE (insn) == INSN)
3582 mark_label_nuses (PATTERN (insn));
15bbde2b 3583
e13693ec 3584 insn = PREV_INSN (insn);
3585 }
15bbde2b 3586 }
3587
13751393 3588 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
e13693ec 3589
3590 delete_insn (trial);
3591 if (has_barrier)
3592 emit_barrier_after (tem);
3593
3594 /* Recursively call try_split for each new insn created; by the
3595 time control returns here that insn will be fully split, so
3596 set LAST and continue from the insn after the one returned.
3597 We can't use next_active_insn here since AFTER may be a note.
3598 Ignore deleted insns, which can be occur if not optimizing. */
3599 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3600 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3601 tem = try_split (PATTERN (tem), tem, 1);
3602
3603 /* Return either the first or the last insn, depending on which was
3604 requested. */
3605 return last
3606 ? (after ? PREV_INSN (after) : last_insn)
3607 : NEXT_INSN (before);
15bbde2b 3608}
3609\f
3610/* Make and return an INSN rtx, initializing all its slots.
6a84e367 3611 Store PATTERN in the pattern slots. */
15bbde2b 3612
3613rtx
6a84e367 3614make_insn_raw (pattern)
15bbde2b 3615 rtx pattern;
15bbde2b 3616{
19cb6b50 3617 rtx insn;
15bbde2b 3618
d7c47c0e 3619 insn = rtx_alloc (INSN);
15bbde2b 3620
575333f9 3621 INSN_UID (insn) = cur_insn_uid++;
15bbde2b 3622 PATTERN (insn) = pattern;
3623 INSN_CODE (insn) = -1;
fc92fa61 3624 LOG_LINKS (insn) = NULL;
3625 REG_NOTES (insn) = NULL;
13751393 3626 INSN_LOCATOR (insn) = 0;
ab87d1bc 3627 BLOCK_FOR_INSN (insn) = NULL;
15bbde2b 3628
fe7f701d 3629#ifdef ENABLE_RTL_CHECKING
3630 if (insn
9204e736 3631 && INSN_P (insn)
fe7f701d 3632 && (returnjump_p (insn)
3633 || (GET_CODE (insn) == SET
3634 && SET_DEST (insn) == pc_rtx)))
3635 {
3636 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3637 debug_rtx (insn);
3638 }
3639#endif
d823ba47 3640
15bbde2b 3641 return insn;
3642}
3643
31d3e01c 3644/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
15bbde2b 3645
3646static rtx
6a84e367 3647make_jump_insn_raw (pattern)
15bbde2b 3648 rtx pattern;
15bbde2b 3649{
19cb6b50 3650 rtx insn;
15bbde2b 3651
6a84e367 3652 insn = rtx_alloc (JUMP_INSN);
fc92fa61 3653 INSN_UID (insn) = cur_insn_uid++;
15bbde2b 3654
3655 PATTERN (insn) = pattern;
3656 INSN_CODE (insn) = -1;
fc92fa61 3657 LOG_LINKS (insn) = NULL;
3658 REG_NOTES (insn) = NULL;
3659 JUMP_LABEL (insn) = NULL;
13751393 3660 INSN_LOCATOR (insn) = 0;
ab87d1bc 3661 BLOCK_FOR_INSN (insn) = NULL;
15bbde2b 3662
3663 return insn;
3664}
6e911104 3665
31d3e01c 3666/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
6e911104 3667
3668static rtx
3669make_call_insn_raw (pattern)
3670 rtx pattern;
3671{
19cb6b50 3672 rtx insn;
6e911104 3673
3674 insn = rtx_alloc (CALL_INSN);
3675 INSN_UID (insn) = cur_insn_uid++;
3676
3677 PATTERN (insn) = pattern;
3678 INSN_CODE (insn) = -1;
3679 LOG_LINKS (insn) = NULL;
3680 REG_NOTES (insn) = NULL;
3681 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
13751393 3682 INSN_LOCATOR (insn) = 0;
ab87d1bc 3683 BLOCK_FOR_INSN (insn) = NULL;
6e911104 3684
3685 return insn;
3686}
15bbde2b 3687\f
3688/* Add INSN to the end of the doubly-linked list.
3689 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3690
3691void
3692add_insn (insn)
19cb6b50 3693 rtx insn;
15bbde2b 3694{
3695 PREV_INSN (insn) = last_insn;
3696 NEXT_INSN (insn) = 0;
3697
3698 if (NULL != last_insn)
3699 NEXT_INSN (last_insn) = insn;
3700
3701 if (NULL == first_insn)
3702 first_insn = insn;
3703
3704 last_insn = insn;
3705}
3706
312de84d 3707/* Add INSN into the doubly-linked list after insn AFTER. This and
3708 the next should be the only functions called to insert an insn once
f65c10c0 3709 delay slots have been filled since only they know how to update a
312de84d 3710 SEQUENCE. */
15bbde2b 3711
3712void
3713add_insn_after (insn, after)
3714 rtx insn, after;
3715{
3716 rtx next = NEXT_INSN (after);
9dda7915 3717 basic_block bb;
15bbde2b 3718
9ea33026 3719 if (optimize && INSN_DELETED_P (after))
f65c10c0 3720 abort ();
3721
15bbde2b 3722 NEXT_INSN (insn) = next;
3723 PREV_INSN (insn) = after;
3724
3725 if (next)
3726 {
3727 PREV_INSN (next) = insn;
3728 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3729 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3730 }
3731 else if (last_insn == after)
3732 last_insn = insn;
3733 else
3734 {
0a893c29 3735 struct sequence_stack *stack = seq_stack;
15bbde2b 3736 /* Scan all pending sequences too. */
3737 for (; stack; stack = stack->next)
3738 if (after == stack->last)
398f4855 3739 {
3740 stack->last = insn;
3741 break;
3742 }
312de84d 3743
3744 if (stack == 0)
3745 abort ();
15bbde2b 3746 }
3747
ab87d1bc 3748 if (GET_CODE (after) != BARRIER
3749 && GET_CODE (insn) != BARRIER
9dda7915 3750 && (bb = BLOCK_FOR_INSN (after)))
3751 {
3752 set_block_for_insn (insn, bb);
308f9b79 3753 if (INSN_P (insn))
d4c5e26d 3754 bb->flags |= BB_DIRTY;
9dda7915 3755 /* Should not happen as first in the BB is always
3fb1e43b 3756 either NOTE or LABEL. */
9dda7915 3757 if (bb->end == after
3758 /* Avoid clobbering of structure when creating new BB. */
3759 && GET_CODE (insn) != BARRIER
3760 && (GET_CODE (insn) != NOTE
3761 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3762 bb->end = insn;
3763 }
3764
15bbde2b 3765 NEXT_INSN (after) = insn;
3766 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3767 {
3768 rtx sequence = PATTERN (after);
3769 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3770 }
3771}
3772
312de84d 3773/* Add INSN into the doubly-linked list before insn BEFORE. This and
3774 the previous should be the only functions called to insert an insn once
f65c10c0 3775 delay slots have been filled since only they know how to update a
312de84d 3776 SEQUENCE. */
3777
3778void
3779add_insn_before (insn, before)
3780 rtx insn, before;
3781{
3782 rtx prev = PREV_INSN (before);
9dda7915 3783 basic_block bb;
312de84d 3784
9ea33026 3785 if (optimize && INSN_DELETED_P (before))
f65c10c0 3786 abort ();
3787
312de84d 3788 PREV_INSN (insn) = prev;
3789 NEXT_INSN (insn) = before;
3790
3791 if (prev)
3792 {
3793 NEXT_INSN (prev) = insn;
3794 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3795 {
3796 rtx sequence = PATTERN (prev);
3797 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3798 }
3799 }
3800 else if (first_insn == before)
3801 first_insn = insn;
3802 else
3803 {
0a893c29 3804 struct sequence_stack *stack = seq_stack;
312de84d 3805 /* Scan all pending sequences too. */
3806 for (; stack; stack = stack->next)
3807 if (before == stack->first)
398f4855 3808 {
3809 stack->first = insn;
3810 break;
3811 }
312de84d 3812
3813 if (stack == 0)
3814 abort ();
3815 }
3816
ab87d1bc 3817 if (GET_CODE (before) != BARRIER
3818 && GET_CODE (insn) != BARRIER
9dda7915 3819 && (bb = BLOCK_FOR_INSN (before)))
3820 {
3821 set_block_for_insn (insn, bb);
308f9b79 3822 if (INSN_P (insn))
d4c5e26d 3823 bb->flags |= BB_DIRTY;
9dda7915 3824 /* Should not happen as first in the BB is always
3fb1e43b 3825 either NOTE or LABEl. */
9dda7915 3826 if (bb->head == insn
3827 /* Avoid clobbering of structure when creating new BB. */
3828 && GET_CODE (insn) != BARRIER
3829 && (GET_CODE (insn) != NOTE
3830 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3831 abort ();
3832 }
3833
312de84d 3834 PREV_INSN (before) = insn;
3835 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3836 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3837}
3838
7ddcf2bf 3839/* Remove an insn from its doubly-linked list. This function knows how
3840 to handle sequences. */
3841void
3842remove_insn (insn)
3843 rtx insn;
3844{
3845 rtx next = NEXT_INSN (insn);
3846 rtx prev = PREV_INSN (insn);
e4bf866d 3847 basic_block bb;
3848
7ddcf2bf 3849 if (prev)
3850 {
3851 NEXT_INSN (prev) = next;
3852 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3853 {
3854 rtx sequence = PATTERN (prev);
3855 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3856 }
3857 }
3858 else if (first_insn == insn)
3859 first_insn = next;
3860 else
3861 {
0a893c29 3862 struct sequence_stack *stack = seq_stack;
7ddcf2bf 3863 /* Scan all pending sequences too. */
3864 for (; stack; stack = stack->next)
3865 if (insn == stack->first)
3866 {
3867 stack->first = next;
3868 break;
3869 }
3870
3871 if (stack == 0)
3872 abort ();
3873 }
3874
3875 if (next)
3876 {
3877 PREV_INSN (next) = prev;
3878 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3879 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3880 }
3881 else if (last_insn == insn)
3882 last_insn = prev;
3883 else
3884 {
0a893c29 3885 struct sequence_stack *stack = seq_stack;
7ddcf2bf 3886 /* Scan all pending sequences too. */
3887 for (; stack; stack = stack->next)
3888 if (insn == stack->last)
3889 {
3890 stack->last = prev;
3891 break;
3892 }
3893
3894 if (stack == 0)
3895 abort ();
3896 }
ab87d1bc 3897 if (GET_CODE (insn) != BARRIER
e4bf866d 3898 && (bb = BLOCK_FOR_INSN (insn)))
3899 {
308f9b79 3900 if (INSN_P (insn))
d4c5e26d 3901 bb->flags |= BB_DIRTY;
e4bf866d 3902 if (bb->head == insn)
3903 {
f4aee538 3904 /* Never ever delete the basic block note without deleting whole
3905 basic block. */
e4bf866d 3906 if (GET_CODE (insn) == NOTE)
3907 abort ();
3908 bb->head = next;
3909 }
3910 if (bb->end == insn)
3911 bb->end = prev;
3912 }
7ddcf2bf 3913}
3914
d5f9786f 3915/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3916
3917void
3918add_function_usage_to (call_insn, call_fusage)
3919 rtx call_insn, call_fusage;
3920{
3921 if (! call_insn || GET_CODE (call_insn) != CALL_INSN)
3922 abort ();
3923
3924 /* Put the register usage information on the CALL. If there is already
3925 some usage information, put ours at the end. */
3926 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3927 {
3928 rtx link;
3929
3930 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3931 link = XEXP (link, 1))
3932 ;
3933
3934 XEXP (link, 1) = call_fusage;
3935 }
3936 else
3937 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3938}
3939
15bbde2b 3940/* Delete all insns made since FROM.
3941 FROM becomes the new last instruction. */
3942
3943void
3944delete_insns_since (from)
3945 rtx from;
3946{
3947 if (from == 0)
3948 first_insn = 0;
3949 else
3950 NEXT_INSN (from) = 0;
3951 last_insn = from;
3952}
3953
34e2ddcd 3954/* This function is deprecated, please use sequences instead.
3955
3956 Move a consecutive bunch of insns to a different place in the chain.
15bbde2b 3957 The insns to be moved are those between FROM and TO.
3958 They are moved to a new position after the insn AFTER.
3959 AFTER must not be FROM or TO or any insn in between.
3960
3961 This function does not know about SEQUENCEs and hence should not be
3962 called after delay-slot filling has been done. */
3963
3964void
9dda7915 3965reorder_insns_nobb (from, to, after)
15bbde2b 3966 rtx from, to, after;
3967{
3968 /* Splice this bunch out of where it is now. */
3969 if (PREV_INSN (from))
3970 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3971 if (NEXT_INSN (to))
3972 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3973 if (last_insn == to)
3974 last_insn = PREV_INSN (from);
3975 if (first_insn == from)
3976 first_insn = NEXT_INSN (to);
3977
3978 /* Make the new neighbors point to it and it to them. */
3979 if (NEXT_INSN (after))
3980 PREV_INSN (NEXT_INSN (after)) = to;
3981
3982 NEXT_INSN (to) = NEXT_INSN (after);
3983 PREV_INSN (from) = after;
3984 NEXT_INSN (after) = from;
3985 if (after == last_insn)
3986 last_insn = to;
3987}
3988
9dda7915 3989/* Same as function above, but take care to update BB boundaries. */
3990void
3991reorder_insns (from, to, after)
3992 rtx from, to, after;
3993{
3994 rtx prev = PREV_INSN (from);
3995 basic_block bb, bb2;
3996
3997 reorder_insns_nobb (from, to, after);
3998
ab87d1bc 3999 if (GET_CODE (after) != BARRIER
9dda7915 4000 && (bb = BLOCK_FOR_INSN (after)))
4001 {
4002 rtx x;
308f9b79 4003 bb->flags |= BB_DIRTY;
d4c5e26d 4004
ab87d1bc 4005 if (GET_CODE (from) != BARRIER
9dda7915 4006 && (bb2 = BLOCK_FOR_INSN (from)))
4007 {
4008 if (bb2->end == to)
4009 bb2->end = prev;
308f9b79 4010 bb2->flags |= BB_DIRTY;
9dda7915 4011 }
4012
4013 if (bb->end == after)
4014 bb->end = to;
4015
4016 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4017 set_block_for_insn (x, bb);
4018 }
4019}
4020
15bbde2b 4021/* Return the line note insn preceding INSN. */
4022
4023static rtx
4024find_line_note (insn)
4025 rtx insn;
4026{
4027 if (no_line_numbers)
4028 return 0;
4029
4030 for (; insn; insn = PREV_INSN (insn))
4031 if (GET_CODE (insn) == NOTE
ff385626 4032 && NOTE_LINE_NUMBER (insn) >= 0)
15bbde2b 4033 break;
4034
4035 return insn;
4036}
4037
4038/* Like reorder_insns, but inserts line notes to preserve the line numbers
4039 of the moved insns when debugging. This may insert a note between AFTER
4040 and FROM, and another one after TO. */
4041
4042void
4043reorder_insns_with_line_notes (from, to, after)
4044 rtx from, to, after;
4045{
4046 rtx from_line = find_line_note (from);
4047 rtx after_line = find_line_note (after);
4048
4049 reorder_insns (from, to, after);
4050
4051 if (from_line == after_line)
4052 return;
4053
4054 if (from_line)
4055 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
4056 NOTE_LINE_NUMBER (from_line),
4057 after);
4058 if (after_line)
4059 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
4060 NOTE_LINE_NUMBER (after_line),
4061 to);
4062}
90b89d2c 4063
0a78547b 4064/* Remove unnecessary notes from the instruction stream. */
90b89d2c 4065
4066void
0a78547b 4067remove_unnecessary_notes ()
90b89d2c 4068{
92cfc4a8 4069 rtx block_stack = NULL_RTX;
4070 rtx eh_stack = NULL_RTX;
90b89d2c 4071 rtx insn;
4072 rtx next;
92cfc4a8 4073 rtx tmp;
90b89d2c 4074
f1ab82be 4075 /* We must not remove the first instruction in the function because
4076 the compiler depends on the first instruction being a note. */
90b89d2c 4077 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
4078 {
4079 /* Remember what's next. */
4080 next = NEXT_INSN (insn);
4081
4082 /* We're only interested in notes. */
4083 if (GET_CODE (insn) != NOTE)
4084 continue;
4085
92cfc4a8 4086 switch (NOTE_LINE_NUMBER (insn))
5846cb0f 4087 {
92cfc4a8 4088 case NOTE_INSN_DELETED:
cab14df7 4089 case NOTE_INSN_LOOP_END_TOP_COND:
92cfc4a8 4090 remove_insn (insn);
4091 break;
4092
4093 case NOTE_INSN_EH_REGION_BEG:
4094 eh_stack = alloc_INSN_LIST (insn, eh_stack);
4095 break;
4096
4097 case NOTE_INSN_EH_REGION_END:
4098 /* Too many end notes. */
4099 if (eh_stack == NULL_RTX)
4100 abort ();
4101 /* Mismatched nesting. */
4102 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
4103 abort ();
4104 tmp = eh_stack;
4105 eh_stack = XEXP (eh_stack, 1);
4106 free_INSN_LIST_node (tmp);
4107 break;
4108
4109 case NOTE_INSN_BLOCK_BEG:
4110 /* By now, all notes indicating lexical blocks should have
4111 NOTE_BLOCK filled in. */
4112 if (NOTE_BLOCK (insn) == NULL_TREE)
4113 abort ();
4114 block_stack = alloc_INSN_LIST (insn, block_stack);
4115 break;
4116
4117 case NOTE_INSN_BLOCK_END:
4118 /* Too many end notes. */
4119 if (block_stack == NULL_RTX)
4120 abort ();
4121 /* Mismatched nesting. */
4122 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
4123 abort ();
4124 tmp = block_stack;
4125 block_stack = XEXP (block_stack, 1);
4126 free_INSN_LIST_node (tmp);
4127
5846cb0f 4128 /* Scan back to see if there are any non-note instructions
4129 between INSN and the beginning of this block. If not,
4130 then there is no PC range in the generated code that will
4131 actually be in this block, so there's no point in
4132 remembering the existence of the block. */
d4c5e26d 4133 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
5846cb0f 4134 {
4135 /* This block contains a real instruction. Note that we
4136 don't include labels; if the only thing in the block
4137 is a label, then there are still no PC values that
4138 lie within the block. */
92cfc4a8 4139 if (INSN_P (tmp))
5846cb0f 4140 break;
4141
4142 /* We're only interested in NOTEs. */
92cfc4a8 4143 if (GET_CODE (tmp) != NOTE)
5846cb0f 4144 continue;
4145
92cfc4a8 4146 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
5846cb0f 4147 {
b29760a8 4148 /* We just verified that this BLOCK matches us with
4149 the block_stack check above. Never delete the
4150 BLOCK for the outermost scope of the function; we
4151 can refer to names from that scope even if the
4152 block notes are messed up. */
4153 if (! is_body_block (NOTE_BLOCK (insn))
4154 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
e33dbbdf 4155 {
92cfc4a8 4156 remove_insn (tmp);
e33dbbdf 4157 remove_insn (insn);
4158 }
5846cb0f 4159 break;
4160 }
92cfc4a8 4161 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
5846cb0f 4162 /* There's a nested block. We need to leave the
4163 current block in place since otherwise the debugger
4164 wouldn't be able to show symbols from our block in
4165 the nested block. */
4166 break;
4167 }
4168 }
90b89d2c 4169 }
92cfc4a8 4170
4171 /* Too many begin notes. */
4172 if (block_stack || eh_stack)
4173 abort ();
90b89d2c 4174}
4175
15bbde2b 4176\f
31d3e01c 4177/* Emit insn(s) of given code and pattern
4178 at a specified place within the doubly-linked list.
15bbde2b 4179
31d3e01c 4180 All of the emit_foo global entry points accept an object
4181 X which is either an insn list or a PATTERN of a single
4182 instruction.
15bbde2b 4183
31d3e01c 4184 There are thus a few canonical ways to generate code and
4185 emit it at a specific place in the instruction stream. For
4186 example, consider the instruction named SPOT and the fact that
4187 we would like to emit some instructions before SPOT. We might
4188 do it like this:
15bbde2b 4189
31d3e01c 4190 start_sequence ();
4191 ... emit the new instructions ...
4192 insns_head = get_insns ();
4193 end_sequence ();
15bbde2b 4194
31d3e01c 4195 emit_insn_before (insns_head, SPOT);
15bbde2b 4196
31d3e01c 4197 It used to be common to generate SEQUENCE rtl instead, but that
4198 is a relic of the past which no longer occurs. The reason is that
4199 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4200 generated would almost certainly die right after it was created. */
15bbde2b 4201
31d3e01c 4202/* Make X be output before the instruction BEFORE. */
15bbde2b 4203
4204rtx
31d3e01c 4205emit_insn_before (x, before)
4206 rtx x, before;
15bbde2b 4207{
31d3e01c 4208 rtx last = before;
19cb6b50 4209 rtx insn;
15bbde2b 4210
31d3e01c 4211#ifdef ENABLE_RTL_CHECKING
4212 if (before == NULL_RTX)
4213 abort ();
4214#endif
4215
4216 if (x == NULL_RTX)
4217 return last;
4218
4219 switch (GET_CODE (x))
15bbde2b 4220 {
31d3e01c 4221 case INSN:
4222 case JUMP_INSN:
4223 case CALL_INSN:
4224 case CODE_LABEL:
4225 case BARRIER:
4226 case NOTE:
4227 insn = x;
4228 while (insn)
4229 {
4230 rtx next = NEXT_INSN (insn);
4231 add_insn_before (insn, before);
4232 last = insn;
4233 insn = next;
4234 }
4235 break;
4236
4237#ifdef ENABLE_RTL_CHECKING
4238 case SEQUENCE:
4239 abort ();
4240 break;
4241#endif
4242
4243 default:
4244 last = make_insn_raw (x);
4245 add_insn_before (last, before);
4246 break;
15bbde2b 4247 }
4248
31d3e01c 4249 return last;
15bbde2b 4250}
4251
31d3e01c 4252/* Make an instruction with body X and code JUMP_INSN
15bbde2b 4253 and output it before the instruction BEFORE. */
4254
4255rtx
31d3e01c 4256emit_jump_insn_before (x, before)
4257 rtx x, before;
15bbde2b 4258{
d90b3d04 4259 rtx insn, last = NULL_RTX;
6e911104 4260
31d3e01c 4261#ifdef ENABLE_RTL_CHECKING
4262 if (before == NULL_RTX)
4263 abort ();
4264#endif
4265
4266 switch (GET_CODE (x))
6e911104 4267 {
31d3e01c 4268 case INSN:
4269 case JUMP_INSN:
4270 case CALL_INSN:
4271 case CODE_LABEL:
4272 case BARRIER:
4273 case NOTE:
4274 insn = x;
4275 while (insn)
4276 {
4277 rtx next = NEXT_INSN (insn);
4278 add_insn_before (insn, before);
4279 last = insn;
4280 insn = next;
4281 }
4282 break;
4283
4284#ifdef ENABLE_RTL_CHECKING
4285 case SEQUENCE:
4286 abort ();
4287 break;
4288#endif
4289
4290 default:
4291 last = make_jump_insn_raw (x);
4292 add_insn_before (last, before);
4293 break;
6e911104 4294 }
4295
31d3e01c 4296 return last;
15bbde2b 4297}
4298
31d3e01c 4299/* Make an instruction with body X and code CALL_INSN
cd0fe062 4300 and output it before the instruction BEFORE. */
4301
4302rtx
31d3e01c 4303emit_call_insn_before (x, before)
4304 rtx x, before;
cd0fe062 4305{
d90b3d04 4306 rtx last = NULL_RTX, insn;
cd0fe062 4307
31d3e01c 4308#ifdef ENABLE_RTL_CHECKING
4309 if (before == NULL_RTX)
4310 abort ();
4311#endif
4312
4313 switch (GET_CODE (x))
cd0fe062 4314 {
31d3e01c 4315 case INSN:
4316 case JUMP_INSN:
4317 case CALL_INSN:
4318 case CODE_LABEL:
4319 case BARRIER:
4320 case NOTE:
4321 insn = x;
4322 while (insn)
4323 {
4324 rtx next = NEXT_INSN (insn);
4325 add_insn_before (insn, before);
4326 last = insn;
4327 insn = next;
4328 }
4329 break;
4330
4331#ifdef ENABLE_RTL_CHECKING
4332 case SEQUENCE:
4333 abort ();
4334 break;
4335#endif
4336
4337 default:
4338 last = make_call_insn_raw (x);
4339 add_insn_before (last, before);
4340 break;
cd0fe062 4341 }
4342
31d3e01c 4343 return last;
cd0fe062 4344}
4345
15bbde2b 4346/* Make an insn of code BARRIER
71caadc0 4347 and output it before the insn BEFORE. */
15bbde2b 4348
4349rtx
4350emit_barrier_before (before)
19cb6b50 4351 rtx before;
15bbde2b 4352{
19cb6b50 4353 rtx insn = rtx_alloc (BARRIER);
15bbde2b 4354
4355 INSN_UID (insn) = cur_insn_uid++;
4356
312de84d 4357 add_insn_before (insn, before);
15bbde2b 4358 return insn;
4359}
4360
71caadc0 4361/* Emit the label LABEL before the insn BEFORE. */
4362
4363rtx
4364emit_label_before (label, before)
4365 rtx label, before;
4366{
4367 /* This can be called twice for the same label as a result of the
4368 confusion that follows a syntax error! So make it harmless. */
4369 if (INSN_UID (label) == 0)
4370 {
4371 INSN_UID (label) = cur_insn_uid++;
4372 add_insn_before (label, before);
4373 }
4374
4375 return label;
4376}
4377
15bbde2b 4378/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4379
4380rtx
4381emit_note_before (subtype, before)
4382 int subtype;
4383 rtx before;
4384{
19cb6b50 4385 rtx note = rtx_alloc (NOTE);
15bbde2b 4386 INSN_UID (note) = cur_insn_uid++;
4387 NOTE_SOURCE_FILE (note) = 0;
4388 NOTE_LINE_NUMBER (note) = subtype;
ab87d1bc 4389 BLOCK_FOR_INSN (note) = NULL;
15bbde2b 4390
312de84d 4391 add_insn_before (note, before);
15bbde2b 4392 return note;
4393}
4394\f
31d3e01c 4395/* Helper for emit_insn_after, handles lists of instructions
4396 efficiently. */
15bbde2b 4397
31d3e01c 4398static rtx emit_insn_after_1 PARAMS ((rtx, rtx));
4399
4400static rtx
4401emit_insn_after_1 (first, after)
4402 rtx first, after;
15bbde2b 4403{
31d3e01c 4404 rtx last;
4405 rtx after_after;
4406 basic_block bb;
15bbde2b 4407
31d3e01c 4408 if (GET_CODE (after) != BARRIER
4409 && (bb = BLOCK_FOR_INSN (after)))
15bbde2b 4410 {
31d3e01c 4411 bb->flags |= BB_DIRTY;
4412 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4413 if (GET_CODE (last) != BARRIER)
4414 set_block_for_insn (last, bb);
4415 if (GET_CODE (last) != BARRIER)
4416 set_block_for_insn (last, bb);
4417 if (bb->end == after)
4418 bb->end = last;
15bbde2b 4419 }
4420 else
31d3e01c 4421 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4422 continue;
4423
4424 after_after = NEXT_INSN (after);
4425
4426 NEXT_INSN (after) = first;
4427 PREV_INSN (first) = after;
4428 NEXT_INSN (last) = after_after;
4429 if (after_after)
4430 PREV_INSN (after_after) = last;
4431
4432 if (after == last_insn)
4433 last_insn = last;
4434 return last;
4435}
4436
4437/* Make X be output after the insn AFTER. */
4438
4439rtx
4440emit_insn_after (x, after)
4441 rtx x, after;
4442{
4443 rtx last = after;
4444
4445#ifdef ENABLE_RTL_CHECKING
4446 if (after == NULL_RTX)
4447 abort ();
4448#endif
4449
4450 if (x == NULL_RTX)
4451 return last;
4452
4453 switch (GET_CODE (x))
15bbde2b 4454 {
31d3e01c 4455 case INSN:
4456 case JUMP_INSN:
4457 case CALL_INSN:
4458 case CODE_LABEL:
4459 case BARRIER:
4460 case NOTE:
4461 last = emit_insn_after_1 (x, after);
4462 break;
4463
4464#ifdef ENABLE_RTL_CHECKING
4465 case SEQUENCE:
4466 abort ();
4467 break;
4468#endif
4469
4470 default:
4471 last = make_insn_raw (x);
4472 add_insn_after (last, after);
4473 break;
15bbde2b 4474 }
4475
31d3e01c 4476 return last;
15bbde2b 4477}
4478
1bea98fb 4479/* Similar to emit_insn_after, except that line notes are to be inserted so
4480 as to act as if this insn were at FROM. */
4481
4482void
31d3e01c 4483emit_insn_after_with_line_notes (x, after, from)
4484 rtx x, after, from;
1bea98fb 4485{
4486 rtx from_line = find_line_note (from);
4487 rtx after_line = find_line_note (after);
31d3e01c 4488 rtx insn = emit_insn_after (x, after);
1bea98fb 4489
4490 if (from_line)
4491 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
4492 NOTE_LINE_NUMBER (from_line),
4493 after);
4494
4495 if (after_line)
4496 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
4497 NOTE_LINE_NUMBER (after_line),
4498 insn);
4499}
4500
31d3e01c 4501/* Make an insn of code JUMP_INSN with body X
15bbde2b 4502 and output it after the insn AFTER. */
4503
4504rtx
31d3e01c 4505emit_jump_insn_after (x, after)
4506 rtx x, after;
15bbde2b 4507{
31d3e01c 4508 rtx last;
15bbde2b 4509
31d3e01c 4510#ifdef ENABLE_RTL_CHECKING
4511 if (after == NULL_RTX)
4512 abort ();
4513#endif
4514
4515 switch (GET_CODE (x))
15bbde2b 4516 {
31d3e01c 4517 case INSN:
4518 case JUMP_INSN:
4519 case CALL_INSN:
4520 case CODE_LABEL:
4521 case BARRIER:
4522 case NOTE:
4523 last = emit_insn_after_1 (x, after);
4524 break;
4525
4526#ifdef ENABLE_RTL_CHECKING
4527 case SEQUENCE:
4528 abort ();
4529 break;
4530#endif
4531
4532 default:
4533 last = make_jump_insn_raw (x);
4534 add_insn_after (last, after);
4535 break;
15bbde2b 4536 }
4537
31d3e01c 4538 return last;
4539}
4540
4541/* Make an instruction with body X and code CALL_INSN
4542 and output it after the instruction AFTER. */
4543
4544rtx
4545emit_call_insn_after (x, after)
4546 rtx x, after;
4547{
4548 rtx last;
4549
4550#ifdef ENABLE_RTL_CHECKING
4551 if (after == NULL_RTX)
4552 abort ();
4553#endif
4554
4555 switch (GET_CODE (x))
4556 {
4557 case INSN:
4558 case JUMP_INSN:
4559 case CALL_INSN:
4560 case CODE_LABEL:
4561 case BARRIER:
4562 case NOTE:
4563 last = emit_insn_after_1 (x, after);
4564 break;
4565
4566#ifdef ENABLE_RTL_CHECKING
4567 case SEQUENCE:
4568 abort ();
4569 break;
4570#endif
4571
4572 default:
4573 last = make_call_insn_raw (x);
4574 add_insn_after (last, after);
4575 break;
4576 }
4577
4578 return last;
15bbde2b 4579}
4580
4581/* Make an insn of code BARRIER
4582 and output it after the insn AFTER. */
4583
4584rtx
4585emit_barrier_after (after)
19cb6b50 4586 rtx after;
15bbde2b 4587{
19cb6b50 4588 rtx insn = rtx_alloc (BARRIER);
15bbde2b 4589
4590 INSN_UID (insn) = cur_insn_uid++;
4591
4592 add_insn_after (insn, after);
4593 return insn;
4594}
4595
4596/* Emit the label LABEL after the insn AFTER. */
4597
4598rtx
4599emit_label_after (label, after)
4600 rtx label, after;
4601{
4602 /* This can be called twice for the same label
4603 as a result of the confusion that follows a syntax error!
4604 So make it harmless. */
4605 if (INSN_UID (label) == 0)
4606 {
4607 INSN_UID (label) = cur_insn_uid++;
4608 add_insn_after (label, after);
4609 }
4610
4611 return label;
4612}
4613
4614/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4615
4616rtx
4617emit_note_after (subtype, after)
4618 int subtype;
4619 rtx after;
4620{
19cb6b50 4621 rtx note = rtx_alloc (NOTE);
15bbde2b 4622 INSN_UID (note) = cur_insn_uid++;
4623 NOTE_SOURCE_FILE (note) = 0;
4624 NOTE_LINE_NUMBER (note) = subtype;
ab87d1bc 4625 BLOCK_FOR_INSN (note) = NULL;
15bbde2b 4626 add_insn_after (note, after);
4627 return note;
4628}
4629
4630/* Emit a line note for FILE and LINE after the insn AFTER. */
4631
4632rtx
4633emit_line_note_after (file, line, after)
9a356c3c 4634 const char *file;
15bbde2b 4635 int line;
4636 rtx after;
4637{
19cb6b50 4638 rtx note;
15bbde2b 4639
0a3b3d88 4640 if (line < 0)
4641 abort ();
4642 if (no_line_numbers)
15bbde2b 4643 {
4644 cur_insn_uid++;
4645 return 0;
4646 }
4647
d4c5e26d 4648 note = rtx_alloc (NOTE);
15bbde2b 4649 INSN_UID (note) = cur_insn_uid++;
4650 NOTE_SOURCE_FILE (note) = file;
4651 NOTE_LINE_NUMBER (note) = line;
ab87d1bc 4652 BLOCK_FOR_INSN (note) = NULL;
15bbde2b 4653 add_insn_after (note, after);
4654 return note;
4655}
4656\f
13751393 4657/* Like emit_insn_after, but set INSN_LOCATOR according to SCOPE. */
d321a68b 4658rtx
13751393 4659emit_insn_after_setloc (pattern, after, loc)
d321a68b 4660 rtx pattern, after;
13751393 4661 int loc;
d321a68b 4662{
4663 rtx last = emit_insn_after (pattern, after);
d321a68b 4664
31d3e01c 4665 after = NEXT_INSN (after);
4666 while (1)
4667 {
59d19cb8 4668 if (active_insn_p (after))
13751393 4669 INSN_LOCATOR (after) = loc;
31d3e01c 4670 if (after == last)
4671 break;
4672 after = NEXT_INSN (after);
4673 }
d321a68b 4674 return last;
4675}
4676
13751393 4677/* Like emit_jump_insn_after, but set INSN_LOCATOR according to SCOPE. */
d321a68b 4678rtx
13751393 4679emit_jump_insn_after_setloc (pattern, after, loc)
d321a68b 4680 rtx pattern, after;
13751393 4681 int loc;
d321a68b 4682{
4683 rtx last = emit_jump_insn_after (pattern, after);
31d3e01c 4684
4685 after = NEXT_INSN (after);
4686 while (1)
4687 {
59d19cb8 4688 if (active_insn_p (after))
13751393 4689 INSN_LOCATOR (after) = loc;
31d3e01c 4690 if (after == last)
4691 break;
4692 after = NEXT_INSN (after);
4693 }
d321a68b 4694 return last;
4695}
4696
13751393 4697/* Like emit_call_insn_after, but set INSN_LOCATOR according to SCOPE. */
d321a68b 4698rtx
13751393 4699emit_call_insn_after_setloc (pattern, after, loc)
d321a68b 4700 rtx pattern, after;
13751393 4701 int loc;
d321a68b 4702{
4703 rtx last = emit_call_insn_after (pattern, after);
31d3e01c 4704
4705 after = NEXT_INSN (after);
4706 while (1)
4707 {
59d19cb8 4708 if (active_insn_p (after))
13751393 4709 INSN_LOCATOR (after) = loc;
31d3e01c 4710 if (after == last)
4711 break;
4712 after = NEXT_INSN (after);
4713 }
d321a68b 4714 return last;
4715}
4716
13751393 4717/* Like emit_insn_before, but set INSN_LOCATOR according to SCOPE. */
d321a68b 4718rtx
13751393 4719emit_insn_before_setloc (pattern, before, loc)
d321a68b 4720 rtx pattern, before;
13751393 4721 int loc;
d321a68b 4722{
4723 rtx first = PREV_INSN (before);
4724 rtx last = emit_insn_before (pattern, before);
4725
31d3e01c 4726 first = NEXT_INSN (first);
4727 while (1)
4728 {
59d19cb8 4729 if (active_insn_p (first))
13751393 4730 INSN_LOCATOR (first) = loc;
31d3e01c 4731 if (first == last)
4732 break;
4733 first = NEXT_INSN (first);
4734 }
d321a68b 4735 return last;
4736}
4737\f
31d3e01c 4738/* Take X and emit it at the end of the doubly-linked
4739 INSN list.
15bbde2b 4740
4741 Returns the last insn emitted. */
4742
4743rtx
31d3e01c 4744emit_insn (x)
4745 rtx x;
15bbde2b 4746{
31d3e01c 4747 rtx last = last_insn;
4748 rtx insn;
15bbde2b 4749
31d3e01c 4750 if (x == NULL_RTX)
4751 return last;
15bbde2b 4752
31d3e01c 4753 switch (GET_CODE (x))
4754 {
4755 case INSN:
4756 case JUMP_INSN:
4757 case CALL_INSN:
4758 case CODE_LABEL:
4759 case BARRIER:
4760 case NOTE:
4761 insn = x;
4762 while (insn)
15bbde2b 4763 {
31d3e01c 4764 rtx next = NEXT_INSN (insn);
15bbde2b 4765 add_insn (insn);
31d3e01c 4766 last = insn;
4767 insn = next;
15bbde2b 4768 }
31d3e01c 4769 break;
15bbde2b 4770
31d3e01c 4771#ifdef ENABLE_RTL_CHECKING
4772 case SEQUENCE:
4773 abort ();
4774 break;
4775#endif
15bbde2b 4776
31d3e01c 4777 default:
4778 last = make_insn_raw (x);
4779 add_insn (last);
4780 break;
15bbde2b 4781 }
4782
4783 return last;
4784}
4785
31d3e01c 4786/* Make an insn of code JUMP_INSN with pattern X
4787 and add it to the end of the doubly-linked list. */
15bbde2b 4788
4789rtx
31d3e01c 4790emit_jump_insn (x)
4791 rtx x;
15bbde2b 4792{
d90b3d04 4793 rtx last = NULL_RTX, insn;
15bbde2b 4794
31d3e01c 4795 switch (GET_CODE (x))
15bbde2b 4796 {
31d3e01c 4797 case INSN:
4798 case JUMP_INSN:
4799 case CALL_INSN:
4800 case CODE_LABEL:
4801 case BARRIER:
4802 case NOTE:
4803 insn = x;
4804 while (insn)
4805 {
4806 rtx next = NEXT_INSN (insn);
4807 add_insn (insn);
4808 last = insn;
4809 insn = next;
4810 }
4811 break;
b36b07d8 4812
31d3e01c 4813#ifdef ENABLE_RTL_CHECKING
4814 case SEQUENCE:
4815 abort ();
4816 break;
4817#endif
b36b07d8 4818
31d3e01c 4819 default:
4820 last = make_jump_insn_raw (x);
4821 add_insn (last);
4822 break;
9dda7915 4823 }
b36b07d8 4824
4825 return last;
4826}
4827
31d3e01c 4828/* Make an insn of code CALL_INSN with pattern X
15bbde2b 4829 and add it to the end of the doubly-linked list. */
4830
4831rtx
31d3e01c 4832emit_call_insn (x)
4833 rtx x;
15bbde2b 4834{
31d3e01c 4835 rtx insn;
4836
4837 switch (GET_CODE (x))
15bbde2b 4838 {
31d3e01c 4839 case INSN:
4840 case JUMP_INSN:
4841 case CALL_INSN:
4842 case CODE_LABEL:
4843 case BARRIER:
4844 case NOTE:
4845 insn = emit_insn (x);
4846 break;
15bbde2b 4847
31d3e01c 4848#ifdef ENABLE_RTL_CHECKING
4849 case SEQUENCE:
4850 abort ();
4851 break;
4852#endif
15bbde2b 4853
31d3e01c 4854 default:
4855 insn = make_call_insn_raw (x);
15bbde2b 4856 add_insn (insn);
31d3e01c 4857 break;
15bbde2b 4858 }
31d3e01c 4859
4860 return insn;
15bbde2b 4861}
4862
4863/* Add the label LABEL to the end of the doubly-linked list. */
4864
4865rtx
4866emit_label (label)
4867 rtx label;
4868{
4869 /* This can be called twice for the same label
4870 as a result of the confusion that follows a syntax error!
4871 So make it harmless. */
4872 if (INSN_UID (label) == 0)
4873 {
4874 INSN_UID (label) = cur_insn_uid++;
4875 add_insn (label);
4876 }
4877 return label;
4878}
4879
4880/* Make an insn of code BARRIER
4881 and add it to the end of the doubly-linked list. */
4882
4883rtx
4884emit_barrier ()
4885{
19cb6b50 4886 rtx barrier = rtx_alloc (BARRIER);
15bbde2b 4887 INSN_UID (barrier) = cur_insn_uid++;
4888 add_insn (barrier);
4889 return barrier;
4890}
4891
4892/* Make an insn of code NOTE
4893 with data-fields specified by FILE and LINE
4894 and add it to the end of the doubly-linked list,
4895 but only if line-numbers are desired for debugging info. */
4896
4897rtx
4898emit_line_note (file, line)
9a356c3c 4899 const char *file;
15bbde2b 4900 int line;
4901{
0a3b3d88 4902 if (line < 0)
4903 abort ();
4904
21b005ed 4905 set_file_and_line_for_stmt (file, line);
15bbde2b 4906
0a3b3d88 4907 if (file && last_location.file && !strcmp (file, last_location.file)
4908 && line == last_location.line)
4909 return NULL_RTX;
4910 last_location.file = file;
4911 last_location.line = line;
4912
15bbde2b 4913 if (no_line_numbers)
0a3b3d88 4914 {
4915 cur_insn_uid++;
4916 return NULL_RTX;
4917 }
15bbde2b 4918
4919 return emit_note (file, line);
4920}
4921
4922/* Make an insn of code NOTE
4923 with data-fields specified by FILE and LINE
4924 and add it to the end of the doubly-linked list.
4925 If it is a line-number NOTE, omit it if it matches the previous one. */
4926
4927rtx
4928emit_note (file, line)
9a356c3c 4929 const char *file;
15bbde2b 4930 int line;
4931{
19cb6b50 4932 rtx note;
15bbde2b 4933
15bbde2b 4934 note = rtx_alloc (NOTE);
4935 INSN_UID (note) = cur_insn_uid++;
4936 NOTE_SOURCE_FILE (note) = file;
4937 NOTE_LINE_NUMBER (note) = line;
ab87d1bc 4938 BLOCK_FOR_INSN (note) = NULL;
15bbde2b 4939 add_insn (note);
4940 return note;
4941}
4942
8d54063f 4943/* Emit a NOTE, and don't omit it even if LINE is the previous note. */
15bbde2b 4944
4945rtx
4946emit_line_note_force (file, line)
9a356c3c 4947 const char *file;
15bbde2b 4948 int line;
4949{
0a3b3d88 4950 last_location.line = -1;
15bbde2b 4951 return emit_line_note (file, line);
4952}
4953
4954/* Cause next statement to emit a line note even if the line number
4955 has not changed. This is used at the beginning of a function. */
4956
4957void
4958force_next_line_note ()
4959{
0a3b3d88 4960 last_location.line = -1;
15bbde2b 4961}
f1934a33 4962
4963/* Place a note of KIND on insn INSN with DATUM as the datum. If a
6312a35e 4964 note of this type already exists, remove it first. */
f1934a33 4965
c080d8f0 4966rtx
f1934a33 4967set_unique_reg_note (insn, kind, datum)
4968 rtx insn;
4969 enum reg_note kind;
4970 rtx datum;
4971{
4972 rtx note = find_reg_note (insn, kind, NULL_RTX);
4973
7e6224ab 4974 switch (kind)
4975 {
4976 case REG_EQUAL:
4977 case REG_EQUIV:
4978 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4979 has multiple sets (some callers assume single_set
4980 means the insn only has one set, when in fact it
4981 means the insn only has one * useful * set). */
4982 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4983 {
4984 if (note)
4985 abort ();
4986 return NULL_RTX;
4987 }
4988
4989 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4990 It serves no useful purpose and breaks eliminate_regs. */
4991 if (GET_CODE (datum) == ASM_OPERANDS)
4992 return NULL_RTX;
4993 break;
4994
4995 default:
4996 break;
4997 }
c080d8f0 4998
d823ba47 4999 if (note)
c080d8f0 5000 {
5001 XEXP (note, 0) = datum;
5002 return note;
5003 }
f1934a33 5004
5005 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
c080d8f0 5006 return REG_NOTES (insn);
f1934a33 5007}
15bbde2b 5008\f
5009/* Return an indication of which type of insn should have X as a body.
5010 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5011
5012enum rtx_code
5013classify_insn (x)
5014 rtx x;
5015{
5016 if (GET_CODE (x) == CODE_LABEL)
5017 return CODE_LABEL;
5018 if (GET_CODE (x) == CALL)
5019 return CALL_INSN;
5020 if (GET_CODE (x) == RETURN)
5021 return JUMP_INSN;
5022 if (GET_CODE (x) == SET)
5023 {
5024 if (SET_DEST (x) == pc_rtx)
5025 return JUMP_INSN;
5026 else if (GET_CODE (SET_SRC (x)) == CALL)
5027 return CALL_INSN;
5028 else
5029 return INSN;
5030 }
5031 if (GET_CODE (x) == PARALLEL)
5032 {
19cb6b50 5033 int j;
15bbde2b 5034 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5035 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5036 return CALL_INSN;
5037 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5038 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5039 return JUMP_INSN;
5040 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5041 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5042 return CALL_INSN;
5043 }
5044 return INSN;
5045}
5046
5047/* Emit the rtl pattern X as an appropriate kind of insn.
5048 If X is a label, it is simply added into the insn chain. */
5049
5050rtx
5051emit (x)
5052 rtx x;
5053{
5054 enum rtx_code code = classify_insn (x);
5055
5056 if (code == CODE_LABEL)
5057 return emit_label (x);
5058 else if (code == INSN)
5059 return emit_insn (x);
5060 else if (code == JUMP_INSN)
5061 {
19cb6b50 5062 rtx insn = emit_jump_insn (x);
b2816317 5063 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
15bbde2b 5064 return emit_barrier ();
5065 return insn;
5066 }
5067 else if (code == CALL_INSN)
5068 return emit_call_insn (x);
5069 else
5070 abort ();
5071}
5072\f
1f3233d1 5073/* Space for free sequence stack entries. */
5074static GTY ((deletable (""))) struct sequence_stack *free_sequence_stack;
5075
b49854c6 5076/* Begin emitting insns to a sequence which can be packaged in an
5077 RTL_EXPR. If this sequence will contain something that might cause
5078 the compiler to pop arguments to function calls (because those
5079 pops have previously been deferred; see INHIBIT_DEFER_POP for more
5080 details), use do_pending_stack_adjust before calling this function.
5081 That will ensure that the deferred pops are not accidentally
9588521d 5082 emitted in the middle of this sequence. */
15bbde2b 5083
5084void
5085start_sequence ()
5086{
5087 struct sequence_stack *tem;
5088
1f3233d1 5089 if (free_sequence_stack != NULL)
5090 {
5091 tem = free_sequence_stack;
5092 free_sequence_stack = tem->next;
5093 }
5094 else
5095 tem = (struct sequence_stack *) ggc_alloc (sizeof (struct sequence_stack));
15bbde2b 5096
0a893c29 5097 tem->next = seq_stack;
15bbde2b 5098 tem->first = first_insn;
5099 tem->last = last_insn;
961819fb 5100 tem->sequence_rtl_expr = seq_rtl_expr;
15bbde2b 5101
0a893c29 5102 seq_stack = tem;
15bbde2b 5103
5104 first_insn = 0;
5105 last_insn = 0;
5106}
5107
961819fb 5108/* Similarly, but indicate that this sequence will be placed in T, an
5109 RTL_EXPR. See the documentation for start_sequence for more
5110 information about how to use this function. */
5111
5112void
5113start_sequence_for_rtl_expr (t)
5114 tree t;
5115{
5116 start_sequence ();
5117
5118 seq_rtl_expr = t;
5119}
5120
b49854c6 5121/* Set up the insn chain starting with FIRST as the current sequence,
5122 saving the previously current one. See the documentation for
5123 start_sequence for more information about how to use this function. */
15bbde2b 5124
5125void
5126push_to_sequence (first)
5127 rtx first;
5128{
5129 rtx last;
5130
5131 start_sequence ();
5132
5133 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
5134
5135 first_insn = first;
5136 last_insn = last;
5137}
5138
78147e84 5139/* Set up the insn chain from a chain stort in FIRST to LAST. */
5140
5141void
5142push_to_full_sequence (first, last)
5143 rtx first, last;
5144{
5145 start_sequence ();
5146 first_insn = first;
5147 last_insn = last;
5148 /* We really should have the end of the insn chain here. */
5149 if (last && NEXT_INSN (last))
5150 abort ();
5151}
5152
ab74c92f 5153/* Set up the outer-level insn chain
5154 as the current sequence, saving the previously current one. */
5155
5156void
5157push_topmost_sequence ()
5158{
2041cfd9 5159 struct sequence_stack *stack, *top = NULL;
ab74c92f 5160
5161 start_sequence ();
5162
0a893c29 5163 for (stack = seq_stack; stack; stack = stack->next)
ab74c92f 5164 top = stack;
5165
5166 first_insn = top->first;
5167 last_insn = top->last;
961819fb 5168 seq_rtl_expr = top->sequence_rtl_expr;
ab74c92f 5169}
5170
5171/* After emitting to the outer-level insn chain, update the outer-level
5172 insn chain, and restore the previous saved state. */
5173
5174void
5175pop_topmost_sequence ()
5176{
2041cfd9 5177 struct sequence_stack *stack, *top = NULL;
ab74c92f 5178
0a893c29 5179 for (stack = seq_stack; stack; stack = stack->next)
ab74c92f 5180 top = stack;
5181
5182 top->first = first_insn;
5183 top->last = last_insn;
961819fb 5184 /* ??? Why don't we save seq_rtl_expr here? */
ab74c92f 5185
5186 end_sequence ();
5187}
5188
15bbde2b 5189/* After emitting to a sequence, restore previous saved state.
5190
b49854c6 5191 To get the contents of the sequence just made, you must call
31d3e01c 5192 `get_insns' *before* calling here.
b49854c6 5193
5194 If the compiler might have deferred popping arguments while
5195 generating this sequence, and this sequence will not be immediately
5196 inserted into the instruction stream, use do_pending_stack_adjust
31d3e01c 5197 before calling get_insns. That will ensure that the deferred
b49854c6 5198 pops are inserted into this sequence, and not into some random
5199 location in the instruction stream. See INHIBIT_DEFER_POP for more
5200 information about deferred popping of arguments. */
15bbde2b 5201
5202void
5203end_sequence ()
5204{
0a893c29 5205 struct sequence_stack *tem = seq_stack;
15bbde2b 5206
5207 first_insn = tem->first;
5208 last_insn = tem->last;
961819fb 5209 seq_rtl_expr = tem->sequence_rtl_expr;
0a893c29 5210 seq_stack = tem->next;
15bbde2b 5211
1f3233d1 5212 memset (tem, 0, sizeof (*tem));
5213 tem->next = free_sequence_stack;
5214 free_sequence_stack = tem;
15bbde2b 5215}
5216
78147e84 5217/* This works like end_sequence, but records the old sequence in FIRST
5218 and LAST. */
5219
5220void
5221end_full_sequence (first, last)
5222 rtx *first, *last;
5223{
5224 *first = first_insn;
5225 *last = last_insn;
d4c5e26d 5226 end_sequence ();
78147e84 5227}
5228
15bbde2b 5229/* Return 1 if currently emitting into a sequence. */
5230
5231int
5232in_sequence_p ()
5233{
0a893c29 5234 return seq_stack != 0;
15bbde2b 5235}
15bbde2b 5236\f
02ebfa52 5237/* Put the various virtual registers into REGNO_REG_RTX. */
5238
5239void
0a893c29 5240init_virtual_regs (es)
5241 struct emit_status *es;
02ebfa52 5242{
0a893c29 5243 rtx *ptr = es->x_regno_reg_rtx;
5244 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5245 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5246 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5247 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5248 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5249}
5250
928d57e3 5251\f
5252/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5253static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5254static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5255static int copy_insn_n_scratches;
5256
5257/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5258 copied an ASM_OPERANDS.
5259 In that case, it is the original input-operand vector. */
5260static rtvec orig_asm_operands_vector;
5261
5262/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5263 copied an ASM_OPERANDS.
5264 In that case, it is the copied input-operand vector. */
5265static rtvec copy_asm_operands_vector;
5266
5267/* Likewise for the constraints vector. */
5268static rtvec orig_asm_constraints_vector;
5269static rtvec copy_asm_constraints_vector;
5270
5271/* Recursively create a new copy of an rtx for copy_insn.
5272 This function differs from copy_rtx in that it handles SCRATCHes and
5273 ASM_OPERANDs properly.
5274 Normally, this function is not used directly; use copy_insn as front end.
5275 However, you could first copy an insn pattern with copy_insn and then use
5276 this function afterwards to properly copy any REG_NOTEs containing
5277 SCRATCHes. */
5278
5279rtx
5280copy_insn_1 (orig)
19cb6b50 5281 rtx orig;
928d57e3 5282{
19cb6b50 5283 rtx copy;
5284 int i, j;
5285 RTX_CODE code;
5286 const char *format_ptr;
928d57e3 5287
5288 code = GET_CODE (orig);
5289
5290 switch (code)
5291 {
5292 case REG:
5293 case QUEUED:
5294 case CONST_INT:
5295 case CONST_DOUBLE:
886cfd4f 5296 case CONST_VECTOR:
928d57e3 5297 case SYMBOL_REF:
5298 case CODE_LABEL:
5299 case PC:
5300 case CC0:
5301 case ADDRESSOF:
5302 return orig;
5303
5304 case SCRATCH:
5305 for (i = 0; i < copy_insn_n_scratches; i++)
5306 if (copy_insn_scratch_in[i] == orig)
5307 return copy_insn_scratch_out[i];
5308 break;
5309
5310 case CONST:
5311 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
5312 a LABEL_REF, it isn't sharable. */
5313 if (GET_CODE (XEXP (orig, 0)) == PLUS
5314 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
5315 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
5316 return orig;
5317 break;
d823ba47 5318
928d57e3 5319 /* A MEM with a constant address is not sharable. The problem is that
5320 the constant address may need to be reloaded. If the mem is shared,
5321 then reloading one copy of this mem will cause all copies to appear
5322 to have been reloaded. */
5323
5324 default:
5325 break;
5326 }
5327
5328 copy = rtx_alloc (code);
5329
5330 /* Copy the various flags, and other information. We assume that
5331 all fields need copying, and then clear the fields that should
5332 not be copied. That is the sensible default behavior, and forces
5333 us to explicitly document why we are *not* copying a flag. */
5334 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
5335
5336 /* We do not copy the USED flag, which is used as a mark bit during
5337 walks over the RTL. */
7c25cb91 5338 RTX_FLAG (copy, used) = 0;
928d57e3 5339
5340 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5341 if (GET_RTX_CLASS (code) == 'i')
5342 {
7c25cb91 5343 RTX_FLAG (copy, jump) = 0;
5344 RTX_FLAG (copy, call) = 0;
5345 RTX_FLAG (copy, frame_related) = 0;
928d57e3 5346 }
d823ba47 5347
928d57e3 5348 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5349
5350 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5351 {
d925550d 5352 copy->fld[i] = orig->fld[i];
928d57e3 5353 switch (*format_ptr++)
5354 {
5355 case 'e':
928d57e3 5356 if (XEXP (orig, i) != NULL)
5357 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5358 break;
5359
928d57e3 5360 case 'E':
5361 case 'V':
928d57e3 5362 if (XVEC (orig, i) == orig_asm_constraints_vector)
5363 XVEC (copy, i) = copy_asm_constraints_vector;
5364 else if (XVEC (orig, i) == orig_asm_operands_vector)
5365 XVEC (copy, i) = copy_asm_operands_vector;
5366 else if (XVEC (orig, i) != NULL)
5367 {
5368 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5369 for (j = 0; j < XVECLEN (copy, i); j++)
5370 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5371 }
5372 break;
5373
928d57e3 5374 case 't':
928d57e3 5375 case 'w':
928d57e3 5376 case 'i':
928d57e3 5377 case 's':
5378 case 'S':
d925550d 5379 case 'u':
5380 case '0':
5381 /* These are left unchanged. */
928d57e3 5382 break;
5383
5384 default:
5385 abort ();
5386 }
5387 }
5388
5389 if (code == SCRATCH)
5390 {
5391 i = copy_insn_n_scratches++;
5392 if (i >= MAX_RECOG_OPERANDS)
5393 abort ();
5394 copy_insn_scratch_in[i] = orig;
5395 copy_insn_scratch_out[i] = copy;
5396 }
5397 else if (code == ASM_OPERANDS)
5398 {
d91f2122 5399 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5400 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5401 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5402 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
928d57e3 5403 }
5404
5405 return copy;
5406}
5407
5408/* Create a new copy of an rtx.
5409 This function differs from copy_rtx in that it handles SCRATCHes and
5410 ASM_OPERANDs properly.
5411 INSN doesn't really have to be a full INSN; it could be just the
5412 pattern. */
5413rtx
5414copy_insn (insn)
5415 rtx insn;
5416{
5417 copy_insn_n_scratches = 0;
5418 orig_asm_operands_vector = 0;
5419 orig_asm_constraints_vector = 0;
5420 copy_asm_operands_vector = 0;
5421 copy_asm_constraints_vector = 0;
5422 return copy_insn_1 (insn);
5423}
02ebfa52 5424
15bbde2b 5425/* Initialize data structures and variables in this file
5426 before generating rtl for each function. */
5427
5428void
5429init_emit ()
5430{
08513b52 5431 struct function *f = cfun;
15bbde2b 5432
1f3233d1 5433 f->emit = (struct emit_status *) ggc_alloc (sizeof (struct emit_status));
15bbde2b 5434 first_insn = NULL;
5435 last_insn = NULL;
961819fb 5436 seq_rtl_expr = NULL;
15bbde2b 5437 cur_insn_uid = 1;
5438 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
0a3b3d88 5439 last_location.line = 0;
5440 last_location.file = 0;
15bbde2b 5441 first_label_num = label_num;
5442 last_label_num = 0;
0a893c29 5443 seq_stack = NULL;
15bbde2b 5444
15bbde2b 5445 /* Init the tables that describe all the pseudo regs. */
5446
e61a0a7f 5447 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
15bbde2b 5448
0a893c29 5449 f->emit->regno_pointer_align
1f3233d1 5450 = (unsigned char *) ggc_alloc_cleared (f->emit->regno_pointer_align_length
5451 * sizeof (unsigned char));
d4c332ff 5452
d823ba47 5453 regno_reg_rtx
64402fe6 5454 = (rtx *) ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
fcdc122e 5455
936082bb 5456 /* Put copies of all the hard registers into regno_reg_rtx. */
90295bd2 5457 memcpy (regno_reg_rtx,
5458 static_regno_reg_rtx,
5459 FIRST_PSEUDO_REGISTER * sizeof (rtx));
936082bb 5460
15bbde2b 5461 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
0a893c29 5462 init_virtual_regs (f->emit);
888e0d33 5463
5464 /* Indicate that the virtual registers and stack locations are
5465 all pointers. */
e61a0a7f 5466 REG_POINTER (stack_pointer_rtx) = 1;
5467 REG_POINTER (frame_pointer_rtx) = 1;
5468 REG_POINTER (hard_frame_pointer_rtx) = 1;
5469 REG_POINTER (arg_pointer_rtx) = 1;
888e0d33 5470
e61a0a7f 5471 REG_POINTER (virtual_incoming_args_rtx) = 1;
5472 REG_POINTER (virtual_stack_vars_rtx) = 1;
5473 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5474 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5475 REG_POINTER (virtual_cfa_rtx) = 1;
89525da0 5476
d4c332ff 5477#ifdef STACK_BOUNDARY
80909c64 5478 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5479 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5480 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5481 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5482
5483 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5484 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5485 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5486 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5487 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
d4c332ff 5488#endif
5489
89525da0 5490#ifdef INIT_EXPANDERS
5491 INIT_EXPANDERS;
5492#endif
15bbde2b 5493}
5494
89dd3424 5495/* Generate the constant 0. */
886cfd4f 5496
5497static rtx
89dd3424 5498gen_const_vector_0 (mode)
886cfd4f 5499 enum machine_mode mode;
5500{
5501 rtx tem;
5502 rtvec v;
5503 int units, i;
5504 enum machine_mode inner;
5505
5506 units = GET_MODE_NUNITS (mode);
5507 inner = GET_MODE_INNER (mode);
5508
5509 v = rtvec_alloc (units);
5510
5511 /* We need to call this function after we to set CONST0_RTX first. */
5512 if (!CONST0_RTX (inner))
5513 abort ();
5514
5515 for (i = 0; i < units; ++i)
5516 RTVEC_ELT (v, i) = CONST0_RTX (inner);
5517
9426b612 5518 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
886cfd4f 5519 return tem;
5520}
5521
9426b612 5522/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5523 all elements are zero. */
5524rtx
5525gen_rtx_CONST_VECTOR (mode, v)
5526 enum machine_mode mode;
5527 rtvec v;
5528{
5529 rtx inner_zero = CONST0_RTX (GET_MODE_INNER (mode));
5530 int i;
5531
5532 for (i = GET_MODE_NUNITS (mode) - 1; i >= 0; i--)
5533 if (RTVEC_ELT (v, i) != inner_zero)
5534 return gen_rtx_raw_CONST_VECTOR (mode, v);
5535 return CONST0_RTX (mode);
5536}
5537
15bbde2b 5538/* Create some permanent unique rtl objects shared between all functions.
5539 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5540
5541void
5542init_emit_once (line_numbers)
5543 int line_numbers;
5544{
5545 int i;
5546 enum machine_mode mode;
9e042f31 5547 enum machine_mode double_mode;
15bbde2b 5548
2ff23ed0 5549 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5550 tables. */
573aba85 5551 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5552 const_int_htab_eq, NULL);
c6259b83 5553
573aba85 5554 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5555 const_double_htab_eq, NULL);
2ff23ed0 5556
573aba85 5557 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5558 mem_attrs_htab_eq, NULL);
ca74b940 5559 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5560 reg_attrs_htab_eq, NULL);
77695070 5561
15bbde2b 5562 no_line_numbers = ! line_numbers;
5563
71d7daa2 5564 /* Compute the word and byte modes. */
5565
5566 byte_mode = VOIDmode;
5567 word_mode = VOIDmode;
5568 double_mode = VOIDmode;
5569
5570 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5571 mode = GET_MODE_WIDER_MODE (mode))
5572 {
5573 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5574 && byte_mode == VOIDmode)
5575 byte_mode = mode;
5576
5577 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5578 && word_mode == VOIDmode)
5579 word_mode = mode;
5580 }
5581
71d7daa2 5582 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5583 mode = GET_MODE_WIDER_MODE (mode))
5584 {
5585 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5586 && double_mode == VOIDmode)
5587 double_mode = mode;
5588 }
5589
5590 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5591
57c097d5 5592 /* Assign register numbers to the globally defined register rtx.
5593 This must be done at runtime because the register number field
5594 is in a union and some compilers can't initialize unions. */
5595
5596 pc_rtx = gen_rtx (PC, VOIDmode);
5597 cc0_rtx = gen_rtx (CC0, VOIDmode);
22cf44bc 5598 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5599 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
57c097d5 5600 if (hard_frame_pointer_rtx == 0)
d823ba47 5601 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
22cf44bc 5602 HARD_FRAME_POINTER_REGNUM);
57c097d5 5603 if (arg_pointer_rtx == 0)
22cf44bc 5604 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
d823ba47 5605 virtual_incoming_args_rtx =
22cf44bc 5606 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
d823ba47 5607 virtual_stack_vars_rtx =
22cf44bc 5608 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
d823ba47 5609 virtual_stack_dynamic_rtx =
22cf44bc 5610 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
d823ba47 5611 virtual_outgoing_args_rtx =
5612 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
22cf44bc 5613 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
57c097d5 5614
90295bd2 5615 /* Initialize RTL for commonly used hard registers. These are
5616 copied into regno_reg_rtx as we begin to compile each function. */
5617 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5618 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5619
57c097d5 5620#ifdef INIT_EXPANDERS
ab5beff9 5621 /* This is to initialize {init|mark|free}_machine_status before the first
5622 call to push_function_context_to. This is needed by the Chill front
3fb1e43b 5623 end which calls push_function_context_to before the first call to
57c097d5 5624 init_function_start. */
5625 INIT_EXPANDERS;
5626#endif
5627
15bbde2b 5628 /* Create the unique rtx's for certain rtx codes and operand values. */
5629
7014838c 5630 /* Don't use gen_rtx here since gen_rtx in this case
5631 tries to use these variables. */
15bbde2b 5632 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
d823ba47 5633 const_int_rtx[i + MAX_SAVED_CONST_INT] =
a717d5b4 5634 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
15bbde2b 5635
1a60f06a 5636 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5637 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
57c097d5 5638 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
1a60f06a 5639 else
3ad7bb1c 5640 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
15bbde2b 5641
2ff23ed0 5642 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5643 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5644 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5645 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
77e89269 5646 REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5647
5648 dconsthalf = dconst1;
5649 dconsthalf.exp--;
15bbde2b 5650
5651 for (i = 0; i <= 2; i++)
5652 {
badfe841 5653 REAL_VALUE_TYPE *r =
5654 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5655
15bbde2b 5656 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5657 mode = GET_MODE_WIDER_MODE (mode))
2ff23ed0 5658 const_tiny_rtx[i][(int) mode] =
5659 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
15bbde2b 5660
b572011e 5661 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
15bbde2b 5662
5663 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5664 mode = GET_MODE_WIDER_MODE (mode))
b572011e 5665 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
7540dcc4 5666
5667 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5668 mode != VOIDmode;
5669 mode = GET_MODE_WIDER_MODE (mode))
5670 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
15bbde2b 5671 }
5672
886cfd4f 5673 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5674 mode != VOIDmode;
5675 mode = GET_MODE_WIDER_MODE (mode))
89dd3424 5676 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
886cfd4f 5677
5678 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5679 mode != VOIDmode;
5680 mode = GET_MODE_WIDER_MODE (mode))
89dd3424 5681 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
886cfd4f 5682
0fd4500a 5683 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5684 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5685 const_tiny_rtx[0][i] = const0_rtx;
15bbde2b 5686
065336b4 5687 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5688 if (STORE_FLAG_VALUE == 1)
5689 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5690
f4bffa58 5691#ifdef RETURN_ADDRESS_POINTER_REGNUM
5692 return_address_pointer_rtx
22cf44bc 5693 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
f4bffa58 5694#endif
5695
5696#ifdef STRUCT_VALUE
5697 struct_value_rtx = STRUCT_VALUE;
5698#else
5699 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
5700#endif
5701
5702#ifdef STRUCT_VALUE_INCOMING
5703 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
5704#else
5705#ifdef STRUCT_VALUE_INCOMING_REGNUM
5706 struct_value_incoming_rtx
5707 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
5708#else
5709 struct_value_incoming_rtx = struct_value_rtx;
5710#endif
5711#endif
5712
5713#ifdef STATIC_CHAIN_REGNUM
5714 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5715
5716#ifdef STATIC_CHAIN_INCOMING_REGNUM
5717 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5718 static_chain_incoming_rtx
5719 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5720 else
5721#endif
5722 static_chain_incoming_rtx = static_chain_rtx;
5723#endif
5724
5725#ifdef STATIC_CHAIN
5726 static_chain_rtx = STATIC_CHAIN;
5727
5728#ifdef STATIC_CHAIN_INCOMING
5729 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5730#else
5731 static_chain_incoming_rtx = static_chain_rtx;
5732#endif
5733#endif
5734
3473aefe 5735 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
52bcea50 5736 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
15bbde2b 5737}
ac6c481d 5738\f
5739/* Query and clear/ restore no_line_numbers. This is used by the
5740 switch / case handling in stmt.c to give proper line numbers in
5741 warnings about unreachable code. */
5742
5743int
5744force_line_numbers ()
5745{
5746 int old = no_line_numbers;
5747
5748 no_line_numbers = 0;
5749 if (old)
5750 force_next_line_note ();
5751 return old;
5752}
5753
5754void
5755restore_line_number_status (old_value)
5756 int old_value;
5757{
5758 no_line_numbers = old_value;
5759}
cd0fe062 5760
5761/* Produce exact duplicate of insn INSN after AFTER.
5762 Care updating of libcall regions if present. */
5763
5764rtx
5765emit_copy_of_insn_after (insn, after)
5766 rtx insn, after;
5767{
5768 rtx new;
5769 rtx note1, note2, link;
5770
5771 switch (GET_CODE (insn))
5772 {
5773 case INSN:
5774 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5775 break;
5776
5777 case JUMP_INSN:
5778 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5779 break;
5780
5781 case CALL_INSN:
5782 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5783 if (CALL_INSN_FUNCTION_USAGE (insn))
5784 CALL_INSN_FUNCTION_USAGE (new)
5785 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5786 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5787 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5788 break;
5789
5790 default:
5791 abort ();
5792 }
5793
5794 /* Update LABEL_NUSES. */
5795 mark_jump_label (PATTERN (new), new, 0);
5796
13751393 5797 INSN_LOCATOR (new) = INSN_LOCATOR (insn);
ab87d1bc 5798
cd0fe062 5799 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5800 make them. */
5801 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5802 if (REG_NOTE_KIND (link) != REG_LABEL)
5803 {
5804 if (GET_CODE (link) == EXPR_LIST)
5805 REG_NOTES (new)
5806 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5807 XEXP (link, 0),
5808 REG_NOTES (new)));
5809 else
5810 REG_NOTES (new)
5811 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5812 XEXP (link, 0),
5813 REG_NOTES (new)));
5814 }
5815
5816 /* Fix the libcall sequences. */
5817 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5818 {
5819 rtx p = new;
5820 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5821 p = PREV_INSN (p);
5822 XEXP (note1, 0) = p;
5823 XEXP (note2, 0) = new;
5824 }
ce07bc35 5825 INSN_CODE (new) = INSN_CODE (insn);
cd0fe062 5826 return new;
5827}
1f3233d1 5828
5829#include "gt-emit-rtl.h"