1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
5 Free Software Foundation, Inc.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 /* Middle-to-low level generation of rtx code and insns.
26 This file contains support functions for creating rtl expressions
27 and manipulating them in the doubly-linked chain of insns.
29 The patterns of the insns are created by machine-dependent
30 routines in insn-emit.c, which is generated automatically from
31 the machine description. These routines make the individual rtx's
32 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33 which are automatically generated from rtl.def; what is machine
34 dependent is the kind of rtx's they make and what arguments they
39 #include "coretypes.h"
49 #include "hard-reg-set.h"
51 #include "insn-config.h"
54 #include "fixed-value.h"
56 #include "basic-block.h"
59 #include "langhooks.h"
60 #include "tree-pass.h"
65 /* Commonly used modes. */
67 enum machine_mode byte_mode
; /* Mode whose width is BITS_PER_UNIT. */
68 enum machine_mode word_mode
; /* Mode whose width is BITS_PER_WORD. */
69 enum machine_mode double_mode
; /* Mode whose width is DOUBLE_TYPE_SIZE. */
70 enum machine_mode ptr_mode
; /* Mode whose width is POINTER_SIZE. */
72 /* Datastructures maintained for currently processed function in RTL form. */
74 struct rtl_data x_rtl
;
76 /* Indexed by pseudo register number, gives the rtx for that pseudo.
77 Allocated in parallel with regno_pointer_align.
78 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
79 with length attribute nested in top level structures. */
83 /* This is *not* reset after each function. It gives each CODE_LABEL
84 in the entire compilation a unique label number. */
86 static GTY(()) int label_num
= 1;
88 /* Commonly used rtx's, so that we only need space for one copy.
89 These are initialized once for the entire compilation.
90 All of these are unique; no other rtx-object will be equal to any
93 rtx global_rtl
[GR_MAX
];
95 /* Commonly used RTL for hard registers. These objects are not necessarily
96 unique, so we allocate them separately from global_rtl. They are
97 initialized once per compilation unit, then copied into regno_reg_rtx
98 at the beginning of each function. */
99 static GTY(()) rtx static_regno_reg_rtx
[FIRST_PSEUDO_REGISTER
];
101 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
102 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
103 record a copy of const[012]_rtx. */
105 rtx const_tiny_rtx
[3][(int) MAX_MACHINE_MODE
];
109 REAL_VALUE_TYPE dconst0
;
110 REAL_VALUE_TYPE dconst1
;
111 REAL_VALUE_TYPE dconst2
;
112 REAL_VALUE_TYPE dconstm1
;
113 REAL_VALUE_TYPE dconsthalf
;
115 /* Record fixed-point constant 0 and 1. */
116 FIXED_VALUE_TYPE fconst0
[MAX_FCONST0
];
117 FIXED_VALUE_TYPE fconst1
[MAX_FCONST1
];
119 /* All references to the following fixed hard registers go through
120 these unique rtl objects. On machines where the frame-pointer and
121 arg-pointer are the same register, they use the same unique object.
123 After register allocation, other rtl objects which used to be pseudo-regs
124 may be clobbered to refer to the frame-pointer register.
125 But references that were originally to the frame-pointer can be
126 distinguished from the others because they contain frame_pointer_rtx.
128 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
129 tricky: until register elimination has taken place hard_frame_pointer_rtx
130 should be used if it is being set, and frame_pointer_rtx otherwise. After
131 register elimination hard_frame_pointer_rtx should always be used.
132 On machines where the two registers are same (most) then these are the
135 In an inline procedure, the stack and frame pointer rtxs may not be
136 used for anything else. */
137 rtx pic_offset_table_rtx
; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
139 /* This is used to implement __builtin_return_address for some machines.
140 See for instance the MIPS port. */
141 rtx return_address_pointer_rtx
; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
143 /* We make one copy of (const_int C) where C is in
144 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
145 to save space during the compilation and simplify comparisons of
148 rtx const_int_rtx
[MAX_SAVED_CONST_INT
* 2 + 1];
150 /* A hash table storing CONST_INTs whose absolute value is greater
151 than MAX_SAVED_CONST_INT. */
153 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
154 htab_t const_int_htab
;
156 /* A hash table storing memory attribute structures. */
157 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs
)))
158 htab_t mem_attrs_htab
;
160 /* A hash table storing register attribute structures. */
161 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs
)))
162 htab_t reg_attrs_htab
;
164 /* A hash table storing all CONST_DOUBLEs. */
165 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
166 htab_t const_double_htab
;
168 /* A hash table storing all CONST_FIXEDs. */
169 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
170 htab_t const_fixed_htab
;
172 #define first_insn (crtl->emit.x_first_insn)
173 #define last_insn (crtl->emit.x_last_insn)
174 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
175 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
176 #define last_location (crtl->emit.x_last_location)
177 #define first_label_num (crtl->emit.x_first_label_num)
179 static rtx
make_call_insn_raw (rtx
);
180 static rtx
change_address_1 (rtx
, enum machine_mode
, rtx
, int);
181 static void set_used_decls (tree
);
182 static void mark_label_nuses (rtx
);
183 static hashval_t
const_int_htab_hash (const void *);
184 static int const_int_htab_eq (const void *, const void *);
185 static hashval_t
const_double_htab_hash (const void *);
186 static int const_double_htab_eq (const void *, const void *);
187 static rtx
lookup_const_double (rtx
);
188 static hashval_t
const_fixed_htab_hash (const void *);
189 static int const_fixed_htab_eq (const void *, const void *);
190 static rtx
lookup_const_fixed (rtx
);
191 static hashval_t
mem_attrs_htab_hash (const void *);
192 static int mem_attrs_htab_eq (const void *, const void *);
193 static mem_attrs
*get_mem_attrs (alias_set_type
, tree
, rtx
, rtx
, unsigned int,
194 addr_space_t
, enum machine_mode
);
195 static hashval_t
reg_attrs_htab_hash (const void *);
196 static int reg_attrs_htab_eq (const void *, const void *);
197 static reg_attrs
*get_reg_attrs (tree
, int);
198 static rtx
gen_const_vector (enum machine_mode
, int);
199 static void copy_rtx_if_shared_1 (rtx
*orig
);
201 /* Probability of the conditional branch currently proceeded by try_split.
202 Set to -1 otherwise. */
203 int split_branch_probability
= -1;
205 /* Returns a hash code for X (which is a really a CONST_INT). */
208 const_int_htab_hash (const void *x
)
210 return (hashval_t
) INTVAL ((const_rtx
) x
);
213 /* Returns nonzero if the value represented by X (which is really a
214 CONST_INT) is the same as that given by Y (which is really a
218 const_int_htab_eq (const void *x
, const void *y
)
220 return (INTVAL ((const_rtx
) x
) == *((const HOST_WIDE_INT
*) y
));
223 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
225 const_double_htab_hash (const void *x
)
227 const_rtx
const value
= (const_rtx
) x
;
230 if (GET_MODE (value
) == VOIDmode
)
231 h
= CONST_DOUBLE_LOW (value
) ^ CONST_DOUBLE_HIGH (value
);
234 h
= real_hash (CONST_DOUBLE_REAL_VALUE (value
));
235 /* MODE is used in the comparison, so it should be in the hash. */
236 h
^= GET_MODE (value
);
241 /* Returns nonzero if the value represented by X (really a ...)
242 is the same as that represented by Y (really a ...) */
244 const_double_htab_eq (const void *x
, const void *y
)
246 const_rtx
const a
= (const_rtx
)x
, b
= (const_rtx
)y
;
248 if (GET_MODE (a
) != GET_MODE (b
))
250 if (GET_MODE (a
) == VOIDmode
)
251 return (CONST_DOUBLE_LOW (a
) == CONST_DOUBLE_LOW (b
)
252 && CONST_DOUBLE_HIGH (a
) == CONST_DOUBLE_HIGH (b
));
254 return real_identical (CONST_DOUBLE_REAL_VALUE (a
),
255 CONST_DOUBLE_REAL_VALUE (b
));
258 /* Returns a hash code for X (which is really a CONST_FIXED). */
261 const_fixed_htab_hash (const void *x
)
263 const_rtx
const value
= (const_rtx
) x
;
266 h
= fixed_hash (CONST_FIXED_VALUE (value
));
267 /* MODE is used in the comparison, so it should be in the hash. */
268 h
^= GET_MODE (value
);
272 /* Returns nonzero if the value represented by X (really a ...)
273 is the same as that represented by Y (really a ...). */
276 const_fixed_htab_eq (const void *x
, const void *y
)
278 const_rtx
const a
= (const_rtx
) x
, b
= (const_rtx
) y
;
280 if (GET_MODE (a
) != GET_MODE (b
))
282 return fixed_identical (CONST_FIXED_VALUE (a
), CONST_FIXED_VALUE (b
));
285 /* Returns a hash code for X (which is a really a mem_attrs *). */
288 mem_attrs_htab_hash (const void *x
)
290 const mem_attrs
*const p
= (const mem_attrs
*) x
;
292 return (p
->alias
^ (p
->align
* 1000)
293 ^ (p
->addrspace
* 4000)
294 ^ ((p
->offset
? INTVAL (p
->offset
) : 0) * 50000)
295 ^ ((p
->size
? INTVAL (p
->size
) : 0) * 2500000)
296 ^ (size_t) iterative_hash_expr (p
->expr
, 0));
299 /* Returns nonzero if the value represented by X (which is really a
300 mem_attrs *) is the same as that given by Y (which is also really a
304 mem_attrs_htab_eq (const void *x
, const void *y
)
306 const mem_attrs
*const p
= (const mem_attrs
*) x
;
307 const mem_attrs
*const q
= (const mem_attrs
*) y
;
309 return (p
->alias
== q
->alias
&& p
->offset
== q
->offset
310 && p
->size
== q
->size
&& p
->align
== q
->align
311 && p
->addrspace
== q
->addrspace
312 && (p
->expr
== q
->expr
313 || (p
->expr
!= NULL_TREE
&& q
->expr
!= NULL_TREE
314 && operand_equal_p (p
->expr
, q
->expr
, 0))));
317 /* Allocate a new mem_attrs structure and insert it into the hash table if
318 one identical to it is not already in the table. We are doing this for
322 get_mem_attrs (alias_set_type alias
, tree expr
, rtx offset
, rtx size
,
323 unsigned int align
, addr_space_t addrspace
, enum machine_mode mode
)
328 /* If everything is the default, we can just return zero.
329 This must match what the corresponding MEM_* macros return when the
330 field is not present. */
331 if (alias
== 0 && expr
== 0 && offset
== 0 && addrspace
== 0
333 || (mode
!= BLKmode
&& GET_MODE_SIZE (mode
) == INTVAL (size
)))
334 && (STRICT_ALIGNMENT
&& mode
!= BLKmode
335 ? align
== GET_MODE_ALIGNMENT (mode
) : align
== BITS_PER_UNIT
))
340 attrs
.offset
= offset
;
343 attrs
.addrspace
= addrspace
;
345 slot
= htab_find_slot (mem_attrs_htab
, &attrs
, INSERT
);
348 *slot
= ggc_alloc (sizeof (mem_attrs
));
349 memcpy (*slot
, &attrs
, sizeof (mem_attrs
));
352 return (mem_attrs
*) *slot
;
355 /* Returns a hash code for X (which is a really a reg_attrs *). */
358 reg_attrs_htab_hash (const void *x
)
360 const reg_attrs
*const p
= (const reg_attrs
*) x
;
362 return ((p
->offset
* 1000) ^ (long) p
->decl
);
365 /* Returns nonzero if the value represented by X (which is really a
366 reg_attrs *) is the same as that given by Y (which is also really a
370 reg_attrs_htab_eq (const void *x
, const void *y
)
372 const reg_attrs
*const p
= (const reg_attrs
*) x
;
373 const reg_attrs
*const q
= (const reg_attrs
*) y
;
375 return (p
->decl
== q
->decl
&& p
->offset
== q
->offset
);
377 /* Allocate a new reg_attrs structure and insert it into the hash table if
378 one identical to it is not already in the table. We are doing this for
382 get_reg_attrs (tree decl
, int offset
)
387 /* If everything is the default, we can just return zero. */
388 if (decl
== 0 && offset
== 0)
392 attrs
.offset
= offset
;
394 slot
= htab_find_slot (reg_attrs_htab
, &attrs
, INSERT
);
397 *slot
= ggc_alloc (sizeof (reg_attrs
));
398 memcpy (*slot
, &attrs
, sizeof (reg_attrs
));
401 return (reg_attrs
*) *slot
;
406 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule
412 rtx x
= gen_rtx_ASM_INPUT (VOIDmode
, "");
413 MEM_VOLATILE_P (x
) = true;
419 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
420 don't attempt to share with the various global pieces of rtl (such as
421 frame_pointer_rtx). */
424 gen_raw_REG (enum machine_mode mode
, int regno
)
426 rtx x
= gen_rtx_raw_REG (mode
, regno
);
427 ORIGINAL_REGNO (x
) = regno
;
431 /* There are some RTL codes that require special attention; the generation
432 functions do the raw handling. If you add to this list, modify
433 special_rtx in gengenrtl.c as well. */
436 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED
, HOST_WIDE_INT arg
)
440 if (arg
>= - MAX_SAVED_CONST_INT
&& arg
<= MAX_SAVED_CONST_INT
)
441 return const_int_rtx
[arg
+ MAX_SAVED_CONST_INT
];
443 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
444 if (const_true_rtx
&& arg
== STORE_FLAG_VALUE
)
445 return const_true_rtx
;
448 /* Look up the CONST_INT in the hash table. */
449 slot
= htab_find_slot_with_hash (const_int_htab
, &arg
,
450 (hashval_t
) arg
, INSERT
);
452 *slot
= gen_rtx_raw_CONST_INT (VOIDmode
, arg
);
458 gen_int_mode (HOST_WIDE_INT c
, enum machine_mode mode
)
460 return GEN_INT (trunc_int_for_mode (c
, mode
));
463 /* CONST_DOUBLEs might be created from pairs of integers, or from
464 REAL_VALUE_TYPEs. Also, their length is known only at run time,
465 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
467 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
468 hash table. If so, return its counterpart; otherwise add it
469 to the hash table and return it. */
471 lookup_const_double (rtx real
)
473 void **slot
= htab_find_slot (const_double_htab
, real
, INSERT
);
480 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
481 VALUE in mode MODE. */
483 const_double_from_real_value (REAL_VALUE_TYPE value
, enum machine_mode mode
)
485 rtx real
= rtx_alloc (CONST_DOUBLE
);
486 PUT_MODE (real
, mode
);
490 return lookup_const_double (real
);
493 /* Determine whether FIXED, a CONST_FIXED, already exists in the
494 hash table. If so, return its counterpart; otherwise add it
495 to the hash table and return it. */
498 lookup_const_fixed (rtx fixed
)
500 void **slot
= htab_find_slot (const_fixed_htab
, fixed
, INSERT
);
507 /* Return a CONST_FIXED rtx for a fixed-point value specified by
508 VALUE in mode MODE. */
511 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value
, enum machine_mode mode
)
513 rtx fixed
= rtx_alloc (CONST_FIXED
);
514 PUT_MODE (fixed
, mode
);
518 return lookup_const_fixed (fixed
);
521 /* Return a CONST_DOUBLE or CONST_INT for a value specified as
525 immed_double_int_const (double_int i
, enum machine_mode mode
)
527 return immed_double_const (i
.low
, i
.high
, mode
);
530 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
531 of ints: I0 is the low-order word and I1 is the high-order word.
532 Do not use this routine for non-integer modes; convert to
533 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
536 immed_double_const (HOST_WIDE_INT i0
, HOST_WIDE_INT i1
, enum machine_mode mode
)
541 /* There are the following cases (note that there are no modes with
542 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
544 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
546 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
547 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
548 from copies of the sign bit, and sign of i0 and i1 are the same), then
549 we return a CONST_INT for i0.
550 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
551 if (mode
!= VOIDmode
)
553 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
554 || GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
555 /* We can get a 0 for an error mark. */
556 || GET_MODE_CLASS (mode
) == MODE_VECTOR_INT
557 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
);
559 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
560 return gen_int_mode (i0
, mode
);
562 gcc_assert (GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
);
565 /* If this integer fits in one word, return a CONST_INT. */
566 if ((i1
== 0 && i0
>= 0) || (i1
== ~0 && i0
< 0))
569 /* We use VOIDmode for integers. */
570 value
= rtx_alloc (CONST_DOUBLE
);
571 PUT_MODE (value
, VOIDmode
);
573 CONST_DOUBLE_LOW (value
) = i0
;
574 CONST_DOUBLE_HIGH (value
) = i1
;
576 for (i
= 2; i
< (sizeof CONST_DOUBLE_FORMAT
- 1); i
++)
577 XWINT (value
, i
) = 0;
579 return lookup_const_double (value
);
583 gen_rtx_REG (enum machine_mode mode
, unsigned int regno
)
585 /* In case the MD file explicitly references the frame pointer, have
586 all such references point to the same frame pointer. This is
587 used during frame pointer elimination to distinguish the explicit
588 references to these registers from pseudos that happened to be
591 If we have eliminated the frame pointer or arg pointer, we will
592 be using it as a normal register, for example as a spill
593 register. In such cases, we might be accessing it in a mode that
594 is not Pmode and therefore cannot use the pre-allocated rtx.
596 Also don't do this when we are making new REGs in reload, since
597 we don't want to get confused with the real pointers. */
599 if (mode
== Pmode
&& !reload_in_progress
)
601 if (regno
== FRAME_POINTER_REGNUM
602 && (!reload_completed
|| frame_pointer_needed
))
603 return frame_pointer_rtx
;
604 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
605 if (regno
== HARD_FRAME_POINTER_REGNUM
606 && (!reload_completed
|| frame_pointer_needed
))
607 return hard_frame_pointer_rtx
;
609 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
610 if (regno
== ARG_POINTER_REGNUM
)
611 return arg_pointer_rtx
;
613 #ifdef RETURN_ADDRESS_POINTER_REGNUM
614 if (regno
== RETURN_ADDRESS_POINTER_REGNUM
)
615 return return_address_pointer_rtx
;
617 if (regno
== (unsigned) PIC_OFFSET_TABLE_REGNUM
618 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
619 return pic_offset_table_rtx
;
620 if (regno
== STACK_POINTER_REGNUM
)
621 return stack_pointer_rtx
;
625 /* If the per-function register table has been set up, try to re-use
626 an existing entry in that table to avoid useless generation of RTL.
628 This code is disabled for now until we can fix the various backends
629 which depend on having non-shared hard registers in some cases. Long
630 term we want to re-enable this code as it can significantly cut down
631 on the amount of useless RTL that gets generated.
633 We'll also need to fix some code that runs after reload that wants to
634 set ORIGINAL_REGNO. */
639 && regno
< FIRST_PSEUDO_REGISTER
640 && reg_raw_mode
[regno
] == mode
)
641 return regno_reg_rtx
[regno
];
644 return gen_raw_REG (mode
, regno
);
648 gen_rtx_MEM (enum machine_mode mode
, rtx addr
)
650 rtx rt
= gen_rtx_raw_MEM (mode
, addr
);
652 /* This field is not cleared by the mere allocation of the rtx, so
659 /* Generate a memory referring to non-trapping constant memory. */
662 gen_const_mem (enum machine_mode mode
, rtx addr
)
664 rtx mem
= gen_rtx_MEM (mode
, addr
);
665 MEM_READONLY_P (mem
) = 1;
666 MEM_NOTRAP_P (mem
) = 1;
670 /* Generate a MEM referring to fixed portions of the frame, e.g., register
674 gen_frame_mem (enum machine_mode mode
, rtx addr
)
676 rtx mem
= gen_rtx_MEM (mode
, addr
);
677 MEM_NOTRAP_P (mem
) = 1;
678 set_mem_alias_set (mem
, get_frame_alias_set ());
682 /* Generate a MEM referring to a temporary use of the stack, not part
683 of the fixed stack frame. For example, something which is pushed
684 by a target splitter. */
686 gen_tmp_stack_mem (enum machine_mode mode
, rtx addr
)
688 rtx mem
= gen_rtx_MEM (mode
, addr
);
689 MEM_NOTRAP_P (mem
) = 1;
690 if (!cfun
->calls_alloca
)
691 set_mem_alias_set (mem
, get_frame_alias_set ());
695 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
696 this construct would be valid, and false otherwise. */
699 validate_subreg (enum machine_mode omode
, enum machine_mode imode
,
700 const_rtx reg
, unsigned int offset
)
702 unsigned int isize
= GET_MODE_SIZE (imode
);
703 unsigned int osize
= GET_MODE_SIZE (omode
);
705 /* All subregs must be aligned. */
706 if (offset
% osize
!= 0)
709 /* The subreg offset cannot be outside the inner object. */
713 /* ??? This should not be here. Temporarily continue to allow word_mode
714 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
715 Generally, backends are doing something sketchy but it'll take time to
717 if (omode
== word_mode
)
719 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
720 is the culprit here, and not the backends. */
721 else if (osize
>= UNITS_PER_WORD
&& isize
>= osize
)
723 /* Allow component subregs of complex and vector. Though given the below
724 extraction rules, it's not always clear what that means. */
725 else if ((COMPLEX_MODE_P (imode
) || VECTOR_MODE_P (imode
))
726 && GET_MODE_INNER (imode
) == omode
)
728 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
729 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
730 represent this. It's questionable if this ought to be represented at
731 all -- why can't this all be hidden in post-reload splitters that make
732 arbitrarily mode changes to the registers themselves. */
733 else if (VECTOR_MODE_P (omode
) && GET_MODE_INNER (omode
) == imode
)
735 /* Subregs involving floating point modes are not allowed to
736 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
737 (subreg:SI (reg:DF) 0) isn't. */
738 else if (FLOAT_MODE_P (imode
) || FLOAT_MODE_P (omode
))
744 /* Paradoxical subregs must have offset zero. */
748 /* This is a normal subreg. Verify that the offset is representable. */
750 /* For hard registers, we already have most of these rules collected in
751 subreg_offset_representable_p. */
752 if (reg
&& REG_P (reg
) && HARD_REGISTER_P (reg
))
754 unsigned int regno
= REGNO (reg
);
756 #ifdef CANNOT_CHANGE_MODE_CLASS
757 if ((COMPLEX_MODE_P (imode
) || VECTOR_MODE_P (imode
))
758 && GET_MODE_INNER (imode
) == omode
)
760 else if (REG_CANNOT_CHANGE_MODE_P (regno
, imode
, omode
))
764 return subreg_offset_representable_p (regno
, imode
, offset
, omode
);
767 /* For pseudo registers, we want most of the same checks. Namely:
768 If the register no larger than a word, the subreg must be lowpart.
769 If the register is larger than a word, the subreg must be the lowpart
770 of a subword. A subreg does *not* perform arbitrary bit extraction.
771 Given that we've already checked mode/offset alignment, we only have
772 to check subword subregs here. */
773 if (osize
< UNITS_PER_WORD
)
775 enum machine_mode wmode
= isize
> UNITS_PER_WORD
? word_mode
: imode
;
776 unsigned int low_off
= subreg_lowpart_offset (omode
, wmode
);
777 if (offset
% UNITS_PER_WORD
!= low_off
)
784 gen_rtx_SUBREG (enum machine_mode mode
, rtx reg
, int offset
)
786 gcc_assert (validate_subreg (mode
, GET_MODE (reg
), reg
, offset
));
787 return gen_rtx_raw_SUBREG (mode
, reg
, offset
);
790 /* Generate a SUBREG representing the least-significant part of REG if MODE
791 is smaller than mode of REG, otherwise paradoxical SUBREG. */
794 gen_lowpart_SUBREG (enum machine_mode mode
, rtx reg
)
796 enum machine_mode inmode
;
798 inmode
= GET_MODE (reg
);
799 if (inmode
== VOIDmode
)
801 return gen_rtx_SUBREG (mode
, reg
,
802 subreg_lowpart_offset (mode
, inmode
));
806 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
809 gen_rtvec (int n
, ...)
817 /* Don't allocate an empty rtvec... */
821 rt_val
= rtvec_alloc (n
);
823 for (i
= 0; i
< n
; i
++)
824 rt_val
->elem
[i
] = va_arg (p
, rtx
);
831 gen_rtvec_v (int n
, rtx
*argp
)
836 /* Don't allocate an empty rtvec... */
840 rt_val
= rtvec_alloc (n
);
842 for (i
= 0; i
< n
; i
++)
843 rt_val
->elem
[i
] = *argp
++;
848 /* Return the number of bytes between the start of an OUTER_MODE
849 in-memory value and the start of an INNER_MODE in-memory value,
850 given that the former is a lowpart of the latter. It may be a
851 paradoxical lowpart, in which case the offset will be negative
852 on big-endian targets. */
855 byte_lowpart_offset (enum machine_mode outer_mode
,
856 enum machine_mode inner_mode
)
858 if (GET_MODE_SIZE (outer_mode
) < GET_MODE_SIZE (inner_mode
))
859 return subreg_lowpart_offset (outer_mode
, inner_mode
);
861 return -subreg_lowpart_offset (inner_mode
, outer_mode
);
864 /* Generate a REG rtx for a new pseudo register of mode MODE.
865 This pseudo is assigned the next sequential register number. */
868 gen_reg_rtx (enum machine_mode mode
)
871 unsigned int align
= GET_MODE_ALIGNMENT (mode
);
873 gcc_assert (can_create_pseudo_p ());
875 /* If a virtual register with bigger mode alignment is generated,
876 increase stack alignment estimation because it might be spilled
878 if (SUPPORTS_STACK_ALIGNMENT
879 && crtl
->stack_alignment_estimated
< align
880 && !crtl
->stack_realign_processed
)
882 unsigned int min_align
= MINIMUM_ALIGNMENT (NULL
, mode
, align
);
883 if (crtl
->stack_alignment_estimated
< min_align
)
884 crtl
->stack_alignment_estimated
= min_align
;
887 if (generating_concat_p
888 && (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
889 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
))
891 /* For complex modes, don't make a single pseudo.
892 Instead, make a CONCAT of two pseudos.
893 This allows noncontiguous allocation of the real and imaginary parts,
894 which makes much better code. Besides, allocating DCmode
895 pseudos overstrains reload on some machines like the 386. */
896 rtx realpart
, imagpart
;
897 enum machine_mode partmode
= GET_MODE_INNER (mode
);
899 realpart
= gen_reg_rtx (partmode
);
900 imagpart
= gen_reg_rtx (partmode
);
901 return gen_rtx_CONCAT (mode
, realpart
, imagpart
);
904 /* Make sure regno_pointer_align, and regno_reg_rtx are large
905 enough to have an element for this pseudo reg number. */
907 if (reg_rtx_no
== crtl
->emit
.regno_pointer_align_length
)
909 int old_size
= crtl
->emit
.regno_pointer_align_length
;
913 tmp
= XRESIZEVEC (char, crtl
->emit
.regno_pointer_align
, old_size
* 2);
914 memset (tmp
+ old_size
, 0, old_size
);
915 crtl
->emit
.regno_pointer_align
= (unsigned char *) tmp
;
917 new1
= GGC_RESIZEVEC (rtx
, regno_reg_rtx
, old_size
* 2);
918 memset (new1
+ old_size
, 0, old_size
* sizeof (rtx
));
919 regno_reg_rtx
= new1
;
921 crtl
->emit
.regno_pointer_align_length
= old_size
* 2;
924 val
= gen_raw_REG (mode
, reg_rtx_no
);
925 regno_reg_rtx
[reg_rtx_no
++] = val
;
929 /* Update NEW with the same attributes as REG, but with OFFSET added
930 to the REG_OFFSET. */
933 update_reg_offset (rtx new_rtx
, rtx reg
, int offset
)
935 REG_ATTRS (new_rtx
) = get_reg_attrs (REG_EXPR (reg
),
936 REG_OFFSET (reg
) + offset
);
939 /* Generate a register with same attributes as REG, but with OFFSET
940 added to the REG_OFFSET. */
943 gen_rtx_REG_offset (rtx reg
, enum machine_mode mode
, unsigned int regno
,
946 rtx new_rtx
= gen_rtx_REG (mode
, regno
);
948 update_reg_offset (new_rtx
, reg
, offset
);
952 /* Generate a new pseudo-register with the same attributes as REG, but
953 with OFFSET added to the REG_OFFSET. */
956 gen_reg_rtx_offset (rtx reg
, enum machine_mode mode
, int offset
)
958 rtx new_rtx
= gen_reg_rtx (mode
);
960 update_reg_offset (new_rtx
, reg
, offset
);
964 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
965 new register is a (possibly paradoxical) lowpart of the old one. */
968 adjust_reg_mode (rtx reg
, enum machine_mode mode
)
970 update_reg_offset (reg
, reg
, byte_lowpart_offset (mode
, GET_MODE (reg
)));
971 PUT_MODE (reg
, mode
);
974 /* Copy REG's attributes from X, if X has any attributes. If REG and X
975 have different modes, REG is a (possibly paradoxical) lowpart of X. */
978 set_reg_attrs_from_value (rtx reg
, rtx x
)
982 /* Hard registers can be reused for multiple purposes within the same
983 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
985 if (HARD_REGISTER_P (reg
))
988 offset
= byte_lowpart_offset (GET_MODE (reg
), GET_MODE (x
));
991 if (MEM_OFFSET (x
) && CONST_INT_P (MEM_OFFSET (x
)))
993 = get_reg_attrs (MEM_EXPR (x
), INTVAL (MEM_OFFSET (x
)) + offset
);
995 mark_reg_pointer (reg
, 0);
1000 update_reg_offset (reg
, x
, offset
);
1001 if (REG_POINTER (x
))
1002 mark_reg_pointer (reg
, REGNO_POINTER_ALIGN (REGNO (x
)));
1006 /* Generate a REG rtx for a new pseudo register, copying the mode
1007 and attributes from X. */
1010 gen_reg_rtx_and_attrs (rtx x
)
1012 rtx reg
= gen_reg_rtx (GET_MODE (x
));
1013 set_reg_attrs_from_value (reg
, x
);
1017 /* Set the register attributes for registers contained in PARM_RTX.
1018 Use needed values from memory attributes of MEM. */
1021 set_reg_attrs_for_parm (rtx parm_rtx
, rtx mem
)
1023 if (REG_P (parm_rtx
))
1024 set_reg_attrs_from_value (parm_rtx
, mem
);
1025 else if (GET_CODE (parm_rtx
) == PARALLEL
)
1027 /* Check for a NULL entry in the first slot, used to indicate that the
1028 parameter goes both on the stack and in registers. */
1029 int i
= XEXP (XVECEXP (parm_rtx
, 0, 0), 0) ? 0 : 1;
1030 for (; i
< XVECLEN (parm_rtx
, 0); i
++)
1032 rtx x
= XVECEXP (parm_rtx
, 0, i
);
1033 if (REG_P (XEXP (x
, 0)))
1034 REG_ATTRS (XEXP (x
, 0))
1035 = get_reg_attrs (MEM_EXPR (mem
),
1036 INTVAL (XEXP (x
, 1)));
1041 /* Set the REG_ATTRS for registers in value X, given that X represents
1045 set_reg_attrs_for_decl_rtl (tree t
, rtx x
)
1047 if (GET_CODE (x
) == SUBREG
)
1049 gcc_assert (subreg_lowpart_p (x
));
1054 = get_reg_attrs (t
, byte_lowpart_offset (GET_MODE (x
),
1056 if (GET_CODE (x
) == CONCAT
)
1058 if (REG_P (XEXP (x
, 0)))
1059 REG_ATTRS (XEXP (x
, 0)) = get_reg_attrs (t
, 0);
1060 if (REG_P (XEXP (x
, 1)))
1061 REG_ATTRS (XEXP (x
, 1))
1062 = get_reg_attrs (t
, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x
, 0))));
1064 if (GET_CODE (x
) == PARALLEL
)
1068 /* Check for a NULL entry, used to indicate that the parameter goes
1069 both on the stack and in registers. */
1070 if (XEXP (XVECEXP (x
, 0, 0), 0))
1075 for (i
= start
; i
< XVECLEN (x
, 0); i
++)
1077 rtx y
= XVECEXP (x
, 0, i
);
1078 if (REG_P (XEXP (y
, 0)))
1079 REG_ATTRS (XEXP (y
, 0)) = get_reg_attrs (t
, INTVAL (XEXP (y
, 1)));
1084 /* Assign the RTX X to declaration T. */
1087 set_decl_rtl (tree t
, rtx x
)
1089 DECL_WRTL_CHECK (t
)->decl_with_rtl
.rtl
= x
;
1091 set_reg_attrs_for_decl_rtl (t
, x
);
1094 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1095 if the ABI requires the parameter to be passed by reference. */
1098 set_decl_incoming_rtl (tree t
, rtx x
, bool by_reference_p
)
1100 DECL_INCOMING_RTL (t
) = x
;
1101 if (x
&& !by_reference_p
)
1102 set_reg_attrs_for_decl_rtl (t
, x
);
1105 /* Identify REG (which may be a CONCAT) as a user register. */
1108 mark_user_reg (rtx reg
)
1110 if (GET_CODE (reg
) == CONCAT
)
1112 REG_USERVAR_P (XEXP (reg
, 0)) = 1;
1113 REG_USERVAR_P (XEXP (reg
, 1)) = 1;
1117 gcc_assert (REG_P (reg
));
1118 REG_USERVAR_P (reg
) = 1;
1122 /* Identify REG as a probable pointer register and show its alignment
1123 as ALIGN, if nonzero. */
1126 mark_reg_pointer (rtx reg
, int align
)
1128 if (! REG_POINTER (reg
))
1130 REG_POINTER (reg
) = 1;
1133 REGNO_POINTER_ALIGN (REGNO (reg
)) = align
;
1135 else if (align
&& align
< REGNO_POINTER_ALIGN (REGNO (reg
)))
1136 /* We can no-longer be sure just how aligned this pointer is. */
1137 REGNO_POINTER_ALIGN (REGNO (reg
)) = align
;
1140 /* Return 1 plus largest pseudo reg number used in the current function. */
1148 /* Return 1 + the largest label number used so far in the current function. */
1151 max_label_num (void)
1156 /* Return first label number used in this function (if any were used). */
1159 get_first_label_num (void)
1161 return first_label_num
;
1164 /* If the rtx for label was created during the expansion of a nested
1165 function, then first_label_num won't include this label number.
1166 Fix this now so that array indices work later. */
1169 maybe_set_first_label_num (rtx x
)
1171 if (CODE_LABEL_NUMBER (x
) < first_label_num
)
1172 first_label_num
= CODE_LABEL_NUMBER (x
);
1175 /* Return a value representing some low-order bits of X, where the number
1176 of low-order bits is given by MODE. Note that no conversion is done
1177 between floating-point and fixed-point values, rather, the bit
1178 representation is returned.
1180 This function handles the cases in common between gen_lowpart, below,
1181 and two variants in cse.c and combine.c. These are the cases that can
1182 be safely handled at all points in the compilation.
1184 If this is not a case we can handle, return 0. */
1187 gen_lowpart_common (enum machine_mode mode
, rtx x
)
1189 int msize
= GET_MODE_SIZE (mode
);
1192 enum machine_mode innermode
;
1194 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1195 so we have to make one up. Yuk. */
1196 innermode
= GET_MODE (x
);
1198 && msize
* BITS_PER_UNIT
<= HOST_BITS_PER_WIDE_INT
)
1199 innermode
= mode_for_size (HOST_BITS_PER_WIDE_INT
, MODE_INT
, 0);
1200 else if (innermode
== VOIDmode
)
1201 innermode
= mode_for_size (HOST_BITS_PER_WIDE_INT
* 2, MODE_INT
, 0);
1203 xsize
= GET_MODE_SIZE (innermode
);
1205 gcc_assert (innermode
!= VOIDmode
&& innermode
!= BLKmode
);
1207 if (innermode
== mode
)
1210 /* MODE must occupy no more words than the mode of X. */
1211 if ((msize
+ (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
1212 > ((xsize
+ (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))
1215 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1216 if (SCALAR_FLOAT_MODE_P (mode
) && msize
> xsize
)
1219 offset
= subreg_lowpart_offset (mode
, innermode
);
1221 if ((GET_CODE (x
) == ZERO_EXTEND
|| GET_CODE (x
) == SIGN_EXTEND
)
1222 && (GET_MODE_CLASS (mode
) == MODE_INT
1223 || GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
))
1225 /* If we are getting the low-order part of something that has been
1226 sign- or zero-extended, we can either just use the object being
1227 extended or make a narrower extension. If we want an even smaller
1228 piece than the size of the object being extended, call ourselves
1231 This case is used mostly by combine and cse. */
1233 if (GET_MODE (XEXP (x
, 0)) == mode
)
1235 else if (msize
< GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))))
1236 return gen_lowpart_common (mode
, XEXP (x
, 0));
1237 else if (msize
< xsize
)
1238 return gen_rtx_fmt_e (GET_CODE (x
), mode
, XEXP (x
, 0));
1240 else if (GET_CODE (x
) == SUBREG
|| REG_P (x
)
1241 || GET_CODE (x
) == CONCAT
|| GET_CODE (x
) == CONST_VECTOR
1242 || GET_CODE (x
) == CONST_DOUBLE
|| CONST_INT_P (x
))
1243 return simplify_gen_subreg (mode
, x
, innermode
, offset
);
1245 /* Otherwise, we can't do this. */
1250 gen_highpart (enum machine_mode mode
, rtx x
)
1252 unsigned int msize
= GET_MODE_SIZE (mode
);
1255 /* This case loses if X is a subreg. To catch bugs early,
1256 complain if an invalid MODE is used even in other cases. */
1257 gcc_assert (msize
<= UNITS_PER_WORD
1258 || msize
== (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x
)));
1260 result
= simplify_gen_subreg (mode
, x
, GET_MODE (x
),
1261 subreg_highpart_offset (mode
, GET_MODE (x
)));
1262 gcc_assert (result
);
1264 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1265 the target if we have a MEM. gen_highpart must return a valid operand,
1266 emitting code if necessary to do so. */
1269 result
= validize_mem (result
);
1270 gcc_assert (result
);
1276 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1277 be VOIDmode constant. */
1279 gen_highpart_mode (enum machine_mode outermode
, enum machine_mode innermode
, rtx exp
)
1281 if (GET_MODE (exp
) != VOIDmode
)
1283 gcc_assert (GET_MODE (exp
) == innermode
);
1284 return gen_highpart (outermode
, exp
);
1286 return simplify_gen_subreg (outermode
, exp
, innermode
,
1287 subreg_highpart_offset (outermode
, innermode
));
1290 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1293 subreg_lowpart_offset (enum machine_mode outermode
, enum machine_mode innermode
)
1295 unsigned int offset
= 0;
1296 int difference
= (GET_MODE_SIZE (innermode
) - GET_MODE_SIZE (outermode
));
1300 if (WORDS_BIG_ENDIAN
)
1301 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
1302 if (BYTES_BIG_ENDIAN
)
1303 offset
+= difference
% UNITS_PER_WORD
;
1309 /* Return offset in bytes to get OUTERMODE high part
1310 of the value in mode INNERMODE stored in memory in target format. */
1312 subreg_highpart_offset (enum machine_mode outermode
, enum machine_mode innermode
)
1314 unsigned int offset
= 0;
1315 int difference
= (GET_MODE_SIZE (innermode
) - GET_MODE_SIZE (outermode
));
1317 gcc_assert (GET_MODE_SIZE (innermode
) >= GET_MODE_SIZE (outermode
));
1321 if (! WORDS_BIG_ENDIAN
)
1322 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
1323 if (! BYTES_BIG_ENDIAN
)
1324 offset
+= difference
% UNITS_PER_WORD
;
1330 /* Return 1 iff X, assumed to be a SUBREG,
1331 refers to the least significant part of its containing reg.
1332 If X is not a SUBREG, always return 1 (it is its own low part!). */
1335 subreg_lowpart_p (const_rtx x
)
1337 if (GET_CODE (x
) != SUBREG
)
1339 else if (GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
1342 return (subreg_lowpart_offset (GET_MODE (x
), GET_MODE (SUBREG_REG (x
)))
1343 == SUBREG_BYTE (x
));
1346 /* Return subword OFFSET of operand OP.
1347 The word number, OFFSET, is interpreted as the word number starting
1348 at the low-order address. OFFSET 0 is the low-order word if not
1349 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1351 If we cannot extract the required word, we return zero. Otherwise,
1352 an rtx corresponding to the requested word will be returned.
1354 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1355 reload has completed, a valid address will always be returned. After
1356 reload, if a valid address cannot be returned, we return zero.
1358 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1359 it is the responsibility of the caller.
1361 MODE is the mode of OP in case it is a CONST_INT.
1363 ??? This is still rather broken for some cases. The problem for the
1364 moment is that all callers of this thing provide no 'goal mode' to
1365 tell us to work with. This exists because all callers were written
1366 in a word based SUBREG world.
1367 Now use of this function can be deprecated by simplify_subreg in most
1372 operand_subword (rtx op
, unsigned int offset
, int validate_address
, enum machine_mode mode
)
1374 if (mode
== VOIDmode
)
1375 mode
= GET_MODE (op
);
1377 gcc_assert (mode
!= VOIDmode
);
1379 /* If OP is narrower than a word, fail. */
1381 && (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
))
1384 /* If we want a word outside OP, return zero. */
1386 && (offset
+ 1) * UNITS_PER_WORD
> GET_MODE_SIZE (mode
))
1389 /* Form a new MEM at the requested address. */
1392 rtx new_rtx
= adjust_address_nv (op
, word_mode
, offset
* UNITS_PER_WORD
);
1394 if (! validate_address
)
1397 else if (reload_completed
)
1399 if (! strict_memory_address_addr_space_p (word_mode
,
1401 MEM_ADDR_SPACE (op
)))
1405 return replace_equiv_address (new_rtx
, XEXP (new_rtx
, 0));
1408 /* Rest can be handled by simplify_subreg. */
1409 return simplify_gen_subreg (word_mode
, op
, mode
, (offset
* UNITS_PER_WORD
));
1412 /* Similar to `operand_subword', but never return 0. If we can't
1413 extract the required subword, put OP into a register and try again.
1414 The second attempt must succeed. We always validate the address in
1417 MODE is the mode of OP, in case it is CONST_INT. */
1420 operand_subword_force (rtx op
, unsigned int offset
, enum machine_mode mode
)
1422 rtx result
= operand_subword (op
, offset
, 1, mode
);
1427 if (mode
!= BLKmode
&& mode
!= VOIDmode
)
1429 /* If this is a register which can not be accessed by words, copy it
1430 to a pseudo register. */
1432 op
= copy_to_reg (op
);
1434 op
= force_reg (mode
, op
);
1437 result
= operand_subword (op
, offset
, 1, mode
);
1438 gcc_assert (result
);
1443 /* Returns 1 if both MEM_EXPR can be considered equal
1447 mem_expr_equal_p (const_tree expr1
, const_tree expr2
)
1452 if (! expr1
|| ! expr2
)
1455 if (TREE_CODE (expr1
) != TREE_CODE (expr2
))
1458 return operand_equal_p (expr1
, expr2
, 0);
1461 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1462 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1466 get_mem_align_offset (rtx mem
, unsigned int align
)
1469 unsigned HOST_WIDE_INT offset
;
1471 /* This function can't use
1472 if (!MEM_EXPR (mem) || !MEM_OFFSET (mem)
1473 || !CONST_INT_P (MEM_OFFSET (mem))
1474 || (get_object_alignment (MEM_EXPR (mem), MEM_ALIGN (mem), align)
1478 return (- INTVAL (MEM_OFFSET (mem))) & (align / BITS_PER_UNIT - 1);
1480 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1481 for <variable>. get_inner_reference doesn't handle it and
1482 even if it did, the alignment in that case needs to be determined
1483 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1484 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1485 isn't sufficiently aligned, the object it is in might be. */
1486 gcc_assert (MEM_P (mem
));
1487 expr
= MEM_EXPR (mem
);
1488 if (expr
== NULL_TREE
1489 || MEM_OFFSET (mem
) == NULL_RTX
1490 || !CONST_INT_P (MEM_OFFSET (mem
)))
1493 offset
= INTVAL (MEM_OFFSET (mem
));
1496 if (DECL_ALIGN (expr
) < align
)
1499 else if (INDIRECT_REF_P (expr
))
1501 if (TYPE_ALIGN (TREE_TYPE (expr
)) < (unsigned int) align
)
1504 else if (TREE_CODE (expr
) == COMPONENT_REF
)
1508 tree inner
= TREE_OPERAND (expr
, 0);
1509 tree field
= TREE_OPERAND (expr
, 1);
1510 tree byte_offset
= component_ref_field_offset (expr
);
1511 tree bit_offset
= DECL_FIELD_BIT_OFFSET (field
);
1514 || !host_integerp (byte_offset
, 1)
1515 || !host_integerp (bit_offset
, 1))
1518 offset
+= tree_low_cst (byte_offset
, 1);
1519 offset
+= tree_low_cst (bit_offset
, 1) / BITS_PER_UNIT
;
1521 if (inner
== NULL_TREE
)
1523 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field
))
1524 < (unsigned int) align
)
1528 else if (DECL_P (inner
))
1530 if (DECL_ALIGN (inner
) < align
)
1534 else if (TREE_CODE (inner
) != COMPONENT_REF
)
1542 return offset
& ((align
/ BITS_PER_UNIT
) - 1);
1545 /* Given REF (a MEM) and T, either the type of X or the expression
1546 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1547 if we are making a new object of this type. BITPOS is nonzero if
1548 there is an offset outstanding on T that will be applied later. */
1551 set_mem_attributes_minus_bitpos (rtx ref
, tree t
, int objectp
,
1552 HOST_WIDE_INT bitpos
)
1554 alias_set_type alias
= MEM_ALIAS_SET (ref
);
1555 tree expr
= MEM_EXPR (ref
);
1556 rtx offset
= MEM_OFFSET (ref
);
1557 rtx size
= MEM_SIZE (ref
);
1558 unsigned int align
= MEM_ALIGN (ref
);
1559 HOST_WIDE_INT apply_bitpos
= 0;
1562 /* It can happen that type_for_mode was given a mode for which there
1563 is no language-level type. In which case it returns NULL, which
1568 type
= TYPE_P (t
) ? t
: TREE_TYPE (t
);
1569 if (type
== error_mark_node
)
1572 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1573 wrong answer, as it assumes that DECL_RTL already has the right alias
1574 info. Callers should not set DECL_RTL until after the call to
1575 set_mem_attributes. */
1576 gcc_assert (!DECL_P (t
) || ref
!= DECL_RTL_IF_SET (t
));
1578 /* Get the alias set from the expression or type (perhaps using a
1579 front-end routine) and use it. */
1580 alias
= get_alias_set (t
);
1582 MEM_VOLATILE_P (ref
) |= TYPE_VOLATILE (type
);
1583 MEM_IN_STRUCT_P (ref
)
1584 = AGGREGATE_TYPE_P (type
) || TREE_CODE (type
) == COMPLEX_TYPE
;
1585 MEM_POINTER (ref
) = POINTER_TYPE_P (type
);
1587 /* If we are making an object of this type, or if this is a DECL, we know
1588 that it is a scalar if the type is not an aggregate. */
1589 if ((objectp
|| DECL_P (t
))
1590 && ! AGGREGATE_TYPE_P (type
)
1591 && TREE_CODE (type
) != COMPLEX_TYPE
)
1592 MEM_SCALAR_P (ref
) = 1;
1594 /* We can set the alignment from the type if we are making an object,
1595 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1596 if (objectp
|| TREE_CODE (t
) == INDIRECT_REF
1597 || TREE_CODE (t
) == ALIGN_INDIRECT_REF
1598 || TYPE_ALIGN_OK (type
))
1599 align
= MAX (align
, TYPE_ALIGN (type
));
1601 if (TREE_CODE (t
) == MISALIGNED_INDIRECT_REF
)
1603 if (integer_zerop (TREE_OPERAND (t
, 1)))
1604 /* We don't know anything about the alignment. */
1605 align
= BITS_PER_UNIT
;
1607 align
= tree_low_cst (TREE_OPERAND (t
, 1), 1);
1610 /* If the size is known, we can set that. */
1611 if (TYPE_SIZE_UNIT (type
) && host_integerp (TYPE_SIZE_UNIT (type
), 1))
1612 size
= GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type
), 1));
1614 /* If T is not a type, we may be able to deduce some more information about
1619 bool align_computed
= false;
1621 if (TREE_THIS_VOLATILE (t
))
1622 MEM_VOLATILE_P (ref
) = 1;
1624 /* Now remove any conversions: they don't change what the underlying
1625 object is. Likewise for SAVE_EXPR. */
1626 while (CONVERT_EXPR_P (t
)
1627 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
1628 || TREE_CODE (t
) == SAVE_EXPR
)
1629 t
= TREE_OPERAND (t
, 0);
1631 /* We may look through structure-like accesses for the purposes of
1632 examining TREE_THIS_NOTRAP, but not array-like accesses. */
1634 while (TREE_CODE (base
) == COMPONENT_REF
1635 || TREE_CODE (base
) == REALPART_EXPR
1636 || TREE_CODE (base
) == IMAGPART_EXPR
1637 || TREE_CODE (base
) == BIT_FIELD_REF
)
1638 base
= TREE_OPERAND (base
, 0);
1642 if (CODE_CONTAINS_STRUCT (TREE_CODE (base
), TS_DECL_WITH_VIS
))
1643 MEM_NOTRAP_P (ref
) = !DECL_WEAK (base
);
1645 MEM_NOTRAP_P (ref
) = 1;
1648 MEM_NOTRAP_P (ref
) = TREE_THIS_NOTRAP (base
);
1650 base
= get_base_address (base
);
1651 if (base
&& DECL_P (base
)
1652 && TREE_READONLY (base
)
1653 && (TREE_STATIC (base
) || DECL_EXTERNAL (base
)))
1655 tree base_type
= TREE_TYPE (base
);
1656 gcc_assert (!(base_type
&& TYPE_NEEDS_CONSTRUCTING (base_type
))
1657 || DECL_ARTIFICIAL (base
));
1658 MEM_READONLY_P (ref
) = 1;
1661 /* If this expression uses it's parent's alias set, mark it such
1662 that we won't change it. */
1663 if (component_uses_parent_alias_set (t
))
1664 MEM_KEEP_ALIAS_SET_P (ref
) = 1;
1666 /* If this is a decl, set the attributes of the MEM from it. */
1670 offset
= const0_rtx
;
1671 apply_bitpos
= bitpos
;
1672 size
= (DECL_SIZE_UNIT (t
)
1673 && host_integerp (DECL_SIZE_UNIT (t
), 1)
1674 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t
), 1)) : 0);
1675 align
= DECL_ALIGN (t
);
1676 align_computed
= true;
1679 /* If this is a constant, we know the alignment. */
1680 else if (CONSTANT_CLASS_P (t
))
1682 align
= TYPE_ALIGN (type
);
1683 #ifdef CONSTANT_ALIGNMENT
1684 align
= CONSTANT_ALIGNMENT (t
, align
);
1686 align_computed
= true;
1689 /* If this is a field reference and not a bit-field, record it. */
1690 /* ??? There is some information that can be gleaned from bit-fields,
1691 such as the word offset in the structure that might be modified.
1692 But skip it for now. */
1693 else if (TREE_CODE (t
) == COMPONENT_REF
1694 && ! DECL_BIT_FIELD (TREE_OPERAND (t
, 1)))
1697 offset
= const0_rtx
;
1698 apply_bitpos
= bitpos
;
1699 /* ??? Any reason the field size would be different than
1700 the size we got from the type? */
1703 /* If this is an array reference, look for an outer field reference. */
1704 else if (TREE_CODE (t
) == ARRAY_REF
)
1706 tree off_tree
= size_zero_node
;
1707 /* We can't modify t, because we use it at the end of the
1713 tree index
= TREE_OPERAND (t2
, 1);
1714 tree low_bound
= array_ref_low_bound (t2
);
1715 tree unit_size
= array_ref_element_size (t2
);
1717 /* We assume all arrays have sizes that are a multiple of a byte.
1718 First subtract the lower bound, if any, in the type of the
1719 index, then convert to sizetype and multiply by the size of
1720 the array element. */
1721 if (! integer_zerop (low_bound
))
1722 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
1725 off_tree
= size_binop (PLUS_EXPR
,
1726 size_binop (MULT_EXPR
,
1727 fold_convert (sizetype
,
1731 t2
= TREE_OPERAND (t2
, 0);
1733 while (TREE_CODE (t2
) == ARRAY_REF
);
1739 if (host_integerp (off_tree
, 1))
1741 HOST_WIDE_INT ioff
= tree_low_cst (off_tree
, 1);
1742 HOST_WIDE_INT aoff
= (ioff
& -ioff
) * BITS_PER_UNIT
;
1743 align
= DECL_ALIGN (t2
);
1744 if (aoff
&& (unsigned HOST_WIDE_INT
) aoff
< align
)
1746 align_computed
= true;
1747 offset
= GEN_INT (ioff
);
1748 apply_bitpos
= bitpos
;
1751 else if (TREE_CODE (t2
) == COMPONENT_REF
)
1755 if (host_integerp (off_tree
, 1))
1757 offset
= GEN_INT (tree_low_cst (off_tree
, 1));
1758 apply_bitpos
= bitpos
;
1760 /* ??? Any reason the field size would be different than
1761 the size we got from the type? */
1764 /* If this is an indirect reference, record it. */
1765 else if (TREE_CODE (t
) == INDIRECT_REF
1766 || TREE_CODE (t
) == MISALIGNED_INDIRECT_REF
)
1769 offset
= const0_rtx
;
1770 apply_bitpos
= bitpos
;
1774 /* If this is an indirect reference, record it. */
1775 else if (TREE_CODE (t
) == INDIRECT_REF
1776 || TREE_CODE (t
) == MISALIGNED_INDIRECT_REF
)
1779 offset
= const0_rtx
;
1780 apply_bitpos
= bitpos
;
1783 if (!align_computed
&& !INDIRECT_REF_P (t
))
1785 unsigned int obj_align
1786 = get_object_alignment (t
, align
, BIGGEST_ALIGNMENT
);
1787 align
= MAX (align
, obj_align
);
1791 /* If we modified OFFSET based on T, then subtract the outstanding
1792 bit position offset. Similarly, increase the size of the accessed
1793 object to contain the negative offset. */
1796 offset
= plus_constant (offset
, -(apply_bitpos
/ BITS_PER_UNIT
));
1798 size
= plus_constant (size
, apply_bitpos
/ BITS_PER_UNIT
);
1801 if (TREE_CODE (t
) == ALIGN_INDIRECT_REF
)
1803 /* Force EXPR and OFFSET to NULL, since we don't know exactly what
1804 we're overlapping. */
1809 /* Now set the attributes we computed above. */
1811 = get_mem_attrs (alias
, expr
, offset
, size
, align
,
1812 TYPE_ADDR_SPACE (type
), GET_MODE (ref
));
1814 /* If this is already known to be a scalar or aggregate, we are done. */
1815 if (MEM_IN_STRUCT_P (ref
) || MEM_SCALAR_P (ref
))
1818 /* If it is a reference into an aggregate, this is part of an aggregate.
1819 Otherwise we don't know. */
1820 else if (TREE_CODE (t
) == COMPONENT_REF
|| TREE_CODE (t
) == ARRAY_REF
1821 || TREE_CODE (t
) == ARRAY_RANGE_REF
1822 || TREE_CODE (t
) == BIT_FIELD_REF
)
1823 MEM_IN_STRUCT_P (ref
) = 1;
1827 set_mem_attributes (rtx ref
, tree t
, int objectp
)
1829 set_mem_attributes_minus_bitpos (ref
, t
, objectp
, 0);
1832 /* Set the alias set of MEM to SET. */
1835 set_mem_alias_set (rtx mem
, alias_set_type set
)
1837 #ifdef ENABLE_CHECKING
1838 /* If the new and old alias sets don't conflict, something is wrong. */
1839 gcc_assert (alias_sets_conflict_p (set
, MEM_ALIAS_SET (mem
)));
1842 MEM_ATTRS (mem
) = get_mem_attrs (set
, MEM_EXPR (mem
), MEM_OFFSET (mem
),
1843 MEM_SIZE (mem
), MEM_ALIGN (mem
),
1844 MEM_ADDR_SPACE (mem
), GET_MODE (mem
));
1847 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1850 set_mem_addr_space (rtx mem
, addr_space_t addrspace
)
1852 MEM_ATTRS (mem
) = get_mem_attrs (MEM_ALIAS_SET (mem
), MEM_EXPR (mem
),
1853 MEM_OFFSET (mem
), MEM_SIZE (mem
),
1854 MEM_ALIGN (mem
), addrspace
, GET_MODE (mem
));
1857 /* Set the alignment of MEM to ALIGN bits. */
1860 set_mem_align (rtx mem
, unsigned int align
)
1862 MEM_ATTRS (mem
) = get_mem_attrs (MEM_ALIAS_SET (mem
), MEM_EXPR (mem
),
1863 MEM_OFFSET (mem
), MEM_SIZE (mem
), align
,
1864 MEM_ADDR_SPACE (mem
), GET_MODE (mem
));
1867 /* Set the expr for MEM to EXPR. */
1870 set_mem_expr (rtx mem
, tree expr
)
1873 = get_mem_attrs (MEM_ALIAS_SET (mem
), expr
, MEM_OFFSET (mem
),
1874 MEM_SIZE (mem
), MEM_ALIGN (mem
),
1875 MEM_ADDR_SPACE (mem
), GET_MODE (mem
));
1878 /* Set the offset of MEM to OFFSET. */
1881 set_mem_offset (rtx mem
, rtx offset
)
1883 MEM_ATTRS (mem
) = get_mem_attrs (MEM_ALIAS_SET (mem
), MEM_EXPR (mem
),
1884 offset
, MEM_SIZE (mem
), MEM_ALIGN (mem
),
1885 MEM_ADDR_SPACE (mem
), GET_MODE (mem
));
1888 /* Set the size of MEM to SIZE. */
1891 set_mem_size (rtx mem
, rtx size
)
1893 MEM_ATTRS (mem
) = get_mem_attrs (MEM_ALIAS_SET (mem
), MEM_EXPR (mem
),
1894 MEM_OFFSET (mem
), size
, MEM_ALIGN (mem
),
1895 MEM_ADDR_SPACE (mem
), GET_MODE (mem
));
1898 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1899 and its address changed to ADDR. (VOIDmode means don't change the mode.
1900 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1901 returned memory location is required to be valid. The memory
1902 attributes are not changed. */
1905 change_address_1 (rtx memref
, enum machine_mode mode
, rtx addr
, int validate
)
1910 gcc_assert (MEM_P (memref
));
1911 as
= MEM_ADDR_SPACE (memref
);
1912 if (mode
== VOIDmode
)
1913 mode
= GET_MODE (memref
);
1915 addr
= XEXP (memref
, 0);
1916 if (mode
== GET_MODE (memref
) && addr
== XEXP (memref
, 0)
1917 && (!validate
|| memory_address_addr_space_p (mode
, addr
, as
)))
1922 if (reload_in_progress
|| reload_completed
)
1923 gcc_assert (memory_address_addr_space_p (mode
, addr
, as
));
1925 addr
= memory_address_addr_space (mode
, addr
, as
);
1928 if (rtx_equal_p (addr
, XEXP (memref
, 0)) && mode
== GET_MODE (memref
))
1931 new_rtx
= gen_rtx_MEM (mode
, addr
);
1932 MEM_COPY_ATTRIBUTES (new_rtx
, memref
);
1936 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1937 way we are changing MEMREF, so we only preserve the alias set. */
1940 change_address (rtx memref
, enum machine_mode mode
, rtx addr
)
1942 rtx new_rtx
= change_address_1 (memref
, mode
, addr
, 1), size
;
1943 enum machine_mode mmode
= GET_MODE (new_rtx
);
1946 size
= mmode
== BLKmode
? 0 : GEN_INT (GET_MODE_SIZE (mmode
));
1947 align
= mmode
== BLKmode
? BITS_PER_UNIT
: GET_MODE_ALIGNMENT (mmode
);
1949 /* If there are no changes, just return the original memory reference. */
1950 if (new_rtx
== memref
)
1952 if (MEM_ATTRS (memref
) == 0
1953 || (MEM_EXPR (memref
) == NULL
1954 && MEM_OFFSET (memref
) == NULL
1955 && MEM_SIZE (memref
) == size
1956 && MEM_ALIGN (memref
) == align
))
1959 new_rtx
= gen_rtx_MEM (mmode
, XEXP (memref
, 0));
1960 MEM_COPY_ATTRIBUTES (new_rtx
, memref
);
1964 = get_mem_attrs (MEM_ALIAS_SET (memref
), 0, 0, size
, align
,
1965 MEM_ADDR_SPACE (memref
), mmode
);
1970 /* Return a memory reference like MEMREF, but with its mode changed
1971 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1972 nonzero, the memory address is forced to be valid.
1973 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1974 and caller is responsible for adjusting MEMREF base register. */
1977 adjust_address_1 (rtx memref
, enum machine_mode mode
, HOST_WIDE_INT offset
,
1978 int validate
, int adjust
)
1980 rtx addr
= XEXP (memref
, 0);
1982 rtx memoffset
= MEM_OFFSET (memref
);
1984 unsigned int memalign
= MEM_ALIGN (memref
);
1985 addr_space_t as
= MEM_ADDR_SPACE (memref
);
1986 enum machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
1989 /* If there are no changes, just return the original memory reference. */
1990 if (mode
== GET_MODE (memref
) && !offset
1991 && (!validate
|| memory_address_addr_space_p (mode
, addr
, as
)))
1994 /* ??? Prefer to create garbage instead of creating shared rtl.
1995 This may happen even if offset is nonzero -- consider
1996 (plus (plus reg reg) const_int) -- so do this always. */
1997 addr
= copy_rtx (addr
);
1999 /* Convert a possibly large offset to a signed value within the
2000 range of the target address space. */
2001 pbits
= GET_MODE_BITSIZE (address_mode
);
2002 if (HOST_BITS_PER_WIDE_INT
> pbits
)
2004 int shift
= HOST_BITS_PER_WIDE_INT
- pbits
;
2005 offset
= (((HOST_WIDE_INT
) ((unsigned HOST_WIDE_INT
) offset
<< shift
))
2011 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2012 object, we can merge it into the LO_SUM. */
2013 if (GET_MODE (memref
) != BLKmode
&& GET_CODE (addr
) == LO_SUM
2015 && (unsigned HOST_WIDE_INT
) offset
2016 < GET_MODE_ALIGNMENT (GET_MODE (memref
)) / BITS_PER_UNIT
)
2017 addr
= gen_rtx_LO_SUM (address_mode
, XEXP (addr
, 0),
2018 plus_constant (XEXP (addr
, 1), offset
));
2020 addr
= plus_constant (addr
, offset
);
2023 new_rtx
= change_address_1 (memref
, mode
, addr
, validate
);
2025 /* If the address is a REG, change_address_1 rightfully returns memref,
2026 but this would destroy memref's MEM_ATTRS. */
2027 if (new_rtx
== memref
&& offset
!= 0)
2028 new_rtx
= copy_rtx (new_rtx
);
2030 /* Compute the new values of the memory attributes due to this adjustment.
2031 We add the offsets and update the alignment. */
2033 memoffset
= GEN_INT (offset
+ INTVAL (memoffset
));
2035 /* Compute the new alignment by taking the MIN of the alignment and the
2036 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2041 (unsigned HOST_WIDE_INT
) (offset
& -offset
) * BITS_PER_UNIT
);
2043 /* We can compute the size in a number of ways. */
2044 if (GET_MODE (new_rtx
) != BLKmode
)
2045 size
= GEN_INT (GET_MODE_SIZE (GET_MODE (new_rtx
)));
2046 else if (MEM_SIZE (memref
))
2047 size
= plus_constant (MEM_SIZE (memref
), -offset
);
2049 MEM_ATTRS (new_rtx
) = get_mem_attrs (MEM_ALIAS_SET (memref
), MEM_EXPR (memref
),
2050 memoffset
, size
, memalign
, as
,
2051 GET_MODE (new_rtx
));
2053 /* At some point, we should validate that this offset is within the object,
2054 if all the appropriate values are known. */
2058 /* Return a memory reference like MEMREF, but with its mode changed
2059 to MODE and its address changed to ADDR, which is assumed to be
2060 MEMREF offset by OFFSET bytes. If VALIDATE is
2061 nonzero, the memory address is forced to be valid. */
2064 adjust_automodify_address_1 (rtx memref
, enum machine_mode mode
, rtx addr
,
2065 HOST_WIDE_INT offset
, int validate
)
2067 memref
= change_address_1 (memref
, VOIDmode
, addr
, validate
);
2068 return adjust_address_1 (memref
, mode
, offset
, validate
, 0);
2071 /* Return a memory reference like MEMREF, but whose address is changed by
2072 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2073 known to be in OFFSET (possibly 1). */
2076 offset_address (rtx memref
, rtx offset
, unsigned HOST_WIDE_INT pow2
)
2078 rtx new_rtx
, addr
= XEXP (memref
, 0);
2079 addr_space_t as
= MEM_ADDR_SPACE (memref
);
2080 enum machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
2082 new_rtx
= simplify_gen_binary (PLUS
, address_mode
, addr
, offset
);
2084 /* At this point we don't know _why_ the address is invalid. It
2085 could have secondary memory references, multiplies or anything.
2087 However, if we did go and rearrange things, we can wind up not
2088 being able to recognize the magic around pic_offset_table_rtx.
2089 This stuff is fragile, and is yet another example of why it is
2090 bad to expose PIC machinery too early. */
2091 if (! memory_address_addr_space_p (GET_MODE (memref
), new_rtx
, as
)
2092 && GET_CODE (addr
) == PLUS
2093 && XEXP (addr
, 0) == pic_offset_table_rtx
)
2095 addr
= force_reg (GET_MODE (addr
), addr
);
2096 new_rtx
= simplify_gen_binary (PLUS
, address_mode
, addr
, offset
);
2099 update_temp_slot_address (XEXP (memref
, 0), new_rtx
);
2100 new_rtx
= change_address_1 (memref
, VOIDmode
, new_rtx
, 1);
2102 /* If there are no changes, just return the original memory reference. */
2103 if (new_rtx
== memref
)
2106 /* Update the alignment to reflect the offset. Reset the offset, which
2109 = get_mem_attrs (MEM_ALIAS_SET (memref
), MEM_EXPR (memref
), 0, 0,
2110 MIN (MEM_ALIGN (memref
), pow2
* BITS_PER_UNIT
),
2111 as
, GET_MODE (new_rtx
));
2115 /* Return a memory reference like MEMREF, but with its address changed to
2116 ADDR. The caller is asserting that the actual piece of memory pointed
2117 to is the same, just the form of the address is being changed, such as
2118 by putting something into a register. */
2121 replace_equiv_address (rtx memref
, rtx addr
)
2123 /* change_address_1 copies the memory attribute structure without change
2124 and that's exactly what we want here. */
2125 update_temp_slot_address (XEXP (memref
, 0), addr
);
2126 return change_address_1 (memref
, VOIDmode
, addr
, 1);
2129 /* Likewise, but the reference is not required to be valid. */
2132 replace_equiv_address_nv (rtx memref
, rtx addr
)
2134 return change_address_1 (memref
, VOIDmode
, addr
, 0);
2137 /* Return a memory reference like MEMREF, but with its mode widened to
2138 MODE and offset by OFFSET. This would be used by targets that e.g.
2139 cannot issue QImode memory operations and have to use SImode memory
2140 operations plus masking logic. */
2143 widen_memory_access (rtx memref
, enum machine_mode mode
, HOST_WIDE_INT offset
)
2145 rtx new_rtx
= adjust_address_1 (memref
, mode
, offset
, 1, 1);
2146 tree expr
= MEM_EXPR (new_rtx
);
2147 rtx memoffset
= MEM_OFFSET (new_rtx
);
2148 unsigned int size
= GET_MODE_SIZE (mode
);
2150 /* If there are no changes, just return the original memory reference. */
2151 if (new_rtx
== memref
)
2154 /* If we don't know what offset we were at within the expression, then
2155 we can't know if we've overstepped the bounds. */
2161 if (TREE_CODE (expr
) == COMPONENT_REF
)
2163 tree field
= TREE_OPERAND (expr
, 1);
2164 tree offset
= component_ref_field_offset (expr
);
2166 if (! DECL_SIZE_UNIT (field
))
2172 /* Is the field at least as large as the access? If so, ok,
2173 otherwise strip back to the containing structure. */
2174 if (TREE_CODE (DECL_SIZE_UNIT (field
)) == INTEGER_CST
2175 && compare_tree_int (DECL_SIZE_UNIT (field
), size
) >= 0
2176 && INTVAL (memoffset
) >= 0)
2179 if (! host_integerp (offset
, 1))
2185 expr
= TREE_OPERAND (expr
, 0);
2187 = (GEN_INT (INTVAL (memoffset
)
2188 + tree_low_cst (offset
, 1)
2189 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 1)
2192 /* Similarly for the decl. */
2193 else if (DECL_P (expr
)
2194 && DECL_SIZE_UNIT (expr
)
2195 && TREE_CODE (DECL_SIZE_UNIT (expr
)) == INTEGER_CST
2196 && compare_tree_int (DECL_SIZE_UNIT (expr
), size
) >= 0
2197 && (! memoffset
|| INTVAL (memoffset
) >= 0))
2201 /* The widened memory access overflows the expression, which means
2202 that it could alias another expression. Zap it. */
2209 memoffset
= NULL_RTX
;
2211 /* The widened memory may alias other stuff, so zap the alias set. */
2212 /* ??? Maybe use get_alias_set on any remaining expression. */
2214 MEM_ATTRS (new_rtx
) = get_mem_attrs (0, expr
, memoffset
, GEN_INT (size
),
2215 MEM_ALIGN (new_rtx
),
2216 MEM_ADDR_SPACE (new_rtx
), mode
);
2221 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2222 static GTY(()) tree spill_slot_decl
;
2225 get_spill_slot_decl (bool force_build_p
)
2227 tree d
= spill_slot_decl
;
2230 if (d
|| !force_build_p
)
2233 d
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
2234 VAR_DECL
, get_identifier ("%sfp"), void_type_node
);
2235 DECL_ARTIFICIAL (d
) = 1;
2236 DECL_IGNORED_P (d
) = 1;
2238 TREE_THIS_NOTRAP (d
) = 1;
2239 spill_slot_decl
= d
;
2241 rd
= gen_rtx_MEM (BLKmode
, frame_pointer_rtx
);
2242 MEM_NOTRAP_P (rd
) = 1;
2243 MEM_ATTRS (rd
) = get_mem_attrs (new_alias_set (), d
, const0_rtx
,
2244 NULL_RTX
, 0, ADDR_SPACE_GENERIC
, BLKmode
);
2245 SET_DECL_RTL (d
, rd
);
2250 /* Given MEM, a result from assign_stack_local, fill in the memory
2251 attributes as appropriate for a register allocator spill slot.
2252 These slots are not aliasable by other memory. We arrange for
2253 them all to use a single MEM_EXPR, so that the aliasing code can
2254 work properly in the case of shared spill slots. */
2257 set_mem_attrs_for_spill (rtx mem
)
2259 alias_set_type alias
;
2263 expr
= get_spill_slot_decl (true);
2264 alias
= MEM_ALIAS_SET (DECL_RTL (expr
));
2266 /* We expect the incoming memory to be of the form:
2267 (mem:MODE (plus (reg sfp) (const_int offset)))
2268 with perhaps the plus missing for offset = 0. */
2269 addr
= XEXP (mem
, 0);
2270 offset
= const0_rtx
;
2271 if (GET_CODE (addr
) == PLUS
2272 && CONST_INT_P (XEXP (addr
, 1)))
2273 offset
= XEXP (addr
, 1);
2275 MEM_ATTRS (mem
) = get_mem_attrs (alias
, expr
, offset
,
2276 MEM_SIZE (mem
), MEM_ALIGN (mem
),
2277 ADDR_SPACE_GENERIC
, GET_MODE (mem
));
2278 MEM_NOTRAP_P (mem
) = 1;
2281 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2284 gen_label_rtx (void)
2286 return gen_rtx_CODE_LABEL (VOIDmode
, 0, NULL_RTX
, NULL_RTX
,
2287 NULL
, label_num
++, NULL
);
2290 /* For procedure integration. */
2292 /* Install new pointers to the first and last insns in the chain.
2293 Also, set cur_insn_uid to one higher than the last in use.
2294 Used for an inline-procedure after copying the insn chain. */
2297 set_new_first_and_last_insn (rtx first
, rtx last
)
2305 if (MIN_NONDEBUG_INSN_UID
|| MAY_HAVE_DEBUG_INSNS
)
2307 int debug_count
= 0;
2309 cur_insn_uid
= MIN_NONDEBUG_INSN_UID
- 1;
2310 cur_debug_insn_uid
= 0;
2312 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2313 if (INSN_UID (insn
) < MIN_NONDEBUG_INSN_UID
)
2314 cur_debug_insn_uid
= MAX (cur_debug_insn_uid
, INSN_UID (insn
));
2317 cur_insn_uid
= MAX (cur_insn_uid
, INSN_UID (insn
));
2318 if (DEBUG_INSN_P (insn
))
2323 cur_debug_insn_uid
= MIN_NONDEBUG_INSN_UID
+ debug_count
;
2325 cur_debug_insn_uid
++;
2328 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2329 cur_insn_uid
= MAX (cur_insn_uid
, INSN_UID (insn
));
2334 /* Go through all the RTL insn bodies and copy any invalid shared
2335 structure. This routine should only be called once. */
2338 unshare_all_rtl_1 (rtx insn
)
2340 /* Unshare just about everything else. */
2341 unshare_all_rtl_in_chain (insn
);
2343 /* Make sure the addresses of stack slots found outside the insn chain
2344 (such as, in DECL_RTL of a variable) are not shared
2345 with the insn chain.
2347 This special care is necessary when the stack slot MEM does not
2348 actually appear in the insn chain. If it does appear, its address
2349 is unshared from all else at that point. */
2350 stack_slot_list
= copy_rtx_if_shared (stack_slot_list
);
2353 /* Go through all the RTL insn bodies and copy any invalid shared
2354 structure, again. This is a fairly expensive thing to do so it
2355 should be done sparingly. */
2358 unshare_all_rtl_again (rtx insn
)
2363 for (p
= insn
; p
; p
= NEXT_INSN (p
))
2366 reset_used_flags (PATTERN (p
));
2367 reset_used_flags (REG_NOTES (p
));
2370 /* Make sure that virtual stack slots are not shared. */
2371 set_used_decls (DECL_INITIAL (cfun
->decl
));
2373 /* Make sure that virtual parameters are not shared. */
2374 for (decl
= DECL_ARGUMENTS (cfun
->decl
); decl
; decl
= TREE_CHAIN (decl
))
2375 set_used_flags (DECL_RTL (decl
));
2377 reset_used_flags (stack_slot_list
);
2379 unshare_all_rtl_1 (insn
);
2383 unshare_all_rtl (void)
2385 unshare_all_rtl_1 (get_insns ());
2389 struct rtl_opt_pass pass_unshare_all_rtl
=
2393 "unshare", /* name */
2395 unshare_all_rtl
, /* execute */
2398 0, /* static_pass_number */
2399 TV_NONE
, /* tv_id */
2400 0, /* properties_required */
2401 0, /* properties_provided */
2402 0, /* properties_destroyed */
2403 0, /* todo_flags_start */
2404 TODO_dump_func
| TODO_verify_rtl_sharing
/* todo_flags_finish */
2409 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2410 Recursively does the same for subexpressions. */
2413 verify_rtx_sharing (rtx orig
, rtx insn
)
2418 const char *format_ptr
;
2423 code
= GET_CODE (x
);
2425 /* These types may be freely shared. */
2443 /* SCRATCH must be shared because they represent distinct values. */
2445 if (REG_P (XEXP (x
, 0)) && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
)
2450 if (shared_const_p (orig
))
2455 /* A MEM is allowed to be shared if its address is constant. */
2456 if (CONSTANT_ADDRESS_P (XEXP (x
, 0))
2457 || reload_completed
|| reload_in_progress
)
2466 /* This rtx may not be shared. If it has already been seen,
2467 replace it with a copy of itself. */
2468 #ifdef ENABLE_CHECKING
2469 if (RTX_FLAG (x
, used
))
2471 error ("invalid rtl sharing found in the insn");
2473 error ("shared rtx");
2475 internal_error ("internal consistency failure");
2478 gcc_assert (!RTX_FLAG (x
, used
));
2480 RTX_FLAG (x
, used
) = 1;
2482 /* Now scan the subexpressions recursively. */
2484 format_ptr
= GET_RTX_FORMAT (code
);
2486 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
2488 switch (*format_ptr
++)
2491 verify_rtx_sharing (XEXP (x
, i
), insn
);
2495 if (XVEC (x
, i
) != NULL
)
2498 int len
= XVECLEN (x
, i
);
2500 for (j
= 0; j
< len
; j
++)
2502 /* We allow sharing of ASM_OPERANDS inside single
2504 if (j
&& GET_CODE (XVECEXP (x
, i
, j
)) == SET
2505 && (GET_CODE (SET_SRC (XVECEXP (x
, i
, j
)))
2507 verify_rtx_sharing (SET_DEST (XVECEXP (x
, i
, j
)), insn
);
2509 verify_rtx_sharing (XVECEXP (x
, i
, j
), insn
);
2518 /* Go through all the RTL insn bodies and check that there is no unexpected
2519 sharing in between the subexpressions. */
2522 verify_rtl_sharing (void)
2526 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
2529 reset_used_flags (PATTERN (p
));
2530 reset_used_flags (REG_NOTES (p
));
2531 if (GET_CODE (PATTERN (p
)) == SEQUENCE
)
2534 rtx q
, sequence
= PATTERN (p
);
2536 for (i
= 0; i
< XVECLEN (sequence
, 0); i
++)
2538 q
= XVECEXP (sequence
, 0, i
);
2539 gcc_assert (INSN_P (q
));
2540 reset_used_flags (PATTERN (q
));
2541 reset_used_flags (REG_NOTES (q
));
2546 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
2549 verify_rtx_sharing (PATTERN (p
), p
);
2550 verify_rtx_sharing (REG_NOTES (p
), p
);
2554 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2555 Assumes the mark bits are cleared at entry. */
2558 unshare_all_rtl_in_chain (rtx insn
)
2560 for (; insn
; insn
= NEXT_INSN (insn
))
2563 PATTERN (insn
) = copy_rtx_if_shared (PATTERN (insn
));
2564 REG_NOTES (insn
) = copy_rtx_if_shared (REG_NOTES (insn
));
2568 /* Go through all virtual stack slots of a function and mark them as
2569 shared. We never replace the DECL_RTLs themselves with a copy,
2570 but expressions mentioned into a DECL_RTL cannot be shared with
2571 expressions in the instruction stream.
2573 Note that reload may convert pseudo registers into memories in-place.
2574 Pseudo registers are always shared, but MEMs never are. Thus if we
2575 reset the used flags on MEMs in the instruction stream, we must set
2576 them again on MEMs that appear in DECL_RTLs. */
2579 set_used_decls (tree blk
)
2584 for (t
= BLOCK_VARS (blk
); t
; t
= TREE_CHAIN (t
))
2585 if (DECL_RTL_SET_P (t
))
2586 set_used_flags (DECL_RTL (t
));
2588 /* Now process sub-blocks. */
2589 for (t
= BLOCK_SUBBLOCKS (blk
); t
; t
= BLOCK_CHAIN (t
))
2593 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2594 Recursively does the same for subexpressions. Uses
2595 copy_rtx_if_shared_1 to reduce stack space. */
2598 copy_rtx_if_shared (rtx orig
)
2600 copy_rtx_if_shared_1 (&orig
);
2604 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2605 use. Recursively does the same for subexpressions. */
2608 copy_rtx_if_shared_1 (rtx
*orig1
)
2614 const char *format_ptr
;
2618 /* Repeat is used to turn tail-recursion into iteration. */
2625 code
= GET_CODE (x
);
2627 /* These types may be freely shared. */
2644 /* SCRATCH must be shared because they represent distinct values. */
2647 if (REG_P (XEXP (x
, 0)) && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
)
2652 if (shared_const_p (x
))
2662 /* The chain of insns is not being copied. */
2669 /* This rtx may not be shared. If it has already been seen,
2670 replace it with a copy of itself. */
2672 if (RTX_FLAG (x
, used
))
2674 x
= shallow_copy_rtx (x
);
2677 RTX_FLAG (x
, used
) = 1;
2679 /* Now scan the subexpressions recursively.
2680 We can store any replaced subexpressions directly into X
2681 since we know X is not shared! Any vectors in X
2682 must be copied if X was copied. */
2684 format_ptr
= GET_RTX_FORMAT (code
);
2685 length
= GET_RTX_LENGTH (code
);
2688 for (i
= 0; i
< length
; i
++)
2690 switch (*format_ptr
++)
2694 copy_rtx_if_shared_1 (last_ptr
);
2695 last_ptr
= &XEXP (x
, i
);
2699 if (XVEC (x
, i
) != NULL
)
2702 int len
= XVECLEN (x
, i
);
2704 /* Copy the vector iff I copied the rtx and the length
2706 if (copied
&& len
> 0)
2707 XVEC (x
, i
) = gen_rtvec_v (len
, XVEC (x
, i
)->elem
);
2709 /* Call recursively on all inside the vector. */
2710 for (j
= 0; j
< len
; j
++)
2713 copy_rtx_if_shared_1 (last_ptr
);
2714 last_ptr
= &XVECEXP (x
, i
, j
);
2729 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2730 to look for shared sub-parts. */
2733 reset_used_flags (rtx x
)
2737 const char *format_ptr
;
2740 /* Repeat is used to turn tail-recursion into iteration. */
2745 code
= GET_CODE (x
);
2747 /* These types may be freely shared so we needn't do any resetting
2772 /* The chain of insns is not being copied. */
2779 RTX_FLAG (x
, used
) = 0;
2781 format_ptr
= GET_RTX_FORMAT (code
);
2782 length
= GET_RTX_LENGTH (code
);
2784 for (i
= 0; i
< length
; i
++)
2786 switch (*format_ptr
++)
2794 reset_used_flags (XEXP (x
, i
));
2798 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2799 reset_used_flags (XVECEXP (x
, i
, j
));
2805 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2806 to look for shared sub-parts. */
2809 set_used_flags (rtx x
)
2813 const char *format_ptr
;
2818 code
= GET_CODE (x
);
2820 /* These types may be freely shared so we needn't do any resetting
2845 /* The chain of insns is not being copied. */
2852 RTX_FLAG (x
, used
) = 1;
2854 format_ptr
= GET_RTX_FORMAT (code
);
2855 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
2857 switch (*format_ptr
++)
2860 set_used_flags (XEXP (x
, i
));
2864 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2865 set_used_flags (XVECEXP (x
, i
, j
));
2871 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2872 Return X or the rtx for the pseudo reg the value of X was copied into.
2873 OTHER must be valid as a SET_DEST. */
2876 make_safe_from (rtx x
, rtx other
)
2879 switch (GET_CODE (other
))
2882 other
= SUBREG_REG (other
);
2884 case STRICT_LOW_PART
:
2887 other
= XEXP (other
, 0);
2896 && GET_CODE (x
) != SUBREG
)
2898 && (REGNO (other
) < FIRST_PSEUDO_REGISTER
2899 || reg_mentioned_p (other
, x
))))
2901 rtx temp
= gen_reg_rtx (GET_MODE (x
));
2902 emit_move_insn (temp
, x
);
2908 /* Emission of insns (adding them to the doubly-linked list). */
2910 /* Return the first insn of the current sequence or current function. */
2918 /* Specify a new insn as the first in the chain. */
2921 set_first_insn (rtx insn
)
2923 gcc_assert (!PREV_INSN (insn
));
2927 /* Return the last insn emitted in current sequence or current function. */
2930 get_last_insn (void)
2935 /* Specify a new insn as the last in the chain. */
2938 set_last_insn (rtx insn
)
2940 gcc_assert (!NEXT_INSN (insn
));
2944 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2947 get_last_insn_anywhere (void)
2949 struct sequence_stack
*stack
;
2952 for (stack
= seq_stack
; stack
; stack
= stack
->next
)
2953 if (stack
->last
!= 0)
2958 /* Return the first nonnote insn emitted in current sequence or current
2959 function. This routine looks inside SEQUENCEs. */
2962 get_first_nonnote_insn (void)
2964 rtx insn
= first_insn
;
2969 for (insn
= next_insn (insn
);
2970 insn
&& NOTE_P (insn
);
2971 insn
= next_insn (insn
))
2975 if (NONJUMP_INSN_P (insn
)
2976 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2977 insn
= XVECEXP (PATTERN (insn
), 0, 0);
2984 /* Return the last nonnote insn emitted in current sequence or current
2985 function. This routine looks inside SEQUENCEs. */
2988 get_last_nonnote_insn (void)
2990 rtx insn
= last_insn
;
2995 for (insn
= previous_insn (insn
);
2996 insn
&& NOTE_P (insn
);
2997 insn
= previous_insn (insn
))
3001 if (NONJUMP_INSN_P (insn
)
3002 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3003 insn
= XVECEXP (PATTERN (insn
), 0,
3004 XVECLEN (PATTERN (insn
), 0) - 1);
3011 /* Return a number larger than any instruction's uid in this function. */
3016 return cur_insn_uid
;
3019 /* Return the number of actual (non-debug) insns emitted in this
3023 get_max_insn_count (void)
3025 int n
= cur_insn_uid
;
3027 /* The table size must be stable across -g, to avoid codegen
3028 differences due to debug insns, and not be affected by
3029 -fmin-insn-uid, to avoid excessive table size and to simplify
3030 debugging of -fcompare-debug failures. */
3031 if (cur_debug_insn_uid
> MIN_NONDEBUG_INSN_UID
)
3032 n
-= cur_debug_insn_uid
;
3034 n
-= MIN_NONDEBUG_INSN_UID
;
3040 /* Return the next insn. If it is a SEQUENCE, return the first insn
3044 next_insn (rtx insn
)
3048 insn
= NEXT_INSN (insn
);
3049 if (insn
&& NONJUMP_INSN_P (insn
)
3050 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3051 insn
= XVECEXP (PATTERN (insn
), 0, 0);
3057 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3061 previous_insn (rtx insn
)
3065 insn
= PREV_INSN (insn
);
3066 if (insn
&& NONJUMP_INSN_P (insn
)
3067 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3068 insn
= XVECEXP (PATTERN (insn
), 0, XVECLEN (PATTERN (insn
), 0) - 1);
3074 /* Return the next insn after INSN that is not a NOTE. This routine does not
3075 look inside SEQUENCEs. */
3078 next_nonnote_insn (rtx insn
)
3082 insn
= NEXT_INSN (insn
);
3083 if (insn
== 0 || !NOTE_P (insn
))
3090 /* Return the next insn after INSN that is not a NOTE, but stop the
3091 search before we enter another basic block. This routine does not
3092 look inside SEQUENCEs. */
3095 next_nonnote_insn_bb (rtx insn
)
3099 insn
= NEXT_INSN (insn
);
3100 if (insn
== 0 || !NOTE_P (insn
))
3102 if (NOTE_INSN_BASIC_BLOCK_P (insn
))
3109 /* Return the previous insn before INSN that is not a NOTE. This routine does
3110 not look inside SEQUENCEs. */
3113 prev_nonnote_insn (rtx insn
)
3117 insn
= PREV_INSN (insn
);
3118 if (insn
== 0 || !NOTE_P (insn
))
3125 /* Return the previous insn before INSN that is not a NOTE, but stop
3126 the search before we enter another basic block. This routine does
3127 not look inside SEQUENCEs. */
3130 prev_nonnote_insn_bb (rtx insn
)
3134 insn
= PREV_INSN (insn
);
3135 if (insn
== 0 || !NOTE_P (insn
))
3137 if (NOTE_INSN_BASIC_BLOCK_P (insn
))
3144 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3145 routine does not look inside SEQUENCEs. */
3148 next_nondebug_insn (rtx insn
)
3152 insn
= NEXT_INSN (insn
);
3153 if (insn
== 0 || !DEBUG_INSN_P (insn
))
3160 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3161 This routine does not look inside SEQUENCEs. */
3164 prev_nondebug_insn (rtx insn
)
3168 insn
= PREV_INSN (insn
);
3169 if (insn
== 0 || !DEBUG_INSN_P (insn
))
3176 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3177 or 0, if there is none. This routine does not look inside
3181 next_real_insn (rtx insn
)
3185 insn
= NEXT_INSN (insn
);
3186 if (insn
== 0 || INSN_P (insn
))
3193 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3194 or 0, if there is none. This routine does not look inside
3198 prev_real_insn (rtx insn
)
3202 insn
= PREV_INSN (insn
);
3203 if (insn
== 0 || INSN_P (insn
))
3210 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3211 This routine does not look inside SEQUENCEs. */
3214 last_call_insn (void)
3218 for (insn
= get_last_insn ();
3219 insn
&& !CALL_P (insn
);
3220 insn
= PREV_INSN (insn
))
3226 /* Find the next insn after INSN that really does something. This routine
3227 does not look inside SEQUENCEs. After reload this also skips over
3228 standalone USE and CLOBBER insn. */
3231 active_insn_p (const_rtx insn
)
3233 return (CALL_P (insn
) || JUMP_P (insn
)
3234 || (NONJUMP_INSN_P (insn
)
3235 && (! reload_completed
3236 || (GET_CODE (PATTERN (insn
)) != USE
3237 && GET_CODE (PATTERN (insn
)) != CLOBBER
))));
3241 next_active_insn (rtx insn
)
3245 insn
= NEXT_INSN (insn
);
3246 if (insn
== 0 || active_insn_p (insn
))
3253 /* Find the last insn before INSN that really does something. This routine
3254 does not look inside SEQUENCEs. After reload this also skips over
3255 standalone USE and CLOBBER insn. */
3258 prev_active_insn (rtx insn
)
3262 insn
= PREV_INSN (insn
);
3263 if (insn
== 0 || active_insn_p (insn
))
3270 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3273 next_label (rtx insn
)
3277 insn
= NEXT_INSN (insn
);
3278 if (insn
== 0 || LABEL_P (insn
))
3285 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3288 prev_label (rtx insn
)
3292 insn
= PREV_INSN (insn
);
3293 if (insn
== 0 || LABEL_P (insn
))
3300 /* Return the last label to mark the same position as LABEL. Return null
3301 if LABEL itself is null. */
3304 skip_consecutive_labels (rtx label
)
3308 for (insn
= label
; insn
!= 0 && !INSN_P (insn
); insn
= NEXT_INSN (insn
))
3316 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3317 and REG_CC_USER notes so we can find it. */
3320 link_cc0_insns (rtx insn
)
3322 rtx user
= next_nonnote_insn (insn
);
3324 if (NONJUMP_INSN_P (user
) && GET_CODE (PATTERN (user
)) == SEQUENCE
)
3325 user
= XVECEXP (PATTERN (user
), 0, 0);
3327 add_reg_note (user
, REG_CC_SETTER
, insn
);
3328 add_reg_note (insn
, REG_CC_USER
, user
);
3331 /* Return the next insn that uses CC0 after INSN, which is assumed to
3332 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3333 applied to the result of this function should yield INSN).
3335 Normally, this is simply the next insn. However, if a REG_CC_USER note
3336 is present, it contains the insn that uses CC0.
3338 Return 0 if we can't find the insn. */
3341 next_cc0_user (rtx insn
)
3343 rtx note
= find_reg_note (insn
, REG_CC_USER
, NULL_RTX
);
3346 return XEXP (note
, 0);
3348 insn
= next_nonnote_insn (insn
);
3349 if (insn
&& NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3350 insn
= XVECEXP (PATTERN (insn
), 0, 0);
3352 if (insn
&& INSN_P (insn
) && reg_mentioned_p (cc0_rtx
, PATTERN (insn
)))
3358 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3359 note, it is the previous insn. */
3362 prev_cc0_setter (rtx insn
)
3364 rtx note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
3367 return XEXP (note
, 0);
3369 insn
= prev_nonnote_insn (insn
);
3370 gcc_assert (sets_cc0_p (PATTERN (insn
)));
3377 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3380 find_auto_inc (rtx
*xp
, void *data
)
3383 rtx reg
= (rtx
) data
;
3385 if (GET_RTX_CLASS (GET_CODE (x
)) != RTX_AUTOINC
)
3388 switch (GET_CODE (x
))
3396 if (rtx_equal_p (reg
, XEXP (x
, 0)))
3407 /* Increment the label uses for all labels present in rtx. */
3410 mark_label_nuses (rtx x
)
3416 code
= GET_CODE (x
);
3417 if (code
== LABEL_REF
&& LABEL_P (XEXP (x
, 0)))
3418 LABEL_NUSES (XEXP (x
, 0))++;
3420 fmt
= GET_RTX_FORMAT (code
);
3421 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3424 mark_label_nuses (XEXP (x
, i
));
3425 else if (fmt
[i
] == 'E')
3426 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
3427 mark_label_nuses (XVECEXP (x
, i
, j
));
3432 /* Try splitting insns that can be split for better scheduling.
3433 PAT is the pattern which might split.
3434 TRIAL is the insn providing PAT.
3435 LAST is nonzero if we should return the last insn of the sequence produced.
3437 If this routine succeeds in splitting, it returns the first or last
3438 replacement insn depending on the value of LAST. Otherwise, it
3439 returns TRIAL. If the insn to be returned can be split, it will be. */
3442 try_split (rtx pat
, rtx trial
, int last
)
3444 rtx before
= PREV_INSN (trial
);
3445 rtx after
= NEXT_INSN (trial
);
3446 int has_barrier
= 0;
3449 rtx insn_last
, insn
;
3452 /* We're not good at redistributing frame information. */
3453 if (RTX_FRAME_RELATED_P (trial
))
3456 if (any_condjump_p (trial
)
3457 && (note
= find_reg_note (trial
, REG_BR_PROB
, 0)))
3458 split_branch_probability
= INTVAL (XEXP (note
, 0));
3459 probability
= split_branch_probability
;
3461 seq
= split_insns (pat
, trial
);
3463 split_branch_probability
= -1;
3465 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3466 We may need to handle this specially. */
3467 if (after
&& BARRIER_P (after
))
3470 after
= NEXT_INSN (after
);
3476 /* Avoid infinite loop if any insn of the result matches
3477 the original pattern. */
3481 if (INSN_P (insn_last
)
3482 && rtx_equal_p (PATTERN (insn_last
), pat
))
3484 if (!NEXT_INSN (insn_last
))
3486 insn_last
= NEXT_INSN (insn_last
);
3489 /* We will be adding the new sequence to the function. The splitters
3490 may have introduced invalid RTL sharing, so unshare the sequence now. */
3491 unshare_all_rtl_in_chain (seq
);
3494 for (insn
= insn_last
; insn
; insn
= PREV_INSN (insn
))
3498 mark_jump_label (PATTERN (insn
), insn
, 0);
3500 if (probability
!= -1
3501 && any_condjump_p (insn
)
3502 && !find_reg_note (insn
, REG_BR_PROB
, 0))
3504 /* We can preserve the REG_BR_PROB notes only if exactly
3505 one jump is created, otherwise the machine description
3506 is responsible for this step using
3507 split_branch_probability variable. */
3508 gcc_assert (njumps
== 1);
3509 add_reg_note (insn
, REG_BR_PROB
, GEN_INT (probability
));
3514 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3515 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3518 for (insn
= insn_last
; insn
; insn
= PREV_INSN (insn
))
3521 rtx
*p
= &CALL_INSN_FUNCTION_USAGE (insn
);
3524 *p
= CALL_INSN_FUNCTION_USAGE (trial
);
3525 SIBLING_CALL_P (insn
) = SIBLING_CALL_P (trial
);
3527 /* Update the debug information for the CALL_INSN. */
3528 if (flag_enable_icf_debug
)
3529 (*debug_hooks
->copy_call_info
) (trial
, insn
);
3533 /* Copy notes, particularly those related to the CFG. */
3534 for (note
= REG_NOTES (trial
); note
; note
= XEXP (note
, 1))
3536 switch (REG_NOTE_KIND (note
))
3539 copy_reg_eh_region_note_backward (note
, insn_last
, NULL
);
3544 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3547 add_reg_note (insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3551 case REG_NON_LOCAL_GOTO
:
3552 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3555 add_reg_note (insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3561 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3563 rtx reg
= XEXP (note
, 0);
3564 if (!FIND_REG_INC_NOTE (insn
, reg
)
3565 && for_each_rtx (&PATTERN (insn
), find_auto_inc
, reg
) > 0)
3566 add_reg_note (insn
, REG_INC
, reg
);
3576 /* If there are LABELS inside the split insns increment the
3577 usage count so we don't delete the label. */
3581 while (insn
!= NULL_RTX
)
3583 /* JUMP_P insns have already been "marked" above. */
3584 if (NONJUMP_INSN_P (insn
))
3585 mark_label_nuses (PATTERN (insn
));
3587 insn
= PREV_INSN (insn
);
3591 tem
= emit_insn_after_setloc (seq
, trial
, INSN_LOCATOR (trial
));
3593 delete_insn (trial
);
3595 emit_barrier_after (tem
);
3597 /* Recursively call try_split for each new insn created; by the
3598 time control returns here that insn will be fully split, so
3599 set LAST and continue from the insn after the one returned.
3600 We can't use next_active_insn here since AFTER may be a note.
3601 Ignore deleted insns, which can be occur if not optimizing. */
3602 for (tem
= NEXT_INSN (before
); tem
!= after
; tem
= NEXT_INSN (tem
))
3603 if (! INSN_DELETED_P (tem
) && INSN_P (tem
))
3604 tem
= try_split (PATTERN (tem
), tem
, 1);
3606 /* Return either the first or the last insn, depending on which was
3609 ? (after
? PREV_INSN (after
) : last_insn
)
3610 : NEXT_INSN (before
);
3613 /* Make and return an INSN rtx, initializing all its slots.
3614 Store PATTERN in the pattern slots. */
3617 make_insn_raw (rtx pattern
)
3621 insn
= rtx_alloc (INSN
);
3623 INSN_UID (insn
) = cur_insn_uid
++;
3624 PATTERN (insn
) = pattern
;
3625 INSN_CODE (insn
) = -1;
3626 REG_NOTES (insn
) = NULL
;
3627 INSN_LOCATOR (insn
) = curr_insn_locator ();
3628 BLOCK_FOR_INSN (insn
) = NULL
;
3630 #ifdef ENABLE_RTL_CHECKING
3633 && (returnjump_p (insn
)
3634 || (GET_CODE (insn
) == SET
3635 && SET_DEST (insn
) == pc_rtx
)))
3637 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3645 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3648 make_debug_insn_raw (rtx pattern
)
3652 insn
= rtx_alloc (DEBUG_INSN
);
3653 INSN_UID (insn
) = cur_debug_insn_uid
++;
3654 if (cur_debug_insn_uid
> MIN_NONDEBUG_INSN_UID
)
3655 INSN_UID (insn
) = cur_insn_uid
++;
3657 PATTERN (insn
) = pattern
;
3658 INSN_CODE (insn
) = -1;
3659 REG_NOTES (insn
) = NULL
;
3660 INSN_LOCATOR (insn
) = curr_insn_locator ();
3661 BLOCK_FOR_INSN (insn
) = NULL
;
3666 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3669 make_jump_insn_raw (rtx pattern
)
3673 insn
= rtx_alloc (JUMP_INSN
);
3674 INSN_UID (insn
) = cur_insn_uid
++;
3676 PATTERN (insn
) = pattern
;
3677 INSN_CODE (insn
) = -1;
3678 REG_NOTES (insn
) = NULL
;
3679 JUMP_LABEL (insn
) = NULL
;
3680 INSN_LOCATOR (insn
) = curr_insn_locator ();
3681 BLOCK_FOR_INSN (insn
) = NULL
;
3686 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3689 make_call_insn_raw (rtx pattern
)
3693 insn
= rtx_alloc (CALL_INSN
);
3694 INSN_UID (insn
) = cur_insn_uid
++;
3696 PATTERN (insn
) = pattern
;
3697 INSN_CODE (insn
) = -1;
3698 REG_NOTES (insn
) = NULL
;
3699 CALL_INSN_FUNCTION_USAGE (insn
) = NULL
;
3700 INSN_LOCATOR (insn
) = curr_insn_locator ();
3701 BLOCK_FOR_INSN (insn
) = NULL
;
3706 /* Add INSN to the end of the doubly-linked list.
3707 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3712 PREV_INSN (insn
) = last_insn
;
3713 NEXT_INSN (insn
) = 0;
3715 if (NULL
!= last_insn
)
3716 NEXT_INSN (last_insn
) = insn
;
3718 if (NULL
== first_insn
)
3724 /* Add INSN into the doubly-linked list after insn AFTER. This and
3725 the next should be the only functions called to insert an insn once
3726 delay slots have been filled since only they know how to update a
3730 add_insn_after (rtx insn
, rtx after
, basic_block bb
)
3732 rtx next
= NEXT_INSN (after
);
3734 gcc_assert (!optimize
|| !INSN_DELETED_P (after
));
3736 NEXT_INSN (insn
) = next
;
3737 PREV_INSN (insn
) = after
;
3741 PREV_INSN (next
) = insn
;
3742 if (NONJUMP_INSN_P (next
) && GET_CODE (PATTERN (next
)) == SEQUENCE
)
3743 PREV_INSN (XVECEXP (PATTERN (next
), 0, 0)) = insn
;
3745 else if (last_insn
== after
)
3749 struct sequence_stack
*stack
= seq_stack
;
3750 /* Scan all pending sequences too. */
3751 for (; stack
; stack
= stack
->next
)
3752 if (after
== stack
->last
)
3761 if (!BARRIER_P (after
)
3762 && !BARRIER_P (insn
)
3763 && (bb
= BLOCK_FOR_INSN (after
)))
3765 set_block_for_insn (insn
, bb
);
3767 df_insn_rescan (insn
);
3768 /* Should not happen as first in the BB is always
3769 either NOTE or LABEL. */
3770 if (BB_END (bb
) == after
3771 /* Avoid clobbering of structure when creating new BB. */
3772 && !BARRIER_P (insn
)
3773 && !NOTE_INSN_BASIC_BLOCK_P (insn
))
3777 NEXT_INSN (after
) = insn
;
3778 if (NONJUMP_INSN_P (after
) && GET_CODE (PATTERN (after
)) == SEQUENCE
)
3780 rtx sequence
= PATTERN (after
);
3781 NEXT_INSN (XVECEXP (sequence
, 0, XVECLEN (sequence
, 0) - 1)) = insn
;
3785 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3786 the previous should be the only functions called to insert an insn
3787 once delay slots have been filled since only they know how to
3788 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3792 add_insn_before (rtx insn
, rtx before
, basic_block bb
)
3794 rtx prev
= PREV_INSN (before
);
3796 gcc_assert (!optimize
|| !INSN_DELETED_P (before
));
3798 PREV_INSN (insn
) = prev
;
3799 NEXT_INSN (insn
) = before
;
3803 NEXT_INSN (prev
) = insn
;
3804 if (NONJUMP_INSN_P (prev
) && GET_CODE (PATTERN (prev
)) == SEQUENCE
)
3806 rtx sequence
= PATTERN (prev
);
3807 NEXT_INSN (XVECEXP (sequence
, 0, XVECLEN (sequence
, 0) - 1)) = insn
;
3810 else if (first_insn
== before
)
3814 struct sequence_stack
*stack
= seq_stack
;
3815 /* Scan all pending sequences too. */
3816 for (; stack
; stack
= stack
->next
)
3817 if (before
== stack
->first
)
3819 stack
->first
= insn
;
3827 && !BARRIER_P (before
)
3828 && !BARRIER_P (insn
))
3829 bb
= BLOCK_FOR_INSN (before
);
3833 set_block_for_insn (insn
, bb
);
3835 df_insn_rescan (insn
);
3836 /* Should not happen as first in the BB is always either NOTE or
3838 gcc_assert (BB_HEAD (bb
) != insn
3839 /* Avoid clobbering of structure when creating new BB. */
3841 || NOTE_INSN_BASIC_BLOCK_P (insn
));
3844 PREV_INSN (before
) = insn
;
3845 if (NONJUMP_INSN_P (before
) && GET_CODE (PATTERN (before
)) == SEQUENCE
)
3846 PREV_INSN (XVECEXP (PATTERN (before
), 0, 0)) = insn
;
3850 /* Replace insn with an deleted instruction note. */
3853 set_insn_deleted (rtx insn
)
3855 df_insn_delete (BLOCK_FOR_INSN (insn
), INSN_UID (insn
));
3856 PUT_CODE (insn
, NOTE
);
3857 NOTE_KIND (insn
) = NOTE_INSN_DELETED
;
3861 /* Remove an insn from its doubly-linked list. This function knows how
3862 to handle sequences. */
3864 remove_insn (rtx insn
)
3866 rtx next
= NEXT_INSN (insn
);
3867 rtx prev
= PREV_INSN (insn
);
3870 /* Later in the code, the block will be marked dirty. */
3871 df_insn_delete (NULL
, INSN_UID (insn
));
3875 NEXT_INSN (prev
) = next
;
3876 if (NONJUMP_INSN_P (prev
) && GET_CODE (PATTERN (prev
)) == SEQUENCE
)
3878 rtx sequence
= PATTERN (prev
);
3879 NEXT_INSN (XVECEXP (sequence
, 0, XVECLEN (sequence
, 0) - 1)) = next
;
3882 else if (first_insn
== insn
)
3886 struct sequence_stack
*stack
= seq_stack
;
3887 /* Scan all pending sequences too. */
3888 for (; stack
; stack
= stack
->next
)
3889 if (insn
== stack
->first
)
3891 stack
->first
= next
;
3900 PREV_INSN (next
) = prev
;
3901 if (NONJUMP_INSN_P (next
) && GET_CODE (PATTERN (next
)) == SEQUENCE
)
3902 PREV_INSN (XVECEXP (PATTERN (next
), 0, 0)) = prev
;
3904 else if (last_insn
== insn
)
3908 struct sequence_stack
*stack
= seq_stack
;
3909 /* Scan all pending sequences too. */
3910 for (; stack
; stack
= stack
->next
)
3911 if (insn
== stack
->last
)
3919 if (!BARRIER_P (insn
)
3920 && (bb
= BLOCK_FOR_INSN (insn
)))
3923 df_set_bb_dirty (bb
);
3924 if (BB_HEAD (bb
) == insn
)
3926 /* Never ever delete the basic block note without deleting whole
3928 gcc_assert (!NOTE_P (insn
));
3929 BB_HEAD (bb
) = next
;
3931 if (BB_END (bb
) == insn
)
3936 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3939 add_function_usage_to (rtx call_insn
, rtx call_fusage
)
3941 gcc_assert (call_insn
&& CALL_P (call_insn
));
3943 /* Put the register usage information on the CALL. If there is already
3944 some usage information, put ours at the end. */
3945 if (CALL_INSN_FUNCTION_USAGE (call_insn
))
3949 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
); XEXP (link
, 1) != 0;
3950 link
= XEXP (link
, 1))
3953 XEXP (link
, 1) = call_fusage
;
3956 CALL_INSN_FUNCTION_USAGE (call_insn
) = call_fusage
;
3959 /* Delete all insns made since FROM.
3960 FROM becomes the new last instruction. */
3963 delete_insns_since (rtx from
)
3968 NEXT_INSN (from
) = 0;
3972 /* This function is deprecated, please use sequences instead.
3974 Move a consecutive bunch of insns to a different place in the chain.
3975 The insns to be moved are those between FROM and TO.
3976 They are moved to a new position after the insn AFTER.
3977 AFTER must not be FROM or TO or any insn in between.
3979 This function does not know about SEQUENCEs and hence should not be
3980 called after delay-slot filling has been done. */
3983 reorder_insns_nobb (rtx from
, rtx to
, rtx after
)
3985 /* Splice this bunch out of where it is now. */
3986 if (PREV_INSN (from
))
3987 NEXT_INSN (PREV_INSN (from
)) = NEXT_INSN (to
);
3989 PREV_INSN (NEXT_INSN (to
)) = PREV_INSN (from
);
3990 if (last_insn
== to
)
3991 last_insn
= PREV_INSN (from
);
3992 if (first_insn
== from
)
3993 first_insn
= NEXT_INSN (to
);
3995 /* Make the new neighbors point to it and it to them. */
3996 if (NEXT_INSN (after
))
3997 PREV_INSN (NEXT_INSN (after
)) = to
;
3999 NEXT_INSN (to
) = NEXT_INSN (after
);
4000 PREV_INSN (from
) = after
;
4001 NEXT_INSN (after
) = from
;
4002 if (after
== last_insn
)
4006 /* Same as function above, but take care to update BB boundaries. */
4008 reorder_insns (rtx from
, rtx to
, rtx after
)
4010 rtx prev
= PREV_INSN (from
);
4011 basic_block bb
, bb2
;
4013 reorder_insns_nobb (from
, to
, after
);
4015 if (!BARRIER_P (after
)
4016 && (bb
= BLOCK_FOR_INSN (after
)))
4019 df_set_bb_dirty (bb
);
4021 if (!BARRIER_P (from
)
4022 && (bb2
= BLOCK_FOR_INSN (from
)))
4024 if (BB_END (bb2
) == to
)
4025 BB_END (bb2
) = prev
;
4026 df_set_bb_dirty (bb2
);
4029 if (BB_END (bb
) == after
)
4032 for (x
= from
; x
!= NEXT_INSN (to
); x
= NEXT_INSN (x
))
4034 df_insn_change_bb (x
, bb
);
4039 /* Emit insn(s) of given code and pattern
4040 at a specified place within the doubly-linked list.
4042 All of the emit_foo global entry points accept an object
4043 X which is either an insn list or a PATTERN of a single
4046 There are thus a few canonical ways to generate code and
4047 emit it at a specific place in the instruction stream. For
4048 example, consider the instruction named SPOT and the fact that
4049 we would like to emit some instructions before SPOT. We might
4053 ... emit the new instructions ...
4054 insns_head = get_insns ();
4057 emit_insn_before (insns_head, SPOT);
4059 It used to be common to generate SEQUENCE rtl instead, but that
4060 is a relic of the past which no longer occurs. The reason is that
4061 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4062 generated would almost certainly die right after it was created. */
4064 /* Make X be output before the instruction BEFORE. */
4067 emit_insn_before_noloc (rtx x
, rtx before
, basic_block bb
)
4072 gcc_assert (before
);
4077 switch (GET_CODE (x
))
4089 rtx next
= NEXT_INSN (insn
);
4090 add_insn_before (insn
, before
, bb
);
4096 #ifdef ENABLE_RTL_CHECKING
4103 last
= make_insn_raw (x
);
4104 add_insn_before (last
, before
, bb
);
4111 /* Make an instruction with body X and code JUMP_INSN
4112 and output it before the instruction BEFORE. */
4115 emit_jump_insn_before_noloc (rtx x
, rtx before
)
4117 rtx insn
, last
= NULL_RTX
;
4119 gcc_assert (before
);
4121 switch (GET_CODE (x
))
4133 rtx next
= NEXT_INSN (insn
);
4134 add_insn_before (insn
, before
, NULL
);
4140 #ifdef ENABLE_RTL_CHECKING
4147 last
= make_jump_insn_raw (x
);
4148 add_insn_before (last
, before
, NULL
);
4155 /* Make an instruction with body X and code CALL_INSN
4156 and output it before the instruction BEFORE. */
4159 emit_call_insn_before_noloc (rtx x
, rtx before
)
4161 rtx last
= NULL_RTX
, insn
;
4163 gcc_assert (before
);
4165 switch (GET_CODE (x
))
4177 rtx next
= NEXT_INSN (insn
);
4178 add_insn_before (insn
, before
, NULL
);
4184 #ifdef ENABLE_RTL_CHECKING
4191 last
= make_call_insn_raw (x
);
4192 add_insn_before (last
, before
, NULL
);
4199 /* Make an instruction with body X and code DEBUG_INSN
4200 and output it before the instruction BEFORE. */
4203 emit_debug_insn_before_noloc (rtx x
, rtx before
)
4205 rtx last
= NULL_RTX
, insn
;
4207 gcc_assert (before
);
4209 switch (GET_CODE (x
))
4221 rtx next
= NEXT_INSN (insn
);
4222 add_insn_before (insn
, before
, NULL
);
4228 #ifdef ENABLE_RTL_CHECKING
4235 last
= make_debug_insn_raw (x
);
4236 add_insn_before (last
, before
, NULL
);
4243 /* Make an insn of code BARRIER
4244 and output it before the insn BEFORE. */
4247 emit_barrier_before (rtx before
)
4249 rtx insn
= rtx_alloc (BARRIER
);
4251 INSN_UID (insn
) = cur_insn_uid
++;
4253 add_insn_before (insn
, before
, NULL
);
4257 /* Emit the label LABEL before the insn BEFORE. */
4260 emit_label_before (rtx label
, rtx before
)
4262 /* This can be called twice for the same label as a result of the
4263 confusion that follows a syntax error! So make it harmless. */
4264 if (INSN_UID (label
) == 0)
4266 INSN_UID (label
) = cur_insn_uid
++;
4267 add_insn_before (label
, before
, NULL
);
4273 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4276 emit_note_before (enum insn_note subtype
, rtx before
)
4278 rtx note
= rtx_alloc (NOTE
);
4279 INSN_UID (note
) = cur_insn_uid
++;
4280 NOTE_KIND (note
) = subtype
;
4281 BLOCK_FOR_INSN (note
) = NULL
;
4282 memset (&NOTE_DATA (note
), 0, sizeof (NOTE_DATA (note
)));
4284 add_insn_before (note
, before
, NULL
);
4288 /* Helper for emit_insn_after, handles lists of instructions
4292 emit_insn_after_1 (rtx first
, rtx after
, basic_block bb
)
4296 if (!bb
&& !BARRIER_P (after
))
4297 bb
= BLOCK_FOR_INSN (after
);
4301 df_set_bb_dirty (bb
);
4302 for (last
= first
; NEXT_INSN (last
); last
= NEXT_INSN (last
))
4303 if (!BARRIER_P (last
))
4305 set_block_for_insn (last
, bb
);
4306 df_insn_rescan (last
);
4308 if (!BARRIER_P (last
))
4310 set_block_for_insn (last
, bb
);
4311 df_insn_rescan (last
);
4313 if (BB_END (bb
) == after
)
4317 for (last
= first
; NEXT_INSN (last
); last
= NEXT_INSN (last
))
4320 after_after
= NEXT_INSN (after
);
4322 NEXT_INSN (after
) = first
;
4323 PREV_INSN (first
) = after
;
4324 NEXT_INSN (last
) = after_after
;
4326 PREV_INSN (after_after
) = last
;
4328 if (after
== last_insn
)
4334 /* Make X be output after the insn AFTER and set the BB of insn. If
4335 BB is NULL, an attempt is made to infer the BB from AFTER. */
4338 emit_insn_after_noloc (rtx x
, rtx after
, basic_block bb
)
4347 switch (GET_CODE (x
))
4356 last
= emit_insn_after_1 (x
, after
, bb
);
4359 #ifdef ENABLE_RTL_CHECKING
4366 last
= make_insn_raw (x
);
4367 add_insn_after (last
, after
, bb
);
4375 /* Make an insn of code JUMP_INSN with body X
4376 and output it after the insn AFTER. */
4379 emit_jump_insn_after_noloc (rtx x
, rtx after
)
4385 switch (GET_CODE (x
))
4394 last
= emit_insn_after_1 (x
, after
, NULL
);
4397 #ifdef ENABLE_RTL_CHECKING
4404 last
= make_jump_insn_raw (x
);
4405 add_insn_after (last
, after
, NULL
);
4412 /* Make an instruction with body X and code CALL_INSN
4413 and output it after the instruction AFTER. */
4416 emit_call_insn_after_noloc (rtx x
, rtx after
)
4422 switch (GET_CODE (x
))
4431 last
= emit_insn_after_1 (x
, after
, NULL
);
4434 #ifdef ENABLE_RTL_CHECKING
4441 last
= make_call_insn_raw (x
);
4442 add_insn_after (last
, after
, NULL
);
4449 /* Make an instruction with body X and code CALL_INSN
4450 and output it after the instruction AFTER. */
4453 emit_debug_insn_after_noloc (rtx x
, rtx after
)
4459 switch (GET_CODE (x
))
4468 last
= emit_insn_after_1 (x
, after
, NULL
);
4471 #ifdef ENABLE_RTL_CHECKING
4478 last
= make_debug_insn_raw (x
);
4479 add_insn_after (last
, after
, NULL
);
4486 /* Make an insn of code BARRIER
4487 and output it after the insn AFTER. */
4490 emit_barrier_after (rtx after
)
4492 rtx insn
= rtx_alloc (BARRIER
);
4494 INSN_UID (insn
) = cur_insn_uid
++;
4496 add_insn_after (insn
, after
, NULL
);
4500 /* Emit the label LABEL after the insn AFTER. */
4503 emit_label_after (rtx label
, rtx after
)
4505 /* This can be called twice for the same label
4506 as a result of the confusion that follows a syntax error!
4507 So make it harmless. */
4508 if (INSN_UID (label
) == 0)
4510 INSN_UID (label
) = cur_insn_uid
++;
4511 add_insn_after (label
, after
, NULL
);
4517 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4520 emit_note_after (enum insn_note subtype
, rtx after
)
4522 rtx note
= rtx_alloc (NOTE
);
4523 INSN_UID (note
) = cur_insn_uid
++;
4524 NOTE_KIND (note
) = subtype
;
4525 BLOCK_FOR_INSN (note
) = NULL
;
4526 memset (&NOTE_DATA (note
), 0, sizeof (NOTE_DATA (note
)));
4527 add_insn_after (note
, after
, NULL
);
4531 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4533 emit_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4535 rtx last
= emit_insn_after_noloc (pattern
, after
, NULL
);
4537 if (pattern
== NULL_RTX
|| !loc
)
4540 after
= NEXT_INSN (after
);
4543 if (active_insn_p (after
) && !INSN_LOCATOR (after
))
4544 INSN_LOCATOR (after
) = loc
;
4547 after
= NEXT_INSN (after
);
4552 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4554 emit_insn_after (rtx pattern
, rtx after
)
4558 while (DEBUG_INSN_P (prev
))
4559 prev
= PREV_INSN (prev
);
4562 return emit_insn_after_setloc (pattern
, after
, INSN_LOCATOR (prev
));
4564 return emit_insn_after_noloc (pattern
, after
, NULL
);
4567 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4569 emit_jump_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4571 rtx last
= emit_jump_insn_after_noloc (pattern
, after
);
4573 if (pattern
== NULL_RTX
|| !loc
)
4576 after
= NEXT_INSN (after
);
4579 if (active_insn_p (after
) && !INSN_LOCATOR (after
))
4580 INSN_LOCATOR (after
) = loc
;
4583 after
= NEXT_INSN (after
);
4588 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4590 emit_jump_insn_after (rtx pattern
, rtx after
)
4594 while (DEBUG_INSN_P (prev
))
4595 prev
= PREV_INSN (prev
);
4598 return emit_jump_insn_after_setloc (pattern
, after
, INSN_LOCATOR (prev
));
4600 return emit_jump_insn_after_noloc (pattern
, after
);
4603 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4605 emit_call_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4607 rtx last
= emit_call_insn_after_noloc (pattern
, after
);
4609 if (pattern
== NULL_RTX
|| !loc
)
4612 after
= NEXT_INSN (after
);
4615 if (active_insn_p (after
) && !INSN_LOCATOR (after
))
4616 INSN_LOCATOR (after
) = loc
;
4619 after
= NEXT_INSN (after
);
4624 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4626 emit_call_insn_after (rtx pattern
, rtx after
)
4630 while (DEBUG_INSN_P (prev
))
4631 prev
= PREV_INSN (prev
);
4634 return emit_call_insn_after_setloc (pattern
, after
, INSN_LOCATOR (prev
));
4636 return emit_call_insn_after_noloc (pattern
, after
);
4639 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4641 emit_debug_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4643 rtx last
= emit_debug_insn_after_noloc (pattern
, after
);
4645 if (pattern
== NULL_RTX
|| !loc
)
4648 after
= NEXT_INSN (after
);
4651 if (active_insn_p (after
) && !INSN_LOCATOR (after
))
4652 INSN_LOCATOR (after
) = loc
;
4655 after
= NEXT_INSN (after
);
4660 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4662 emit_debug_insn_after (rtx pattern
, rtx after
)
4665 return emit_debug_insn_after_setloc (pattern
, after
, INSN_LOCATOR (after
));
4667 return emit_debug_insn_after_noloc (pattern
, after
);
4670 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */
4672 emit_insn_before_setloc (rtx pattern
, rtx before
, int loc
)
4674 rtx first
= PREV_INSN (before
);
4675 rtx last
= emit_insn_before_noloc (pattern
, before
, NULL
);
4677 if (pattern
== NULL_RTX
|| !loc
)
4681 first
= get_insns ();
4683 first
= NEXT_INSN (first
);
4686 if (active_insn_p (first
) && !INSN_LOCATOR (first
))
4687 INSN_LOCATOR (first
) = loc
;
4690 first
= NEXT_INSN (first
);
4695 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4697 emit_insn_before (rtx pattern
, rtx before
)
4701 while (DEBUG_INSN_P (next
))
4702 next
= PREV_INSN (next
);
4705 return emit_insn_before_setloc (pattern
, before
, INSN_LOCATOR (next
));
4707 return emit_insn_before_noloc (pattern
, before
, NULL
);
4710 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4712 emit_jump_insn_before_setloc (rtx pattern
, rtx before
, int loc
)
4714 rtx first
= PREV_INSN (before
);
4715 rtx last
= emit_jump_insn_before_noloc (pattern
, before
);
4717 if (pattern
== NULL_RTX
)
4720 first
= NEXT_INSN (first
);
4723 if (active_insn_p (first
) && !INSN_LOCATOR (first
))
4724 INSN_LOCATOR (first
) = loc
;
4727 first
= NEXT_INSN (first
);
4732 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4734 emit_jump_insn_before (rtx pattern
, rtx before
)
4738 while (DEBUG_INSN_P (next
))
4739 next
= PREV_INSN (next
);
4742 return emit_jump_insn_before_setloc (pattern
, before
, INSN_LOCATOR (next
));
4744 return emit_jump_insn_before_noloc (pattern
, before
);
4747 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4749 emit_call_insn_before_setloc (rtx pattern
, rtx before
, int loc
)
4751 rtx first
= PREV_INSN (before
);
4752 rtx last
= emit_call_insn_before_noloc (pattern
, before
);
4754 if (pattern
== NULL_RTX
)
4757 first
= NEXT_INSN (first
);
4760 if (active_insn_p (first
) && !INSN_LOCATOR (first
))
4761 INSN_LOCATOR (first
) = loc
;
4764 first
= NEXT_INSN (first
);
4769 /* like emit_call_insn_before_noloc,
4770 but set insn_locator according to before. */
4772 emit_call_insn_before (rtx pattern
, rtx before
)
4776 while (DEBUG_INSN_P (next
))
4777 next
= PREV_INSN (next
);
4780 return emit_call_insn_before_setloc (pattern
, before
, INSN_LOCATOR (next
));
4782 return emit_call_insn_before_noloc (pattern
, before
);
4785 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4787 emit_debug_insn_before_setloc (rtx pattern
, rtx before
, int loc
)
4789 rtx first
= PREV_INSN (before
);
4790 rtx last
= emit_debug_insn_before_noloc (pattern
, before
);
4792 if (pattern
== NULL_RTX
)
4795 first
= NEXT_INSN (first
);
4798 if (active_insn_p (first
) && !INSN_LOCATOR (first
))
4799 INSN_LOCATOR (first
) = loc
;
4802 first
= NEXT_INSN (first
);
4807 /* like emit_debug_insn_before_noloc,
4808 but set insn_locator according to before. */
4810 emit_debug_insn_before (rtx pattern
, rtx before
)
4812 if (INSN_P (before
))
4813 return emit_debug_insn_before_setloc (pattern
, before
, INSN_LOCATOR (before
));
4815 return emit_debug_insn_before_noloc (pattern
, before
);
4818 /* Take X and emit it at the end of the doubly-linked
4821 Returns the last insn emitted. */
4826 rtx last
= last_insn
;
4832 switch (GET_CODE (x
))
4844 rtx next
= NEXT_INSN (insn
);
4851 #ifdef ENABLE_RTL_CHECKING
4858 last
= make_insn_raw (x
);
4866 /* Make an insn of code DEBUG_INSN with pattern X
4867 and add it to the end of the doubly-linked list. */
4870 emit_debug_insn (rtx x
)
4872 rtx last
= last_insn
;
4878 switch (GET_CODE (x
))
4890 rtx next
= NEXT_INSN (insn
);
4897 #ifdef ENABLE_RTL_CHECKING
4904 last
= make_debug_insn_raw (x
);
4912 /* Make an insn of code JUMP_INSN with pattern X
4913 and add it to the end of the doubly-linked list. */
4916 emit_jump_insn (rtx x
)
4918 rtx last
= NULL_RTX
, insn
;
4920 switch (GET_CODE (x
))
4932 rtx next
= NEXT_INSN (insn
);
4939 #ifdef ENABLE_RTL_CHECKING
4946 last
= make_jump_insn_raw (x
);
4954 /* Make an insn of code CALL_INSN with pattern X
4955 and add it to the end of the doubly-linked list. */
4958 emit_call_insn (rtx x
)
4962 switch (GET_CODE (x
))
4971 insn
= emit_insn (x
);
4974 #ifdef ENABLE_RTL_CHECKING
4981 insn
= make_call_insn_raw (x
);
4989 /* Add the label LABEL to the end of the doubly-linked list. */
4992 emit_label (rtx label
)
4994 /* This can be called twice for the same label
4995 as a result of the confusion that follows a syntax error!
4996 So make it harmless. */
4997 if (INSN_UID (label
) == 0)
4999 INSN_UID (label
) = cur_insn_uid
++;
5005 /* Make an insn of code BARRIER
5006 and add it to the end of the doubly-linked list. */
5011 rtx barrier
= rtx_alloc (BARRIER
);
5012 INSN_UID (barrier
) = cur_insn_uid
++;
5017 /* Emit a copy of note ORIG. */
5020 emit_note_copy (rtx orig
)
5024 note
= rtx_alloc (NOTE
);
5026 INSN_UID (note
) = cur_insn_uid
++;
5027 NOTE_DATA (note
) = NOTE_DATA (orig
);
5028 NOTE_KIND (note
) = NOTE_KIND (orig
);
5029 BLOCK_FOR_INSN (note
) = NULL
;
5035 /* Make an insn of code NOTE or type NOTE_NO
5036 and add it to the end of the doubly-linked list. */
5039 emit_note (enum insn_note kind
)
5043 note
= rtx_alloc (NOTE
);
5044 INSN_UID (note
) = cur_insn_uid
++;
5045 NOTE_KIND (note
) = kind
;
5046 memset (&NOTE_DATA (note
), 0, sizeof (NOTE_DATA (note
)));
5047 BLOCK_FOR_INSN (note
) = NULL
;
5052 /* Emit a clobber of lvalue X. */
5055 emit_clobber (rtx x
)
5057 /* CONCATs should not appear in the insn stream. */
5058 if (GET_CODE (x
) == CONCAT
)
5060 emit_clobber (XEXP (x
, 0));
5061 return emit_clobber (XEXP (x
, 1));
5063 return emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
5066 /* Return a sequence of insns to clobber lvalue X. */
5080 /* Emit a use of rvalue X. */
5085 /* CONCATs should not appear in the insn stream. */
5086 if (GET_CODE (x
) == CONCAT
)
5088 emit_use (XEXP (x
, 0));
5089 return emit_use (XEXP (x
, 1));
5091 return emit_insn (gen_rtx_USE (VOIDmode
, x
));
5094 /* Return a sequence of insns to use rvalue X. */
5108 /* Cause next statement to emit a line note even if the line number
5112 force_next_line_note (void)
5117 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5118 note of this type already exists, remove it first. */
5121 set_unique_reg_note (rtx insn
, enum reg_note kind
, rtx datum
)
5123 rtx note
= find_reg_note (insn
, kind
, NULL_RTX
);
5129 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
5130 has multiple sets (some callers assume single_set
5131 means the insn only has one set, when in fact it
5132 means the insn only has one * useful * set). */
5133 if (GET_CODE (PATTERN (insn
)) == PARALLEL
&& multiple_sets (insn
))
5139 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5140 It serves no useful purpose and breaks eliminate_regs. */
5141 if (GET_CODE (datum
) == ASM_OPERANDS
)
5146 XEXP (note
, 0) = datum
;
5147 df_notes_rescan (insn
);
5155 XEXP (note
, 0) = datum
;
5161 add_reg_note (insn
, kind
, datum
);
5167 df_notes_rescan (insn
);
5173 return REG_NOTES (insn
);
5176 /* Return an indication of which type of insn should have X as a body.
5177 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5179 static enum rtx_code
5180 classify_insn (rtx x
)
5184 if (GET_CODE (x
) == CALL
)
5186 if (GET_CODE (x
) == RETURN
)
5188 if (GET_CODE (x
) == SET
)
5190 if (SET_DEST (x
) == pc_rtx
)
5192 else if (GET_CODE (SET_SRC (x
)) == CALL
)
5197 if (GET_CODE (x
) == PARALLEL
)
5200 for (j
= XVECLEN (x
, 0) - 1; j
>= 0; j
--)
5201 if (GET_CODE (XVECEXP (x
, 0, j
)) == CALL
)
5203 else if (GET_CODE (XVECEXP (x
, 0, j
)) == SET
5204 && SET_DEST (XVECEXP (x
, 0, j
)) == pc_rtx
)
5206 else if (GET_CODE (XVECEXP (x
, 0, j
)) == SET
5207 && GET_CODE (SET_SRC (XVECEXP (x
, 0, j
))) == CALL
)
5213 /* Emit the rtl pattern X as an appropriate kind of insn.
5214 If X is a label, it is simply added into the insn chain. */
5219 enum rtx_code code
= classify_insn (x
);
5224 return emit_label (x
);
5226 return emit_insn (x
);
5229 rtx insn
= emit_jump_insn (x
);
5230 if (any_uncondjump_p (insn
) || GET_CODE (x
) == RETURN
)
5231 return emit_barrier ();
5235 return emit_call_insn (x
);
5237 return emit_debug_insn (x
);
5243 /* Space for free sequence stack entries. */
5244 static GTY ((deletable
)) struct sequence_stack
*free_sequence_stack
;
5246 /* Begin emitting insns to a sequence. If this sequence will contain
5247 something that might cause the compiler to pop arguments to function
5248 calls (because those pops have previously been deferred; see
5249 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5250 before calling this function. That will ensure that the deferred
5251 pops are not accidentally emitted in the middle of this sequence. */
5254 start_sequence (void)
5256 struct sequence_stack
*tem
;
5258 if (free_sequence_stack
!= NULL
)
5260 tem
= free_sequence_stack
;
5261 free_sequence_stack
= tem
->next
;
5264 tem
= GGC_NEW (struct sequence_stack
);
5266 tem
->next
= seq_stack
;
5267 tem
->first
= first_insn
;
5268 tem
->last
= last_insn
;
5276 /* Set up the insn chain starting with FIRST as the current sequence,
5277 saving the previously current one. See the documentation for
5278 start_sequence for more information about how to use this function. */
5281 push_to_sequence (rtx first
)
5287 for (last
= first
; last
&& NEXT_INSN (last
); last
= NEXT_INSN (last
));
5293 /* Like push_to_sequence, but take the last insn as an argument to avoid
5294 looping through the list. */
5297 push_to_sequence2 (rtx first
, rtx last
)
5305 /* Set up the outer-level insn chain
5306 as the current sequence, saving the previously current one. */
5309 push_topmost_sequence (void)
5311 struct sequence_stack
*stack
, *top
= NULL
;
5315 for (stack
= seq_stack
; stack
; stack
= stack
->next
)
5318 first_insn
= top
->first
;
5319 last_insn
= top
->last
;
5322 /* After emitting to the outer-level insn chain, update the outer-level
5323 insn chain, and restore the previous saved state. */
5326 pop_topmost_sequence (void)
5328 struct sequence_stack
*stack
, *top
= NULL
;
5330 for (stack
= seq_stack
; stack
; stack
= stack
->next
)
5333 top
->first
= first_insn
;
5334 top
->last
= last_insn
;
5339 /* After emitting to a sequence, restore previous saved state.
5341 To get the contents of the sequence just made, you must call
5342 `get_insns' *before* calling here.
5344 If the compiler might have deferred popping arguments while
5345 generating this sequence, and this sequence will not be immediately
5346 inserted into the instruction stream, use do_pending_stack_adjust
5347 before calling get_insns. That will ensure that the deferred
5348 pops are inserted into this sequence, and not into some random
5349 location in the instruction stream. See INHIBIT_DEFER_POP for more
5350 information about deferred popping of arguments. */
5355 struct sequence_stack
*tem
= seq_stack
;
5357 first_insn
= tem
->first
;
5358 last_insn
= tem
->last
;
5359 seq_stack
= tem
->next
;
5361 memset (tem
, 0, sizeof (*tem
));
5362 tem
->next
= free_sequence_stack
;
5363 free_sequence_stack
= tem
;
5366 /* Return 1 if currently emitting into a sequence. */
5369 in_sequence_p (void)
5371 return seq_stack
!= 0;
5374 /* Put the various virtual registers into REGNO_REG_RTX. */
5377 init_virtual_regs (void)
5379 regno_reg_rtx
[VIRTUAL_INCOMING_ARGS_REGNUM
] = virtual_incoming_args_rtx
;
5380 regno_reg_rtx
[VIRTUAL_STACK_VARS_REGNUM
] = virtual_stack_vars_rtx
;
5381 regno_reg_rtx
[VIRTUAL_STACK_DYNAMIC_REGNUM
] = virtual_stack_dynamic_rtx
;
5382 regno_reg_rtx
[VIRTUAL_OUTGOING_ARGS_REGNUM
] = virtual_outgoing_args_rtx
;
5383 regno_reg_rtx
[VIRTUAL_CFA_REGNUM
] = virtual_cfa_rtx
;
5387 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5388 static rtx copy_insn_scratch_in
[MAX_RECOG_OPERANDS
];
5389 static rtx copy_insn_scratch_out
[MAX_RECOG_OPERANDS
];
5390 static int copy_insn_n_scratches
;
5392 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5393 copied an ASM_OPERANDS.
5394 In that case, it is the original input-operand vector. */
5395 static rtvec orig_asm_operands_vector
;
5397 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5398 copied an ASM_OPERANDS.
5399 In that case, it is the copied input-operand vector. */
5400 static rtvec copy_asm_operands_vector
;
5402 /* Likewise for the constraints vector. */
5403 static rtvec orig_asm_constraints_vector
;
5404 static rtvec copy_asm_constraints_vector
;
5406 /* Recursively create a new copy of an rtx for copy_insn.
5407 This function differs from copy_rtx in that it handles SCRATCHes and
5408 ASM_OPERANDs properly.
5409 Normally, this function is not used directly; use copy_insn as front end.
5410 However, you could first copy an insn pattern with copy_insn and then use
5411 this function afterwards to properly copy any REG_NOTEs containing
5415 copy_insn_1 (rtx orig
)
5420 const char *format_ptr
;
5425 code
= GET_CODE (orig
);
5440 if (REG_P (XEXP (orig
, 0)) && REGNO (XEXP (orig
, 0)) < FIRST_PSEUDO_REGISTER
)
5445 for (i
= 0; i
< copy_insn_n_scratches
; i
++)
5446 if (copy_insn_scratch_in
[i
] == orig
)
5447 return copy_insn_scratch_out
[i
];
5451 if (shared_const_p (orig
))
5455 /* A MEM with a constant address is not sharable. The problem is that
5456 the constant address may need to be reloaded. If the mem is shared,
5457 then reloading one copy of this mem will cause all copies to appear
5458 to have been reloaded. */
5464 /* Copy the various flags, fields, and other information. We assume
5465 that all fields need copying, and then clear the fields that should
5466 not be copied. That is the sensible default behavior, and forces
5467 us to explicitly document why we are *not* copying a flag. */
5468 copy
= shallow_copy_rtx (orig
);
5470 /* We do not copy the USED flag, which is used as a mark bit during
5471 walks over the RTL. */
5472 RTX_FLAG (copy
, used
) = 0;
5474 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5477 RTX_FLAG (copy
, jump
) = 0;
5478 RTX_FLAG (copy
, call
) = 0;
5479 RTX_FLAG (copy
, frame_related
) = 0;
5482 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
5484 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
5485 switch (*format_ptr
++)
5488 if (XEXP (orig
, i
) != NULL
)
5489 XEXP (copy
, i
) = copy_insn_1 (XEXP (orig
, i
));
5494 if (XVEC (orig
, i
) == orig_asm_constraints_vector
)
5495 XVEC (copy
, i
) = copy_asm_constraints_vector
;
5496 else if (XVEC (orig
, i
) == orig_asm_operands_vector
)
5497 XVEC (copy
, i
) = copy_asm_operands_vector
;
5498 else if (XVEC (orig
, i
) != NULL
)
5500 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
5501 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
5502 XVECEXP (copy
, i
, j
) = copy_insn_1 (XVECEXP (orig
, i
, j
));
5513 /* These are left unchanged. */
5520 if (code
== SCRATCH
)
5522 i
= copy_insn_n_scratches
++;
5523 gcc_assert (i
< MAX_RECOG_OPERANDS
);
5524 copy_insn_scratch_in
[i
] = orig
;
5525 copy_insn_scratch_out
[i
] = copy
;
5527 else if (code
== ASM_OPERANDS
)
5529 orig_asm_operands_vector
= ASM_OPERANDS_INPUT_VEC (orig
);
5530 copy_asm_operands_vector
= ASM_OPERANDS_INPUT_VEC (copy
);
5531 orig_asm_constraints_vector
= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig
);
5532 copy_asm_constraints_vector
= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy
);
5538 /* Create a new copy of an rtx.
5539 This function differs from copy_rtx in that it handles SCRATCHes and
5540 ASM_OPERANDs properly.
5541 INSN doesn't really have to be a full INSN; it could be just the
5544 copy_insn (rtx insn
)
5546 copy_insn_n_scratches
= 0;
5547 orig_asm_operands_vector
= 0;
5548 orig_asm_constraints_vector
= 0;
5549 copy_asm_operands_vector
= 0;
5550 copy_asm_constraints_vector
= 0;
5551 return copy_insn_1 (insn
);
5554 /* Initialize data structures and variables in this file
5555 before generating rtl for each function. */
5562 if (MIN_NONDEBUG_INSN_UID
)
5563 cur_insn_uid
= MIN_NONDEBUG_INSN_UID
;
5566 cur_debug_insn_uid
= 1;
5567 reg_rtx_no
= LAST_VIRTUAL_REGISTER
+ 1;
5568 last_location
= UNKNOWN_LOCATION
;
5569 first_label_num
= label_num
;
5572 /* Init the tables that describe all the pseudo regs. */
5574 crtl
->emit
.regno_pointer_align_length
= LAST_VIRTUAL_REGISTER
+ 101;
5576 crtl
->emit
.regno_pointer_align
5577 = XCNEWVEC (unsigned char, crtl
->emit
.regno_pointer_align_length
);
5580 = GGC_NEWVEC (rtx
, crtl
->emit
.regno_pointer_align_length
);
5582 /* Put copies of all the hard registers into regno_reg_rtx. */
5583 memcpy (regno_reg_rtx
,
5584 static_regno_reg_rtx
,
5585 FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
5587 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5588 init_virtual_regs ();
5590 /* Indicate that the virtual registers and stack locations are
5592 REG_POINTER (stack_pointer_rtx
) = 1;
5593 REG_POINTER (frame_pointer_rtx
) = 1;
5594 REG_POINTER (hard_frame_pointer_rtx
) = 1;
5595 REG_POINTER (arg_pointer_rtx
) = 1;
5597 REG_POINTER (virtual_incoming_args_rtx
) = 1;
5598 REG_POINTER (virtual_stack_vars_rtx
) = 1;
5599 REG_POINTER (virtual_stack_dynamic_rtx
) = 1;
5600 REG_POINTER (virtual_outgoing_args_rtx
) = 1;
5601 REG_POINTER (virtual_cfa_rtx
) = 1;
5603 #ifdef STACK_BOUNDARY
5604 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM
) = STACK_BOUNDARY
;
5605 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
5606 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
5607 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM
) = STACK_BOUNDARY
;
5609 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM
) = STACK_BOUNDARY
;
5610 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM
) = STACK_BOUNDARY
;
5611 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM
) = STACK_BOUNDARY
;
5612 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM
) = STACK_BOUNDARY
;
5613 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM
) = BITS_PER_WORD
;
5616 #ifdef INIT_EXPANDERS
5621 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5624 gen_const_vector (enum machine_mode mode
, int constant
)
5629 enum machine_mode inner
;
5631 units
= GET_MODE_NUNITS (mode
);
5632 inner
= GET_MODE_INNER (mode
);
5634 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner
));
5636 v
= rtvec_alloc (units
);
5638 /* We need to call this function after we set the scalar const_tiny_rtx
5640 gcc_assert (const_tiny_rtx
[constant
][(int) inner
]);
5642 for (i
= 0; i
< units
; ++i
)
5643 RTVEC_ELT (v
, i
) = const_tiny_rtx
[constant
][(int) inner
];
5645 tem
= gen_rtx_raw_CONST_VECTOR (mode
, v
);
5649 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5650 all elements are zero, and the one vector when all elements are one. */
5652 gen_rtx_CONST_VECTOR (enum machine_mode mode
, rtvec v
)
5654 enum machine_mode inner
= GET_MODE_INNER (mode
);
5655 int nunits
= GET_MODE_NUNITS (mode
);
5659 /* Check to see if all of the elements have the same value. */
5660 x
= RTVEC_ELT (v
, nunits
- 1);
5661 for (i
= nunits
- 2; i
>= 0; i
--)
5662 if (RTVEC_ELT (v
, i
) != x
)
5665 /* If the values are all the same, check to see if we can use one of the
5666 standard constant vectors. */
5669 if (x
== CONST0_RTX (inner
))
5670 return CONST0_RTX (mode
);
5671 else if (x
== CONST1_RTX (inner
))
5672 return CONST1_RTX (mode
);
5675 return gen_rtx_raw_CONST_VECTOR (mode
, v
);
5678 /* Initialise global register information required by all functions. */
5681 init_emit_regs (void)
5685 /* Reset register attributes */
5686 htab_empty (reg_attrs_htab
);
5688 /* We need reg_raw_mode, so initialize the modes now. */
5689 init_reg_modes_target ();
5691 /* Assign register numbers to the globally defined register rtx. */
5692 pc_rtx
= gen_rtx_PC (VOIDmode
);
5693 cc0_rtx
= gen_rtx_CC0 (VOIDmode
);
5694 stack_pointer_rtx
= gen_raw_REG (Pmode
, STACK_POINTER_REGNUM
);
5695 frame_pointer_rtx
= gen_raw_REG (Pmode
, FRAME_POINTER_REGNUM
);
5696 hard_frame_pointer_rtx
= gen_raw_REG (Pmode
, HARD_FRAME_POINTER_REGNUM
);
5697 arg_pointer_rtx
= gen_raw_REG (Pmode
, ARG_POINTER_REGNUM
);
5698 virtual_incoming_args_rtx
=
5699 gen_raw_REG (Pmode
, VIRTUAL_INCOMING_ARGS_REGNUM
);
5700 virtual_stack_vars_rtx
=
5701 gen_raw_REG (Pmode
, VIRTUAL_STACK_VARS_REGNUM
);
5702 virtual_stack_dynamic_rtx
=
5703 gen_raw_REG (Pmode
, VIRTUAL_STACK_DYNAMIC_REGNUM
);
5704 virtual_outgoing_args_rtx
=
5705 gen_raw_REG (Pmode
, VIRTUAL_OUTGOING_ARGS_REGNUM
);
5706 virtual_cfa_rtx
= gen_raw_REG (Pmode
, VIRTUAL_CFA_REGNUM
);
5708 /* Initialize RTL for commonly used hard registers. These are
5709 copied into regno_reg_rtx as we begin to compile each function. */
5710 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
5711 static_regno_reg_rtx
[i
] = gen_raw_REG (reg_raw_mode
[i
], i
);
5713 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5714 return_address_pointer_rtx
5715 = gen_raw_REG (Pmode
, RETURN_ADDRESS_POINTER_REGNUM
);
5718 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
)
5719 pic_offset_table_rtx
= gen_raw_REG (Pmode
, PIC_OFFSET_TABLE_REGNUM
);
5721 pic_offset_table_rtx
= NULL_RTX
;
5724 /* Create some permanent unique rtl objects shared between all functions. */
5727 init_emit_once (void)
5730 enum machine_mode mode
;
5731 enum machine_mode double_mode
;
5733 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5735 const_int_htab
= htab_create_ggc (37, const_int_htab_hash
,
5736 const_int_htab_eq
, NULL
);
5738 const_double_htab
= htab_create_ggc (37, const_double_htab_hash
,
5739 const_double_htab_eq
, NULL
);
5741 const_fixed_htab
= htab_create_ggc (37, const_fixed_htab_hash
,
5742 const_fixed_htab_eq
, NULL
);
5744 mem_attrs_htab
= htab_create_ggc (37, mem_attrs_htab_hash
,
5745 mem_attrs_htab_eq
, NULL
);
5746 reg_attrs_htab
= htab_create_ggc (37, reg_attrs_htab_hash
,
5747 reg_attrs_htab_eq
, NULL
);
5749 /* Compute the word and byte modes. */
5751 byte_mode
= VOIDmode
;
5752 word_mode
= VOIDmode
;
5753 double_mode
= VOIDmode
;
5755 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
5757 mode
= GET_MODE_WIDER_MODE (mode
))
5759 if (GET_MODE_BITSIZE (mode
) == BITS_PER_UNIT
5760 && byte_mode
== VOIDmode
)
5763 if (GET_MODE_BITSIZE (mode
) == BITS_PER_WORD
5764 && word_mode
== VOIDmode
)
5768 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
5770 mode
= GET_MODE_WIDER_MODE (mode
))
5772 if (GET_MODE_BITSIZE (mode
) == DOUBLE_TYPE_SIZE
5773 && double_mode
== VOIDmode
)
5777 ptr_mode
= mode_for_size (POINTER_SIZE
, GET_MODE_CLASS (Pmode
), 0);
5779 #ifdef INIT_EXPANDERS
5780 /* This is to initialize {init|mark|free}_machine_status before the first
5781 call to push_function_context_to. This is needed by the Chill front
5782 end which calls push_function_context_to before the first call to
5783 init_function_start. */
5787 /* Create the unique rtx's for certain rtx codes and operand values. */
5789 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5790 tries to use these variables. */
5791 for (i
= - MAX_SAVED_CONST_INT
; i
<= MAX_SAVED_CONST_INT
; i
++)
5792 const_int_rtx
[i
+ MAX_SAVED_CONST_INT
] =
5793 gen_rtx_raw_CONST_INT (VOIDmode
, (HOST_WIDE_INT
) i
);
5795 if (STORE_FLAG_VALUE
>= - MAX_SAVED_CONST_INT
5796 && STORE_FLAG_VALUE
<= MAX_SAVED_CONST_INT
)
5797 const_true_rtx
= const_int_rtx
[STORE_FLAG_VALUE
+ MAX_SAVED_CONST_INT
];
5799 const_true_rtx
= gen_rtx_CONST_INT (VOIDmode
, STORE_FLAG_VALUE
);
5801 REAL_VALUE_FROM_INT (dconst0
, 0, 0, double_mode
);
5802 REAL_VALUE_FROM_INT (dconst1
, 1, 0, double_mode
);
5803 REAL_VALUE_FROM_INT (dconst2
, 2, 0, double_mode
);
5808 dconsthalf
= dconst1
;
5809 SET_REAL_EXP (&dconsthalf
, REAL_EXP (&dconsthalf
) - 1);
5811 for (i
= 0; i
< (int) ARRAY_SIZE (const_tiny_rtx
); i
++)
5813 const REAL_VALUE_TYPE
*const r
=
5814 (i
== 0 ? &dconst0
: i
== 1 ? &dconst1
: &dconst2
);
5816 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
5818 mode
= GET_MODE_WIDER_MODE (mode
))
5819 const_tiny_rtx
[i
][(int) mode
] =
5820 CONST_DOUBLE_FROM_REAL_VALUE (*r
, mode
);
5822 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT
);
5824 mode
= GET_MODE_WIDER_MODE (mode
))
5825 const_tiny_rtx
[i
][(int) mode
] =
5826 CONST_DOUBLE_FROM_REAL_VALUE (*r
, mode
);
5828 const_tiny_rtx
[i
][(int) VOIDmode
] = GEN_INT (i
);
5830 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
5832 mode
= GET_MODE_WIDER_MODE (mode
))
5833 const_tiny_rtx
[i
][(int) mode
] = GEN_INT (i
);
5835 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT
);
5837 mode
= GET_MODE_WIDER_MODE (mode
))
5838 const_tiny_rtx
[i
][(int) mode
] = GEN_INT (i
);
5841 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT
);
5843 mode
= GET_MODE_WIDER_MODE (mode
))
5845 rtx inner
= const_tiny_rtx
[0][(int)GET_MODE_INNER (mode
)];
5846 const_tiny_rtx
[0][(int) mode
] = gen_rtx_CONCAT (mode
, inner
, inner
);
5849 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT
);
5851 mode
= GET_MODE_WIDER_MODE (mode
))
5853 rtx inner
= const_tiny_rtx
[0][(int)GET_MODE_INNER (mode
)];
5854 const_tiny_rtx
[0][(int) mode
] = gen_rtx_CONCAT (mode
, inner
, inner
);
5857 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT
);
5859 mode
= GET_MODE_WIDER_MODE (mode
))
5861 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
5862 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
5865 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT
);
5867 mode
= GET_MODE_WIDER_MODE (mode
))
5869 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
5870 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
5873 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FRACT
);
5875 mode
= GET_MODE_WIDER_MODE (mode
))
5877 FCONST0(mode
).data
.high
= 0;
5878 FCONST0(mode
).data
.low
= 0;
5879 FCONST0(mode
).mode
= mode
;
5880 const_tiny_rtx
[0][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
5881 FCONST0 (mode
), mode
);
5884 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_UFRACT
);
5886 mode
= GET_MODE_WIDER_MODE (mode
))
5888 FCONST0(mode
).data
.high
= 0;
5889 FCONST0(mode
).data
.low
= 0;
5890 FCONST0(mode
).mode
= mode
;
5891 const_tiny_rtx
[0][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
5892 FCONST0 (mode
), mode
);
5895 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_ACCUM
);
5897 mode
= GET_MODE_WIDER_MODE (mode
))
5899 FCONST0(mode
).data
.high
= 0;
5900 FCONST0(mode
).data
.low
= 0;
5901 FCONST0(mode
).mode
= mode
;
5902 const_tiny_rtx
[0][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
5903 FCONST0 (mode
), mode
);
5905 /* We store the value 1. */
5906 FCONST1(mode
).data
.high
= 0;
5907 FCONST1(mode
).data
.low
= 0;
5908 FCONST1(mode
).mode
= mode
;
5909 lshift_double (1, 0, GET_MODE_FBIT (mode
),
5910 2 * HOST_BITS_PER_WIDE_INT
,
5911 &FCONST1(mode
).data
.low
,
5912 &FCONST1(mode
).data
.high
,
5913 SIGNED_FIXED_POINT_MODE_P (mode
));
5914 const_tiny_rtx
[1][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
5915 FCONST1 (mode
), mode
);
5918 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_UACCUM
);
5920 mode
= GET_MODE_WIDER_MODE (mode
))
5922 FCONST0(mode
).data
.high
= 0;
5923 FCONST0(mode
).data
.low
= 0;
5924 FCONST0(mode
).mode
= mode
;
5925 const_tiny_rtx
[0][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
5926 FCONST0 (mode
), mode
);
5928 /* We store the value 1. */
5929 FCONST1(mode
).data
.high
= 0;
5930 FCONST1(mode
).data
.low
= 0;
5931 FCONST1(mode
).mode
= mode
;
5932 lshift_double (1, 0, GET_MODE_FBIT (mode
),
5933 2 * HOST_BITS_PER_WIDE_INT
,
5934 &FCONST1(mode
).data
.low
,
5935 &FCONST1(mode
).data
.high
,
5936 SIGNED_FIXED_POINT_MODE_P (mode
));
5937 const_tiny_rtx
[1][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
5938 FCONST1 (mode
), mode
);
5941 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT
);
5943 mode
= GET_MODE_WIDER_MODE (mode
))
5945 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
5948 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT
);
5950 mode
= GET_MODE_WIDER_MODE (mode
))
5952 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
5955 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM
);
5957 mode
= GET_MODE_WIDER_MODE (mode
))
5959 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
5960 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
5963 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM
);
5965 mode
= GET_MODE_WIDER_MODE (mode
))
5967 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
5968 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
5971 for (i
= (int) CCmode
; i
< (int) MAX_MACHINE_MODE
; ++i
)
5972 if (GET_MODE_CLASS ((enum machine_mode
) i
) == MODE_CC
)
5973 const_tiny_rtx
[0][i
] = const0_rtx
;
5975 const_tiny_rtx
[0][(int) BImode
] = const0_rtx
;
5976 if (STORE_FLAG_VALUE
== 1)
5977 const_tiny_rtx
[1][(int) BImode
] = const1_rtx
;
5980 /* Produce exact duplicate of insn INSN after AFTER.
5981 Care updating of libcall regions if present. */
5984 emit_copy_of_insn_after (rtx insn
, rtx after
)
5988 switch (GET_CODE (insn
))
5991 new_rtx
= emit_insn_after (copy_insn (PATTERN (insn
)), after
);
5995 new_rtx
= emit_jump_insn_after (copy_insn (PATTERN (insn
)), after
);
5999 new_rtx
= emit_debug_insn_after (copy_insn (PATTERN (insn
)), after
);
6003 new_rtx
= emit_call_insn_after (copy_insn (PATTERN (insn
)), after
);
6004 if (CALL_INSN_FUNCTION_USAGE (insn
))
6005 CALL_INSN_FUNCTION_USAGE (new_rtx
)
6006 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn
));
6007 SIBLING_CALL_P (new_rtx
) = SIBLING_CALL_P (insn
);
6008 RTL_CONST_CALL_P (new_rtx
) = RTL_CONST_CALL_P (insn
);
6009 RTL_PURE_CALL_P (new_rtx
) = RTL_PURE_CALL_P (insn
);
6010 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx
)
6011 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn
);
6018 /* Update LABEL_NUSES. */
6019 mark_jump_label (PATTERN (new_rtx
), new_rtx
, 0);
6021 INSN_LOCATOR (new_rtx
) = INSN_LOCATOR (insn
);
6023 /* If the old insn is frame related, then so is the new one. This is
6024 primarily needed for IA-64 unwind info which marks epilogue insns,
6025 which may be duplicated by the basic block reordering code. */
6026 RTX_FRAME_RELATED_P (new_rtx
) = RTX_FRAME_RELATED_P (insn
);
6028 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6029 will make them. REG_LABEL_TARGETs are created there too, but are
6030 supposed to be sticky, so we copy them. */
6031 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
6032 if (REG_NOTE_KIND (link
) != REG_LABEL_OPERAND
)
6034 if (GET_CODE (link
) == EXPR_LIST
)
6035 add_reg_note (new_rtx
, REG_NOTE_KIND (link
),
6036 copy_insn_1 (XEXP (link
, 0)));
6038 add_reg_note (new_rtx
, REG_NOTE_KIND (link
), XEXP (link
, 0));
6041 INSN_CODE (new_rtx
) = INSN_CODE (insn
);
6045 static GTY((deletable
)) rtx hard_reg_clobbers
[NUM_MACHINE_MODES
][FIRST_PSEUDO_REGISTER
];
6047 gen_hard_reg_clobber (enum machine_mode mode
, unsigned int regno
)
6049 if (hard_reg_clobbers
[mode
][regno
])
6050 return hard_reg_clobbers
[mode
][regno
];
6052 return (hard_reg_clobbers
[mode
][regno
] =
6053 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (mode
, regno
)));
6056 #include "gt-emit-rtl.h"