1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 /* Middle-to-low level generation of rtx code and insns.
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
36 #include "coretypes.h"
44 #include "stringpool.h"
45 #include "insn-config.h"
49 #include "diagnostic-core.h"
51 #include "fold-const.h"
60 #include "stor-layout.h"
64 struct target_rtl default_target_rtl
;
66 struct target_rtl
*this_target_rtl
= &default_target_rtl
;
69 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
71 /* Commonly used modes. */
73 scalar_int_mode byte_mode
; /* Mode whose width is BITS_PER_UNIT. */
74 scalar_int_mode word_mode
; /* Mode whose width is BITS_PER_WORD. */
75 scalar_int_mode ptr_mode
; /* Mode whose width is POINTER_SIZE. */
77 /* Datastructures maintained for currently processed function in RTL form. */
79 struct rtl_data x_rtl
;
81 /* Indexed by pseudo register number, gives the rtx for that pseudo.
82 Allocated in parallel with regno_pointer_align.
83 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
84 with length attribute nested in top level structures. */
88 /* This is *not* reset after each function. It gives each CODE_LABEL
89 in the entire compilation a unique label number. */
91 static GTY(()) int label_num
= 1;
93 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
94 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
95 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
96 is set only for MODE_INT and MODE_VECTOR_INT modes. */
98 rtx const_tiny_rtx
[4][(int) MAX_MACHINE_MODE
];
102 REAL_VALUE_TYPE dconst0
;
103 REAL_VALUE_TYPE dconst1
;
104 REAL_VALUE_TYPE dconst2
;
105 REAL_VALUE_TYPE dconstm1
;
106 REAL_VALUE_TYPE dconsthalf
;
108 /* Record fixed-point constant 0 and 1. */
109 FIXED_VALUE_TYPE fconst0
[MAX_FCONST0
];
110 FIXED_VALUE_TYPE fconst1
[MAX_FCONST1
];
112 /* We make one copy of (const_int C) where C is in
113 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
114 to save space during the compilation and simplify comparisons of
117 rtx const_int_rtx
[MAX_SAVED_CONST_INT
* 2 + 1];
119 /* Standard pieces of rtx, to be substituted directly into things. */
122 rtx simple_return_rtx
;
125 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
126 this pointer should normally never be dereferenced), but is required to be
127 distinct from NULL_RTX. Currently used by peephole2 pass. */
128 rtx_insn
*invalid_insn_rtx
;
130 /* A hash table storing CONST_INTs whose absolute value is greater
131 than MAX_SAVED_CONST_INT. */
133 struct const_int_hasher
: ggc_cache_ptr_hash
<rtx_def
>
135 typedef HOST_WIDE_INT compare_type
;
137 static hashval_t
hash (rtx i
);
138 static bool equal (rtx i
, HOST_WIDE_INT h
);
141 static GTY ((cache
)) hash_table
<const_int_hasher
> *const_int_htab
;
143 struct const_wide_int_hasher
: ggc_cache_ptr_hash
<rtx_def
>
145 static hashval_t
hash (rtx x
);
146 static bool equal (rtx x
, rtx y
);
149 static GTY ((cache
)) hash_table
<const_wide_int_hasher
> *const_wide_int_htab
;
151 struct const_poly_int_hasher
: ggc_cache_ptr_hash
<rtx_def
>
153 typedef std::pair
<machine_mode
, poly_wide_int_ref
> compare_type
;
155 static hashval_t
hash (rtx x
);
156 static bool equal (rtx x
, const compare_type
&y
);
159 static GTY ((cache
)) hash_table
<const_poly_int_hasher
> *const_poly_int_htab
;
161 /* A hash table storing register attribute structures. */
162 struct reg_attr_hasher
: ggc_cache_ptr_hash
<reg_attrs
>
164 static hashval_t
hash (reg_attrs
*x
);
165 static bool equal (reg_attrs
*a
, reg_attrs
*b
);
168 static GTY ((cache
)) hash_table
<reg_attr_hasher
> *reg_attrs_htab
;
170 /* A hash table storing all CONST_DOUBLEs. */
171 struct const_double_hasher
: ggc_cache_ptr_hash
<rtx_def
>
173 static hashval_t
hash (rtx x
);
174 static bool equal (rtx x
, rtx y
);
177 static GTY ((cache
)) hash_table
<const_double_hasher
> *const_double_htab
;
179 /* A hash table storing all CONST_FIXEDs. */
180 struct const_fixed_hasher
: ggc_cache_ptr_hash
<rtx_def
>
182 static hashval_t
hash (rtx x
);
183 static bool equal (rtx x
, rtx y
);
186 static GTY ((cache
)) hash_table
<const_fixed_hasher
> *const_fixed_htab
;
188 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
189 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
190 #define first_label_num (crtl->emit.x_first_label_num)
192 static void set_used_decls (tree
);
193 static void mark_label_nuses (rtx
);
194 #if TARGET_SUPPORTS_WIDE_INT
195 static rtx
lookup_const_wide_int (rtx
);
197 static rtx
lookup_const_double (rtx
);
198 static rtx
lookup_const_fixed (rtx
);
199 static rtx
gen_const_vector (machine_mode
, int);
200 static void copy_rtx_if_shared_1 (rtx
*orig
);
202 /* Probability of the conditional branch currently proceeded by try_split. */
203 profile_probability split_branch_probability
;
205 /* Returns a hash code for X (which is a really a CONST_INT). */
208 const_int_hasher::hash (rtx x
)
210 return (hashval_t
) INTVAL (x
);
213 /* Returns nonzero if the value represented by X (which is really a
214 CONST_INT) is the same as that given by Y (which is really a
218 const_int_hasher::equal (rtx x
, HOST_WIDE_INT y
)
220 return (INTVAL (x
) == y
);
223 #if TARGET_SUPPORTS_WIDE_INT
224 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
227 const_wide_int_hasher::hash (rtx x
)
230 unsigned HOST_WIDE_INT hash
= 0;
233 for (i
= 0; i
< CONST_WIDE_INT_NUNITS (xr
); i
++)
234 hash
+= CONST_WIDE_INT_ELT (xr
, i
);
236 return (hashval_t
) hash
;
239 /* Returns nonzero if the value represented by X (which is really a
240 CONST_WIDE_INT) is the same as that given by Y (which is really a
244 const_wide_int_hasher::equal (rtx x
, rtx y
)
249 if (CONST_WIDE_INT_NUNITS (xr
) != CONST_WIDE_INT_NUNITS (yr
))
252 for (i
= 0; i
< CONST_WIDE_INT_NUNITS (xr
); i
++)
253 if (CONST_WIDE_INT_ELT (xr
, i
) != CONST_WIDE_INT_ELT (yr
, i
))
260 /* Returns a hash code for CONST_POLY_INT X. */
263 const_poly_int_hasher::hash (rtx x
)
266 h
.add_int (GET_MODE (x
));
267 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
268 h
.add_wide_int (CONST_POLY_INT_COEFFS (x
)[i
]);
272 /* Returns nonzero if CONST_POLY_INT X is an rtx representation of Y. */
275 const_poly_int_hasher::equal (rtx x
, const compare_type
&y
)
277 if (GET_MODE (x
) != y
.first
)
279 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
280 if (CONST_POLY_INT_COEFFS (x
)[i
] != y
.second
.coeffs
[i
])
285 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
287 const_double_hasher::hash (rtx x
)
289 const_rtx
const value
= x
;
292 if (TARGET_SUPPORTS_WIDE_INT
== 0 && GET_MODE (value
) == VOIDmode
)
293 h
= CONST_DOUBLE_LOW (value
) ^ CONST_DOUBLE_HIGH (value
);
296 h
= real_hash (CONST_DOUBLE_REAL_VALUE (value
));
297 /* MODE is used in the comparison, so it should be in the hash. */
298 h
^= GET_MODE (value
);
303 /* Returns nonzero if the value represented by X (really a ...)
304 is the same as that represented by Y (really a ...) */
306 const_double_hasher::equal (rtx x
, rtx y
)
308 const_rtx
const a
= x
, b
= y
;
310 if (GET_MODE (a
) != GET_MODE (b
))
312 if (TARGET_SUPPORTS_WIDE_INT
== 0 && GET_MODE (a
) == VOIDmode
)
313 return (CONST_DOUBLE_LOW (a
) == CONST_DOUBLE_LOW (b
)
314 && CONST_DOUBLE_HIGH (a
) == CONST_DOUBLE_HIGH (b
));
316 return real_identical (CONST_DOUBLE_REAL_VALUE (a
),
317 CONST_DOUBLE_REAL_VALUE (b
));
320 /* Returns a hash code for X (which is really a CONST_FIXED). */
323 const_fixed_hasher::hash (rtx x
)
325 const_rtx
const value
= x
;
328 h
= fixed_hash (CONST_FIXED_VALUE (value
));
329 /* MODE is used in the comparison, so it should be in the hash. */
330 h
^= GET_MODE (value
);
334 /* Returns nonzero if the value represented by X is the same as that
338 const_fixed_hasher::equal (rtx x
, rtx y
)
340 const_rtx
const a
= x
, b
= y
;
342 if (GET_MODE (a
) != GET_MODE (b
))
344 return fixed_identical (CONST_FIXED_VALUE (a
), CONST_FIXED_VALUE (b
));
347 /* Return true if the given memory attributes are equal. */
350 mem_attrs_eq_p (const struct mem_attrs
*p
, const struct mem_attrs
*q
)
356 return (p
->alias
== q
->alias
357 && p
->offset_known_p
== q
->offset_known_p
358 && (!p
->offset_known_p
|| p
->offset
== q
->offset
)
359 && p
->size_known_p
== q
->size_known_p
360 && (!p
->size_known_p
|| p
->size
== q
->size
)
361 && p
->align
== q
->align
362 && p
->addrspace
== q
->addrspace
363 && (p
->expr
== q
->expr
364 || (p
->expr
!= NULL_TREE
&& q
->expr
!= NULL_TREE
365 && operand_equal_p (p
->expr
, q
->expr
, 0))));
368 /* Set MEM's memory attributes so that they are the same as ATTRS. */
371 set_mem_attrs (rtx mem
, mem_attrs
*attrs
)
373 /* If everything is the default, we can just clear the attributes. */
374 if (mem_attrs_eq_p (attrs
, mode_mem_attrs
[(int) GET_MODE (mem
)]))
381 || !mem_attrs_eq_p (attrs
, MEM_ATTRS (mem
)))
383 MEM_ATTRS (mem
) = ggc_alloc
<mem_attrs
> ();
384 memcpy (MEM_ATTRS (mem
), attrs
, sizeof (mem_attrs
));
388 /* Returns a hash code for X (which is a really a reg_attrs *). */
391 reg_attr_hasher::hash (reg_attrs
*x
)
393 const reg_attrs
*const p
= x
;
397 h
.add_poly_hwi (p
->offset
);
401 /* Returns nonzero if the value represented by X is the same as that given by
405 reg_attr_hasher::equal (reg_attrs
*x
, reg_attrs
*y
)
407 const reg_attrs
*const p
= x
;
408 const reg_attrs
*const q
= y
;
410 return (p
->decl
== q
->decl
&& known_eq (p
->offset
, q
->offset
));
412 /* Allocate a new reg_attrs structure and insert it into the hash table if
413 one identical to it is not already in the table. We are doing this for
417 get_reg_attrs (tree decl
, poly_int64 offset
)
421 /* If everything is the default, we can just return zero. */
422 if (decl
== 0 && known_eq (offset
, 0))
426 attrs
.offset
= offset
;
428 reg_attrs
**slot
= reg_attrs_htab
->find_slot (&attrs
, INSERT
);
431 *slot
= ggc_alloc
<reg_attrs
> ();
432 memcpy (*slot
, &attrs
, sizeof (reg_attrs
));
440 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
441 and to block register equivalences to be seen across this insn. */
446 rtx x
= gen_rtx_ASM_INPUT (VOIDmode
, "");
447 MEM_VOLATILE_P (x
) = true;
453 /* Set the mode and register number of X to MODE and REGNO. */
456 set_mode_and_regno (rtx x
, machine_mode mode
, unsigned int regno
)
458 unsigned int nregs
= (HARD_REGISTER_NUM_P (regno
)
459 ? hard_regno_nregs (regno
, mode
)
461 PUT_MODE_RAW (x
, mode
);
462 set_regno_raw (x
, regno
, nregs
);
465 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
466 don't attempt to share with the various global pieces of rtl (such as
467 frame_pointer_rtx). */
470 gen_raw_REG (machine_mode mode
, unsigned int regno
)
472 rtx x
= rtx_alloc (REG MEM_STAT_INFO
);
473 set_mode_and_regno (x
, mode
, regno
);
474 REG_ATTRS (x
) = NULL
;
475 ORIGINAL_REGNO (x
) = regno
;
479 /* There are some RTL codes that require special attention; the generation
480 functions do the raw handling. If you add to this list, modify
481 special_rtx in gengenrtl.c as well. */
484 gen_rtx_EXPR_LIST (machine_mode mode
, rtx expr
, rtx expr_list
)
486 return as_a
<rtx_expr_list
*> (gen_rtx_fmt_ee (EXPR_LIST
, mode
, expr
,
491 gen_rtx_INSN_LIST (machine_mode mode
, rtx insn
, rtx insn_list
)
493 return as_a
<rtx_insn_list
*> (gen_rtx_fmt_ue (INSN_LIST
, mode
, insn
,
498 gen_rtx_INSN (machine_mode mode
, rtx_insn
*prev_insn
, rtx_insn
*next_insn
,
499 basic_block bb
, rtx pattern
, int location
, int code
,
502 return as_a
<rtx_insn
*> (gen_rtx_fmt_uuBeiie (INSN
, mode
,
503 prev_insn
, next_insn
,
504 bb
, pattern
, location
, code
,
509 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED
, HOST_WIDE_INT arg
)
511 if (arg
>= - MAX_SAVED_CONST_INT
&& arg
<= MAX_SAVED_CONST_INT
)
512 return const_int_rtx
[arg
+ MAX_SAVED_CONST_INT
];
514 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
515 if (const_true_rtx
&& arg
== STORE_FLAG_VALUE
)
516 return const_true_rtx
;
519 /* Look up the CONST_INT in the hash table. */
520 rtx
*slot
= const_int_htab
->find_slot_with_hash (arg
, (hashval_t
) arg
,
523 *slot
= gen_rtx_raw_CONST_INT (VOIDmode
, arg
);
529 gen_int_mode (poly_int64 c
, machine_mode mode
)
531 c
= trunc_int_for_mode (c
, mode
);
532 if (c
.is_constant ())
533 return GEN_INT (c
.coeffs
[0]);
534 unsigned int prec
= GET_MODE_PRECISION (as_a
<scalar_mode
> (mode
));
535 return immed_wide_int_const (poly_wide_int::from (c
, prec
, SIGNED
), mode
);
538 /* CONST_DOUBLEs might be created from pairs of integers, or from
539 REAL_VALUE_TYPEs. Also, their length is known only at run time,
540 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
542 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
543 hash table. If so, return its counterpart; otherwise add it
544 to the hash table and return it. */
546 lookup_const_double (rtx real
)
548 rtx
*slot
= const_double_htab
->find_slot (real
, INSERT
);
555 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
556 VALUE in mode MODE. */
558 const_double_from_real_value (REAL_VALUE_TYPE value
, machine_mode mode
)
560 rtx real
= rtx_alloc (CONST_DOUBLE
);
561 PUT_MODE (real
, mode
);
565 return lookup_const_double (real
);
568 /* Determine whether FIXED, a CONST_FIXED, already exists in the
569 hash table. If so, return its counterpart; otherwise add it
570 to the hash table and return it. */
573 lookup_const_fixed (rtx fixed
)
575 rtx
*slot
= const_fixed_htab
->find_slot (fixed
, INSERT
);
582 /* Return a CONST_FIXED rtx for a fixed-point value specified by
583 VALUE in mode MODE. */
586 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value
, machine_mode mode
)
588 rtx fixed
= rtx_alloc (CONST_FIXED
);
589 PUT_MODE (fixed
, mode
);
593 return lookup_const_fixed (fixed
);
596 #if TARGET_SUPPORTS_WIDE_INT == 0
597 /* Constructs double_int from rtx CST. */
600 rtx_to_double_int (const_rtx cst
)
604 if (CONST_INT_P (cst
))
605 r
= double_int::from_shwi (INTVAL (cst
));
606 else if (CONST_DOUBLE_AS_INT_P (cst
))
608 r
.low
= CONST_DOUBLE_LOW (cst
);
609 r
.high
= CONST_DOUBLE_HIGH (cst
);
618 #if TARGET_SUPPORTS_WIDE_INT
619 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
620 If so, return its counterpart; otherwise add it to the hash table and
624 lookup_const_wide_int (rtx wint
)
626 rtx
*slot
= const_wide_int_htab
->find_slot (wint
, INSERT
);
634 /* Return an rtx constant for V, given that the constant has mode MODE.
635 The returned rtx will be a CONST_INT if V fits, otherwise it will be
636 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
637 (if TARGET_SUPPORTS_WIDE_INT). */
640 immed_wide_int_const_1 (const wide_int_ref
&v
, machine_mode mode
)
642 unsigned int len
= v
.get_len ();
643 /* Not scalar_int_mode because we also allow pointer bound modes. */
644 unsigned int prec
= GET_MODE_PRECISION (as_a
<scalar_mode
> (mode
));
646 /* Allow truncation but not extension since we do not know if the
647 number is signed or unsigned. */
648 gcc_assert (prec
<= v
.get_precision ());
650 if (len
< 2 || prec
<= HOST_BITS_PER_WIDE_INT
)
651 return gen_int_mode (v
.elt (0), mode
);
653 #if TARGET_SUPPORTS_WIDE_INT
657 unsigned int blocks_needed
658 = (prec
+ HOST_BITS_PER_WIDE_INT
- 1) / HOST_BITS_PER_WIDE_INT
;
660 if (len
> blocks_needed
)
663 value
= const_wide_int_alloc (len
);
665 /* It is so tempting to just put the mode in here. Must control
667 PUT_MODE (value
, VOIDmode
);
668 CWI_PUT_NUM_ELEM (value
, len
);
670 for (i
= 0; i
< len
; i
++)
671 CONST_WIDE_INT_ELT (value
, i
) = v
.elt (i
);
673 return lookup_const_wide_int (value
);
676 return immed_double_const (v
.elt (0), v
.elt (1), mode
);
680 #if TARGET_SUPPORTS_WIDE_INT == 0
681 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
682 of ints: I0 is the low-order word and I1 is the high-order word.
683 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
684 implied upper bits are copies of the high bit of i1. The value
685 itself is neither signed nor unsigned. Do not use this routine for
686 non-integer modes; convert to REAL_VALUE_TYPE and use
687 const_double_from_real_value. */
690 immed_double_const (HOST_WIDE_INT i0
, HOST_WIDE_INT i1
, machine_mode mode
)
695 /* There are the following cases (note that there are no modes with
696 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
698 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
700 2) If the value of the integer fits into HOST_WIDE_INT anyway
701 (i.e., i1 consists only from copies of the sign bit, and sign
702 of i0 and i1 are the same), then we return a CONST_INT for i0.
703 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
705 if (is_a
<scalar_mode
> (mode
, &smode
)
706 && GET_MODE_BITSIZE (smode
) <= HOST_BITS_PER_WIDE_INT
)
707 return gen_int_mode (i0
, mode
);
709 /* If this integer fits in one word, return a CONST_INT. */
710 if ((i1
== 0 && i0
>= 0) || (i1
== ~0 && i0
< 0))
713 /* We use VOIDmode for integers. */
714 value
= rtx_alloc (CONST_DOUBLE
);
715 PUT_MODE (value
, VOIDmode
);
717 CONST_DOUBLE_LOW (value
) = i0
;
718 CONST_DOUBLE_HIGH (value
) = i1
;
720 for (i
= 2; i
< (sizeof CONST_DOUBLE_FORMAT
- 1); i
++)
721 XWINT (value
, i
) = 0;
723 return lookup_const_double (value
);
727 /* Return an rtx representation of C in mode MODE. */
730 immed_wide_int_const (const poly_wide_int_ref
&c
, machine_mode mode
)
732 if (c
.is_constant ())
733 return immed_wide_int_const_1 (c
.coeffs
[0], mode
);
735 /* Not scalar_int_mode because we also allow pointer bound modes. */
736 unsigned int prec
= GET_MODE_PRECISION (as_a
<scalar_mode
> (mode
));
738 /* Allow truncation but not extension since we do not know if the
739 number is signed or unsigned. */
740 gcc_assert (prec
<= c
.coeffs
[0].get_precision ());
741 poly_wide_int newc
= poly_wide_int::from (c
, prec
, SIGNED
);
743 /* See whether we already have an rtx for this constant. */
746 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
747 h
.add_wide_int (newc
.coeffs
[i
]);
748 const_poly_int_hasher::compare_type
typed_value (mode
, newc
);
749 rtx
*slot
= const_poly_int_htab
->find_slot_with_hash (typed_value
,
755 /* Create a new rtx. There's a choice to be made here between installing
756 the actual mode of the rtx or leaving it as VOIDmode (for consistency
757 with CONST_INT). In practice the handling of the codes is different
758 enough that we get no benefit from using VOIDmode, and various places
759 assume that VOIDmode implies CONST_INT. Using the real mode seems like
760 the right long-term direction anyway. */
761 typedef trailing_wide_ints
<NUM_POLY_INT_COEFFS
> twi
;
762 size_t extra_size
= twi::extra_size (prec
);
763 x
= rtx_alloc_v (CONST_POLY_INT
,
764 sizeof (struct const_poly_int_def
) + extra_size
);
766 CONST_POLY_INT_COEFFS (x
).set_precision (prec
);
767 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
768 CONST_POLY_INT_COEFFS (x
)[i
] = newc
.coeffs
[i
];
775 gen_rtx_REG (machine_mode mode
, unsigned int regno
)
777 /* In case the MD file explicitly references the frame pointer, have
778 all such references point to the same frame pointer. This is
779 used during frame pointer elimination to distinguish the explicit
780 references to these registers from pseudos that happened to be
783 If we have eliminated the frame pointer or arg pointer, we will
784 be using it as a normal register, for example as a spill
785 register. In such cases, we might be accessing it in a mode that
786 is not Pmode and therefore cannot use the pre-allocated rtx.
788 Also don't do this when we are making new REGs in reload, since
789 we don't want to get confused with the real pointers. */
791 if (mode
== Pmode
&& !reload_in_progress
&& !lra_in_progress
)
793 if (regno
== FRAME_POINTER_REGNUM
794 && (!reload_completed
|| frame_pointer_needed
))
795 return frame_pointer_rtx
;
797 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
798 && regno
== HARD_FRAME_POINTER_REGNUM
799 && (!reload_completed
|| frame_pointer_needed
))
800 return hard_frame_pointer_rtx
;
801 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
802 if (FRAME_POINTER_REGNUM
!= ARG_POINTER_REGNUM
803 && regno
== ARG_POINTER_REGNUM
)
804 return arg_pointer_rtx
;
806 #ifdef RETURN_ADDRESS_POINTER_REGNUM
807 if (regno
== RETURN_ADDRESS_POINTER_REGNUM
)
808 return return_address_pointer_rtx
;
810 if (regno
== (unsigned) PIC_OFFSET_TABLE_REGNUM
811 && PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
812 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
813 return pic_offset_table_rtx
;
814 if (regno
== STACK_POINTER_REGNUM
)
815 return stack_pointer_rtx
;
819 /* If the per-function register table has been set up, try to re-use
820 an existing entry in that table to avoid useless generation of RTL.
822 This code is disabled for now until we can fix the various backends
823 which depend on having non-shared hard registers in some cases. Long
824 term we want to re-enable this code as it can significantly cut down
825 on the amount of useless RTL that gets generated.
827 We'll also need to fix some code that runs after reload that wants to
828 set ORIGINAL_REGNO. */
833 && regno
< FIRST_PSEUDO_REGISTER
834 && reg_raw_mode
[regno
] == mode
)
835 return regno_reg_rtx
[regno
];
838 return gen_raw_REG (mode
, regno
);
842 gen_rtx_MEM (machine_mode mode
, rtx addr
)
844 rtx rt
= gen_rtx_raw_MEM (mode
, addr
);
846 /* This field is not cleared by the mere allocation of the rtx, so
853 /* Generate a memory referring to non-trapping constant memory. */
856 gen_const_mem (machine_mode mode
, rtx addr
)
858 rtx mem
= gen_rtx_MEM (mode
, addr
);
859 MEM_READONLY_P (mem
) = 1;
860 MEM_NOTRAP_P (mem
) = 1;
864 /* Generate a MEM referring to fixed portions of the frame, e.g., register
868 gen_frame_mem (machine_mode mode
, rtx addr
)
870 rtx mem
= gen_rtx_MEM (mode
, addr
);
871 MEM_NOTRAP_P (mem
) = 1;
872 set_mem_alias_set (mem
, get_frame_alias_set ());
876 /* Generate a MEM referring to a temporary use of the stack, not part
877 of the fixed stack frame. For example, something which is pushed
878 by a target splitter. */
880 gen_tmp_stack_mem (machine_mode mode
, rtx addr
)
882 rtx mem
= gen_rtx_MEM (mode
, addr
);
883 MEM_NOTRAP_P (mem
) = 1;
884 if (!cfun
->calls_alloca
)
885 set_mem_alias_set (mem
, get_frame_alias_set ());
889 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
890 this construct would be valid, and false otherwise. */
893 validate_subreg (machine_mode omode
, machine_mode imode
,
894 const_rtx reg
, unsigned int offset
)
896 unsigned int isize
= GET_MODE_SIZE (imode
);
897 unsigned int osize
= GET_MODE_SIZE (omode
);
899 /* All subregs must be aligned. */
900 if (offset
% osize
!= 0)
903 /* The subreg offset cannot be outside the inner object. */
907 unsigned int regsize
= REGMODE_NATURAL_SIZE (imode
);
909 /* ??? This should not be here. Temporarily continue to allow word_mode
910 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
911 Generally, backends are doing something sketchy but it'll take time to
913 if (omode
== word_mode
)
915 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
916 is the culprit here, and not the backends. */
917 else if (osize
>= regsize
&& isize
>= osize
)
919 /* Allow component subregs of complex and vector. Though given the below
920 extraction rules, it's not always clear what that means. */
921 else if ((COMPLEX_MODE_P (imode
) || VECTOR_MODE_P (imode
))
922 && GET_MODE_INNER (imode
) == omode
)
924 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
925 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
926 represent this. It's questionable if this ought to be represented at
927 all -- why can't this all be hidden in post-reload splitters that make
928 arbitrarily mode changes to the registers themselves. */
929 else if (VECTOR_MODE_P (omode
) && GET_MODE_INNER (omode
) == imode
)
931 /* Subregs involving floating point modes are not allowed to
932 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
933 (subreg:SI (reg:DF) 0) isn't. */
934 else if (FLOAT_MODE_P (imode
) || FLOAT_MODE_P (omode
))
936 if (! (isize
== osize
937 /* LRA can use subreg to store a floating point value in
938 an integer mode. Although the floating point and the
939 integer modes need the same number of hard registers,
940 the size of floating point mode can be less than the
941 integer mode. LRA also uses subregs for a register
942 should be used in different mode in on insn. */
947 /* Paradoxical subregs must have offset zero. */
951 /* This is a normal subreg. Verify that the offset is representable. */
953 /* For hard registers, we already have most of these rules collected in
954 subreg_offset_representable_p. */
955 if (reg
&& REG_P (reg
) && HARD_REGISTER_P (reg
))
957 unsigned int regno
= REGNO (reg
);
959 if ((COMPLEX_MODE_P (imode
) || VECTOR_MODE_P (imode
))
960 && GET_MODE_INNER (imode
) == omode
)
962 else if (!REG_CAN_CHANGE_MODE_P (regno
, imode
, omode
))
965 return subreg_offset_representable_p (regno
, imode
, offset
, omode
);
968 /* For pseudo registers, we want most of the same checks. Namely:
970 Assume that the pseudo register will be allocated to hard registers
971 that can hold REGSIZE bytes each. If OSIZE is not a multiple of REGSIZE,
972 the remainder must correspond to the lowpart of the containing hard
973 register. If BYTES_BIG_ENDIAN, the lowpart is at the highest offset,
974 otherwise it is at the lowest offset.
976 Given that we've already checked the mode and offset alignment,
977 we only have to check subblock subregs here. */
979 && ! (lra_in_progress
&& (FLOAT_MODE_P (imode
) || FLOAT_MODE_P (omode
))))
981 unsigned int block_size
= MIN (isize
, regsize
);
982 unsigned int offset_within_block
= offset
% block_size
;
984 ? offset_within_block
!= block_size
- osize
985 : offset_within_block
!= 0)
992 gen_rtx_SUBREG (machine_mode mode
, rtx reg
, int offset
)
994 gcc_assert (validate_subreg (mode
, GET_MODE (reg
), reg
, offset
));
995 return gen_rtx_raw_SUBREG (mode
, reg
, offset
);
998 /* Generate a SUBREG representing the least-significant part of REG if MODE
999 is smaller than mode of REG, otherwise paradoxical SUBREG. */
1002 gen_lowpart_SUBREG (machine_mode mode
, rtx reg
)
1004 machine_mode inmode
;
1006 inmode
= GET_MODE (reg
);
1007 if (inmode
== VOIDmode
)
1009 return gen_rtx_SUBREG (mode
, reg
,
1010 subreg_lowpart_offset (mode
, inmode
));
1014 gen_rtx_VAR_LOCATION (machine_mode mode
, tree decl
, rtx loc
,
1015 enum var_init_status status
)
1017 rtx x
= gen_rtx_fmt_te (VAR_LOCATION
, mode
, decl
, loc
);
1018 PAT_VAR_LOCATION_STATUS (x
) = status
;
1023 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
1026 gen_rtvec (int n
, ...)
1034 /* Don't allocate an empty rtvec... */
1041 rt_val
= rtvec_alloc (n
);
1043 for (i
= 0; i
< n
; i
++)
1044 rt_val
->elem
[i
] = va_arg (p
, rtx
);
1051 gen_rtvec_v (int n
, rtx
*argp
)
1056 /* Don't allocate an empty rtvec... */
1060 rt_val
= rtvec_alloc (n
);
1062 for (i
= 0; i
< n
; i
++)
1063 rt_val
->elem
[i
] = *argp
++;
1069 gen_rtvec_v (int n
, rtx_insn
**argp
)
1074 /* Don't allocate an empty rtvec... */
1078 rt_val
= rtvec_alloc (n
);
1080 for (i
= 0; i
< n
; i
++)
1081 rt_val
->elem
[i
] = *argp
++;
1087 /* Return the number of bytes between the start of an OUTER_MODE
1088 in-memory value and the start of an INNER_MODE in-memory value,
1089 given that the former is a lowpart of the latter. It may be a
1090 paradoxical lowpart, in which case the offset will be negative
1091 on big-endian targets. */
1094 byte_lowpart_offset (machine_mode outer_mode
,
1095 machine_mode inner_mode
)
1097 if (paradoxical_subreg_p (outer_mode
, inner_mode
))
1098 return -subreg_lowpart_offset (inner_mode
, outer_mode
);
1100 return subreg_lowpart_offset (outer_mode
, inner_mode
);
1103 /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
1104 from address X. For paradoxical big-endian subregs this is a
1105 negative value, otherwise it's the same as OFFSET. */
1108 subreg_memory_offset (machine_mode outer_mode
, machine_mode inner_mode
,
1109 unsigned int offset
)
1111 if (paradoxical_subreg_p (outer_mode
, inner_mode
))
1113 gcc_assert (offset
== 0);
1114 return -subreg_lowpart_offset (inner_mode
, outer_mode
);
1119 /* As above, but return the offset that existing subreg X would have
1120 if SUBREG_REG (X) were stored in memory. The only significant thing
1121 about the current SUBREG_REG is its mode. */
1124 subreg_memory_offset (const_rtx x
)
1126 return subreg_memory_offset (GET_MODE (x
), GET_MODE (SUBREG_REG (x
)),
1130 /* Generate a REG rtx for a new pseudo register of mode MODE.
1131 This pseudo is assigned the next sequential register number. */
1134 gen_reg_rtx (machine_mode mode
)
1137 unsigned int align
= GET_MODE_ALIGNMENT (mode
);
1139 gcc_assert (can_create_pseudo_p ());
1141 /* If a virtual register with bigger mode alignment is generated,
1142 increase stack alignment estimation because it might be spilled
1144 if (SUPPORTS_STACK_ALIGNMENT
1145 && crtl
->stack_alignment_estimated
< align
1146 && !crtl
->stack_realign_processed
)
1148 unsigned int min_align
= MINIMUM_ALIGNMENT (NULL
, mode
, align
);
1149 if (crtl
->stack_alignment_estimated
< min_align
)
1150 crtl
->stack_alignment_estimated
= min_align
;
1153 if (generating_concat_p
1154 && (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
1155 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
))
1157 /* For complex modes, don't make a single pseudo.
1158 Instead, make a CONCAT of two pseudos.
1159 This allows noncontiguous allocation of the real and imaginary parts,
1160 which makes much better code. Besides, allocating DCmode
1161 pseudos overstrains reload on some machines like the 386. */
1162 rtx realpart
, imagpart
;
1163 machine_mode partmode
= GET_MODE_INNER (mode
);
1165 realpart
= gen_reg_rtx (partmode
);
1166 imagpart
= gen_reg_rtx (partmode
);
1167 return gen_rtx_CONCAT (mode
, realpart
, imagpart
);
1170 /* Do not call gen_reg_rtx with uninitialized crtl. */
1171 gcc_assert (crtl
->emit
.regno_pointer_align_length
);
1173 crtl
->emit
.ensure_regno_capacity ();
1174 gcc_assert (reg_rtx_no
< crtl
->emit
.regno_pointer_align_length
);
1176 val
= gen_raw_REG (mode
, reg_rtx_no
);
1177 regno_reg_rtx
[reg_rtx_no
++] = val
;
1181 /* Make sure m_regno_pointer_align, and regno_reg_rtx are large
1182 enough to have elements in the range 0 <= idx <= reg_rtx_no. */
1185 emit_status::ensure_regno_capacity ()
1187 int old_size
= regno_pointer_align_length
;
1189 if (reg_rtx_no
< old_size
)
1192 int new_size
= old_size
* 2;
1193 while (reg_rtx_no
>= new_size
)
1196 char *tmp
= XRESIZEVEC (char, regno_pointer_align
, new_size
);
1197 memset (tmp
+ old_size
, 0, new_size
- old_size
);
1198 regno_pointer_align
= (unsigned char *) tmp
;
1200 rtx
*new1
= GGC_RESIZEVEC (rtx
, regno_reg_rtx
, new_size
);
1201 memset (new1
+ old_size
, 0, (new_size
- old_size
) * sizeof (rtx
));
1202 regno_reg_rtx
= new1
;
1204 crtl
->emit
.regno_pointer_align_length
= new_size
;
1207 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1210 reg_is_parm_p (rtx reg
)
1214 gcc_assert (REG_P (reg
));
1215 decl
= REG_EXPR (reg
);
1216 return (decl
&& TREE_CODE (decl
) == PARM_DECL
);
1219 /* Update NEW with the same attributes as REG, but with OFFSET added
1220 to the REG_OFFSET. */
1223 update_reg_offset (rtx new_rtx
, rtx reg
, poly_int64 offset
)
1225 REG_ATTRS (new_rtx
) = get_reg_attrs (REG_EXPR (reg
),
1226 REG_OFFSET (reg
) + offset
);
1229 /* Generate a register with same attributes as REG, but with OFFSET
1230 added to the REG_OFFSET. */
1233 gen_rtx_REG_offset (rtx reg
, machine_mode mode
, unsigned int regno
,
1236 rtx new_rtx
= gen_rtx_REG (mode
, regno
);
1238 update_reg_offset (new_rtx
, reg
, offset
);
1242 /* Generate a new pseudo-register with the same attributes as REG, but
1243 with OFFSET added to the REG_OFFSET. */
1246 gen_reg_rtx_offset (rtx reg
, machine_mode mode
, int offset
)
1248 rtx new_rtx
= gen_reg_rtx (mode
);
1250 update_reg_offset (new_rtx
, reg
, offset
);
1254 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1255 new register is a (possibly paradoxical) lowpart of the old one. */
1258 adjust_reg_mode (rtx reg
, machine_mode mode
)
1260 update_reg_offset (reg
, reg
, byte_lowpart_offset (mode
, GET_MODE (reg
)));
1261 PUT_MODE (reg
, mode
);
1264 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1265 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1268 set_reg_attrs_from_value (rtx reg
, rtx x
)
1271 bool can_be_reg_pointer
= true;
1273 /* Don't call mark_reg_pointer for incompatible pointer sign
1275 while (GET_CODE (x
) == SIGN_EXTEND
1276 || GET_CODE (x
) == ZERO_EXTEND
1277 || GET_CODE (x
) == TRUNCATE
1278 || (GET_CODE (x
) == SUBREG
&& subreg_lowpart_p (x
)))
1280 #if defined(POINTERS_EXTEND_UNSIGNED)
1281 if (((GET_CODE (x
) == SIGN_EXTEND
&& POINTERS_EXTEND_UNSIGNED
)
1282 || (GET_CODE (x
) == ZERO_EXTEND
&& ! POINTERS_EXTEND_UNSIGNED
)
1283 || (paradoxical_subreg_p (x
)
1284 && ! (SUBREG_PROMOTED_VAR_P (x
)
1285 && SUBREG_CHECK_PROMOTED_SIGN (x
,
1286 POINTERS_EXTEND_UNSIGNED
))))
1287 && !targetm
.have_ptr_extend ())
1288 can_be_reg_pointer
= false;
1293 /* Hard registers can be reused for multiple purposes within the same
1294 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1295 on them is wrong. */
1296 if (HARD_REGISTER_P (reg
))
1299 offset
= byte_lowpart_offset (GET_MODE (reg
), GET_MODE (x
));
1302 if (MEM_OFFSET_KNOWN_P (x
))
1303 REG_ATTRS (reg
) = get_reg_attrs (MEM_EXPR (x
),
1304 MEM_OFFSET (x
) + offset
);
1305 if (can_be_reg_pointer
&& MEM_POINTER (x
))
1306 mark_reg_pointer (reg
, 0);
1311 update_reg_offset (reg
, x
, offset
);
1312 if (can_be_reg_pointer
&& REG_POINTER (x
))
1313 mark_reg_pointer (reg
, REGNO_POINTER_ALIGN (REGNO (x
)));
1317 /* Generate a REG rtx for a new pseudo register, copying the mode
1318 and attributes from X. */
1321 gen_reg_rtx_and_attrs (rtx x
)
1323 rtx reg
= gen_reg_rtx (GET_MODE (x
));
1324 set_reg_attrs_from_value (reg
, x
);
1328 /* Set the register attributes for registers contained in PARM_RTX.
1329 Use needed values from memory attributes of MEM. */
1332 set_reg_attrs_for_parm (rtx parm_rtx
, rtx mem
)
1334 if (REG_P (parm_rtx
))
1335 set_reg_attrs_from_value (parm_rtx
, mem
);
1336 else if (GET_CODE (parm_rtx
) == PARALLEL
)
1338 /* Check for a NULL entry in the first slot, used to indicate that the
1339 parameter goes both on the stack and in registers. */
1340 int i
= XEXP (XVECEXP (parm_rtx
, 0, 0), 0) ? 0 : 1;
1341 for (; i
< XVECLEN (parm_rtx
, 0); i
++)
1343 rtx x
= XVECEXP (parm_rtx
, 0, i
);
1344 if (REG_P (XEXP (x
, 0)))
1345 REG_ATTRS (XEXP (x
, 0))
1346 = get_reg_attrs (MEM_EXPR (mem
),
1347 INTVAL (XEXP (x
, 1)));
1352 /* Set the REG_ATTRS for registers in value X, given that X represents
1356 set_reg_attrs_for_decl_rtl (tree t
, rtx x
)
1361 if (GET_CODE (x
) == SUBREG
)
1363 gcc_assert (subreg_lowpart_p (x
));
1368 = get_reg_attrs (t
, byte_lowpart_offset (GET_MODE (x
),
1371 : TYPE_MODE (TREE_TYPE (tdecl
))));
1372 if (GET_CODE (x
) == CONCAT
)
1374 if (REG_P (XEXP (x
, 0)))
1375 REG_ATTRS (XEXP (x
, 0)) = get_reg_attrs (t
, 0);
1376 if (REG_P (XEXP (x
, 1)))
1377 REG_ATTRS (XEXP (x
, 1))
1378 = get_reg_attrs (t
, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x
, 0))));
1380 if (GET_CODE (x
) == PARALLEL
)
1384 /* Check for a NULL entry, used to indicate that the parameter goes
1385 both on the stack and in registers. */
1386 if (XEXP (XVECEXP (x
, 0, 0), 0))
1391 for (i
= start
; i
< XVECLEN (x
, 0); i
++)
1393 rtx y
= XVECEXP (x
, 0, i
);
1394 if (REG_P (XEXP (y
, 0)))
1395 REG_ATTRS (XEXP (y
, 0)) = get_reg_attrs (t
, INTVAL (XEXP (y
, 1)));
1400 /* Assign the RTX X to declaration T. */
1403 set_decl_rtl (tree t
, rtx x
)
1405 DECL_WRTL_CHECK (t
)->decl_with_rtl
.rtl
= x
;
1407 set_reg_attrs_for_decl_rtl (t
, x
);
1410 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1411 if the ABI requires the parameter to be passed by reference. */
1414 set_decl_incoming_rtl (tree t
, rtx x
, bool by_reference_p
)
1416 DECL_INCOMING_RTL (t
) = x
;
1417 if (x
&& !by_reference_p
)
1418 set_reg_attrs_for_decl_rtl (t
, x
);
1421 /* Identify REG (which may be a CONCAT) as a user register. */
1424 mark_user_reg (rtx reg
)
1426 if (GET_CODE (reg
) == CONCAT
)
1428 REG_USERVAR_P (XEXP (reg
, 0)) = 1;
1429 REG_USERVAR_P (XEXP (reg
, 1)) = 1;
1433 gcc_assert (REG_P (reg
));
1434 REG_USERVAR_P (reg
) = 1;
1438 /* Identify REG as a probable pointer register and show its alignment
1439 as ALIGN, if nonzero. */
1442 mark_reg_pointer (rtx reg
, int align
)
1444 if (! REG_POINTER (reg
))
1446 REG_POINTER (reg
) = 1;
1449 REGNO_POINTER_ALIGN (REGNO (reg
)) = align
;
1451 else if (align
&& align
< REGNO_POINTER_ALIGN (REGNO (reg
)))
1452 /* We can no-longer be sure just how aligned this pointer is. */
1453 REGNO_POINTER_ALIGN (REGNO (reg
)) = align
;
1456 /* Return 1 plus largest pseudo reg number used in the current function. */
1464 /* Return 1 + the largest label number used so far in the current function. */
1467 max_label_num (void)
1472 /* Return first label number used in this function (if any were used). */
1475 get_first_label_num (void)
1477 return first_label_num
;
1480 /* If the rtx for label was created during the expansion of a nested
1481 function, then first_label_num won't include this label number.
1482 Fix this now so that array indices work later. */
1485 maybe_set_first_label_num (rtx_code_label
*x
)
1487 if (CODE_LABEL_NUMBER (x
) < first_label_num
)
1488 first_label_num
= CODE_LABEL_NUMBER (x
);
1491 /* For use by the RTL function loader, when mingling with normal
1493 Ensure that label_num is greater than the label num of X, to avoid
1494 duplicate labels in the generated assembler. */
1497 maybe_set_max_label_num (rtx_code_label
*x
)
1499 if (CODE_LABEL_NUMBER (x
) >= label_num
)
1500 label_num
= CODE_LABEL_NUMBER (x
) + 1;
1504 /* Return a value representing some low-order bits of X, where the number
1505 of low-order bits is given by MODE. Note that no conversion is done
1506 between floating-point and fixed-point values, rather, the bit
1507 representation is returned.
1509 This function handles the cases in common between gen_lowpart, below,
1510 and two variants in cse.c and combine.c. These are the cases that can
1511 be safely handled at all points in the compilation.
1513 If this is not a case we can handle, return 0. */
1516 gen_lowpart_common (machine_mode mode
, rtx x
)
1518 int msize
= GET_MODE_SIZE (mode
);
1520 machine_mode innermode
;
1522 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1523 so we have to make one up. Yuk. */
1524 innermode
= GET_MODE (x
);
1526 && msize
* BITS_PER_UNIT
<= HOST_BITS_PER_WIDE_INT
)
1527 innermode
= int_mode_for_size (HOST_BITS_PER_WIDE_INT
, 0).require ();
1528 else if (innermode
== VOIDmode
)
1529 innermode
= int_mode_for_size (HOST_BITS_PER_DOUBLE_INT
, 0).require ();
1531 xsize
= GET_MODE_SIZE (innermode
);
1533 gcc_assert (innermode
!= VOIDmode
&& innermode
!= BLKmode
);
1535 if (innermode
== mode
)
1538 if (SCALAR_FLOAT_MODE_P (mode
))
1540 /* Don't allow paradoxical FLOAT_MODE subregs. */
1546 /* MODE must occupy no more of the underlying registers than X. */
1547 unsigned int regsize
= REGMODE_NATURAL_SIZE (innermode
);
1548 unsigned int mregs
= CEIL (msize
, regsize
);
1549 unsigned int xregs
= CEIL (xsize
, regsize
);
1554 scalar_int_mode int_mode
, int_innermode
, from_mode
;
1555 if ((GET_CODE (x
) == ZERO_EXTEND
|| GET_CODE (x
) == SIGN_EXTEND
)
1556 && is_a
<scalar_int_mode
> (mode
, &int_mode
)
1557 && is_a
<scalar_int_mode
> (innermode
, &int_innermode
)
1558 && is_a
<scalar_int_mode
> (GET_MODE (XEXP (x
, 0)), &from_mode
))
1560 /* If we are getting the low-order part of something that has been
1561 sign- or zero-extended, we can either just use the object being
1562 extended or make a narrower extension. If we want an even smaller
1563 piece than the size of the object being extended, call ourselves
1566 This case is used mostly by combine and cse. */
1568 if (from_mode
== int_mode
)
1570 else if (GET_MODE_SIZE (int_mode
) < GET_MODE_SIZE (from_mode
))
1571 return gen_lowpart_common (int_mode
, XEXP (x
, 0));
1572 else if (GET_MODE_SIZE (int_mode
) < GET_MODE_SIZE (int_innermode
))
1573 return gen_rtx_fmt_e (GET_CODE (x
), int_mode
, XEXP (x
, 0));
1575 else if (GET_CODE (x
) == SUBREG
|| REG_P (x
)
1576 || GET_CODE (x
) == CONCAT
|| const_vec_p (x
)
1577 || CONST_DOUBLE_AS_FLOAT_P (x
) || CONST_SCALAR_INT_P (x
)
1578 || CONST_POLY_INT_P (x
))
1579 return lowpart_subreg (mode
, x
, innermode
);
1581 /* Otherwise, we can't do this. */
1586 gen_highpart (machine_mode mode
, rtx x
)
1588 unsigned int msize
= GET_MODE_SIZE (mode
);
1591 /* This case loses if X is a subreg. To catch bugs early,
1592 complain if an invalid MODE is used even in other cases. */
1593 gcc_assert (msize
<= UNITS_PER_WORD
1594 || msize
== (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x
)));
1596 result
= simplify_gen_subreg (mode
, x
, GET_MODE (x
),
1597 subreg_highpart_offset (mode
, GET_MODE (x
)));
1598 gcc_assert (result
);
1600 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1601 the target if we have a MEM. gen_highpart must return a valid operand,
1602 emitting code if necessary to do so. */
1605 result
= validize_mem (result
);
1606 gcc_assert (result
);
1612 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1613 be VOIDmode constant. */
1615 gen_highpart_mode (machine_mode outermode
, machine_mode innermode
, rtx exp
)
1617 if (GET_MODE (exp
) != VOIDmode
)
1619 gcc_assert (GET_MODE (exp
) == innermode
);
1620 return gen_highpart (outermode
, exp
);
1622 return simplify_gen_subreg (outermode
, exp
, innermode
,
1623 subreg_highpart_offset (outermode
, innermode
));
1626 /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
1627 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1630 subreg_size_lowpart_offset (unsigned int outer_bytes
, unsigned int inner_bytes
)
1632 if (outer_bytes
> inner_bytes
)
1633 /* Paradoxical subregs always have a SUBREG_BYTE of 0. */
1636 if (BYTES_BIG_ENDIAN
&& WORDS_BIG_ENDIAN
)
1637 return inner_bytes
- outer_bytes
;
1638 else if (!BYTES_BIG_ENDIAN
&& !WORDS_BIG_ENDIAN
)
1641 return subreg_size_offset_from_lsb (outer_bytes
, inner_bytes
, 0);
1644 /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
1645 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1648 subreg_size_highpart_offset (unsigned int outer_bytes
,
1649 unsigned int inner_bytes
)
1651 gcc_assert (inner_bytes
>= outer_bytes
);
1653 if (BYTES_BIG_ENDIAN
&& WORDS_BIG_ENDIAN
)
1655 else if (!BYTES_BIG_ENDIAN
&& !WORDS_BIG_ENDIAN
)
1656 return inner_bytes
- outer_bytes
;
1658 return subreg_size_offset_from_lsb (outer_bytes
, inner_bytes
,
1659 (inner_bytes
- outer_bytes
)
1663 /* Return 1 iff X, assumed to be a SUBREG,
1664 refers to the least significant part of its containing reg.
1665 If X is not a SUBREG, always return 1 (it is its own low part!). */
1668 subreg_lowpart_p (const_rtx x
)
1670 if (GET_CODE (x
) != SUBREG
)
1672 else if (GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
1675 return (subreg_lowpart_offset (GET_MODE (x
), GET_MODE (SUBREG_REG (x
)))
1676 == SUBREG_BYTE (x
));
1679 /* Return subword OFFSET of operand OP.
1680 The word number, OFFSET, is interpreted as the word number starting
1681 at the low-order address. OFFSET 0 is the low-order word if not
1682 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1684 If we cannot extract the required word, we return zero. Otherwise,
1685 an rtx corresponding to the requested word will be returned.
1687 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1688 reload has completed, a valid address will always be returned. After
1689 reload, if a valid address cannot be returned, we return zero.
1691 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1692 it is the responsibility of the caller.
1694 MODE is the mode of OP in case it is a CONST_INT.
1696 ??? This is still rather broken for some cases. The problem for the
1697 moment is that all callers of this thing provide no 'goal mode' to
1698 tell us to work with. This exists because all callers were written
1699 in a word based SUBREG world.
1700 Now use of this function can be deprecated by simplify_subreg in most
1705 operand_subword (rtx op
, unsigned int offset
, int validate_address
, machine_mode mode
)
1707 if (mode
== VOIDmode
)
1708 mode
= GET_MODE (op
);
1710 gcc_assert (mode
!= VOIDmode
);
1712 /* If OP is narrower than a word, fail. */
1714 && (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
))
1717 /* If we want a word outside OP, return zero. */
1719 && (offset
+ 1) * UNITS_PER_WORD
> GET_MODE_SIZE (mode
))
1722 /* Form a new MEM at the requested address. */
1725 rtx new_rtx
= adjust_address_nv (op
, word_mode
, offset
* UNITS_PER_WORD
);
1727 if (! validate_address
)
1730 else if (reload_completed
)
1732 if (! strict_memory_address_addr_space_p (word_mode
,
1734 MEM_ADDR_SPACE (op
)))
1738 return replace_equiv_address (new_rtx
, XEXP (new_rtx
, 0));
1741 /* Rest can be handled by simplify_subreg. */
1742 return simplify_gen_subreg (word_mode
, op
, mode
, (offset
* UNITS_PER_WORD
));
1745 /* Similar to `operand_subword', but never return 0. If we can't
1746 extract the required subword, put OP into a register and try again.
1747 The second attempt must succeed. We always validate the address in
1750 MODE is the mode of OP, in case it is CONST_INT. */
1753 operand_subword_force (rtx op
, unsigned int offset
, machine_mode mode
)
1755 rtx result
= operand_subword (op
, offset
, 1, mode
);
1760 if (mode
!= BLKmode
&& mode
!= VOIDmode
)
1762 /* If this is a register which can not be accessed by words, copy it
1763 to a pseudo register. */
1765 op
= copy_to_reg (op
);
1767 op
= force_reg (mode
, op
);
1770 result
= operand_subword (op
, offset
, 1, mode
);
1771 gcc_assert (result
);
1776 /* Returns 1 if both MEM_EXPR can be considered equal
1780 mem_expr_equal_p (const_tree expr1
, const_tree expr2
)
1785 if (! expr1
|| ! expr2
)
1788 if (TREE_CODE (expr1
) != TREE_CODE (expr2
))
1791 return operand_equal_p (expr1
, expr2
, 0);
1794 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1795 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1799 get_mem_align_offset (rtx mem
, unsigned int align
)
1802 unsigned HOST_WIDE_INT offset
;
1804 /* This function can't use
1805 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1806 || (MAX (MEM_ALIGN (mem),
1807 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1811 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1813 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1814 for <variable>. get_inner_reference doesn't handle it and
1815 even if it did, the alignment in that case needs to be determined
1816 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1817 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1818 isn't sufficiently aligned, the object it is in might be. */
1819 gcc_assert (MEM_P (mem
));
1820 expr
= MEM_EXPR (mem
);
1821 if (expr
== NULL_TREE
|| !MEM_OFFSET_KNOWN_P (mem
))
1824 offset
= MEM_OFFSET (mem
);
1827 if (DECL_ALIGN (expr
) < align
)
1830 else if (INDIRECT_REF_P (expr
))
1832 if (TYPE_ALIGN (TREE_TYPE (expr
)) < (unsigned int) align
)
1835 else if (TREE_CODE (expr
) == COMPONENT_REF
)
1839 tree inner
= TREE_OPERAND (expr
, 0);
1840 tree field
= TREE_OPERAND (expr
, 1);
1841 tree byte_offset
= component_ref_field_offset (expr
);
1842 tree bit_offset
= DECL_FIELD_BIT_OFFSET (field
);
1845 || !tree_fits_uhwi_p (byte_offset
)
1846 || !tree_fits_uhwi_p (bit_offset
))
1849 offset
+= tree_to_uhwi (byte_offset
);
1850 offset
+= tree_to_uhwi (bit_offset
) / BITS_PER_UNIT
;
1852 if (inner
== NULL_TREE
)
1854 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field
))
1855 < (unsigned int) align
)
1859 else if (DECL_P (inner
))
1861 if (DECL_ALIGN (inner
) < align
)
1865 else if (TREE_CODE (inner
) != COMPONENT_REF
)
1873 return offset
& ((align
/ BITS_PER_UNIT
) - 1);
1876 /* Given REF (a MEM) and T, either the type of X or the expression
1877 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1878 if we are making a new object of this type. BITPOS is nonzero if
1879 there is an offset outstanding on T that will be applied later. */
1882 set_mem_attributes_minus_bitpos (rtx ref
, tree t
, int objectp
,
1883 HOST_WIDE_INT bitpos
)
1885 HOST_WIDE_INT apply_bitpos
= 0;
1887 struct mem_attrs attrs
, *defattrs
, *refattrs
;
1890 /* It can happen that type_for_mode was given a mode for which there
1891 is no language-level type. In which case it returns NULL, which
1896 type
= TYPE_P (t
) ? t
: TREE_TYPE (t
);
1897 if (type
== error_mark_node
)
1900 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1901 wrong answer, as it assumes that DECL_RTL already has the right alias
1902 info. Callers should not set DECL_RTL until after the call to
1903 set_mem_attributes. */
1904 gcc_assert (!DECL_P (t
) || ref
!= DECL_RTL_IF_SET (t
));
1906 memset (&attrs
, 0, sizeof (attrs
));
1908 /* Get the alias set from the expression or type (perhaps using a
1909 front-end routine) and use it. */
1910 attrs
.alias
= get_alias_set (t
);
1912 MEM_VOLATILE_P (ref
) |= TYPE_VOLATILE (type
);
1913 MEM_POINTER (ref
) = POINTER_TYPE_P (type
);
1915 /* Default values from pre-existing memory attributes if present. */
1916 refattrs
= MEM_ATTRS (ref
);
1919 /* ??? Can this ever happen? Calling this routine on a MEM that
1920 already carries memory attributes should probably be invalid. */
1921 attrs
.expr
= refattrs
->expr
;
1922 attrs
.offset_known_p
= refattrs
->offset_known_p
;
1923 attrs
.offset
= refattrs
->offset
;
1924 attrs
.size_known_p
= refattrs
->size_known_p
;
1925 attrs
.size
= refattrs
->size
;
1926 attrs
.align
= refattrs
->align
;
1929 /* Otherwise, default values from the mode of the MEM reference. */
1932 defattrs
= mode_mem_attrs
[(int) GET_MODE (ref
)];
1933 gcc_assert (!defattrs
->expr
);
1934 gcc_assert (!defattrs
->offset_known_p
);
1936 /* Respect mode size. */
1937 attrs
.size_known_p
= defattrs
->size_known_p
;
1938 attrs
.size
= defattrs
->size
;
1939 /* ??? Is this really necessary? We probably should always get
1940 the size from the type below. */
1942 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1943 if T is an object, always compute the object alignment below. */
1945 attrs
.align
= defattrs
->align
;
1947 attrs
.align
= BITS_PER_UNIT
;
1948 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1949 e.g. if the type carries an alignment attribute. Should we be
1950 able to simply always use TYPE_ALIGN? */
1953 /* We can set the alignment from the type if we are making an object or if
1954 this is an INDIRECT_REF. */
1955 if (objectp
|| TREE_CODE (t
) == INDIRECT_REF
)
1956 attrs
.align
= MAX (attrs
.align
, TYPE_ALIGN (type
));
1958 /* If the size is known, we can set that. */
1959 tree new_size
= TYPE_SIZE_UNIT (type
);
1961 /* The address-space is that of the type. */
1962 as
= TYPE_ADDR_SPACE (type
);
1964 /* If T is not a type, we may be able to deduce some more information about
1970 if (TREE_THIS_VOLATILE (t
))
1971 MEM_VOLATILE_P (ref
) = 1;
1973 /* Now remove any conversions: they don't change what the underlying
1974 object is. Likewise for SAVE_EXPR. */
1975 while (CONVERT_EXPR_P (t
)
1976 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
1977 || TREE_CODE (t
) == SAVE_EXPR
)
1978 t
= TREE_OPERAND (t
, 0);
1980 /* Note whether this expression can trap. */
1981 MEM_NOTRAP_P (ref
) = !tree_could_trap_p (t
);
1983 base
= get_base_address (t
);
1987 && TREE_READONLY (base
)
1988 && (TREE_STATIC (base
) || DECL_EXTERNAL (base
))
1989 && !TREE_THIS_VOLATILE (base
))
1990 MEM_READONLY_P (ref
) = 1;
1992 /* Mark static const strings readonly as well. */
1993 if (TREE_CODE (base
) == STRING_CST
1994 && TREE_READONLY (base
)
1995 && TREE_STATIC (base
))
1996 MEM_READONLY_P (ref
) = 1;
1998 /* Address-space information is on the base object. */
1999 if (TREE_CODE (base
) == MEM_REF
2000 || TREE_CODE (base
) == TARGET_MEM_REF
)
2001 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base
,
2004 as
= TYPE_ADDR_SPACE (TREE_TYPE (base
));
2007 /* If this expression uses it's parent's alias set, mark it such
2008 that we won't change it. */
2009 if (component_uses_parent_alias_set_from (t
) != NULL_TREE
)
2010 MEM_KEEP_ALIAS_SET_P (ref
) = 1;
2012 /* If this is a decl, set the attributes of the MEM from it. */
2016 attrs
.offset_known_p
= true;
2018 apply_bitpos
= bitpos
;
2019 new_size
= DECL_SIZE_UNIT (t
);
2022 /* ??? If we end up with a constant here do record a MEM_EXPR. */
2023 else if (CONSTANT_CLASS_P (t
))
2026 /* If this is a field reference, record it. */
2027 else if (TREE_CODE (t
) == COMPONENT_REF
)
2030 attrs
.offset_known_p
= true;
2032 apply_bitpos
= bitpos
;
2033 if (DECL_BIT_FIELD (TREE_OPERAND (t
, 1)))
2034 new_size
= DECL_SIZE_UNIT (TREE_OPERAND (t
, 1));
2037 /* If this is an array reference, look for an outer field reference. */
2038 else if (TREE_CODE (t
) == ARRAY_REF
)
2040 tree off_tree
= size_zero_node
;
2041 /* We can't modify t, because we use it at the end of the
2047 tree index
= TREE_OPERAND (t2
, 1);
2048 tree low_bound
= array_ref_low_bound (t2
);
2049 tree unit_size
= array_ref_element_size (t2
);
2051 /* We assume all arrays have sizes that are a multiple of a byte.
2052 First subtract the lower bound, if any, in the type of the
2053 index, then convert to sizetype and multiply by the size of
2054 the array element. */
2055 if (! integer_zerop (low_bound
))
2056 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
2059 off_tree
= size_binop (PLUS_EXPR
,
2060 size_binop (MULT_EXPR
,
2061 fold_convert (sizetype
,
2065 t2
= TREE_OPERAND (t2
, 0);
2067 while (TREE_CODE (t2
) == ARRAY_REF
);
2070 || (TREE_CODE (t2
) == COMPONENT_REF
2071 /* For trailing arrays t2 doesn't have a size that
2072 covers all valid accesses. */
2073 && ! array_at_struct_end_p (t
)))
2076 attrs
.offset_known_p
= false;
2077 if (tree_fits_uhwi_p (off_tree
))
2079 attrs
.offset_known_p
= true;
2080 attrs
.offset
= tree_to_uhwi (off_tree
);
2081 apply_bitpos
= bitpos
;
2084 /* Else do not record a MEM_EXPR. */
2087 /* If this is an indirect reference, record it. */
2088 else if (TREE_CODE (t
) == MEM_REF
2089 || TREE_CODE (t
) == TARGET_MEM_REF
)
2092 attrs
.offset_known_p
= true;
2094 apply_bitpos
= bitpos
;
2097 /* Compute the alignment. */
2098 unsigned int obj_align
;
2099 unsigned HOST_WIDE_INT obj_bitpos
;
2100 get_object_alignment_1 (t
, &obj_align
, &obj_bitpos
);
2101 obj_bitpos
= (obj_bitpos
- bitpos
) & (obj_align
- 1);
2102 if (obj_bitpos
!= 0)
2103 obj_align
= least_bit_hwi (obj_bitpos
);
2104 attrs
.align
= MAX (attrs
.align
, obj_align
);
2107 if (tree_fits_uhwi_p (new_size
))
2109 attrs
.size_known_p
= true;
2110 attrs
.size
= tree_to_uhwi (new_size
);
2113 /* If we modified OFFSET based on T, then subtract the outstanding
2114 bit position offset. Similarly, increase the size of the accessed
2115 object to contain the negative offset. */
2118 gcc_assert (attrs
.offset_known_p
);
2119 attrs
.offset
-= apply_bitpos
/ BITS_PER_UNIT
;
2120 if (attrs
.size_known_p
)
2121 attrs
.size
+= apply_bitpos
/ BITS_PER_UNIT
;
2124 /* Now set the attributes we computed above. */
2125 attrs
.addrspace
= as
;
2126 set_mem_attrs (ref
, &attrs
);
2130 set_mem_attributes (rtx ref
, tree t
, int objectp
)
2132 set_mem_attributes_minus_bitpos (ref
, t
, objectp
, 0);
2135 /* Set the alias set of MEM to SET. */
2138 set_mem_alias_set (rtx mem
, alias_set_type set
)
2140 struct mem_attrs attrs
;
2142 /* If the new and old alias sets don't conflict, something is wrong. */
2143 gcc_checking_assert (alias_sets_conflict_p (set
, MEM_ALIAS_SET (mem
)));
2144 attrs
= *get_mem_attrs (mem
);
2146 set_mem_attrs (mem
, &attrs
);
2149 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2152 set_mem_addr_space (rtx mem
, addr_space_t addrspace
)
2154 struct mem_attrs attrs
;
2156 attrs
= *get_mem_attrs (mem
);
2157 attrs
.addrspace
= addrspace
;
2158 set_mem_attrs (mem
, &attrs
);
2161 /* Set the alignment of MEM to ALIGN bits. */
2164 set_mem_align (rtx mem
, unsigned int align
)
2166 struct mem_attrs attrs
;
2168 attrs
= *get_mem_attrs (mem
);
2169 attrs
.align
= align
;
2170 set_mem_attrs (mem
, &attrs
);
2173 /* Set the expr for MEM to EXPR. */
2176 set_mem_expr (rtx mem
, tree expr
)
2178 struct mem_attrs attrs
;
2180 attrs
= *get_mem_attrs (mem
);
2182 set_mem_attrs (mem
, &attrs
);
2185 /* Set the offset of MEM to OFFSET. */
2188 set_mem_offset (rtx mem
, HOST_WIDE_INT offset
)
2190 struct mem_attrs attrs
;
2192 attrs
= *get_mem_attrs (mem
);
2193 attrs
.offset_known_p
= true;
2194 attrs
.offset
= offset
;
2195 set_mem_attrs (mem
, &attrs
);
2198 /* Clear the offset of MEM. */
2201 clear_mem_offset (rtx mem
)
2203 struct mem_attrs attrs
;
2205 attrs
= *get_mem_attrs (mem
);
2206 attrs
.offset_known_p
= false;
2207 set_mem_attrs (mem
, &attrs
);
2210 /* Set the size of MEM to SIZE. */
2213 set_mem_size (rtx mem
, HOST_WIDE_INT size
)
2215 struct mem_attrs attrs
;
2217 attrs
= *get_mem_attrs (mem
);
2218 attrs
.size_known_p
= true;
2220 set_mem_attrs (mem
, &attrs
);
2223 /* Clear the size of MEM. */
2226 clear_mem_size (rtx mem
)
2228 struct mem_attrs attrs
;
2230 attrs
= *get_mem_attrs (mem
);
2231 attrs
.size_known_p
= false;
2232 set_mem_attrs (mem
, &attrs
);
2235 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2236 and its address changed to ADDR. (VOIDmode means don't change the mode.
2237 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2238 returned memory location is required to be valid. INPLACE is true if any
2239 changes can be made directly to MEMREF or false if MEMREF must be treated
2242 The memory attributes are not changed. */
2245 change_address_1 (rtx memref
, machine_mode mode
, rtx addr
, int validate
,
2251 gcc_assert (MEM_P (memref
));
2252 as
= MEM_ADDR_SPACE (memref
);
2253 if (mode
== VOIDmode
)
2254 mode
= GET_MODE (memref
);
2256 addr
= XEXP (memref
, 0);
2257 if (mode
== GET_MODE (memref
) && addr
== XEXP (memref
, 0)
2258 && (!validate
|| memory_address_addr_space_p (mode
, addr
, as
)))
2261 /* Don't validate address for LRA. LRA can make the address valid
2262 by itself in most efficient way. */
2263 if (validate
&& !lra_in_progress
)
2265 if (reload_in_progress
|| reload_completed
)
2266 gcc_assert (memory_address_addr_space_p (mode
, addr
, as
));
2268 addr
= memory_address_addr_space (mode
, addr
, as
);
2271 if (rtx_equal_p (addr
, XEXP (memref
, 0)) && mode
== GET_MODE (memref
))
2276 XEXP (memref
, 0) = addr
;
2280 new_rtx
= gen_rtx_MEM (mode
, addr
);
2281 MEM_COPY_ATTRIBUTES (new_rtx
, memref
);
2285 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2286 way we are changing MEMREF, so we only preserve the alias set. */
2289 change_address (rtx memref
, machine_mode mode
, rtx addr
)
2291 rtx new_rtx
= change_address_1 (memref
, mode
, addr
, 1, false);
2292 machine_mode mmode
= GET_MODE (new_rtx
);
2293 struct mem_attrs attrs
, *defattrs
;
2295 attrs
= *get_mem_attrs (memref
);
2296 defattrs
= mode_mem_attrs
[(int) mmode
];
2297 attrs
.expr
= NULL_TREE
;
2298 attrs
.offset_known_p
= false;
2299 attrs
.size_known_p
= defattrs
->size_known_p
;
2300 attrs
.size
= defattrs
->size
;
2301 attrs
.align
= defattrs
->align
;
2303 /* If there are no changes, just return the original memory reference. */
2304 if (new_rtx
== memref
)
2306 if (mem_attrs_eq_p (get_mem_attrs (memref
), &attrs
))
2309 new_rtx
= gen_rtx_MEM (mmode
, XEXP (memref
, 0));
2310 MEM_COPY_ATTRIBUTES (new_rtx
, memref
);
2313 set_mem_attrs (new_rtx
, &attrs
);
2317 /* Return a memory reference like MEMREF, but with its mode changed
2318 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2319 nonzero, the memory address is forced to be valid.
2320 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2321 and the caller is responsible for adjusting MEMREF base register.
2322 If ADJUST_OBJECT is zero, the underlying object associated with the
2323 memory reference is left unchanged and the caller is responsible for
2324 dealing with it. Otherwise, if the new memory reference is outside
2325 the underlying object, even partially, then the object is dropped.
2326 SIZE, if nonzero, is the size of an access in cases where MODE
2327 has no inherent size. */
2330 adjust_address_1 (rtx memref
, machine_mode mode
, HOST_WIDE_INT offset
,
2331 int validate
, int adjust_address
, int adjust_object
,
2334 rtx addr
= XEXP (memref
, 0);
2336 scalar_int_mode address_mode
;
2338 struct mem_attrs attrs
= *get_mem_attrs (memref
), *defattrs
;
2339 unsigned HOST_WIDE_INT max_align
;
2340 #ifdef POINTERS_EXTEND_UNSIGNED
2341 scalar_int_mode pointer_mode
2342 = targetm
.addr_space
.pointer_mode (attrs
.addrspace
);
2345 /* VOIDmode means no mode change for change_address_1. */
2346 if (mode
== VOIDmode
)
2347 mode
= GET_MODE (memref
);
2349 /* Take the size of non-BLKmode accesses from the mode. */
2350 defattrs
= mode_mem_attrs
[(int) mode
];
2351 if (defattrs
->size_known_p
)
2352 size
= defattrs
->size
;
2354 /* If there are no changes, just return the original memory reference. */
2355 if (mode
== GET_MODE (memref
) && !offset
2356 && (size
== 0 || (attrs
.size_known_p
&& attrs
.size
== size
))
2357 && (!validate
|| memory_address_addr_space_p (mode
, addr
,
2361 /* ??? Prefer to create garbage instead of creating shared rtl.
2362 This may happen even if offset is nonzero -- consider
2363 (plus (plus reg reg) const_int) -- so do this always. */
2364 addr
= copy_rtx (addr
);
2366 /* Convert a possibly large offset to a signed value within the
2367 range of the target address space. */
2368 address_mode
= get_address_mode (memref
);
2369 pbits
= GET_MODE_BITSIZE (address_mode
);
2370 if (HOST_BITS_PER_WIDE_INT
> pbits
)
2372 int shift
= HOST_BITS_PER_WIDE_INT
- pbits
;
2373 offset
= (((HOST_WIDE_INT
) ((unsigned HOST_WIDE_INT
) offset
<< shift
))
2379 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2380 object, we can merge it into the LO_SUM. */
2381 if (GET_MODE (memref
) != BLKmode
&& GET_CODE (addr
) == LO_SUM
2383 && (unsigned HOST_WIDE_INT
) offset
2384 < GET_MODE_ALIGNMENT (GET_MODE (memref
)) / BITS_PER_UNIT
)
2385 addr
= gen_rtx_LO_SUM (address_mode
, XEXP (addr
, 0),
2386 plus_constant (address_mode
,
2387 XEXP (addr
, 1), offset
));
2388 #ifdef POINTERS_EXTEND_UNSIGNED
2389 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2390 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2391 the fact that pointers are not allowed to overflow. */
2392 else if (POINTERS_EXTEND_UNSIGNED
> 0
2393 && GET_CODE (addr
) == ZERO_EXTEND
2394 && GET_MODE (XEXP (addr
, 0)) == pointer_mode
2395 && trunc_int_for_mode (offset
, pointer_mode
) == offset
)
2396 addr
= gen_rtx_ZERO_EXTEND (address_mode
,
2397 plus_constant (pointer_mode
,
2398 XEXP (addr
, 0), offset
));
2401 addr
= plus_constant (address_mode
, addr
, offset
);
2404 new_rtx
= change_address_1 (memref
, mode
, addr
, validate
, false);
2406 /* If the address is a REG, change_address_1 rightfully returns memref,
2407 but this would destroy memref's MEM_ATTRS. */
2408 if (new_rtx
== memref
&& offset
!= 0)
2409 new_rtx
= copy_rtx (new_rtx
);
2411 /* Conservatively drop the object if we don't know where we start from. */
2412 if (adjust_object
&& (!attrs
.offset_known_p
|| !attrs
.size_known_p
))
2414 attrs
.expr
= NULL_TREE
;
2418 /* Compute the new values of the memory attributes due to this adjustment.
2419 We add the offsets and update the alignment. */
2420 if (attrs
.offset_known_p
)
2422 attrs
.offset
+= offset
;
2424 /* Drop the object if the new left end is not within its bounds. */
2425 if (adjust_object
&& attrs
.offset
< 0)
2427 attrs
.expr
= NULL_TREE
;
2432 /* Compute the new alignment by taking the MIN of the alignment and the
2433 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2437 max_align
= least_bit_hwi (offset
) * BITS_PER_UNIT
;
2438 attrs
.align
= MIN (attrs
.align
, max_align
);
2443 /* Drop the object if the new right end is not within its bounds. */
2444 if (adjust_object
&& (offset
+ size
) > attrs
.size
)
2446 attrs
.expr
= NULL_TREE
;
2449 attrs
.size_known_p
= true;
2452 else if (attrs
.size_known_p
)
2454 gcc_assert (!adjust_object
);
2455 attrs
.size
-= offset
;
2456 /* ??? The store_by_pieces machinery generates negative sizes,
2457 so don't assert for that here. */
2460 set_mem_attrs (new_rtx
, &attrs
);
2465 /* Return a memory reference like MEMREF, but with its mode changed
2466 to MODE and its address changed to ADDR, which is assumed to be
2467 MEMREF offset by OFFSET bytes. If VALIDATE is
2468 nonzero, the memory address is forced to be valid. */
2471 adjust_automodify_address_1 (rtx memref
, machine_mode mode
, rtx addr
,
2472 HOST_WIDE_INT offset
, int validate
)
2474 memref
= change_address_1 (memref
, VOIDmode
, addr
, validate
, false);
2475 return adjust_address_1 (memref
, mode
, offset
, validate
, 0, 0, 0);
2478 /* Return a memory reference like MEMREF, but whose address is changed by
2479 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2480 known to be in OFFSET (possibly 1). */
2483 offset_address (rtx memref
, rtx offset
, unsigned HOST_WIDE_INT pow2
)
2485 rtx new_rtx
, addr
= XEXP (memref
, 0);
2486 machine_mode address_mode
;
2487 struct mem_attrs attrs
, *defattrs
;
2489 attrs
= *get_mem_attrs (memref
);
2490 address_mode
= get_address_mode (memref
);
2491 new_rtx
= simplify_gen_binary (PLUS
, address_mode
, addr
, offset
);
2493 /* At this point we don't know _why_ the address is invalid. It
2494 could have secondary memory references, multiplies or anything.
2496 However, if we did go and rearrange things, we can wind up not
2497 being able to recognize the magic around pic_offset_table_rtx.
2498 This stuff is fragile, and is yet another example of why it is
2499 bad to expose PIC machinery too early. */
2500 if (! memory_address_addr_space_p (GET_MODE (memref
), new_rtx
,
2502 && GET_CODE (addr
) == PLUS
2503 && XEXP (addr
, 0) == pic_offset_table_rtx
)
2505 addr
= force_reg (GET_MODE (addr
), addr
);
2506 new_rtx
= simplify_gen_binary (PLUS
, address_mode
, addr
, offset
);
2509 update_temp_slot_address (XEXP (memref
, 0), new_rtx
);
2510 new_rtx
= change_address_1 (memref
, VOIDmode
, new_rtx
, 1, false);
2512 /* If there are no changes, just return the original memory reference. */
2513 if (new_rtx
== memref
)
2516 /* Update the alignment to reflect the offset. Reset the offset, which
2518 defattrs
= mode_mem_attrs
[(int) GET_MODE (new_rtx
)];
2519 attrs
.offset_known_p
= false;
2520 attrs
.size_known_p
= defattrs
->size_known_p
;
2521 attrs
.size
= defattrs
->size
;
2522 attrs
.align
= MIN (attrs
.align
, pow2
* BITS_PER_UNIT
);
2523 set_mem_attrs (new_rtx
, &attrs
);
2527 /* Return a memory reference like MEMREF, but with its address changed to
2528 ADDR. The caller is asserting that the actual piece of memory pointed
2529 to is the same, just the form of the address is being changed, such as
2530 by putting something into a register. INPLACE is true if any changes
2531 can be made directly to MEMREF or false if MEMREF must be treated as
2535 replace_equiv_address (rtx memref
, rtx addr
, bool inplace
)
2537 /* change_address_1 copies the memory attribute structure without change
2538 and that's exactly what we want here. */
2539 update_temp_slot_address (XEXP (memref
, 0), addr
);
2540 return change_address_1 (memref
, VOIDmode
, addr
, 1, inplace
);
2543 /* Likewise, but the reference is not required to be valid. */
2546 replace_equiv_address_nv (rtx memref
, rtx addr
, bool inplace
)
2548 return change_address_1 (memref
, VOIDmode
, addr
, 0, inplace
);
2551 /* Return a memory reference like MEMREF, but with its mode widened to
2552 MODE and offset by OFFSET. This would be used by targets that e.g.
2553 cannot issue QImode memory operations and have to use SImode memory
2554 operations plus masking logic. */
2557 widen_memory_access (rtx memref
, machine_mode mode
, HOST_WIDE_INT offset
)
2559 rtx new_rtx
= adjust_address_1 (memref
, mode
, offset
, 1, 1, 0, 0);
2560 struct mem_attrs attrs
;
2561 unsigned int size
= GET_MODE_SIZE (mode
);
2563 /* If there are no changes, just return the original memory reference. */
2564 if (new_rtx
== memref
)
2567 attrs
= *get_mem_attrs (new_rtx
);
2569 /* If we don't know what offset we were at within the expression, then
2570 we can't know if we've overstepped the bounds. */
2571 if (! attrs
.offset_known_p
)
2572 attrs
.expr
= NULL_TREE
;
2576 if (TREE_CODE (attrs
.expr
) == COMPONENT_REF
)
2578 tree field
= TREE_OPERAND (attrs
.expr
, 1);
2579 tree offset
= component_ref_field_offset (attrs
.expr
);
2581 if (! DECL_SIZE_UNIT (field
))
2583 attrs
.expr
= NULL_TREE
;
2587 /* Is the field at least as large as the access? If so, ok,
2588 otherwise strip back to the containing structure. */
2589 if (TREE_CODE (DECL_SIZE_UNIT (field
)) == INTEGER_CST
2590 && compare_tree_int (DECL_SIZE_UNIT (field
), size
) >= 0
2591 && attrs
.offset
>= 0)
2594 if (! tree_fits_uhwi_p (offset
))
2596 attrs
.expr
= NULL_TREE
;
2600 attrs
.expr
= TREE_OPERAND (attrs
.expr
, 0);
2601 attrs
.offset
+= tree_to_uhwi (offset
);
2602 attrs
.offset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
2605 /* Similarly for the decl. */
2606 else if (DECL_P (attrs
.expr
)
2607 && DECL_SIZE_UNIT (attrs
.expr
)
2608 && TREE_CODE (DECL_SIZE_UNIT (attrs
.expr
)) == INTEGER_CST
2609 && compare_tree_int (DECL_SIZE_UNIT (attrs
.expr
), size
) >= 0
2610 && (! attrs
.offset_known_p
|| attrs
.offset
>= 0))
2614 /* The widened memory access overflows the expression, which means
2615 that it could alias another expression. Zap it. */
2616 attrs
.expr
= NULL_TREE
;
2622 attrs
.offset_known_p
= false;
2624 /* The widened memory may alias other stuff, so zap the alias set. */
2625 /* ??? Maybe use get_alias_set on any remaining expression. */
2627 attrs
.size_known_p
= true;
2629 set_mem_attrs (new_rtx
, &attrs
);
2633 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2634 static GTY(()) tree spill_slot_decl
;
2637 get_spill_slot_decl (bool force_build_p
)
2639 tree d
= spill_slot_decl
;
2641 struct mem_attrs attrs
;
2643 if (d
|| !force_build_p
)
2646 d
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
2647 VAR_DECL
, get_identifier ("%sfp"), void_type_node
);
2648 DECL_ARTIFICIAL (d
) = 1;
2649 DECL_IGNORED_P (d
) = 1;
2651 spill_slot_decl
= d
;
2653 rd
= gen_rtx_MEM (BLKmode
, frame_pointer_rtx
);
2654 MEM_NOTRAP_P (rd
) = 1;
2655 attrs
= *mode_mem_attrs
[(int) BLKmode
];
2656 attrs
.alias
= new_alias_set ();
2658 set_mem_attrs (rd
, &attrs
);
2659 SET_DECL_RTL (d
, rd
);
2664 /* Given MEM, a result from assign_stack_local, fill in the memory
2665 attributes as appropriate for a register allocator spill slot.
2666 These slots are not aliasable by other memory. We arrange for
2667 them all to use a single MEM_EXPR, so that the aliasing code can
2668 work properly in the case of shared spill slots. */
2671 set_mem_attrs_for_spill (rtx mem
)
2673 struct mem_attrs attrs
;
2676 attrs
= *get_mem_attrs (mem
);
2677 attrs
.expr
= get_spill_slot_decl (true);
2678 attrs
.alias
= MEM_ALIAS_SET (DECL_RTL (attrs
.expr
));
2679 attrs
.addrspace
= ADDR_SPACE_GENERIC
;
2681 /* We expect the incoming memory to be of the form:
2682 (mem:MODE (plus (reg sfp) (const_int offset)))
2683 with perhaps the plus missing for offset = 0. */
2684 addr
= XEXP (mem
, 0);
2685 attrs
.offset_known_p
= true;
2687 if (GET_CODE (addr
) == PLUS
2688 && CONST_INT_P (XEXP (addr
, 1)))
2689 attrs
.offset
= INTVAL (XEXP (addr
, 1));
2691 set_mem_attrs (mem
, &attrs
);
2692 MEM_NOTRAP_P (mem
) = 1;
2695 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2698 gen_label_rtx (void)
2700 return as_a
<rtx_code_label
*> (
2701 gen_rtx_CODE_LABEL (VOIDmode
, NULL_RTX
, NULL_RTX
,
2702 NULL
, label_num
++, NULL
));
2705 /* For procedure integration. */
2707 /* Install new pointers to the first and last insns in the chain.
2708 Also, set cur_insn_uid to one higher than the last in use.
2709 Used for an inline-procedure after copying the insn chain. */
2712 set_new_first_and_last_insn (rtx_insn
*first
, rtx_insn
*last
)
2716 set_first_insn (first
);
2717 set_last_insn (last
);
2720 if (MIN_NONDEBUG_INSN_UID
|| MAY_HAVE_DEBUG_INSNS
)
2722 int debug_count
= 0;
2724 cur_insn_uid
= MIN_NONDEBUG_INSN_UID
- 1;
2725 cur_debug_insn_uid
= 0;
2727 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2728 if (INSN_UID (insn
) < MIN_NONDEBUG_INSN_UID
)
2729 cur_debug_insn_uid
= MAX (cur_debug_insn_uid
, INSN_UID (insn
));
2732 cur_insn_uid
= MAX (cur_insn_uid
, INSN_UID (insn
));
2733 if (DEBUG_INSN_P (insn
))
2738 cur_debug_insn_uid
= MIN_NONDEBUG_INSN_UID
+ debug_count
;
2740 cur_debug_insn_uid
++;
2743 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2744 cur_insn_uid
= MAX (cur_insn_uid
, INSN_UID (insn
));
2749 /* Go through all the RTL insn bodies and copy any invalid shared
2750 structure. This routine should only be called once. */
2753 unshare_all_rtl_1 (rtx_insn
*insn
)
2755 /* Unshare just about everything else. */
2756 unshare_all_rtl_in_chain (insn
);
2758 /* Make sure the addresses of stack slots found outside the insn chain
2759 (such as, in DECL_RTL of a variable) are not shared
2760 with the insn chain.
2762 This special care is necessary when the stack slot MEM does not
2763 actually appear in the insn chain. If it does appear, its address
2764 is unshared from all else at that point. */
2767 FOR_EACH_VEC_SAFE_ELT (stack_slot_list
, i
, temp
)
2768 (*stack_slot_list
)[i
] = copy_rtx_if_shared (temp
);
2771 /* Go through all the RTL insn bodies and copy any invalid shared
2772 structure, again. This is a fairly expensive thing to do so it
2773 should be done sparingly. */
2776 unshare_all_rtl_again (rtx_insn
*insn
)
2781 for (p
= insn
; p
; p
= NEXT_INSN (p
))
2784 reset_used_flags (PATTERN (p
));
2785 reset_used_flags (REG_NOTES (p
));
2787 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p
));
2790 /* Make sure that virtual stack slots are not shared. */
2791 set_used_decls (DECL_INITIAL (cfun
->decl
));
2793 /* Make sure that virtual parameters are not shared. */
2794 for (decl
= DECL_ARGUMENTS (cfun
->decl
); decl
; decl
= DECL_CHAIN (decl
))
2795 set_used_flags (DECL_RTL (decl
));
2799 FOR_EACH_VEC_SAFE_ELT (stack_slot_list
, i
, temp
)
2800 reset_used_flags (temp
);
2802 unshare_all_rtl_1 (insn
);
2806 unshare_all_rtl (void)
2808 unshare_all_rtl_1 (get_insns ());
2810 for (tree decl
= DECL_ARGUMENTS (cfun
->decl
); decl
; decl
= DECL_CHAIN (decl
))
2812 if (DECL_RTL_SET_P (decl
))
2813 SET_DECL_RTL (decl
, copy_rtx_if_shared (DECL_RTL (decl
)));
2814 DECL_INCOMING_RTL (decl
) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl
));
2821 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2822 Recursively does the same for subexpressions. */
2825 verify_rtx_sharing (rtx orig
, rtx insn
)
2830 const char *format_ptr
;
2835 code
= GET_CODE (x
);
2837 /* These types may be freely shared. */
2853 /* SCRATCH must be shared because they represent distinct values. */
2856 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2857 clobbers or clobbers of hard registers that originated as pseudos.
2858 This is needed to allow safe register renaming. */
2859 if (REG_P (XEXP (x
, 0))
2860 && HARD_REGISTER_NUM_P (REGNO (XEXP (x
, 0)))
2861 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x
, 0))))
2866 if (shared_const_p (orig
))
2871 /* A MEM is allowed to be shared if its address is constant. */
2872 if (CONSTANT_ADDRESS_P (XEXP (x
, 0))
2873 || reload_completed
|| reload_in_progress
)
2882 /* This rtx may not be shared. If it has already been seen,
2883 replace it with a copy of itself. */
2884 if (flag_checking
&& RTX_FLAG (x
, used
))
2886 error ("invalid rtl sharing found in the insn");
2888 error ("shared rtx");
2890 internal_error ("internal consistency failure");
2892 gcc_assert (!RTX_FLAG (x
, used
));
2894 RTX_FLAG (x
, used
) = 1;
2896 /* Now scan the subexpressions recursively. */
2898 format_ptr
= GET_RTX_FORMAT (code
);
2900 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
2902 switch (*format_ptr
++)
2905 verify_rtx_sharing (XEXP (x
, i
), insn
);
2909 if (XVEC (x
, i
) != NULL
)
2912 int len
= XVECLEN (x
, i
);
2914 for (j
= 0; j
< len
; j
++)
2916 /* We allow sharing of ASM_OPERANDS inside single
2918 if (j
&& GET_CODE (XVECEXP (x
, i
, j
)) == SET
2919 && (GET_CODE (SET_SRC (XVECEXP (x
, i
, j
)))
2921 verify_rtx_sharing (SET_DEST (XVECEXP (x
, i
, j
)), insn
);
2923 verify_rtx_sharing (XVECEXP (x
, i
, j
), insn
);
2932 /* Reset used-flags for INSN. */
2935 reset_insn_used_flags (rtx insn
)
2937 gcc_assert (INSN_P (insn
));
2938 reset_used_flags (PATTERN (insn
));
2939 reset_used_flags (REG_NOTES (insn
));
2941 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn
));
2944 /* Go through all the RTL insn bodies and clear all the USED bits. */
2947 reset_all_used_flags (void)
2951 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
2954 rtx pat
= PATTERN (p
);
2955 if (GET_CODE (pat
) != SEQUENCE
)
2956 reset_insn_used_flags (p
);
2959 gcc_assert (REG_NOTES (p
) == NULL
);
2960 for (int i
= 0; i
< XVECLEN (pat
, 0); i
++)
2962 rtx insn
= XVECEXP (pat
, 0, i
);
2964 reset_insn_used_flags (insn
);
2970 /* Verify sharing in INSN. */
2973 verify_insn_sharing (rtx insn
)
2975 gcc_assert (INSN_P (insn
));
2976 verify_rtx_sharing (PATTERN (insn
), insn
);
2977 verify_rtx_sharing (REG_NOTES (insn
), insn
);
2979 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn
), insn
);
2982 /* Go through all the RTL insn bodies and check that there is no unexpected
2983 sharing in between the subexpressions. */
2986 verify_rtl_sharing (void)
2990 timevar_push (TV_VERIFY_RTL_SHARING
);
2992 reset_all_used_flags ();
2994 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
2997 rtx pat
= PATTERN (p
);
2998 if (GET_CODE (pat
) != SEQUENCE
)
2999 verify_insn_sharing (p
);
3001 for (int i
= 0; i
< XVECLEN (pat
, 0); i
++)
3003 rtx insn
= XVECEXP (pat
, 0, i
);
3005 verify_insn_sharing (insn
);
3009 reset_all_used_flags ();
3011 timevar_pop (TV_VERIFY_RTL_SHARING
);
3014 /* Go through all the RTL insn bodies and copy any invalid shared structure.
3015 Assumes the mark bits are cleared at entry. */
3018 unshare_all_rtl_in_chain (rtx_insn
*insn
)
3020 for (; insn
; insn
= NEXT_INSN (insn
))
3023 PATTERN (insn
) = copy_rtx_if_shared (PATTERN (insn
));
3024 REG_NOTES (insn
) = copy_rtx_if_shared (REG_NOTES (insn
));
3026 CALL_INSN_FUNCTION_USAGE (insn
)
3027 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn
));
3031 /* Go through all virtual stack slots of a function and mark them as
3032 shared. We never replace the DECL_RTLs themselves with a copy,
3033 but expressions mentioned into a DECL_RTL cannot be shared with
3034 expressions in the instruction stream.
3036 Note that reload may convert pseudo registers into memories in-place.
3037 Pseudo registers are always shared, but MEMs never are. Thus if we
3038 reset the used flags on MEMs in the instruction stream, we must set
3039 them again on MEMs that appear in DECL_RTLs. */
3042 set_used_decls (tree blk
)
3047 for (t
= BLOCK_VARS (blk
); t
; t
= DECL_CHAIN (t
))
3048 if (DECL_RTL_SET_P (t
))
3049 set_used_flags (DECL_RTL (t
));
3051 /* Now process sub-blocks. */
3052 for (t
= BLOCK_SUBBLOCKS (blk
); t
; t
= BLOCK_CHAIN (t
))
3056 /* Mark ORIG as in use, and return a copy of it if it was already in use.
3057 Recursively does the same for subexpressions. Uses
3058 copy_rtx_if_shared_1 to reduce stack space. */
3061 copy_rtx_if_shared (rtx orig
)
3063 copy_rtx_if_shared_1 (&orig
);
3067 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
3068 use. Recursively does the same for subexpressions. */
3071 copy_rtx_if_shared_1 (rtx
*orig1
)
3077 const char *format_ptr
;
3081 /* Repeat is used to turn tail-recursion into iteration. */
3088 code
= GET_CODE (x
);
3090 /* These types may be freely shared. */
3106 /* SCRATCH must be shared because they represent distinct values. */
3109 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
3110 clobbers or clobbers of hard registers that originated as pseudos.
3111 This is needed to allow safe register renaming. */
3112 if (REG_P (XEXP (x
, 0))
3113 && HARD_REGISTER_NUM_P (REGNO (XEXP (x
, 0)))
3114 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x
, 0))))
3119 if (shared_const_p (x
))
3129 /* The chain of insns is not being copied. */
3136 /* This rtx may not be shared. If it has already been seen,
3137 replace it with a copy of itself. */
3139 if (RTX_FLAG (x
, used
))
3141 x
= shallow_copy_rtx (x
);
3144 RTX_FLAG (x
, used
) = 1;
3146 /* Now scan the subexpressions recursively.
3147 We can store any replaced subexpressions directly into X
3148 since we know X is not shared! Any vectors in X
3149 must be copied if X was copied. */
3151 format_ptr
= GET_RTX_FORMAT (code
);
3152 length
= GET_RTX_LENGTH (code
);
3155 for (i
= 0; i
< length
; i
++)
3157 switch (*format_ptr
++)
3161 copy_rtx_if_shared_1 (last_ptr
);
3162 last_ptr
= &XEXP (x
, i
);
3166 if (XVEC (x
, i
) != NULL
)
3169 int len
= XVECLEN (x
, i
);
3171 /* Copy the vector iff I copied the rtx and the length
3173 if (copied
&& len
> 0)
3174 XVEC (x
, i
) = gen_rtvec_v (len
, XVEC (x
, i
)->elem
);
3176 /* Call recursively on all inside the vector. */
3177 for (j
= 0; j
< len
; j
++)
3180 copy_rtx_if_shared_1 (last_ptr
);
3181 last_ptr
= &XVECEXP (x
, i
, j
);
3196 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3199 mark_used_flags (rtx x
, int flag
)
3203 const char *format_ptr
;
3206 /* Repeat is used to turn tail-recursion into iteration. */
3211 code
= GET_CODE (x
);
3213 /* These types may be freely shared so we needn't do any resetting
3237 /* The chain of insns is not being copied. */
3244 RTX_FLAG (x
, used
) = flag
;
3246 format_ptr
= GET_RTX_FORMAT (code
);
3247 length
= GET_RTX_LENGTH (code
);
3249 for (i
= 0; i
< length
; i
++)
3251 switch (*format_ptr
++)
3259 mark_used_flags (XEXP (x
, i
), flag
);
3263 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3264 mark_used_flags (XVECEXP (x
, i
, j
), flag
);
3270 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3271 to look for shared sub-parts. */
3274 reset_used_flags (rtx x
)
3276 mark_used_flags (x
, 0);
3279 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3280 to look for shared sub-parts. */
3283 set_used_flags (rtx x
)
3285 mark_used_flags (x
, 1);
3288 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3289 Return X or the rtx for the pseudo reg the value of X was copied into.
3290 OTHER must be valid as a SET_DEST. */
3293 make_safe_from (rtx x
, rtx other
)
3296 switch (GET_CODE (other
))
3299 other
= SUBREG_REG (other
);
3301 case STRICT_LOW_PART
:
3304 other
= XEXP (other
, 0);
3313 && GET_CODE (x
) != SUBREG
)
3315 && (REGNO (other
) < FIRST_PSEUDO_REGISTER
3316 || reg_mentioned_p (other
, x
))))
3318 rtx temp
= gen_reg_rtx (GET_MODE (x
));
3319 emit_move_insn (temp
, x
);
3325 /* Emission of insns (adding them to the doubly-linked list). */
3327 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3330 get_last_insn_anywhere (void)
3332 struct sequence_stack
*seq
;
3333 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
3339 /* Return the first nonnote insn emitted in current sequence or current
3340 function. This routine looks inside SEQUENCEs. */
3343 get_first_nonnote_insn (void)
3345 rtx_insn
*insn
= get_insns ();
3350 for (insn
= next_insn (insn
);
3351 insn
&& NOTE_P (insn
);
3352 insn
= next_insn (insn
))
3356 if (NONJUMP_INSN_P (insn
)
3357 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3358 insn
= as_a
<rtx_sequence
*> (PATTERN (insn
))->insn (0);
3365 /* Return the last nonnote insn emitted in current sequence or current
3366 function. This routine looks inside SEQUENCEs. */
3369 get_last_nonnote_insn (void)
3371 rtx_insn
*insn
= get_last_insn ();
3376 for (insn
= previous_insn (insn
);
3377 insn
&& NOTE_P (insn
);
3378 insn
= previous_insn (insn
))
3382 if (NONJUMP_INSN_P (insn
))
3383 if (rtx_sequence
*seq
= dyn_cast
<rtx_sequence
*> (PATTERN (insn
)))
3384 insn
= seq
->insn (seq
->len () - 1);
3391 /* Return the number of actual (non-debug) insns emitted in this
3395 get_max_insn_count (void)
3397 int n
= cur_insn_uid
;
3399 /* The table size must be stable across -g, to avoid codegen
3400 differences due to debug insns, and not be affected by
3401 -fmin-insn-uid, to avoid excessive table size and to simplify
3402 debugging of -fcompare-debug failures. */
3403 if (cur_debug_insn_uid
> MIN_NONDEBUG_INSN_UID
)
3404 n
-= cur_debug_insn_uid
;
3406 n
-= MIN_NONDEBUG_INSN_UID
;
3412 /* Return the next insn. If it is a SEQUENCE, return the first insn
3416 next_insn (rtx_insn
*insn
)
3420 insn
= NEXT_INSN (insn
);
3421 if (insn
&& NONJUMP_INSN_P (insn
)
3422 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3423 insn
= as_a
<rtx_sequence
*> (PATTERN (insn
))->insn (0);
3429 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3433 previous_insn (rtx_insn
*insn
)
3437 insn
= PREV_INSN (insn
);
3438 if (insn
&& NONJUMP_INSN_P (insn
))
3439 if (rtx_sequence
*seq
= dyn_cast
<rtx_sequence
*> (PATTERN (insn
)))
3440 insn
= seq
->insn (seq
->len () - 1);
3446 /* Return the next insn after INSN that is not a NOTE. This routine does not
3447 look inside SEQUENCEs. */
3450 next_nonnote_insn (rtx_insn
*insn
)
3454 insn
= NEXT_INSN (insn
);
3455 if (insn
== 0 || !NOTE_P (insn
))
3462 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3463 routine does not look inside SEQUENCEs. */
3466 next_nondebug_insn (rtx_insn
*insn
)
3470 insn
= NEXT_INSN (insn
);
3471 if (insn
== 0 || !DEBUG_INSN_P (insn
))
3478 /* Return the previous insn before INSN that is not a NOTE. This routine does
3479 not look inside SEQUENCEs. */
3482 prev_nonnote_insn (rtx_insn
*insn
)
3486 insn
= PREV_INSN (insn
);
3487 if (insn
== 0 || !NOTE_P (insn
))
3494 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3495 This routine does not look inside SEQUENCEs. */
3498 prev_nondebug_insn (rtx_insn
*insn
)
3502 insn
= PREV_INSN (insn
);
3503 if (insn
== 0 || !DEBUG_INSN_P (insn
))
3510 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3511 This routine does not look inside SEQUENCEs. */
3514 next_nonnote_nondebug_insn (rtx_insn
*insn
)
3518 insn
= NEXT_INSN (insn
);
3519 if (insn
== 0 || (!NOTE_P (insn
) && !DEBUG_INSN_P (insn
)))
3526 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN,
3527 but stop the search before we enter another basic block. This
3528 routine does not look inside SEQUENCEs. */
3531 next_nonnote_nondebug_insn_bb (rtx_insn
*insn
)
3535 insn
= NEXT_INSN (insn
);
3538 if (DEBUG_INSN_P (insn
))
3542 if (NOTE_INSN_BASIC_BLOCK_P (insn
))
3549 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3550 This routine does not look inside SEQUENCEs. */
3553 prev_nonnote_nondebug_insn (rtx_insn
*insn
)
3557 insn
= PREV_INSN (insn
);
3558 if (insn
== 0 || (!NOTE_P (insn
) && !DEBUG_INSN_P (insn
)))
3565 /* Return the previous insn before INSN that is not a NOTE nor
3566 DEBUG_INSN, but stop the search before we enter another basic
3567 block. This routine does not look inside SEQUENCEs. */
3570 prev_nonnote_nondebug_insn_bb (rtx_insn
*insn
)
3574 insn
= PREV_INSN (insn
);
3577 if (DEBUG_INSN_P (insn
))
3581 if (NOTE_INSN_BASIC_BLOCK_P (insn
))
3588 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3589 or 0, if there is none. This routine does not look inside
3593 next_real_insn (rtx uncast_insn
)
3595 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3599 insn
= NEXT_INSN (insn
);
3600 if (insn
== 0 || INSN_P (insn
))
3607 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3608 or 0, if there is none. This routine does not look inside
3612 prev_real_insn (rtx_insn
*insn
)
3616 insn
= PREV_INSN (insn
);
3617 if (insn
== 0 || INSN_P (insn
))
3624 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3625 This routine does not look inside SEQUENCEs. */
3628 last_call_insn (void)
3632 for (insn
= get_last_insn ();
3633 insn
&& !CALL_P (insn
);
3634 insn
= PREV_INSN (insn
))
3637 return safe_as_a
<rtx_call_insn
*> (insn
);
3640 /* Find the next insn after INSN that really does something. This routine
3641 does not look inside SEQUENCEs. After reload this also skips over
3642 standalone USE and CLOBBER insn. */
3645 active_insn_p (const rtx_insn
*insn
)
3647 return (CALL_P (insn
) || JUMP_P (insn
)
3648 || JUMP_TABLE_DATA_P (insn
) /* FIXME */
3649 || (NONJUMP_INSN_P (insn
)
3650 && (! reload_completed
3651 || (GET_CODE (PATTERN (insn
)) != USE
3652 && GET_CODE (PATTERN (insn
)) != CLOBBER
))));
3656 next_active_insn (rtx_insn
*insn
)
3660 insn
= NEXT_INSN (insn
);
3661 if (insn
== 0 || active_insn_p (insn
))
3668 /* Find the last insn before INSN that really does something. This routine
3669 does not look inside SEQUENCEs. After reload this also skips over
3670 standalone USE and CLOBBER insn. */
3673 prev_active_insn (rtx_insn
*insn
)
3677 insn
= PREV_INSN (insn
);
3678 if (insn
== 0 || active_insn_p (insn
))
3685 /* Return the next insn that uses CC0 after INSN, which is assumed to
3686 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3687 applied to the result of this function should yield INSN).
3689 Normally, this is simply the next insn. However, if a REG_CC_USER note
3690 is present, it contains the insn that uses CC0.
3692 Return 0 if we can't find the insn. */
3695 next_cc0_user (rtx_insn
*insn
)
3697 rtx note
= find_reg_note (insn
, REG_CC_USER
, NULL_RTX
);
3700 return safe_as_a
<rtx_insn
*> (XEXP (note
, 0));
3702 insn
= next_nonnote_insn (insn
);
3703 if (insn
&& NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3704 insn
= as_a
<rtx_sequence
*> (PATTERN (insn
))->insn (0);
3706 if (insn
&& INSN_P (insn
) && reg_mentioned_p (cc0_rtx
, PATTERN (insn
)))
3712 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3713 note, it is the previous insn. */
3716 prev_cc0_setter (rtx_insn
*insn
)
3718 rtx note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
3721 return safe_as_a
<rtx_insn
*> (XEXP (note
, 0));
3723 insn
= prev_nonnote_insn (insn
);
3724 gcc_assert (sets_cc0_p (PATTERN (insn
)));
3729 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3732 find_auto_inc (const_rtx x
, const_rtx reg
)
3734 subrtx_iterator::array_type array
;
3735 FOR_EACH_SUBRTX (iter
, array
, x
, NONCONST
)
3737 const_rtx x
= *iter
;
3738 if (GET_RTX_CLASS (GET_CODE (x
)) == RTX_AUTOINC
3739 && rtx_equal_p (reg
, XEXP (x
, 0)))
3745 /* Increment the label uses for all labels present in rtx. */
3748 mark_label_nuses (rtx x
)
3754 code
= GET_CODE (x
);
3755 if (code
== LABEL_REF
&& LABEL_P (label_ref_label (x
)))
3756 LABEL_NUSES (label_ref_label (x
))++;
3758 fmt
= GET_RTX_FORMAT (code
);
3759 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3762 mark_label_nuses (XEXP (x
, i
));
3763 else if (fmt
[i
] == 'E')
3764 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
3765 mark_label_nuses (XVECEXP (x
, i
, j
));
3770 /* Try splitting insns that can be split for better scheduling.
3771 PAT is the pattern which might split.
3772 TRIAL is the insn providing PAT.
3773 LAST is nonzero if we should return the last insn of the sequence produced.
3775 If this routine succeeds in splitting, it returns the first or last
3776 replacement insn depending on the value of LAST. Otherwise, it
3777 returns TRIAL. If the insn to be returned can be split, it will be. */
3780 try_split (rtx pat
, rtx_insn
*trial
, int last
)
3782 rtx_insn
*before
, *after
;
3784 rtx_insn
*seq
, *tem
;
3785 profile_probability probability
;
3786 rtx_insn
*insn_last
, *insn
;
3788 rtx_insn
*call_insn
= NULL
;
3790 /* We're not good at redistributing frame information. */
3791 if (RTX_FRAME_RELATED_P (trial
))
3794 if (any_condjump_p (trial
)
3795 && (note
= find_reg_note (trial
, REG_BR_PROB
, 0)))
3796 split_branch_probability
3797 = profile_probability::from_reg_br_prob_note (XINT (note
, 0));
3799 split_branch_probability
= profile_probability::uninitialized ();
3801 probability
= split_branch_probability
;
3803 seq
= split_insns (pat
, trial
);
3805 split_branch_probability
= profile_probability::uninitialized ();
3810 /* Avoid infinite loop if any insn of the result matches
3811 the original pattern. */
3815 if (INSN_P (insn_last
)
3816 && rtx_equal_p (PATTERN (insn_last
), pat
))
3818 if (!NEXT_INSN (insn_last
))
3820 insn_last
= NEXT_INSN (insn_last
);
3823 /* We will be adding the new sequence to the function. The splitters
3824 may have introduced invalid RTL sharing, so unshare the sequence now. */
3825 unshare_all_rtl_in_chain (seq
);
3827 /* Mark labels and copy flags. */
3828 for (insn
= insn_last
; insn
; insn
= PREV_INSN (insn
))
3833 CROSSING_JUMP_P (insn
) = CROSSING_JUMP_P (trial
);
3834 mark_jump_label (PATTERN (insn
), insn
, 0);
3836 if (probability
.initialized_p ()
3837 && any_condjump_p (insn
)
3838 && !find_reg_note (insn
, REG_BR_PROB
, 0))
3840 /* We can preserve the REG_BR_PROB notes only if exactly
3841 one jump is created, otherwise the machine description
3842 is responsible for this step using
3843 split_branch_probability variable. */
3844 gcc_assert (njumps
== 1);
3845 add_reg_br_prob_note (insn
, probability
);
3850 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3851 in SEQ and copy any additional information across. */
3854 for (insn
= insn_last
; insn
; insn
= PREV_INSN (insn
))
3860 gcc_assert (call_insn
== NULL_RTX
);
3863 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3864 target may have explicitly specified. */
3865 p
= &CALL_INSN_FUNCTION_USAGE (insn
);
3868 *p
= CALL_INSN_FUNCTION_USAGE (trial
);
3870 /* If the old call was a sibling call, the new one must
3872 SIBLING_CALL_P (insn
) = SIBLING_CALL_P (trial
);
3874 /* If the new call is the last instruction in the sequence,
3875 it will effectively replace the old call in-situ. Otherwise
3876 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3877 so that it comes immediately after the new call. */
3878 if (NEXT_INSN (insn
))
3879 for (next
= NEXT_INSN (trial
);
3880 next
&& NOTE_P (next
);
3881 next
= NEXT_INSN (next
))
3882 if (NOTE_KIND (next
) == NOTE_INSN_CALL_ARG_LOCATION
)
3885 add_insn_after (next
, insn
, NULL
);
3891 /* Copy notes, particularly those related to the CFG. */
3892 for (note
= REG_NOTES (trial
); note
; note
= XEXP (note
, 1))
3894 switch (REG_NOTE_KIND (note
))
3897 copy_reg_eh_region_note_backward (note
, insn_last
, NULL
);
3903 case REG_CALL_NOCF_CHECK
:
3904 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3907 add_reg_note (insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3911 case REG_NON_LOCAL_GOTO
:
3912 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3915 add_reg_note (insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3923 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3925 rtx reg
= XEXP (note
, 0);
3926 if (!FIND_REG_INC_NOTE (insn
, reg
)
3927 && find_auto_inc (PATTERN (insn
), reg
))
3928 add_reg_note (insn
, REG_INC
, reg
);
3933 fixup_args_size_notes (NULL
, insn_last
, INTVAL (XEXP (note
, 0)));
3937 gcc_assert (call_insn
!= NULL_RTX
);
3938 add_reg_note (call_insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3946 /* If there are LABELS inside the split insns increment the
3947 usage count so we don't delete the label. */
3951 while (insn
!= NULL_RTX
)
3953 /* JUMP_P insns have already been "marked" above. */
3954 if (NONJUMP_INSN_P (insn
))
3955 mark_label_nuses (PATTERN (insn
));
3957 insn
= PREV_INSN (insn
);
3961 before
= PREV_INSN (trial
);
3962 after
= NEXT_INSN (trial
);
3964 tem
= emit_insn_after_setloc (seq
, trial
, INSN_LOCATION (trial
));
3966 delete_insn (trial
);
3968 /* Recursively call try_split for each new insn created; by the
3969 time control returns here that insn will be fully split, so
3970 set LAST and continue from the insn after the one returned.
3971 We can't use next_active_insn here since AFTER may be a note.
3972 Ignore deleted insns, which can be occur if not optimizing. */
3973 for (tem
= NEXT_INSN (before
); tem
!= after
; tem
= NEXT_INSN (tem
))
3974 if (! tem
->deleted () && INSN_P (tem
))
3975 tem
= try_split (PATTERN (tem
), tem
, 1);
3977 /* Return either the first or the last insn, depending on which was
3980 ? (after
? PREV_INSN (after
) : get_last_insn ())
3981 : NEXT_INSN (before
);
3984 /* Make and return an INSN rtx, initializing all its slots.
3985 Store PATTERN in the pattern slots. */
3988 make_insn_raw (rtx pattern
)
3992 insn
= as_a
<rtx_insn
*> (rtx_alloc (INSN
));
3994 INSN_UID (insn
) = cur_insn_uid
++;
3995 PATTERN (insn
) = pattern
;
3996 INSN_CODE (insn
) = -1;
3997 REG_NOTES (insn
) = NULL
;
3998 INSN_LOCATION (insn
) = curr_insn_location ();
3999 BLOCK_FOR_INSN (insn
) = NULL
;
4001 #ifdef ENABLE_RTL_CHECKING
4004 && (returnjump_p (insn
)
4005 || (GET_CODE (insn
) == SET
4006 && SET_DEST (insn
) == pc_rtx
)))
4008 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
4016 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
4019 make_debug_insn_raw (rtx pattern
)
4021 rtx_debug_insn
*insn
;
4023 insn
= as_a
<rtx_debug_insn
*> (rtx_alloc (DEBUG_INSN
));
4024 INSN_UID (insn
) = cur_debug_insn_uid
++;
4025 if (cur_debug_insn_uid
> MIN_NONDEBUG_INSN_UID
)
4026 INSN_UID (insn
) = cur_insn_uid
++;
4028 PATTERN (insn
) = pattern
;
4029 INSN_CODE (insn
) = -1;
4030 REG_NOTES (insn
) = NULL
;
4031 INSN_LOCATION (insn
) = curr_insn_location ();
4032 BLOCK_FOR_INSN (insn
) = NULL
;
4037 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
4040 make_jump_insn_raw (rtx pattern
)
4042 rtx_jump_insn
*insn
;
4044 insn
= as_a
<rtx_jump_insn
*> (rtx_alloc (JUMP_INSN
));
4045 INSN_UID (insn
) = cur_insn_uid
++;
4047 PATTERN (insn
) = pattern
;
4048 INSN_CODE (insn
) = -1;
4049 REG_NOTES (insn
) = NULL
;
4050 JUMP_LABEL (insn
) = NULL
;
4051 INSN_LOCATION (insn
) = curr_insn_location ();
4052 BLOCK_FOR_INSN (insn
) = NULL
;
4057 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
4060 make_call_insn_raw (rtx pattern
)
4062 rtx_call_insn
*insn
;
4064 insn
= as_a
<rtx_call_insn
*> (rtx_alloc (CALL_INSN
));
4065 INSN_UID (insn
) = cur_insn_uid
++;
4067 PATTERN (insn
) = pattern
;
4068 INSN_CODE (insn
) = -1;
4069 REG_NOTES (insn
) = NULL
;
4070 CALL_INSN_FUNCTION_USAGE (insn
) = NULL
;
4071 INSN_LOCATION (insn
) = curr_insn_location ();
4072 BLOCK_FOR_INSN (insn
) = NULL
;
4077 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
4080 make_note_raw (enum insn_note subtype
)
4082 /* Some notes are never created this way at all. These notes are
4083 only created by patching out insns. */
4084 gcc_assert (subtype
!= NOTE_INSN_DELETED_LABEL
4085 && subtype
!= NOTE_INSN_DELETED_DEBUG_LABEL
);
4087 rtx_note
*note
= as_a
<rtx_note
*> (rtx_alloc (NOTE
));
4088 INSN_UID (note
) = cur_insn_uid
++;
4089 NOTE_KIND (note
) = subtype
;
4090 BLOCK_FOR_INSN (note
) = NULL
;
4091 memset (&NOTE_DATA (note
), 0, sizeof (NOTE_DATA (note
)));
4095 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
4096 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
4097 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
4100 link_insn_into_chain (rtx_insn
*insn
, rtx_insn
*prev
, rtx_insn
*next
)
4102 SET_PREV_INSN (insn
) = prev
;
4103 SET_NEXT_INSN (insn
) = next
;
4106 SET_NEXT_INSN (prev
) = insn
;
4107 if (NONJUMP_INSN_P (prev
) && GET_CODE (PATTERN (prev
)) == SEQUENCE
)
4109 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (prev
));
4110 SET_NEXT_INSN (sequence
->insn (sequence
->len () - 1)) = insn
;
4115 SET_PREV_INSN (next
) = insn
;
4116 if (NONJUMP_INSN_P (next
) && GET_CODE (PATTERN (next
)) == SEQUENCE
)
4118 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (next
));
4119 SET_PREV_INSN (sequence
->insn (0)) = insn
;
4123 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
4125 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (insn
));
4126 SET_PREV_INSN (sequence
->insn (0)) = prev
;
4127 SET_NEXT_INSN (sequence
->insn (sequence
->len () - 1)) = next
;
4131 /* Add INSN to the end of the doubly-linked list.
4132 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
4135 add_insn (rtx_insn
*insn
)
4137 rtx_insn
*prev
= get_last_insn ();
4138 link_insn_into_chain (insn
, prev
, NULL
);
4139 if (get_insns () == NULL
)
4140 set_first_insn (insn
);
4141 set_last_insn (insn
);
4144 /* Add INSN into the doubly-linked list after insn AFTER. */
4147 add_insn_after_nobb (rtx_insn
*insn
, rtx_insn
*after
)
4149 rtx_insn
*next
= NEXT_INSN (after
);
4151 gcc_assert (!optimize
|| !after
->deleted ());
4153 link_insn_into_chain (insn
, after
, next
);
4157 struct sequence_stack
*seq
;
4159 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4160 if (after
== seq
->last
)
4168 /* Add INSN into the doubly-linked list before insn BEFORE. */
4171 add_insn_before_nobb (rtx_insn
*insn
, rtx_insn
*before
)
4173 rtx_insn
*prev
= PREV_INSN (before
);
4175 gcc_assert (!optimize
|| !before
->deleted ());
4177 link_insn_into_chain (insn
, prev
, before
);
4181 struct sequence_stack
*seq
;
4183 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4184 if (before
== seq
->first
)
4194 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4195 If BB is NULL, an attempt is made to infer the bb from before.
4197 This and the next function should be the only functions called
4198 to insert an insn once delay slots have been filled since only
4199 they know how to update a SEQUENCE. */
4202 add_insn_after (rtx uncast_insn
, rtx uncast_after
, basic_block bb
)
4204 rtx_insn
*insn
= as_a
<rtx_insn
*> (uncast_insn
);
4205 rtx_insn
*after
= as_a
<rtx_insn
*> (uncast_after
);
4206 add_insn_after_nobb (insn
, after
);
4207 if (!BARRIER_P (after
)
4208 && !BARRIER_P (insn
)
4209 && (bb
= BLOCK_FOR_INSN (after
)))
4211 set_block_for_insn (insn
, bb
);
4213 df_insn_rescan (insn
);
4214 /* Should not happen as first in the BB is always
4215 either NOTE or LABEL. */
4216 if (BB_END (bb
) == after
4217 /* Avoid clobbering of structure when creating new BB. */
4218 && !BARRIER_P (insn
)
4219 && !NOTE_INSN_BASIC_BLOCK_P (insn
))
4224 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4225 If BB is NULL, an attempt is made to infer the bb from before.
4227 This and the previous function should be the only functions called
4228 to insert an insn once delay slots have been filled since only
4229 they know how to update a SEQUENCE. */
4232 add_insn_before (rtx uncast_insn
, rtx uncast_before
, basic_block bb
)
4234 rtx_insn
*insn
= as_a
<rtx_insn
*> (uncast_insn
);
4235 rtx_insn
*before
= as_a
<rtx_insn
*> (uncast_before
);
4236 add_insn_before_nobb (insn
, before
);
4239 && !BARRIER_P (before
)
4240 && !BARRIER_P (insn
))
4241 bb
= BLOCK_FOR_INSN (before
);
4245 set_block_for_insn (insn
, bb
);
4247 df_insn_rescan (insn
);
4248 /* Should not happen as first in the BB is always either NOTE or
4250 gcc_assert (BB_HEAD (bb
) != insn
4251 /* Avoid clobbering of structure when creating new BB. */
4253 || NOTE_INSN_BASIC_BLOCK_P (insn
));
4257 /* Replace insn with an deleted instruction note. */
4260 set_insn_deleted (rtx insn
)
4263 df_insn_delete (as_a
<rtx_insn
*> (insn
));
4264 PUT_CODE (insn
, NOTE
);
4265 NOTE_KIND (insn
) = NOTE_INSN_DELETED
;
4269 /* Unlink INSN from the insn chain.
4271 This function knows how to handle sequences.
4273 This function does not invalidate data flow information associated with
4274 INSN (i.e. does not call df_insn_delete). That makes this function
4275 usable for only disconnecting an insn from the chain, and re-emit it
4278 To later insert INSN elsewhere in the insn chain via add_insn and
4279 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4280 the caller. Nullifying them here breaks many insn chain walks.
4282 To really delete an insn and related DF information, use delete_insn. */
4285 remove_insn (rtx uncast_insn
)
4287 rtx_insn
*insn
= as_a
<rtx_insn
*> (uncast_insn
);
4288 rtx_insn
*next
= NEXT_INSN (insn
);
4289 rtx_insn
*prev
= PREV_INSN (insn
);
4294 SET_NEXT_INSN (prev
) = next
;
4295 if (NONJUMP_INSN_P (prev
) && GET_CODE (PATTERN (prev
)) == SEQUENCE
)
4297 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (prev
));
4298 SET_NEXT_INSN (sequence
->insn (sequence
->len () - 1)) = next
;
4303 struct sequence_stack
*seq
;
4305 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4306 if (insn
== seq
->first
)
4317 SET_PREV_INSN (next
) = prev
;
4318 if (NONJUMP_INSN_P (next
) && GET_CODE (PATTERN (next
)) == SEQUENCE
)
4320 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (next
));
4321 SET_PREV_INSN (sequence
->insn (0)) = prev
;
4326 struct sequence_stack
*seq
;
4328 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4329 if (insn
== seq
->last
)
4338 /* Fix up basic block boundaries, if necessary. */
4339 if (!BARRIER_P (insn
)
4340 && (bb
= BLOCK_FOR_INSN (insn
)))
4342 if (BB_HEAD (bb
) == insn
)
4344 /* Never ever delete the basic block note without deleting whole
4346 gcc_assert (!NOTE_P (insn
));
4347 BB_HEAD (bb
) = next
;
4349 if (BB_END (bb
) == insn
)
4354 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4357 add_function_usage_to (rtx call_insn
, rtx call_fusage
)
4359 gcc_assert (call_insn
&& CALL_P (call_insn
));
4361 /* Put the register usage information on the CALL. If there is already
4362 some usage information, put ours at the end. */
4363 if (CALL_INSN_FUNCTION_USAGE (call_insn
))
4367 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
); XEXP (link
, 1) != 0;
4368 link
= XEXP (link
, 1))
4371 XEXP (link
, 1) = call_fusage
;
4374 CALL_INSN_FUNCTION_USAGE (call_insn
) = call_fusage
;
4377 /* Delete all insns made since FROM.
4378 FROM becomes the new last instruction. */
4381 delete_insns_since (rtx_insn
*from
)
4386 SET_NEXT_INSN (from
) = 0;
4387 set_last_insn (from
);
4390 /* This function is deprecated, please use sequences instead.
4392 Move a consecutive bunch of insns to a different place in the chain.
4393 The insns to be moved are those between FROM and TO.
4394 They are moved to a new position after the insn AFTER.
4395 AFTER must not be FROM or TO or any insn in between.
4397 This function does not know about SEQUENCEs and hence should not be
4398 called after delay-slot filling has been done. */
4401 reorder_insns_nobb (rtx_insn
*from
, rtx_insn
*to
, rtx_insn
*after
)
4405 for (rtx_insn
*x
= from
; x
!= to
; x
= NEXT_INSN (x
))
4406 gcc_assert (after
!= x
);
4407 gcc_assert (after
!= to
);
4410 /* Splice this bunch out of where it is now. */
4411 if (PREV_INSN (from
))
4412 SET_NEXT_INSN (PREV_INSN (from
)) = NEXT_INSN (to
);
4414 SET_PREV_INSN (NEXT_INSN (to
)) = PREV_INSN (from
);
4415 if (get_last_insn () == to
)
4416 set_last_insn (PREV_INSN (from
));
4417 if (get_insns () == from
)
4418 set_first_insn (NEXT_INSN (to
));
4420 /* Make the new neighbors point to it and it to them. */
4421 if (NEXT_INSN (after
))
4422 SET_PREV_INSN (NEXT_INSN (after
)) = to
;
4424 SET_NEXT_INSN (to
) = NEXT_INSN (after
);
4425 SET_PREV_INSN (from
) = after
;
4426 SET_NEXT_INSN (after
) = from
;
4427 if (after
== get_last_insn ())
4431 /* Same as function above, but take care to update BB boundaries. */
4433 reorder_insns (rtx_insn
*from
, rtx_insn
*to
, rtx_insn
*after
)
4435 rtx_insn
*prev
= PREV_INSN (from
);
4436 basic_block bb
, bb2
;
4438 reorder_insns_nobb (from
, to
, after
);
4440 if (!BARRIER_P (after
)
4441 && (bb
= BLOCK_FOR_INSN (after
)))
4444 df_set_bb_dirty (bb
);
4446 if (!BARRIER_P (from
)
4447 && (bb2
= BLOCK_FOR_INSN (from
)))
4449 if (BB_END (bb2
) == to
)
4450 BB_END (bb2
) = prev
;
4451 df_set_bb_dirty (bb2
);
4454 if (BB_END (bb
) == after
)
4457 for (x
= from
; x
!= NEXT_INSN (to
); x
= NEXT_INSN (x
))
4459 df_insn_change_bb (x
, bb
);
4464 /* Emit insn(s) of given code and pattern
4465 at a specified place within the doubly-linked list.
4467 All of the emit_foo global entry points accept an object
4468 X which is either an insn list or a PATTERN of a single
4471 There are thus a few canonical ways to generate code and
4472 emit it at a specific place in the instruction stream. For
4473 example, consider the instruction named SPOT and the fact that
4474 we would like to emit some instructions before SPOT. We might
4478 ... emit the new instructions ...
4479 insns_head = get_insns ();
4482 emit_insn_before (insns_head, SPOT);
4484 It used to be common to generate SEQUENCE rtl instead, but that
4485 is a relic of the past which no longer occurs. The reason is that
4486 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4487 generated would almost certainly die right after it was created. */
4490 emit_pattern_before_noloc (rtx x
, rtx before
, rtx last
, basic_block bb
,
4491 rtx_insn
*(*make_raw
) (rtx
))
4495 gcc_assert (before
);
4498 return safe_as_a
<rtx_insn
*> (last
);
4500 switch (GET_CODE (x
))
4509 insn
= as_a
<rtx_insn
*> (x
);
4512 rtx_insn
*next
= NEXT_INSN (insn
);
4513 add_insn_before (insn
, before
, bb
);
4519 #ifdef ENABLE_RTL_CHECKING
4526 last
= (*make_raw
) (x
);
4527 add_insn_before (last
, before
, bb
);
4531 return safe_as_a
<rtx_insn
*> (last
);
4534 /* Make X be output before the instruction BEFORE. */
4537 emit_insn_before_noloc (rtx x
, rtx_insn
*before
, basic_block bb
)
4539 return emit_pattern_before_noloc (x
, before
, before
, bb
, make_insn_raw
);
4542 /* Make an instruction with body X and code JUMP_INSN
4543 and output it before the instruction BEFORE. */
4546 emit_jump_insn_before_noloc (rtx x
, rtx_insn
*before
)
4548 return as_a
<rtx_jump_insn
*> (
4549 emit_pattern_before_noloc (x
, before
, NULL_RTX
, NULL
,
4550 make_jump_insn_raw
));
4553 /* Make an instruction with body X and code CALL_INSN
4554 and output it before the instruction BEFORE. */
4557 emit_call_insn_before_noloc (rtx x
, rtx_insn
*before
)
4559 return emit_pattern_before_noloc (x
, before
, NULL_RTX
, NULL
,
4560 make_call_insn_raw
);
4563 /* Make an instruction with body X and code DEBUG_INSN
4564 and output it before the instruction BEFORE. */
4567 emit_debug_insn_before_noloc (rtx x
, rtx before
)
4569 return emit_pattern_before_noloc (x
, before
, NULL_RTX
, NULL
,
4570 make_debug_insn_raw
);
4573 /* Make an insn of code BARRIER
4574 and output it before the insn BEFORE. */
4577 emit_barrier_before (rtx before
)
4579 rtx_barrier
*insn
= as_a
<rtx_barrier
*> (rtx_alloc (BARRIER
));
4581 INSN_UID (insn
) = cur_insn_uid
++;
4583 add_insn_before (insn
, before
, NULL
);
4587 /* Emit the label LABEL before the insn BEFORE. */
4590 emit_label_before (rtx label
, rtx_insn
*before
)
4592 gcc_checking_assert (INSN_UID (label
) == 0);
4593 INSN_UID (label
) = cur_insn_uid
++;
4594 add_insn_before (label
, before
, NULL
);
4595 return as_a
<rtx_code_label
*> (label
);
4598 /* Helper for emit_insn_after, handles lists of instructions
4602 emit_insn_after_1 (rtx_insn
*first
, rtx uncast_after
, basic_block bb
)
4604 rtx_insn
*after
= safe_as_a
<rtx_insn
*> (uncast_after
);
4606 rtx_insn
*after_after
;
4607 if (!bb
&& !BARRIER_P (after
))
4608 bb
= BLOCK_FOR_INSN (after
);
4612 df_set_bb_dirty (bb
);
4613 for (last
= first
; NEXT_INSN (last
); last
= NEXT_INSN (last
))
4614 if (!BARRIER_P (last
))
4616 set_block_for_insn (last
, bb
);
4617 df_insn_rescan (last
);
4619 if (!BARRIER_P (last
))
4621 set_block_for_insn (last
, bb
);
4622 df_insn_rescan (last
);
4624 if (BB_END (bb
) == after
)
4628 for (last
= first
; NEXT_INSN (last
); last
= NEXT_INSN (last
))
4631 after_after
= NEXT_INSN (after
);
4633 SET_NEXT_INSN (after
) = first
;
4634 SET_PREV_INSN (first
) = after
;
4635 SET_NEXT_INSN (last
) = after_after
;
4637 SET_PREV_INSN (after_after
) = last
;
4639 if (after
== get_last_insn ())
4640 set_last_insn (last
);
4646 emit_pattern_after_noloc (rtx x
, rtx uncast_after
, basic_block bb
,
4647 rtx_insn
*(*make_raw
)(rtx
))
4649 rtx_insn
*after
= safe_as_a
<rtx_insn
*> (uncast_after
);
4650 rtx_insn
*last
= after
;
4657 switch (GET_CODE (x
))
4666 last
= emit_insn_after_1 (as_a
<rtx_insn
*> (x
), after
, bb
);
4669 #ifdef ENABLE_RTL_CHECKING
4676 last
= (*make_raw
) (x
);
4677 add_insn_after (last
, after
, bb
);
4684 /* Make X be output after the insn AFTER and set the BB of insn. If
4685 BB is NULL, an attempt is made to infer the BB from AFTER. */
4688 emit_insn_after_noloc (rtx x
, rtx after
, basic_block bb
)
4690 return emit_pattern_after_noloc (x
, after
, bb
, make_insn_raw
);
4694 /* Make an insn of code JUMP_INSN with body X
4695 and output it after the insn AFTER. */
4698 emit_jump_insn_after_noloc (rtx x
, rtx after
)
4700 return as_a
<rtx_jump_insn
*> (
4701 emit_pattern_after_noloc (x
, after
, NULL
, make_jump_insn_raw
));
4704 /* Make an instruction with body X and code CALL_INSN
4705 and output it after the instruction AFTER. */
4708 emit_call_insn_after_noloc (rtx x
, rtx after
)
4710 return emit_pattern_after_noloc (x
, after
, NULL
, make_call_insn_raw
);
4713 /* Make an instruction with body X and code CALL_INSN
4714 and output it after the instruction AFTER. */
4717 emit_debug_insn_after_noloc (rtx x
, rtx after
)
4719 return emit_pattern_after_noloc (x
, after
, NULL
, make_debug_insn_raw
);
4722 /* Make an insn of code BARRIER
4723 and output it after the insn AFTER. */
4726 emit_barrier_after (rtx after
)
4728 rtx_barrier
*insn
= as_a
<rtx_barrier
*> (rtx_alloc (BARRIER
));
4730 INSN_UID (insn
) = cur_insn_uid
++;
4732 add_insn_after (insn
, after
, NULL
);
4736 /* Emit the label LABEL after the insn AFTER. */
4739 emit_label_after (rtx label
, rtx_insn
*after
)
4741 gcc_checking_assert (INSN_UID (label
) == 0);
4742 INSN_UID (label
) = cur_insn_uid
++;
4743 add_insn_after (label
, after
, NULL
);
4744 return as_a
<rtx_insn
*> (label
);
4747 /* Notes require a bit of special handling: Some notes need to have their
4748 BLOCK_FOR_INSN set, others should never have it set, and some should
4749 have it set or clear depending on the context. */
4751 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4752 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4753 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4756 note_outside_basic_block_p (enum insn_note subtype
, bool on_bb_boundary_p
)
4760 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4761 case NOTE_INSN_SWITCH_TEXT_SECTIONS
:
4764 /* Notes for var tracking and EH region markers can appear between or
4765 inside basic blocks. If the caller is emitting on the basic block
4766 boundary, do not set BLOCK_FOR_INSN on the new note. */
4767 case NOTE_INSN_VAR_LOCATION
:
4768 case NOTE_INSN_CALL_ARG_LOCATION
:
4769 case NOTE_INSN_EH_REGION_BEG
:
4770 case NOTE_INSN_EH_REGION_END
:
4771 return on_bb_boundary_p
;
4773 /* Otherwise, BLOCK_FOR_INSN must be set. */
4779 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4782 emit_note_after (enum insn_note subtype
, rtx_insn
*after
)
4784 rtx_note
*note
= make_note_raw (subtype
);
4785 basic_block bb
= BARRIER_P (after
) ? NULL
: BLOCK_FOR_INSN (after
);
4786 bool on_bb_boundary_p
= (bb
!= NULL
&& BB_END (bb
) == after
);
4788 if (note_outside_basic_block_p (subtype
, on_bb_boundary_p
))
4789 add_insn_after_nobb (note
, after
);
4791 add_insn_after (note
, after
, bb
);
4795 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4798 emit_note_before (enum insn_note subtype
, rtx_insn
*before
)
4800 rtx_note
*note
= make_note_raw (subtype
);
4801 basic_block bb
= BARRIER_P (before
) ? NULL
: BLOCK_FOR_INSN (before
);
4802 bool on_bb_boundary_p
= (bb
!= NULL
&& BB_HEAD (bb
) == before
);
4804 if (note_outside_basic_block_p (subtype
, on_bb_boundary_p
))
4805 add_insn_before_nobb (note
, before
);
4807 add_insn_before (note
, before
, bb
);
4811 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4812 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4815 emit_pattern_after_setloc (rtx pattern
, rtx uncast_after
, int loc
,
4816 rtx_insn
*(*make_raw
) (rtx
))
4818 rtx_insn
*after
= safe_as_a
<rtx_insn
*> (uncast_after
);
4819 rtx_insn
*last
= emit_pattern_after_noloc (pattern
, after
, NULL
, make_raw
);
4821 if (pattern
== NULL_RTX
|| !loc
)
4824 after
= NEXT_INSN (after
);
4827 if (active_insn_p (after
)
4828 && !JUMP_TABLE_DATA_P (after
) /* FIXME */
4829 && !INSN_LOCATION (after
))
4830 INSN_LOCATION (after
) = loc
;
4833 after
= NEXT_INSN (after
);
4838 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4839 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4843 emit_pattern_after (rtx pattern
, rtx uncast_after
, bool skip_debug_insns
,
4844 rtx_insn
*(*make_raw
) (rtx
))
4846 rtx_insn
*after
= safe_as_a
<rtx_insn
*> (uncast_after
);
4847 rtx_insn
*prev
= after
;
4849 if (skip_debug_insns
)
4850 while (DEBUG_INSN_P (prev
))
4851 prev
= PREV_INSN (prev
);
4854 return emit_pattern_after_setloc (pattern
, after
, INSN_LOCATION (prev
),
4857 return emit_pattern_after_noloc (pattern
, after
, NULL
, make_raw
);
4860 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4862 emit_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4864 return emit_pattern_after_setloc (pattern
, after
, loc
, make_insn_raw
);
4867 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4869 emit_insn_after (rtx pattern
, rtx after
)
4871 return emit_pattern_after (pattern
, after
, true, make_insn_raw
);
4874 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4876 emit_jump_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4878 return as_a
<rtx_jump_insn
*> (
4879 emit_pattern_after_setloc (pattern
, after
, loc
, make_jump_insn_raw
));
4882 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4884 emit_jump_insn_after (rtx pattern
, rtx after
)
4886 return as_a
<rtx_jump_insn
*> (
4887 emit_pattern_after (pattern
, after
, true, make_jump_insn_raw
));
4890 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4892 emit_call_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4894 return emit_pattern_after_setloc (pattern
, after
, loc
, make_call_insn_raw
);
4897 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4899 emit_call_insn_after (rtx pattern
, rtx after
)
4901 return emit_pattern_after (pattern
, after
, true, make_call_insn_raw
);
4904 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4906 emit_debug_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4908 return emit_pattern_after_setloc (pattern
, after
, loc
, make_debug_insn_raw
);
4911 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4913 emit_debug_insn_after (rtx pattern
, rtx after
)
4915 return emit_pattern_after (pattern
, after
, false, make_debug_insn_raw
);
4918 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4919 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4920 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4924 emit_pattern_before_setloc (rtx pattern
, rtx uncast_before
, int loc
, bool insnp
,
4925 rtx_insn
*(*make_raw
) (rtx
))
4927 rtx_insn
*before
= as_a
<rtx_insn
*> (uncast_before
);
4928 rtx_insn
*first
= PREV_INSN (before
);
4929 rtx_insn
*last
= emit_pattern_before_noloc (pattern
, before
,
4930 insnp
? before
: NULL_RTX
,
4933 if (pattern
== NULL_RTX
|| !loc
)
4937 first
= get_insns ();
4939 first
= NEXT_INSN (first
);
4942 if (active_insn_p (first
)
4943 && !JUMP_TABLE_DATA_P (first
) /* FIXME */
4944 && !INSN_LOCATION (first
))
4945 INSN_LOCATION (first
) = loc
;
4948 first
= NEXT_INSN (first
);
4953 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4954 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4955 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4956 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4959 emit_pattern_before (rtx pattern
, rtx uncast_before
, bool skip_debug_insns
,
4960 bool insnp
, rtx_insn
*(*make_raw
) (rtx
))
4962 rtx_insn
*before
= safe_as_a
<rtx_insn
*> (uncast_before
);
4963 rtx_insn
*next
= before
;
4965 if (skip_debug_insns
)
4966 while (DEBUG_INSN_P (next
))
4967 next
= PREV_INSN (next
);
4970 return emit_pattern_before_setloc (pattern
, before
, INSN_LOCATION (next
),
4973 return emit_pattern_before_noloc (pattern
, before
,
4974 insnp
? before
: NULL_RTX
,
4978 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4980 emit_insn_before_setloc (rtx pattern
, rtx_insn
*before
, int loc
)
4982 return emit_pattern_before_setloc (pattern
, before
, loc
, true,
4986 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4988 emit_insn_before (rtx pattern
, rtx before
)
4990 return emit_pattern_before (pattern
, before
, true, true, make_insn_raw
);
4993 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4995 emit_jump_insn_before_setloc (rtx pattern
, rtx_insn
*before
, int loc
)
4997 return as_a
<rtx_jump_insn
*> (
4998 emit_pattern_before_setloc (pattern
, before
, loc
, false,
4999 make_jump_insn_raw
));
5002 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
5004 emit_jump_insn_before (rtx pattern
, rtx before
)
5006 return as_a
<rtx_jump_insn
*> (
5007 emit_pattern_before (pattern
, before
, true, false,
5008 make_jump_insn_raw
));
5011 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5013 emit_call_insn_before_setloc (rtx pattern
, rtx_insn
*before
, int loc
)
5015 return emit_pattern_before_setloc (pattern
, before
, loc
, false,
5016 make_call_insn_raw
);
5019 /* Like emit_call_insn_before_noloc,
5020 but set insn_location according to BEFORE. */
5022 emit_call_insn_before (rtx pattern
, rtx_insn
*before
)
5024 return emit_pattern_before (pattern
, before
, true, false,
5025 make_call_insn_raw
);
5028 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5030 emit_debug_insn_before_setloc (rtx pattern
, rtx before
, int loc
)
5032 return emit_pattern_before_setloc (pattern
, before
, loc
, false,
5033 make_debug_insn_raw
);
5036 /* Like emit_debug_insn_before_noloc,
5037 but set insn_location according to BEFORE. */
5039 emit_debug_insn_before (rtx pattern
, rtx_insn
*before
)
5041 return emit_pattern_before (pattern
, before
, false, false,
5042 make_debug_insn_raw
);
5045 /* Take X and emit it at the end of the doubly-linked
5048 Returns the last insn emitted. */
5053 rtx_insn
*last
= get_last_insn ();
5059 switch (GET_CODE (x
))
5068 insn
= as_a
<rtx_insn
*> (x
);
5071 rtx_insn
*next
= NEXT_INSN (insn
);
5078 #ifdef ENABLE_RTL_CHECKING
5079 case JUMP_TABLE_DATA
:
5086 last
= make_insn_raw (x
);
5094 /* Make an insn of code DEBUG_INSN with pattern X
5095 and add it to the end of the doubly-linked list. */
5098 emit_debug_insn (rtx x
)
5100 rtx_insn
*last
= get_last_insn ();
5106 switch (GET_CODE (x
))
5115 insn
= as_a
<rtx_insn
*> (x
);
5118 rtx_insn
*next
= NEXT_INSN (insn
);
5125 #ifdef ENABLE_RTL_CHECKING
5126 case JUMP_TABLE_DATA
:
5133 last
= make_debug_insn_raw (x
);
5141 /* Make an insn of code JUMP_INSN with pattern X
5142 and add it to the end of the doubly-linked list. */
5145 emit_jump_insn (rtx x
)
5147 rtx_insn
*last
= NULL
;
5150 switch (GET_CODE (x
))
5159 insn
= as_a
<rtx_insn
*> (x
);
5162 rtx_insn
*next
= NEXT_INSN (insn
);
5169 #ifdef ENABLE_RTL_CHECKING
5170 case JUMP_TABLE_DATA
:
5177 last
= make_jump_insn_raw (x
);
5185 /* Make an insn of code CALL_INSN with pattern X
5186 and add it to the end of the doubly-linked list. */
5189 emit_call_insn (rtx x
)
5193 switch (GET_CODE (x
))
5202 insn
= emit_insn (x
);
5205 #ifdef ENABLE_RTL_CHECKING
5207 case JUMP_TABLE_DATA
:
5213 insn
= make_call_insn_raw (x
);
5221 /* Add the label LABEL to the end of the doubly-linked list. */
5224 emit_label (rtx uncast_label
)
5226 rtx_code_label
*label
= as_a
<rtx_code_label
*> (uncast_label
);
5228 gcc_checking_assert (INSN_UID (label
) == 0);
5229 INSN_UID (label
) = cur_insn_uid
++;
5234 /* Make an insn of code JUMP_TABLE_DATA
5235 and add it to the end of the doubly-linked list. */
5237 rtx_jump_table_data
*
5238 emit_jump_table_data (rtx table
)
5240 rtx_jump_table_data
*jump_table_data
=
5241 as_a
<rtx_jump_table_data
*> (rtx_alloc (JUMP_TABLE_DATA
));
5242 INSN_UID (jump_table_data
) = cur_insn_uid
++;
5243 PATTERN (jump_table_data
) = table
;
5244 BLOCK_FOR_INSN (jump_table_data
) = NULL
;
5245 add_insn (jump_table_data
);
5246 return jump_table_data
;
5249 /* Make an insn of code BARRIER
5250 and add it to the end of the doubly-linked list. */
5255 rtx_barrier
*barrier
= as_a
<rtx_barrier
*> (rtx_alloc (BARRIER
));
5256 INSN_UID (barrier
) = cur_insn_uid
++;
5261 /* Emit a copy of note ORIG. */
5264 emit_note_copy (rtx_note
*orig
)
5266 enum insn_note kind
= (enum insn_note
) NOTE_KIND (orig
);
5267 rtx_note
*note
= make_note_raw (kind
);
5268 NOTE_DATA (note
) = NOTE_DATA (orig
);
5273 /* Make an insn of code NOTE or type NOTE_NO
5274 and add it to the end of the doubly-linked list. */
5277 emit_note (enum insn_note kind
)
5279 rtx_note
*note
= make_note_raw (kind
);
5284 /* Emit a clobber of lvalue X. */
5287 emit_clobber (rtx x
)
5289 /* CONCATs should not appear in the insn stream. */
5290 if (GET_CODE (x
) == CONCAT
)
5292 emit_clobber (XEXP (x
, 0));
5293 return emit_clobber (XEXP (x
, 1));
5295 return emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
5298 /* Return a sequence of insns to clobber lvalue X. */
5312 /* Emit a use of rvalue X. */
5317 /* CONCATs should not appear in the insn stream. */
5318 if (GET_CODE (x
) == CONCAT
)
5320 emit_use (XEXP (x
, 0));
5321 return emit_use (XEXP (x
, 1));
5323 return emit_insn (gen_rtx_USE (VOIDmode
, x
));
5326 /* Return a sequence of insns to use rvalue X. */
5340 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5341 Return the set in INSN that such notes describe, or NULL if the notes
5342 have no meaning for INSN. */
5345 set_for_reg_notes (rtx insn
)
5352 pat
= PATTERN (insn
);
5353 if (GET_CODE (pat
) == PARALLEL
)
5355 /* We do not use single_set because that ignores SETs of unused
5356 registers. REG_EQUAL and REG_EQUIV notes really do require the
5357 PARALLEL to have a single SET. */
5358 if (multiple_sets (insn
))
5360 pat
= XVECEXP (pat
, 0, 0);
5363 if (GET_CODE (pat
) != SET
)
5366 reg
= SET_DEST (pat
);
5368 /* Notes apply to the contents of a STRICT_LOW_PART. */
5369 if (GET_CODE (reg
) == STRICT_LOW_PART
5370 || GET_CODE (reg
) == ZERO_EXTRACT
)
5371 reg
= XEXP (reg
, 0);
5373 /* Check that we have a register. */
5374 if (!(REG_P (reg
) || GET_CODE (reg
) == SUBREG
))
5380 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5381 note of this type already exists, remove it first. */
5384 set_unique_reg_note (rtx insn
, enum reg_note kind
, rtx datum
)
5386 rtx note
= find_reg_note (insn
, kind
, NULL_RTX
);
5392 /* We need to support the REG_EQUAL on USE trick of find_reloads. */
5393 if (!set_for_reg_notes (insn
) && GET_CODE (PATTERN (insn
)) != USE
)
5396 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5397 It serves no useful purpose and breaks eliminate_regs. */
5398 if (GET_CODE (datum
) == ASM_OPERANDS
)
5401 /* Notes with side effects are dangerous. Even if the side-effect
5402 initially mirrors one in PATTERN (INSN), later optimizations
5403 might alter the way that the final register value is calculated
5404 and so move or alter the side-effect in some way. The note would
5405 then no longer be a valid substitution for SET_SRC. */
5406 if (side_effects_p (datum
))
5415 XEXP (note
, 0) = datum
;
5418 add_reg_note (insn
, kind
, datum
);
5419 note
= REG_NOTES (insn
);
5426 df_notes_rescan (as_a
<rtx_insn
*> (insn
));
5435 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5437 set_dst_reg_note (rtx insn
, enum reg_note kind
, rtx datum
, rtx dst
)
5439 rtx set
= set_for_reg_notes (insn
);
5441 if (set
&& SET_DEST (set
) == dst
)
5442 return set_unique_reg_note (insn
, kind
, datum
);
5446 /* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5447 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5450 If X is a label, it is simply added into the insn chain. */
5453 emit (rtx x
, bool allow_barrier_p
)
5455 enum rtx_code code
= classify_insn (x
);
5460 return emit_label (x
);
5462 return emit_insn (x
);
5465 rtx_insn
*insn
= emit_jump_insn (x
);
5467 && (any_uncondjump_p (insn
) || GET_CODE (x
) == RETURN
))
5468 return emit_barrier ();
5472 return emit_call_insn (x
);
5474 return emit_debug_insn (x
);
5480 /* Space for free sequence stack entries. */
5481 static GTY ((deletable
)) struct sequence_stack
*free_sequence_stack
;
5483 /* Begin emitting insns to a sequence. If this sequence will contain
5484 something that might cause the compiler to pop arguments to function
5485 calls (because those pops have previously been deferred; see
5486 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5487 before calling this function. That will ensure that the deferred
5488 pops are not accidentally emitted in the middle of this sequence. */
5491 start_sequence (void)
5493 struct sequence_stack
*tem
;
5495 if (free_sequence_stack
!= NULL
)
5497 tem
= free_sequence_stack
;
5498 free_sequence_stack
= tem
->next
;
5501 tem
= ggc_alloc
<sequence_stack
> ();
5503 tem
->next
= get_current_sequence ()->next
;
5504 tem
->first
= get_insns ();
5505 tem
->last
= get_last_insn ();
5506 get_current_sequence ()->next
= tem
;
5512 /* Set up the insn chain starting with FIRST as the current sequence,
5513 saving the previously current one. See the documentation for
5514 start_sequence for more information about how to use this function. */
5517 push_to_sequence (rtx_insn
*first
)
5523 for (last
= first
; last
&& NEXT_INSN (last
); last
= NEXT_INSN (last
))
5526 set_first_insn (first
);
5527 set_last_insn (last
);
5530 /* Like push_to_sequence, but take the last insn as an argument to avoid
5531 looping through the list. */
5534 push_to_sequence2 (rtx_insn
*first
, rtx_insn
*last
)
5538 set_first_insn (first
);
5539 set_last_insn (last
);
5542 /* Set up the outer-level insn chain
5543 as the current sequence, saving the previously current one. */
5546 push_topmost_sequence (void)
5548 struct sequence_stack
*top
;
5552 top
= get_topmost_sequence ();
5553 set_first_insn (top
->first
);
5554 set_last_insn (top
->last
);
5557 /* After emitting to the outer-level insn chain, update the outer-level
5558 insn chain, and restore the previous saved state. */
5561 pop_topmost_sequence (void)
5563 struct sequence_stack
*top
;
5565 top
= get_topmost_sequence ();
5566 top
->first
= get_insns ();
5567 top
->last
= get_last_insn ();
5572 /* After emitting to a sequence, restore previous saved state.
5574 To get the contents of the sequence just made, you must call
5575 `get_insns' *before* calling here.
5577 If the compiler might have deferred popping arguments while
5578 generating this sequence, and this sequence will not be immediately
5579 inserted into the instruction stream, use do_pending_stack_adjust
5580 before calling get_insns. That will ensure that the deferred
5581 pops are inserted into this sequence, and not into some random
5582 location in the instruction stream. See INHIBIT_DEFER_POP for more
5583 information about deferred popping of arguments. */
5588 struct sequence_stack
*tem
= get_current_sequence ()->next
;
5590 set_first_insn (tem
->first
);
5591 set_last_insn (tem
->last
);
5592 get_current_sequence ()->next
= tem
->next
;
5594 memset (tem
, 0, sizeof (*tem
));
5595 tem
->next
= free_sequence_stack
;
5596 free_sequence_stack
= tem
;
5599 /* Return 1 if currently emitting into a sequence. */
5602 in_sequence_p (void)
5604 return get_current_sequence ()->next
!= 0;
5607 /* Put the various virtual registers into REGNO_REG_RTX. */
5610 init_virtual_regs (void)
5612 regno_reg_rtx
[VIRTUAL_INCOMING_ARGS_REGNUM
] = virtual_incoming_args_rtx
;
5613 regno_reg_rtx
[VIRTUAL_STACK_VARS_REGNUM
] = virtual_stack_vars_rtx
;
5614 regno_reg_rtx
[VIRTUAL_STACK_DYNAMIC_REGNUM
] = virtual_stack_dynamic_rtx
;
5615 regno_reg_rtx
[VIRTUAL_OUTGOING_ARGS_REGNUM
] = virtual_outgoing_args_rtx
;
5616 regno_reg_rtx
[VIRTUAL_CFA_REGNUM
] = virtual_cfa_rtx
;
5617 regno_reg_rtx
[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM
]
5618 = virtual_preferred_stack_boundary_rtx
;
5622 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5623 static rtx copy_insn_scratch_in
[MAX_RECOG_OPERANDS
];
5624 static rtx copy_insn_scratch_out
[MAX_RECOG_OPERANDS
];
5625 static int copy_insn_n_scratches
;
5627 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5628 copied an ASM_OPERANDS.
5629 In that case, it is the original input-operand vector. */
5630 static rtvec orig_asm_operands_vector
;
5632 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5633 copied an ASM_OPERANDS.
5634 In that case, it is the copied input-operand vector. */
5635 static rtvec copy_asm_operands_vector
;
5637 /* Likewise for the constraints vector. */
5638 static rtvec orig_asm_constraints_vector
;
5639 static rtvec copy_asm_constraints_vector
;
5641 /* Recursively create a new copy of an rtx for copy_insn.
5642 This function differs from copy_rtx in that it handles SCRATCHes and
5643 ASM_OPERANDs properly.
5644 Normally, this function is not used directly; use copy_insn as front end.
5645 However, you could first copy an insn pattern with copy_insn and then use
5646 this function afterwards to properly copy any REG_NOTEs containing
5650 copy_insn_1 (rtx orig
)
5655 const char *format_ptr
;
5660 code
= GET_CODE (orig
);
5675 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5676 clobbers or clobbers of hard registers that originated as pseudos.
5677 This is needed to allow safe register renaming. */
5678 if (REG_P (XEXP (orig
, 0))
5679 && HARD_REGISTER_NUM_P (REGNO (XEXP (orig
, 0)))
5680 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig
, 0))))
5685 for (i
= 0; i
< copy_insn_n_scratches
; i
++)
5686 if (copy_insn_scratch_in
[i
] == orig
)
5687 return copy_insn_scratch_out
[i
];
5691 if (shared_const_p (orig
))
5695 /* A MEM with a constant address is not sharable. The problem is that
5696 the constant address may need to be reloaded. If the mem is shared,
5697 then reloading one copy of this mem will cause all copies to appear
5698 to have been reloaded. */
5704 /* Copy the various flags, fields, and other information. We assume
5705 that all fields need copying, and then clear the fields that should
5706 not be copied. That is the sensible default behavior, and forces
5707 us to explicitly document why we are *not* copying a flag. */
5708 copy
= shallow_copy_rtx (orig
);
5710 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5713 RTX_FLAG (copy
, jump
) = 0;
5714 RTX_FLAG (copy
, call
) = 0;
5715 RTX_FLAG (copy
, frame_related
) = 0;
5718 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
5720 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
5721 switch (*format_ptr
++)
5724 if (XEXP (orig
, i
) != NULL
)
5725 XEXP (copy
, i
) = copy_insn_1 (XEXP (orig
, i
));
5730 if (XVEC (orig
, i
) == orig_asm_constraints_vector
)
5731 XVEC (copy
, i
) = copy_asm_constraints_vector
;
5732 else if (XVEC (orig
, i
) == orig_asm_operands_vector
)
5733 XVEC (copy
, i
) = copy_asm_operands_vector
;
5734 else if (XVEC (orig
, i
) != NULL
)
5736 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
5737 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
5738 XVECEXP (copy
, i
, j
) = copy_insn_1 (XVECEXP (orig
, i
, j
));
5749 /* These are left unchanged. */
5756 if (code
== SCRATCH
)
5758 i
= copy_insn_n_scratches
++;
5759 gcc_assert (i
< MAX_RECOG_OPERANDS
);
5760 copy_insn_scratch_in
[i
] = orig
;
5761 copy_insn_scratch_out
[i
] = copy
;
5763 else if (code
== ASM_OPERANDS
)
5765 orig_asm_operands_vector
= ASM_OPERANDS_INPUT_VEC (orig
);
5766 copy_asm_operands_vector
= ASM_OPERANDS_INPUT_VEC (copy
);
5767 orig_asm_constraints_vector
= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig
);
5768 copy_asm_constraints_vector
= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy
);
5774 /* Create a new copy of an rtx.
5775 This function differs from copy_rtx in that it handles SCRATCHes and
5776 ASM_OPERANDs properly.
5777 INSN doesn't really have to be a full INSN; it could be just the
5780 copy_insn (rtx insn
)
5782 copy_insn_n_scratches
= 0;
5783 orig_asm_operands_vector
= 0;
5784 orig_asm_constraints_vector
= 0;
5785 copy_asm_operands_vector
= 0;
5786 copy_asm_constraints_vector
= 0;
5787 return copy_insn_1 (insn
);
5790 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5791 on that assumption that INSN itself remains in its original place. */
5794 copy_delay_slot_insn (rtx_insn
*insn
)
5796 /* Copy INSN with its rtx_code, all its notes, location etc. */
5797 insn
= as_a
<rtx_insn
*> (copy_rtx (insn
));
5798 INSN_UID (insn
) = cur_insn_uid
++;
5802 /* Initialize data structures and variables in this file
5803 before generating rtl for each function. */
5808 set_first_insn (NULL
);
5809 set_last_insn (NULL
);
5810 if (MIN_NONDEBUG_INSN_UID
)
5811 cur_insn_uid
= MIN_NONDEBUG_INSN_UID
;
5814 cur_debug_insn_uid
= 1;
5815 reg_rtx_no
= LAST_VIRTUAL_REGISTER
+ 1;
5816 first_label_num
= label_num
;
5817 get_current_sequence ()->next
= NULL
;
5819 /* Init the tables that describe all the pseudo regs. */
5821 crtl
->emit
.regno_pointer_align_length
= LAST_VIRTUAL_REGISTER
+ 101;
5823 crtl
->emit
.regno_pointer_align
5824 = XCNEWVEC (unsigned char, crtl
->emit
.regno_pointer_align_length
);
5827 = ggc_cleared_vec_alloc
<rtx
> (crtl
->emit
.regno_pointer_align_length
);
5829 /* Put copies of all the hard registers into regno_reg_rtx. */
5830 memcpy (regno_reg_rtx
,
5831 initial_regno_reg_rtx
,
5832 FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
5834 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5835 init_virtual_regs ();
5837 /* Indicate that the virtual registers and stack locations are
5839 REG_POINTER (stack_pointer_rtx
) = 1;
5840 REG_POINTER (frame_pointer_rtx
) = 1;
5841 REG_POINTER (hard_frame_pointer_rtx
) = 1;
5842 REG_POINTER (arg_pointer_rtx
) = 1;
5844 REG_POINTER (virtual_incoming_args_rtx
) = 1;
5845 REG_POINTER (virtual_stack_vars_rtx
) = 1;
5846 REG_POINTER (virtual_stack_dynamic_rtx
) = 1;
5847 REG_POINTER (virtual_outgoing_args_rtx
) = 1;
5848 REG_POINTER (virtual_cfa_rtx
) = 1;
5850 #ifdef STACK_BOUNDARY
5851 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM
) = STACK_BOUNDARY
;
5852 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
5853 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
5854 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM
) = STACK_BOUNDARY
;
5856 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM
) = STACK_BOUNDARY
;
5857 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM
) = STACK_BOUNDARY
;
5858 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM
) = STACK_BOUNDARY
;
5859 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM
) = STACK_BOUNDARY
;
5861 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM
) = BITS_PER_WORD
;
5864 #ifdef INIT_EXPANDERS
5869 /* Return true if X is a valid element for a duplicated vector constant
5870 of the given mode. */
5873 valid_for_const_vec_duplicate_p (machine_mode
, rtx x
)
5875 return (CONST_SCALAR_INT_P (x
)
5876 || CONST_DOUBLE_AS_FLOAT_P (x
)
5877 || CONST_FIXED_P (x
));
5880 /* Like gen_const_vec_duplicate, but ignore const_tiny_rtx. */
5883 gen_const_vec_duplicate_1 (machine_mode mode
, rtx el
)
5885 int nunits
= GET_MODE_NUNITS (mode
);
5886 rtvec v
= rtvec_alloc (nunits
);
5887 for (int i
= 0; i
< nunits
; ++i
)
5888 RTVEC_ELT (v
, i
) = el
;
5889 return gen_rtx_raw_CONST_VECTOR (mode
, v
);
5892 /* Generate a vector constant of mode MODE in which every element has
5896 gen_const_vec_duplicate (machine_mode mode
, rtx elt
)
5898 scalar_mode inner_mode
= GET_MODE_INNER (mode
);
5899 if (elt
== CONST0_RTX (inner_mode
))
5900 return CONST0_RTX (mode
);
5901 else if (elt
== CONST1_RTX (inner_mode
))
5902 return CONST1_RTX (mode
);
5903 else if (elt
== CONSTM1_RTX (inner_mode
))
5904 return CONSTM1_RTX (mode
);
5906 return gen_const_vec_duplicate_1 (mode
, elt
);
5909 /* Return a vector rtx of mode MODE in which every element has value X.
5910 The result will be a constant if X is constant. */
5913 gen_vec_duplicate (machine_mode mode
, rtx x
)
5915 if (valid_for_const_vec_duplicate_p (mode
, x
))
5916 return gen_const_vec_duplicate (mode
, x
);
5917 return gen_rtx_VEC_DUPLICATE (mode
, x
);
5920 /* A subroutine of const_vec_series_p that handles the case in which
5921 X is known to be an integer CONST_VECTOR. */
5924 const_vec_series_p_1 (const_rtx x
, rtx
*base_out
, rtx
*step_out
)
5926 unsigned int nelts
= CONST_VECTOR_NUNITS (x
);
5930 scalar_mode inner
= GET_MODE_INNER (GET_MODE (x
));
5931 rtx base
= CONST_VECTOR_ELT (x
, 0);
5932 rtx step
= simplify_binary_operation (MINUS
, inner
,
5933 CONST_VECTOR_ELT (x
, 1), base
);
5934 if (rtx_equal_p (step
, CONST0_RTX (inner
)))
5937 for (unsigned int i
= 2; i
< nelts
; ++i
)
5939 rtx diff
= simplify_binary_operation (MINUS
, inner
,
5940 CONST_VECTOR_ELT (x
, i
),
5941 CONST_VECTOR_ELT (x
, i
- 1));
5942 if (!rtx_equal_p (step
, diff
))
5951 /* Generate a vector constant of mode MODE in which element I has
5952 the value BASE + I * STEP. */
5955 gen_const_vec_series (machine_mode mode
, rtx base
, rtx step
)
5957 gcc_assert (CONSTANT_P (base
) && CONSTANT_P (step
));
5959 int nunits
= GET_MODE_NUNITS (mode
);
5960 rtvec v
= rtvec_alloc (nunits
);
5961 scalar_mode inner_mode
= GET_MODE_INNER (mode
);
5962 RTVEC_ELT (v
, 0) = base
;
5963 for (int i
= 1; i
< nunits
; ++i
)
5964 RTVEC_ELT (v
, i
) = simplify_gen_binary (PLUS
, inner_mode
,
5965 RTVEC_ELT (v
, i
- 1), step
);
5966 return gen_rtx_raw_CONST_VECTOR (mode
, v
);
5969 /* Generate a vector of mode MODE in which element I has the value
5970 BASE + I * STEP. The result will be a constant if BASE and STEP
5971 are both constants. */
5974 gen_vec_series (machine_mode mode
, rtx base
, rtx step
)
5976 if (step
== const0_rtx
)
5977 return gen_vec_duplicate (mode
, base
);
5978 if (CONSTANT_P (base
) && CONSTANT_P (step
))
5979 return gen_const_vec_series (mode
, base
, step
);
5980 return gen_rtx_VEC_SERIES (mode
, base
, step
);
5983 /* Generate a new vector constant for mode MODE and constant value
5987 gen_const_vector (machine_mode mode
, int constant
)
5989 machine_mode inner
= GET_MODE_INNER (mode
);
5991 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner
));
5993 rtx el
= const_tiny_rtx
[constant
][(int) inner
];
5996 return gen_const_vec_duplicate_1 (mode
, el
);
5999 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
6000 all elements are zero, and the one vector when all elements are one. */
6002 gen_rtx_CONST_VECTOR (machine_mode mode
, rtvec v
)
6004 gcc_assert (GET_MODE_NUNITS (mode
) == GET_NUM_ELEM (v
));
6006 /* If the values are all the same, check to see if we can use one of the
6007 standard constant vectors. */
6008 if (rtvec_all_equal_p (v
))
6009 return gen_const_vec_duplicate (mode
, RTVEC_ELT (v
, 0));
6011 return gen_rtx_raw_CONST_VECTOR (mode
, v
);
6014 /* Initialise global register information required by all functions. */
6017 init_emit_regs (void)
6023 /* Reset register attributes */
6024 reg_attrs_htab
->empty ();
6026 /* We need reg_raw_mode, so initialize the modes now. */
6027 init_reg_modes_target ();
6029 /* Assign register numbers to the globally defined register rtx. */
6030 stack_pointer_rtx
= gen_raw_REG (Pmode
, STACK_POINTER_REGNUM
);
6031 frame_pointer_rtx
= gen_raw_REG (Pmode
, FRAME_POINTER_REGNUM
);
6032 hard_frame_pointer_rtx
= gen_raw_REG (Pmode
, HARD_FRAME_POINTER_REGNUM
);
6033 arg_pointer_rtx
= gen_raw_REG (Pmode
, ARG_POINTER_REGNUM
);
6034 virtual_incoming_args_rtx
=
6035 gen_raw_REG (Pmode
, VIRTUAL_INCOMING_ARGS_REGNUM
);
6036 virtual_stack_vars_rtx
=
6037 gen_raw_REG (Pmode
, VIRTUAL_STACK_VARS_REGNUM
);
6038 virtual_stack_dynamic_rtx
=
6039 gen_raw_REG (Pmode
, VIRTUAL_STACK_DYNAMIC_REGNUM
);
6040 virtual_outgoing_args_rtx
=
6041 gen_raw_REG (Pmode
, VIRTUAL_OUTGOING_ARGS_REGNUM
);
6042 virtual_cfa_rtx
= gen_raw_REG (Pmode
, VIRTUAL_CFA_REGNUM
);
6043 virtual_preferred_stack_boundary_rtx
=
6044 gen_raw_REG (Pmode
, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM
);
6046 /* Initialize RTL for commonly used hard registers. These are
6047 copied into regno_reg_rtx as we begin to compile each function. */
6048 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
6049 initial_regno_reg_rtx
[i
] = gen_raw_REG (reg_raw_mode
[i
], i
);
6051 #ifdef RETURN_ADDRESS_POINTER_REGNUM
6052 return_address_pointer_rtx
6053 = gen_raw_REG (Pmode
, RETURN_ADDRESS_POINTER_REGNUM
);
6056 pic_offset_table_rtx
= NULL_RTX
;
6057 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
)
6058 pic_offset_table_rtx
= gen_raw_REG (Pmode
, PIC_OFFSET_TABLE_REGNUM
);
6060 for (i
= 0; i
< (int) MAX_MACHINE_MODE
; i
++)
6062 mode
= (machine_mode
) i
;
6063 attrs
= ggc_cleared_alloc
<mem_attrs
> ();
6064 attrs
->align
= BITS_PER_UNIT
;
6065 attrs
->addrspace
= ADDR_SPACE_GENERIC
;
6066 if (mode
!= BLKmode
)
6068 attrs
->size_known_p
= true;
6069 attrs
->size
= GET_MODE_SIZE (mode
);
6070 if (STRICT_ALIGNMENT
)
6071 attrs
->align
= GET_MODE_ALIGNMENT (mode
);
6073 mode_mem_attrs
[i
] = attrs
;
6076 split_branch_probability
= profile_probability::uninitialized ();
6079 /* Initialize global machine_mode variables. */
6082 init_derived_machine_modes (void)
6084 opt_scalar_int_mode mode_iter
, opt_byte_mode
, opt_word_mode
;
6085 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_INT
)
6087 scalar_int_mode mode
= mode_iter
.require ();
6089 if (GET_MODE_BITSIZE (mode
) == BITS_PER_UNIT
6090 && !opt_byte_mode
.exists ())
6091 opt_byte_mode
= mode
;
6093 if (GET_MODE_BITSIZE (mode
) == BITS_PER_WORD
6094 && !opt_word_mode
.exists ())
6095 opt_word_mode
= mode
;
6098 byte_mode
= opt_byte_mode
.require ();
6099 word_mode
= opt_word_mode
.require ();
6100 ptr_mode
= as_a
<scalar_int_mode
>
6101 (mode_for_size (POINTER_SIZE
, GET_MODE_CLASS (Pmode
), 0).require ());
6104 /* Create some permanent unique rtl objects shared between all functions. */
6107 init_emit_once (void)
6111 scalar_float_mode double_mode
;
6112 opt_scalar_mode smode_iter
;
6114 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
6115 CONST_FIXED, and memory attribute hash tables. */
6116 const_int_htab
= hash_table
<const_int_hasher
>::create_ggc (37);
6118 #if TARGET_SUPPORTS_WIDE_INT
6119 const_wide_int_htab
= hash_table
<const_wide_int_hasher
>::create_ggc (37);
6121 const_double_htab
= hash_table
<const_double_hasher
>::create_ggc (37);
6123 if (NUM_POLY_INT_COEFFS
> 1)
6124 const_poly_int_htab
= hash_table
<const_poly_int_hasher
>::create_ggc (37);
6126 const_fixed_htab
= hash_table
<const_fixed_hasher
>::create_ggc (37);
6128 reg_attrs_htab
= hash_table
<reg_attr_hasher
>::create_ggc (37);
6130 #ifdef INIT_EXPANDERS
6131 /* This is to initialize {init|mark|free}_machine_status before the first
6132 call to push_function_context_to. This is needed by the Chill front
6133 end which calls push_function_context_to before the first call to
6134 init_function_start. */
6138 /* Create the unique rtx's for certain rtx codes and operand values. */
6140 /* Process stack-limiting command-line options. */
6141 if (opt_fstack_limit_symbol_arg
!= NULL
)
6143 = gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (opt_fstack_limit_symbol_arg
));
6144 if (opt_fstack_limit_register_no
>= 0)
6145 stack_limit_rtx
= gen_rtx_REG (Pmode
, opt_fstack_limit_register_no
);
6147 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
6148 tries to use these variables. */
6149 for (i
= - MAX_SAVED_CONST_INT
; i
<= MAX_SAVED_CONST_INT
; i
++)
6150 const_int_rtx
[i
+ MAX_SAVED_CONST_INT
] =
6151 gen_rtx_raw_CONST_INT (VOIDmode
, (HOST_WIDE_INT
) i
);
6153 if (STORE_FLAG_VALUE
>= - MAX_SAVED_CONST_INT
6154 && STORE_FLAG_VALUE
<= MAX_SAVED_CONST_INT
)
6155 const_true_rtx
= const_int_rtx
[STORE_FLAG_VALUE
+ MAX_SAVED_CONST_INT
];
6157 const_true_rtx
= gen_rtx_CONST_INT (VOIDmode
, STORE_FLAG_VALUE
);
6159 double_mode
= float_mode_for_size (DOUBLE_TYPE_SIZE
).require ();
6161 real_from_integer (&dconst0
, double_mode
, 0, SIGNED
);
6162 real_from_integer (&dconst1
, double_mode
, 1, SIGNED
);
6163 real_from_integer (&dconst2
, double_mode
, 2, SIGNED
);
6168 dconsthalf
= dconst1
;
6169 SET_REAL_EXP (&dconsthalf
, REAL_EXP (&dconsthalf
) - 1);
6171 for (i
= 0; i
< 3; i
++)
6173 const REAL_VALUE_TYPE
*const r
=
6174 (i
== 0 ? &dconst0
: i
== 1 ? &dconst1
: &dconst2
);
6176 FOR_EACH_MODE_IN_CLASS (mode
, MODE_FLOAT
)
6177 const_tiny_rtx
[i
][(int) mode
] =
6178 const_double_from_real_value (*r
, mode
);
6180 FOR_EACH_MODE_IN_CLASS (mode
, MODE_DECIMAL_FLOAT
)
6181 const_tiny_rtx
[i
][(int) mode
] =
6182 const_double_from_real_value (*r
, mode
);
6184 const_tiny_rtx
[i
][(int) VOIDmode
] = GEN_INT (i
);
6186 FOR_EACH_MODE_IN_CLASS (mode
, MODE_INT
)
6187 const_tiny_rtx
[i
][(int) mode
] = GEN_INT (i
);
6189 for (mode
= MIN_MODE_PARTIAL_INT
;
6190 mode
<= MAX_MODE_PARTIAL_INT
;
6191 mode
= (machine_mode
)((int)(mode
) + 1))
6192 const_tiny_rtx
[i
][(int) mode
] = GEN_INT (i
);
6195 const_tiny_rtx
[3][(int) VOIDmode
] = constm1_rtx
;
6197 FOR_EACH_MODE_IN_CLASS (mode
, MODE_INT
)
6198 const_tiny_rtx
[3][(int) mode
] = constm1_rtx
;
6200 for (mode
= MIN_MODE_PARTIAL_INT
;
6201 mode
<= MAX_MODE_PARTIAL_INT
;
6202 mode
= (machine_mode
)((int)(mode
) + 1))
6203 const_tiny_rtx
[3][(int) mode
] = constm1_rtx
;
6205 FOR_EACH_MODE_IN_CLASS (mode
, MODE_COMPLEX_INT
)
6207 rtx inner
= const_tiny_rtx
[0][(int)GET_MODE_INNER (mode
)];
6208 const_tiny_rtx
[0][(int) mode
] = gen_rtx_CONCAT (mode
, inner
, inner
);
6211 FOR_EACH_MODE_IN_CLASS (mode
, MODE_COMPLEX_FLOAT
)
6213 rtx inner
= const_tiny_rtx
[0][(int)GET_MODE_INNER (mode
)];
6214 const_tiny_rtx
[0][(int) mode
] = gen_rtx_CONCAT (mode
, inner
, inner
);
6217 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_INT
)
6219 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6220 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6221 const_tiny_rtx
[3][(int) mode
] = gen_const_vector (mode
, 3);
6224 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_FLOAT
)
6226 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6227 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6230 FOR_EACH_MODE_IN_CLASS (smode_iter
, MODE_FRACT
)
6232 scalar_mode smode
= smode_iter
.require ();
6233 FCONST0 (smode
).data
.high
= 0;
6234 FCONST0 (smode
).data
.low
= 0;
6235 FCONST0 (smode
).mode
= smode
;
6236 const_tiny_rtx
[0][(int) smode
]
6237 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode
), smode
);
6240 FOR_EACH_MODE_IN_CLASS (smode_iter
, MODE_UFRACT
)
6242 scalar_mode smode
= smode_iter
.require ();
6243 FCONST0 (smode
).data
.high
= 0;
6244 FCONST0 (smode
).data
.low
= 0;
6245 FCONST0 (smode
).mode
= smode
;
6246 const_tiny_rtx
[0][(int) smode
]
6247 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode
), smode
);
6250 FOR_EACH_MODE_IN_CLASS (smode_iter
, MODE_ACCUM
)
6252 scalar_mode smode
= smode_iter
.require ();
6253 FCONST0 (smode
).data
.high
= 0;
6254 FCONST0 (smode
).data
.low
= 0;
6255 FCONST0 (smode
).mode
= smode
;
6256 const_tiny_rtx
[0][(int) smode
]
6257 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode
), smode
);
6259 /* We store the value 1. */
6260 FCONST1 (smode
).data
.high
= 0;
6261 FCONST1 (smode
).data
.low
= 0;
6262 FCONST1 (smode
).mode
= smode
;
6263 FCONST1 (smode
).data
6264 = double_int_one
.lshift (GET_MODE_FBIT (smode
),
6265 HOST_BITS_PER_DOUBLE_INT
,
6266 SIGNED_FIXED_POINT_MODE_P (smode
));
6267 const_tiny_rtx
[1][(int) smode
]
6268 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode
), smode
);
6271 FOR_EACH_MODE_IN_CLASS (smode_iter
, MODE_UACCUM
)
6273 scalar_mode smode
= smode_iter
.require ();
6274 FCONST0 (smode
).data
.high
= 0;
6275 FCONST0 (smode
).data
.low
= 0;
6276 FCONST0 (smode
).mode
= smode
;
6277 const_tiny_rtx
[0][(int) smode
]
6278 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode
), smode
);
6280 /* We store the value 1. */
6281 FCONST1 (smode
).data
.high
= 0;
6282 FCONST1 (smode
).data
.low
= 0;
6283 FCONST1 (smode
).mode
= smode
;
6284 FCONST1 (smode
).data
6285 = double_int_one
.lshift (GET_MODE_FBIT (smode
),
6286 HOST_BITS_PER_DOUBLE_INT
,
6287 SIGNED_FIXED_POINT_MODE_P (smode
));
6288 const_tiny_rtx
[1][(int) smode
]
6289 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode
), smode
);
6292 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_FRACT
)
6294 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6297 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_UFRACT
)
6299 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6302 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_ACCUM
)
6304 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6305 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6308 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_UACCUM
)
6310 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6311 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6314 for (i
= (int) CCmode
; i
< (int) MAX_MACHINE_MODE
; ++i
)
6315 if (GET_MODE_CLASS ((machine_mode
) i
) == MODE_CC
)
6316 const_tiny_rtx
[0][i
] = const0_rtx
;
6318 const_tiny_rtx
[0][(int) BImode
] = const0_rtx
;
6319 if (STORE_FLAG_VALUE
== 1)
6320 const_tiny_rtx
[1][(int) BImode
] = const1_rtx
;
6322 FOR_EACH_MODE_IN_CLASS (smode_iter
, MODE_POINTER_BOUNDS
)
6324 scalar_mode smode
= smode_iter
.require ();
6325 wide_int wi_zero
= wi::zero (GET_MODE_PRECISION (smode
));
6326 const_tiny_rtx
[0][smode
] = immed_wide_int_const (wi_zero
, smode
);
6329 pc_rtx
= gen_rtx_fmt_ (PC
, VOIDmode
);
6330 ret_rtx
= gen_rtx_fmt_ (RETURN
, VOIDmode
);
6331 simple_return_rtx
= gen_rtx_fmt_ (SIMPLE_RETURN
, VOIDmode
);
6332 cc0_rtx
= gen_rtx_fmt_ (CC0
, VOIDmode
);
6333 invalid_insn_rtx
= gen_rtx_INSN (VOIDmode
,
6337 /*pattern=*/NULL_RTX
,
6340 /*reg_notes=*/NULL_RTX
);
6343 /* Produce exact duplicate of insn INSN after AFTER.
6344 Care updating of libcall regions if present. */
6347 emit_copy_of_insn_after (rtx_insn
*insn
, rtx_insn
*after
)
6352 switch (GET_CODE (insn
))
6355 new_rtx
= emit_insn_after (copy_insn (PATTERN (insn
)), after
);
6359 new_rtx
= emit_jump_insn_after (copy_insn (PATTERN (insn
)), after
);
6360 CROSSING_JUMP_P (new_rtx
) = CROSSING_JUMP_P (insn
);
6364 new_rtx
= emit_debug_insn_after (copy_insn (PATTERN (insn
)), after
);
6368 new_rtx
= emit_call_insn_after (copy_insn (PATTERN (insn
)), after
);
6369 if (CALL_INSN_FUNCTION_USAGE (insn
))
6370 CALL_INSN_FUNCTION_USAGE (new_rtx
)
6371 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn
));
6372 SIBLING_CALL_P (new_rtx
) = SIBLING_CALL_P (insn
);
6373 RTL_CONST_CALL_P (new_rtx
) = RTL_CONST_CALL_P (insn
);
6374 RTL_PURE_CALL_P (new_rtx
) = RTL_PURE_CALL_P (insn
);
6375 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx
)
6376 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn
);
6383 /* Update LABEL_NUSES. */
6384 mark_jump_label (PATTERN (new_rtx
), new_rtx
, 0);
6386 INSN_LOCATION (new_rtx
) = INSN_LOCATION (insn
);
6388 /* If the old insn is frame related, then so is the new one. This is
6389 primarily needed for IA-64 unwind info which marks epilogue insns,
6390 which may be duplicated by the basic block reordering code. */
6391 RTX_FRAME_RELATED_P (new_rtx
) = RTX_FRAME_RELATED_P (insn
);
6393 /* Locate the end of existing REG_NOTES in NEW_RTX. */
6394 rtx
*ptail
= ®_NOTES (new_rtx
);
6395 while (*ptail
!= NULL_RTX
)
6396 ptail
= &XEXP (*ptail
, 1);
6398 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6399 will make them. REG_LABEL_TARGETs are created there too, but are
6400 supposed to be sticky, so we copy them. */
6401 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
6402 if (REG_NOTE_KIND (link
) != REG_LABEL_OPERAND
)
6404 *ptail
= duplicate_reg_note (link
);
6405 ptail
= &XEXP (*ptail
, 1);
6408 INSN_CODE (new_rtx
) = INSN_CODE (insn
);
6412 static GTY((deletable
)) rtx hard_reg_clobbers
[NUM_MACHINE_MODES
][FIRST_PSEUDO_REGISTER
];
6414 gen_hard_reg_clobber (machine_mode mode
, unsigned int regno
)
6416 if (hard_reg_clobbers
[mode
][regno
])
6417 return hard_reg_clobbers
[mode
][regno
];
6419 return (hard_reg_clobbers
[mode
][regno
] =
6420 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (mode
, regno
)));
6423 location_t prologue_location
;
6424 location_t epilogue_location
;
6426 /* Hold current location information and last location information, so the
6427 datastructures are built lazily only when some instructions in given
6428 place are needed. */
6429 static location_t curr_location
;
6431 /* Allocate insn location datastructure. */
6433 insn_locations_init (void)
6435 prologue_location
= epilogue_location
= 0;
6436 curr_location
= UNKNOWN_LOCATION
;
6439 /* At the end of emit stage, clear current location. */
6441 insn_locations_finalize (void)
6443 epilogue_location
= curr_location
;
6444 curr_location
= UNKNOWN_LOCATION
;
6447 /* Set current location. */
6449 set_curr_insn_location (location_t location
)
6451 curr_location
= location
;
6454 /* Get current location. */
6456 curr_insn_location (void)
6458 return curr_location
;
6461 /* Return lexical scope block insn belongs to. */
6463 insn_scope (const rtx_insn
*insn
)
6465 return LOCATION_BLOCK (INSN_LOCATION (insn
));
6468 /* Return line number of the statement that produced this insn. */
6470 insn_line (const rtx_insn
*insn
)
6472 return LOCATION_LINE (INSN_LOCATION (insn
));
6475 /* Return source file of the statement that produced this insn. */
6477 insn_file (const rtx_insn
*insn
)
6479 return LOCATION_FILE (INSN_LOCATION (insn
));
6482 /* Return expanded location of the statement that produced this insn. */
6484 insn_location (const rtx_insn
*insn
)
6486 return expand_location (INSN_LOCATION (insn
));
6489 /* Return true if memory model MODEL requires a pre-operation (release-style)
6490 barrier or a post-operation (acquire-style) barrier. While not universal,
6491 this function matches behavior of several targets. */
6494 need_atomic_barrier_p (enum memmodel model
, bool pre
)
6496 switch (model
& MEMMODEL_BASE_MASK
)
6498 case MEMMODEL_RELAXED
:
6499 case MEMMODEL_CONSUME
:
6501 case MEMMODEL_RELEASE
:
6503 case MEMMODEL_ACQUIRE
:
6505 case MEMMODEL_ACQ_REL
:
6506 case MEMMODEL_SEQ_CST
:
6513 /* Return a constant shift amount for shifting a value of mode MODE
6517 gen_int_shift_amount (machine_mode
, poly_int64 value
)
6519 /* Use a 64-bit mode, to avoid any truncation.
6521 ??? Perhaps this should be automatically derived from the .md files
6522 instead, or perhaps have a target hook. */
6523 scalar_int_mode shift_mode
= (BITS_PER_UNIT
== 8
6525 : int_mode_for_size (64, 0).require ());
6526 return gen_int_mode (value
, shift_mode
);
6529 /* Initialize fields of rtl_data related to stack alignment. */
6532 rtl_data::init_stack_alignment ()
6534 stack_alignment_needed
= STACK_BOUNDARY
;
6535 max_used_stack_slot_alignment
= STACK_BOUNDARY
;
6536 stack_alignment_estimated
= 0;
6537 preferred_stack_boundary
= STACK_BOUNDARY
;
6541 #include "gt-emit-rtl.h"