1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 /* Middle-to-low level generation of rtx code and insns.
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
36 #include "coretypes.h"
44 #include "stringpool.h"
45 #include "insn-config.h"
49 #include "diagnostic-core.h"
51 #include "fold-const.h"
60 #include "stor-layout.h"
64 struct target_rtl default_target_rtl
;
66 struct target_rtl
*this_target_rtl
= &default_target_rtl
;
69 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
71 /* Commonly used modes. */
73 scalar_int_mode byte_mode
; /* Mode whose width is BITS_PER_UNIT. */
74 scalar_int_mode word_mode
; /* Mode whose width is BITS_PER_WORD. */
75 scalar_int_mode ptr_mode
; /* Mode whose width is POINTER_SIZE. */
77 /* Datastructures maintained for currently processed function in RTL form. */
79 struct rtl_data x_rtl
;
81 /* Indexed by pseudo register number, gives the rtx for that pseudo.
82 Allocated in parallel with regno_pointer_align.
83 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
84 with length attribute nested in top level structures. */
88 /* This is *not* reset after each function. It gives each CODE_LABEL
89 in the entire compilation a unique label number. */
91 static GTY(()) int label_num
= 1;
93 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
94 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
95 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
96 is set only for MODE_INT and MODE_VECTOR_INT modes. */
98 rtx const_tiny_rtx
[4][(int) MAX_MACHINE_MODE
];
102 REAL_VALUE_TYPE dconst0
;
103 REAL_VALUE_TYPE dconst1
;
104 REAL_VALUE_TYPE dconst2
;
105 REAL_VALUE_TYPE dconstm1
;
106 REAL_VALUE_TYPE dconsthalf
;
108 /* Record fixed-point constant 0 and 1. */
109 FIXED_VALUE_TYPE fconst0
[MAX_FCONST0
];
110 FIXED_VALUE_TYPE fconst1
[MAX_FCONST1
];
112 /* We make one copy of (const_int C) where C is in
113 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
114 to save space during the compilation and simplify comparisons of
117 rtx const_int_rtx
[MAX_SAVED_CONST_INT
* 2 + 1];
119 /* Standard pieces of rtx, to be substituted directly into things. */
122 rtx simple_return_rtx
;
125 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
126 this pointer should normally never be dereferenced), but is required to be
127 distinct from NULL_RTX. Currently used by peephole2 pass. */
128 rtx_insn
*invalid_insn_rtx
;
130 /* A hash table storing CONST_INTs whose absolute value is greater
131 than MAX_SAVED_CONST_INT. */
133 struct const_int_hasher
: ggc_cache_ptr_hash
<rtx_def
>
135 typedef HOST_WIDE_INT compare_type
;
137 static hashval_t
hash (rtx i
);
138 static bool equal (rtx i
, HOST_WIDE_INT h
);
141 static GTY ((cache
)) hash_table
<const_int_hasher
> *const_int_htab
;
143 struct const_wide_int_hasher
: ggc_cache_ptr_hash
<rtx_def
>
145 static hashval_t
hash (rtx x
);
146 static bool equal (rtx x
, rtx y
);
149 static GTY ((cache
)) hash_table
<const_wide_int_hasher
> *const_wide_int_htab
;
151 struct const_poly_int_hasher
: ggc_cache_ptr_hash
<rtx_def
>
153 typedef std::pair
<machine_mode
, poly_wide_int_ref
> compare_type
;
155 static hashval_t
hash (rtx x
);
156 static bool equal (rtx x
, const compare_type
&y
);
159 static GTY ((cache
)) hash_table
<const_poly_int_hasher
> *const_poly_int_htab
;
161 /* A hash table storing register attribute structures. */
162 struct reg_attr_hasher
: ggc_cache_ptr_hash
<reg_attrs
>
164 static hashval_t
hash (reg_attrs
*x
);
165 static bool equal (reg_attrs
*a
, reg_attrs
*b
);
168 static GTY ((cache
)) hash_table
<reg_attr_hasher
> *reg_attrs_htab
;
170 /* A hash table storing all CONST_DOUBLEs. */
171 struct const_double_hasher
: ggc_cache_ptr_hash
<rtx_def
>
173 static hashval_t
hash (rtx x
);
174 static bool equal (rtx x
, rtx y
);
177 static GTY ((cache
)) hash_table
<const_double_hasher
> *const_double_htab
;
179 /* A hash table storing all CONST_FIXEDs. */
180 struct const_fixed_hasher
: ggc_cache_ptr_hash
<rtx_def
>
182 static hashval_t
hash (rtx x
);
183 static bool equal (rtx x
, rtx y
);
186 static GTY ((cache
)) hash_table
<const_fixed_hasher
> *const_fixed_htab
;
188 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
189 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
190 #define first_label_num (crtl->emit.x_first_label_num)
192 static void set_used_decls (tree
);
193 static void mark_label_nuses (rtx
);
194 #if TARGET_SUPPORTS_WIDE_INT
195 static rtx
lookup_const_wide_int (rtx
);
197 static rtx
lookup_const_double (rtx
);
198 static rtx
lookup_const_fixed (rtx
);
199 static rtx
gen_const_vector (machine_mode
, int);
200 static void copy_rtx_if_shared_1 (rtx
*orig
);
202 /* Probability of the conditional branch currently proceeded by try_split. */
203 profile_probability split_branch_probability
;
205 /* Returns a hash code for X (which is a really a CONST_INT). */
208 const_int_hasher::hash (rtx x
)
210 return (hashval_t
) INTVAL (x
);
213 /* Returns nonzero if the value represented by X (which is really a
214 CONST_INT) is the same as that given by Y (which is really a
218 const_int_hasher::equal (rtx x
, HOST_WIDE_INT y
)
220 return (INTVAL (x
) == y
);
223 #if TARGET_SUPPORTS_WIDE_INT
224 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
227 const_wide_int_hasher::hash (rtx x
)
230 unsigned HOST_WIDE_INT hash
= 0;
233 for (i
= 0; i
< CONST_WIDE_INT_NUNITS (xr
); i
++)
234 hash
+= CONST_WIDE_INT_ELT (xr
, i
);
236 return (hashval_t
) hash
;
239 /* Returns nonzero if the value represented by X (which is really a
240 CONST_WIDE_INT) is the same as that given by Y (which is really a
244 const_wide_int_hasher::equal (rtx x
, rtx y
)
249 if (CONST_WIDE_INT_NUNITS (xr
) != CONST_WIDE_INT_NUNITS (yr
))
252 for (i
= 0; i
< CONST_WIDE_INT_NUNITS (xr
); i
++)
253 if (CONST_WIDE_INT_ELT (xr
, i
) != CONST_WIDE_INT_ELT (yr
, i
))
260 /* Returns a hash code for CONST_POLY_INT X. */
263 const_poly_int_hasher::hash (rtx x
)
266 h
.add_int (GET_MODE (x
));
267 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
268 h
.add_wide_int (CONST_POLY_INT_COEFFS (x
)[i
]);
272 /* Returns nonzero if CONST_POLY_INT X is an rtx representation of Y. */
275 const_poly_int_hasher::equal (rtx x
, const compare_type
&y
)
277 if (GET_MODE (x
) != y
.first
)
279 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
280 if (CONST_POLY_INT_COEFFS (x
)[i
] != y
.second
.coeffs
[i
])
285 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
287 const_double_hasher::hash (rtx x
)
289 const_rtx
const value
= x
;
292 if (TARGET_SUPPORTS_WIDE_INT
== 0 && GET_MODE (value
) == VOIDmode
)
293 h
= CONST_DOUBLE_LOW (value
) ^ CONST_DOUBLE_HIGH (value
);
296 h
= real_hash (CONST_DOUBLE_REAL_VALUE (value
));
297 /* MODE is used in the comparison, so it should be in the hash. */
298 h
^= GET_MODE (value
);
303 /* Returns nonzero if the value represented by X (really a ...)
304 is the same as that represented by Y (really a ...) */
306 const_double_hasher::equal (rtx x
, rtx y
)
308 const_rtx
const a
= x
, b
= y
;
310 if (GET_MODE (a
) != GET_MODE (b
))
312 if (TARGET_SUPPORTS_WIDE_INT
== 0 && GET_MODE (a
) == VOIDmode
)
313 return (CONST_DOUBLE_LOW (a
) == CONST_DOUBLE_LOW (b
)
314 && CONST_DOUBLE_HIGH (a
) == CONST_DOUBLE_HIGH (b
));
316 return real_identical (CONST_DOUBLE_REAL_VALUE (a
),
317 CONST_DOUBLE_REAL_VALUE (b
));
320 /* Returns a hash code for X (which is really a CONST_FIXED). */
323 const_fixed_hasher::hash (rtx x
)
325 const_rtx
const value
= x
;
328 h
= fixed_hash (CONST_FIXED_VALUE (value
));
329 /* MODE is used in the comparison, so it should be in the hash. */
330 h
^= GET_MODE (value
);
334 /* Returns nonzero if the value represented by X is the same as that
338 const_fixed_hasher::equal (rtx x
, rtx y
)
340 const_rtx
const a
= x
, b
= y
;
342 if (GET_MODE (a
) != GET_MODE (b
))
344 return fixed_identical (CONST_FIXED_VALUE (a
), CONST_FIXED_VALUE (b
));
347 /* Return true if the given memory attributes are equal. */
350 mem_attrs_eq_p (const struct mem_attrs
*p
, const struct mem_attrs
*q
)
356 return (p
->alias
== q
->alias
357 && p
->offset_known_p
== q
->offset_known_p
358 && (!p
->offset_known_p
|| known_eq (p
->offset
, q
->offset
))
359 && p
->size_known_p
== q
->size_known_p
360 && (!p
->size_known_p
|| known_eq (p
->size
, q
->size
))
361 && p
->align
== q
->align
362 && p
->addrspace
== q
->addrspace
363 && (p
->expr
== q
->expr
364 || (p
->expr
!= NULL_TREE
&& q
->expr
!= NULL_TREE
365 && operand_equal_p (p
->expr
, q
->expr
, 0))));
368 /* Set MEM's memory attributes so that they are the same as ATTRS. */
371 set_mem_attrs (rtx mem
, mem_attrs
*attrs
)
373 /* If everything is the default, we can just clear the attributes. */
374 if (mem_attrs_eq_p (attrs
, mode_mem_attrs
[(int) GET_MODE (mem
)]))
381 || !mem_attrs_eq_p (attrs
, MEM_ATTRS (mem
)))
383 MEM_ATTRS (mem
) = ggc_alloc
<mem_attrs
> ();
384 memcpy (MEM_ATTRS (mem
), attrs
, sizeof (mem_attrs
));
388 /* Returns a hash code for X (which is a really a reg_attrs *). */
391 reg_attr_hasher::hash (reg_attrs
*x
)
393 const reg_attrs
*const p
= x
;
397 h
.add_poly_hwi (p
->offset
);
401 /* Returns nonzero if the value represented by X is the same as that given by
405 reg_attr_hasher::equal (reg_attrs
*x
, reg_attrs
*y
)
407 const reg_attrs
*const p
= x
;
408 const reg_attrs
*const q
= y
;
410 return (p
->decl
== q
->decl
&& known_eq (p
->offset
, q
->offset
));
412 /* Allocate a new reg_attrs structure and insert it into the hash table if
413 one identical to it is not already in the table. We are doing this for
417 get_reg_attrs (tree decl
, poly_int64 offset
)
421 /* If everything is the default, we can just return zero. */
422 if (decl
== 0 && known_eq (offset
, 0))
426 attrs
.offset
= offset
;
428 reg_attrs
**slot
= reg_attrs_htab
->find_slot (&attrs
, INSERT
);
431 *slot
= ggc_alloc
<reg_attrs
> ();
432 memcpy (*slot
, &attrs
, sizeof (reg_attrs
));
440 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
441 and to block register equivalences to be seen across this insn. */
446 rtx x
= gen_rtx_ASM_INPUT (VOIDmode
, "");
447 MEM_VOLATILE_P (x
) = true;
453 /* Set the mode and register number of X to MODE and REGNO. */
456 set_mode_and_regno (rtx x
, machine_mode mode
, unsigned int regno
)
458 unsigned int nregs
= (HARD_REGISTER_NUM_P (regno
)
459 ? hard_regno_nregs (regno
, mode
)
461 PUT_MODE_RAW (x
, mode
);
462 set_regno_raw (x
, regno
, nregs
);
465 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
466 don't attempt to share with the various global pieces of rtl (such as
467 frame_pointer_rtx). */
470 gen_raw_REG (machine_mode mode
, unsigned int regno
)
472 rtx x
= rtx_alloc (REG MEM_STAT_INFO
);
473 set_mode_and_regno (x
, mode
, regno
);
474 REG_ATTRS (x
) = NULL
;
475 ORIGINAL_REGNO (x
) = regno
;
479 /* There are some RTL codes that require special attention; the generation
480 functions do the raw handling. If you add to this list, modify
481 special_rtx in gengenrtl.c as well. */
484 gen_rtx_EXPR_LIST (machine_mode mode
, rtx expr
, rtx expr_list
)
486 return as_a
<rtx_expr_list
*> (gen_rtx_fmt_ee (EXPR_LIST
, mode
, expr
,
491 gen_rtx_INSN_LIST (machine_mode mode
, rtx insn
, rtx insn_list
)
493 return as_a
<rtx_insn_list
*> (gen_rtx_fmt_ue (INSN_LIST
, mode
, insn
,
498 gen_rtx_INSN (machine_mode mode
, rtx_insn
*prev_insn
, rtx_insn
*next_insn
,
499 basic_block bb
, rtx pattern
, int location
, int code
,
502 return as_a
<rtx_insn
*> (gen_rtx_fmt_uuBeiie (INSN
, mode
,
503 prev_insn
, next_insn
,
504 bb
, pattern
, location
, code
,
509 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED
, HOST_WIDE_INT arg
)
511 if (arg
>= - MAX_SAVED_CONST_INT
&& arg
<= MAX_SAVED_CONST_INT
)
512 return const_int_rtx
[arg
+ MAX_SAVED_CONST_INT
];
514 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
515 if (const_true_rtx
&& arg
== STORE_FLAG_VALUE
)
516 return const_true_rtx
;
519 /* Look up the CONST_INT in the hash table. */
520 rtx
*slot
= const_int_htab
->find_slot_with_hash (arg
, (hashval_t
) arg
,
523 *slot
= gen_rtx_raw_CONST_INT (VOIDmode
, arg
);
529 gen_int_mode (poly_int64 c
, machine_mode mode
)
531 c
= trunc_int_for_mode (c
, mode
);
532 if (c
.is_constant ())
533 return GEN_INT (c
.coeffs
[0]);
534 unsigned int prec
= GET_MODE_PRECISION (as_a
<scalar_mode
> (mode
));
535 return immed_wide_int_const (poly_wide_int::from (c
, prec
, SIGNED
), mode
);
538 /* CONST_DOUBLEs might be created from pairs of integers, or from
539 REAL_VALUE_TYPEs. Also, their length is known only at run time,
540 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
542 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
543 hash table. If so, return its counterpart; otherwise add it
544 to the hash table and return it. */
546 lookup_const_double (rtx real
)
548 rtx
*slot
= const_double_htab
->find_slot (real
, INSERT
);
555 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
556 VALUE in mode MODE. */
558 const_double_from_real_value (REAL_VALUE_TYPE value
, machine_mode mode
)
560 rtx real
= rtx_alloc (CONST_DOUBLE
);
561 PUT_MODE (real
, mode
);
565 return lookup_const_double (real
);
568 /* Determine whether FIXED, a CONST_FIXED, already exists in the
569 hash table. If so, return its counterpart; otherwise add it
570 to the hash table and return it. */
573 lookup_const_fixed (rtx fixed
)
575 rtx
*slot
= const_fixed_htab
->find_slot (fixed
, INSERT
);
582 /* Return a CONST_FIXED rtx for a fixed-point value specified by
583 VALUE in mode MODE. */
586 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value
, machine_mode mode
)
588 rtx fixed
= rtx_alloc (CONST_FIXED
);
589 PUT_MODE (fixed
, mode
);
593 return lookup_const_fixed (fixed
);
596 #if TARGET_SUPPORTS_WIDE_INT == 0
597 /* Constructs double_int from rtx CST. */
600 rtx_to_double_int (const_rtx cst
)
604 if (CONST_INT_P (cst
))
605 r
= double_int::from_shwi (INTVAL (cst
));
606 else if (CONST_DOUBLE_AS_INT_P (cst
))
608 r
.low
= CONST_DOUBLE_LOW (cst
);
609 r
.high
= CONST_DOUBLE_HIGH (cst
);
618 #if TARGET_SUPPORTS_WIDE_INT
619 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
620 If so, return its counterpart; otherwise add it to the hash table and
624 lookup_const_wide_int (rtx wint
)
626 rtx
*slot
= const_wide_int_htab
->find_slot (wint
, INSERT
);
634 /* Return an rtx constant for V, given that the constant has mode MODE.
635 The returned rtx will be a CONST_INT if V fits, otherwise it will be
636 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
637 (if TARGET_SUPPORTS_WIDE_INT). */
640 immed_wide_int_const_1 (const wide_int_ref
&v
, machine_mode mode
)
642 unsigned int len
= v
.get_len ();
643 /* Not scalar_int_mode because we also allow pointer bound modes. */
644 unsigned int prec
= GET_MODE_PRECISION (as_a
<scalar_mode
> (mode
));
646 /* Allow truncation but not extension since we do not know if the
647 number is signed or unsigned. */
648 gcc_assert (prec
<= v
.get_precision ());
650 if (len
< 2 || prec
<= HOST_BITS_PER_WIDE_INT
)
651 return gen_int_mode (v
.elt (0), mode
);
653 #if TARGET_SUPPORTS_WIDE_INT
657 unsigned int blocks_needed
658 = (prec
+ HOST_BITS_PER_WIDE_INT
- 1) / HOST_BITS_PER_WIDE_INT
;
660 if (len
> blocks_needed
)
663 value
= const_wide_int_alloc (len
);
665 /* It is so tempting to just put the mode in here. Must control
667 PUT_MODE (value
, VOIDmode
);
668 CWI_PUT_NUM_ELEM (value
, len
);
670 for (i
= 0; i
< len
; i
++)
671 CONST_WIDE_INT_ELT (value
, i
) = v
.elt (i
);
673 return lookup_const_wide_int (value
);
676 return immed_double_const (v
.elt (0), v
.elt (1), mode
);
680 #if TARGET_SUPPORTS_WIDE_INT == 0
681 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
682 of ints: I0 is the low-order word and I1 is the high-order word.
683 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
684 implied upper bits are copies of the high bit of i1. The value
685 itself is neither signed nor unsigned. Do not use this routine for
686 non-integer modes; convert to REAL_VALUE_TYPE and use
687 const_double_from_real_value. */
690 immed_double_const (HOST_WIDE_INT i0
, HOST_WIDE_INT i1
, machine_mode mode
)
695 /* There are the following cases (note that there are no modes with
696 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
698 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
700 2) If the value of the integer fits into HOST_WIDE_INT anyway
701 (i.e., i1 consists only from copies of the sign bit, and sign
702 of i0 and i1 are the same), then we return a CONST_INT for i0.
703 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
705 if (is_a
<scalar_mode
> (mode
, &smode
)
706 && GET_MODE_BITSIZE (smode
) <= HOST_BITS_PER_WIDE_INT
)
707 return gen_int_mode (i0
, mode
);
709 /* If this integer fits in one word, return a CONST_INT. */
710 if ((i1
== 0 && i0
>= 0) || (i1
== ~0 && i0
< 0))
713 /* We use VOIDmode for integers. */
714 value
= rtx_alloc (CONST_DOUBLE
);
715 PUT_MODE (value
, VOIDmode
);
717 CONST_DOUBLE_LOW (value
) = i0
;
718 CONST_DOUBLE_HIGH (value
) = i1
;
720 for (i
= 2; i
< (sizeof CONST_DOUBLE_FORMAT
- 1); i
++)
721 XWINT (value
, i
) = 0;
723 return lookup_const_double (value
);
727 /* Return an rtx representation of C in mode MODE. */
730 immed_wide_int_const (const poly_wide_int_ref
&c
, machine_mode mode
)
732 if (c
.is_constant ())
733 return immed_wide_int_const_1 (c
.coeffs
[0], mode
);
735 /* Not scalar_int_mode because we also allow pointer bound modes. */
736 unsigned int prec
= GET_MODE_PRECISION (as_a
<scalar_mode
> (mode
));
738 /* Allow truncation but not extension since we do not know if the
739 number is signed or unsigned. */
740 gcc_assert (prec
<= c
.coeffs
[0].get_precision ());
741 poly_wide_int newc
= poly_wide_int::from (c
, prec
, SIGNED
);
743 /* See whether we already have an rtx for this constant. */
746 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
747 h
.add_wide_int (newc
.coeffs
[i
]);
748 const_poly_int_hasher::compare_type
typed_value (mode
, newc
);
749 rtx
*slot
= const_poly_int_htab
->find_slot_with_hash (typed_value
,
755 /* Create a new rtx. There's a choice to be made here between installing
756 the actual mode of the rtx or leaving it as VOIDmode (for consistency
757 with CONST_INT). In practice the handling of the codes is different
758 enough that we get no benefit from using VOIDmode, and various places
759 assume that VOIDmode implies CONST_INT. Using the real mode seems like
760 the right long-term direction anyway. */
761 typedef trailing_wide_ints
<NUM_POLY_INT_COEFFS
> twi
;
762 size_t extra_size
= twi::extra_size (prec
);
763 x
= rtx_alloc_v (CONST_POLY_INT
,
764 sizeof (struct const_poly_int_def
) + extra_size
);
766 CONST_POLY_INT_COEFFS (x
).set_precision (prec
);
767 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
768 CONST_POLY_INT_COEFFS (x
)[i
] = newc
.coeffs
[i
];
775 gen_rtx_REG (machine_mode mode
, unsigned int regno
)
777 /* In case the MD file explicitly references the frame pointer, have
778 all such references point to the same frame pointer. This is
779 used during frame pointer elimination to distinguish the explicit
780 references to these registers from pseudos that happened to be
783 If we have eliminated the frame pointer or arg pointer, we will
784 be using it as a normal register, for example as a spill
785 register. In such cases, we might be accessing it in a mode that
786 is not Pmode and therefore cannot use the pre-allocated rtx.
788 Also don't do this when we are making new REGs in reload, since
789 we don't want to get confused with the real pointers. */
791 if (mode
== Pmode
&& !reload_in_progress
&& !lra_in_progress
)
793 if (regno
== FRAME_POINTER_REGNUM
794 && (!reload_completed
|| frame_pointer_needed
))
795 return frame_pointer_rtx
;
797 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
798 && regno
== HARD_FRAME_POINTER_REGNUM
799 && (!reload_completed
|| frame_pointer_needed
))
800 return hard_frame_pointer_rtx
;
801 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
802 if (FRAME_POINTER_REGNUM
!= ARG_POINTER_REGNUM
803 && regno
== ARG_POINTER_REGNUM
)
804 return arg_pointer_rtx
;
806 #ifdef RETURN_ADDRESS_POINTER_REGNUM
807 if (regno
== RETURN_ADDRESS_POINTER_REGNUM
)
808 return return_address_pointer_rtx
;
810 if (regno
== (unsigned) PIC_OFFSET_TABLE_REGNUM
811 && PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
812 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
813 return pic_offset_table_rtx
;
814 if (regno
== STACK_POINTER_REGNUM
)
815 return stack_pointer_rtx
;
819 /* If the per-function register table has been set up, try to re-use
820 an existing entry in that table to avoid useless generation of RTL.
822 This code is disabled for now until we can fix the various backends
823 which depend on having non-shared hard registers in some cases. Long
824 term we want to re-enable this code as it can significantly cut down
825 on the amount of useless RTL that gets generated.
827 We'll also need to fix some code that runs after reload that wants to
828 set ORIGINAL_REGNO. */
833 && regno
< FIRST_PSEUDO_REGISTER
834 && reg_raw_mode
[regno
] == mode
)
835 return regno_reg_rtx
[regno
];
838 return gen_raw_REG (mode
, regno
);
842 gen_rtx_MEM (machine_mode mode
, rtx addr
)
844 rtx rt
= gen_rtx_raw_MEM (mode
, addr
);
846 /* This field is not cleared by the mere allocation of the rtx, so
853 /* Generate a memory referring to non-trapping constant memory. */
856 gen_const_mem (machine_mode mode
, rtx addr
)
858 rtx mem
= gen_rtx_MEM (mode
, addr
);
859 MEM_READONLY_P (mem
) = 1;
860 MEM_NOTRAP_P (mem
) = 1;
864 /* Generate a MEM referring to fixed portions of the frame, e.g., register
868 gen_frame_mem (machine_mode mode
, rtx addr
)
870 rtx mem
= gen_rtx_MEM (mode
, addr
);
871 MEM_NOTRAP_P (mem
) = 1;
872 set_mem_alias_set (mem
, get_frame_alias_set ());
876 /* Generate a MEM referring to a temporary use of the stack, not part
877 of the fixed stack frame. For example, something which is pushed
878 by a target splitter. */
880 gen_tmp_stack_mem (machine_mode mode
, rtx addr
)
882 rtx mem
= gen_rtx_MEM (mode
, addr
);
883 MEM_NOTRAP_P (mem
) = 1;
884 if (!cfun
->calls_alloca
)
885 set_mem_alias_set (mem
, get_frame_alias_set ());
889 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
890 this construct would be valid, and false otherwise. */
893 validate_subreg (machine_mode omode
, machine_mode imode
,
894 const_rtx reg
, unsigned int offset
)
896 unsigned int isize
= GET_MODE_SIZE (imode
);
897 unsigned int osize
= GET_MODE_SIZE (omode
);
899 /* All subregs must be aligned. */
900 if (offset
% osize
!= 0)
903 /* The subreg offset cannot be outside the inner object. */
907 unsigned int regsize
= REGMODE_NATURAL_SIZE (imode
);
909 /* ??? This should not be here. Temporarily continue to allow word_mode
910 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
911 Generally, backends are doing something sketchy but it'll take time to
913 if (omode
== word_mode
)
915 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
916 is the culprit here, and not the backends. */
917 else if (osize
>= regsize
&& isize
>= osize
)
919 /* Allow component subregs of complex and vector. Though given the below
920 extraction rules, it's not always clear what that means. */
921 else if ((COMPLEX_MODE_P (imode
) || VECTOR_MODE_P (imode
))
922 && GET_MODE_INNER (imode
) == omode
)
924 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
925 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
926 represent this. It's questionable if this ought to be represented at
927 all -- why can't this all be hidden in post-reload splitters that make
928 arbitrarily mode changes to the registers themselves. */
929 else if (VECTOR_MODE_P (omode
) && GET_MODE_INNER (omode
) == imode
)
931 /* Subregs involving floating point modes are not allowed to
932 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
933 (subreg:SI (reg:DF) 0) isn't. */
934 else if (FLOAT_MODE_P (imode
) || FLOAT_MODE_P (omode
))
936 if (! (isize
== osize
937 /* LRA can use subreg to store a floating point value in
938 an integer mode. Although the floating point and the
939 integer modes need the same number of hard registers,
940 the size of floating point mode can be less than the
941 integer mode. LRA also uses subregs for a register
942 should be used in different mode in on insn. */
947 /* Paradoxical subregs must have offset zero. */
951 /* This is a normal subreg. Verify that the offset is representable. */
953 /* For hard registers, we already have most of these rules collected in
954 subreg_offset_representable_p. */
955 if (reg
&& REG_P (reg
) && HARD_REGISTER_P (reg
))
957 unsigned int regno
= REGNO (reg
);
959 if ((COMPLEX_MODE_P (imode
) || VECTOR_MODE_P (imode
))
960 && GET_MODE_INNER (imode
) == omode
)
962 else if (!REG_CAN_CHANGE_MODE_P (regno
, imode
, omode
))
965 return subreg_offset_representable_p (regno
, imode
, offset
, omode
);
968 /* For pseudo registers, we want most of the same checks. Namely:
970 Assume that the pseudo register will be allocated to hard registers
971 that can hold REGSIZE bytes each. If OSIZE is not a multiple of REGSIZE,
972 the remainder must correspond to the lowpart of the containing hard
973 register. If BYTES_BIG_ENDIAN, the lowpart is at the highest offset,
974 otherwise it is at the lowest offset.
976 Given that we've already checked the mode and offset alignment,
977 we only have to check subblock subregs here. */
979 && ! (lra_in_progress
&& (FLOAT_MODE_P (imode
) || FLOAT_MODE_P (omode
))))
981 unsigned int block_size
= MIN (isize
, regsize
);
982 unsigned int offset_within_block
= offset
% block_size
;
984 ? offset_within_block
!= block_size
- osize
985 : offset_within_block
!= 0)
992 gen_rtx_SUBREG (machine_mode mode
, rtx reg
, int offset
)
994 gcc_assert (validate_subreg (mode
, GET_MODE (reg
), reg
, offset
));
995 return gen_rtx_raw_SUBREG (mode
, reg
, offset
);
998 /* Generate a SUBREG representing the least-significant part of REG if MODE
999 is smaller than mode of REG, otherwise paradoxical SUBREG. */
1002 gen_lowpart_SUBREG (machine_mode mode
, rtx reg
)
1004 machine_mode inmode
;
1006 inmode
= GET_MODE (reg
);
1007 if (inmode
== VOIDmode
)
1009 return gen_rtx_SUBREG (mode
, reg
,
1010 subreg_lowpart_offset (mode
, inmode
));
1014 gen_rtx_VAR_LOCATION (machine_mode mode
, tree decl
, rtx loc
,
1015 enum var_init_status status
)
1017 rtx x
= gen_rtx_fmt_te (VAR_LOCATION
, mode
, decl
, loc
);
1018 PAT_VAR_LOCATION_STATUS (x
) = status
;
1023 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
1026 gen_rtvec (int n
, ...)
1034 /* Don't allocate an empty rtvec... */
1041 rt_val
= rtvec_alloc (n
);
1043 for (i
= 0; i
< n
; i
++)
1044 rt_val
->elem
[i
] = va_arg (p
, rtx
);
1051 gen_rtvec_v (int n
, rtx
*argp
)
1056 /* Don't allocate an empty rtvec... */
1060 rt_val
= rtvec_alloc (n
);
1062 for (i
= 0; i
< n
; i
++)
1063 rt_val
->elem
[i
] = *argp
++;
1069 gen_rtvec_v (int n
, rtx_insn
**argp
)
1074 /* Don't allocate an empty rtvec... */
1078 rt_val
= rtvec_alloc (n
);
1080 for (i
= 0; i
< n
; i
++)
1081 rt_val
->elem
[i
] = *argp
++;
1087 /* Return the number of bytes between the start of an OUTER_MODE
1088 in-memory value and the start of an INNER_MODE in-memory value,
1089 given that the former is a lowpart of the latter. It may be a
1090 paradoxical lowpart, in which case the offset will be negative
1091 on big-endian targets. */
1094 byte_lowpart_offset (machine_mode outer_mode
,
1095 machine_mode inner_mode
)
1097 if (paradoxical_subreg_p (outer_mode
, inner_mode
))
1098 return -subreg_lowpart_offset (inner_mode
, outer_mode
);
1100 return subreg_lowpart_offset (outer_mode
, inner_mode
);
1103 /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
1104 from address X. For paradoxical big-endian subregs this is a
1105 negative value, otherwise it's the same as OFFSET. */
1108 subreg_memory_offset (machine_mode outer_mode
, machine_mode inner_mode
,
1109 unsigned int offset
)
1111 if (paradoxical_subreg_p (outer_mode
, inner_mode
))
1113 gcc_assert (offset
== 0);
1114 return -subreg_lowpart_offset (inner_mode
, outer_mode
);
1119 /* As above, but return the offset that existing subreg X would have
1120 if SUBREG_REG (X) were stored in memory. The only significant thing
1121 about the current SUBREG_REG is its mode. */
1124 subreg_memory_offset (const_rtx x
)
1126 return subreg_memory_offset (GET_MODE (x
), GET_MODE (SUBREG_REG (x
)),
1130 /* Generate a REG rtx for a new pseudo register of mode MODE.
1131 This pseudo is assigned the next sequential register number. */
1134 gen_reg_rtx (machine_mode mode
)
1137 unsigned int align
= GET_MODE_ALIGNMENT (mode
);
1139 gcc_assert (can_create_pseudo_p ());
1141 /* If a virtual register with bigger mode alignment is generated,
1142 increase stack alignment estimation because it might be spilled
1144 if (SUPPORTS_STACK_ALIGNMENT
1145 && crtl
->stack_alignment_estimated
< align
1146 && !crtl
->stack_realign_processed
)
1148 unsigned int min_align
= MINIMUM_ALIGNMENT (NULL
, mode
, align
);
1149 if (crtl
->stack_alignment_estimated
< min_align
)
1150 crtl
->stack_alignment_estimated
= min_align
;
1153 if (generating_concat_p
1154 && (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
1155 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
))
1157 /* For complex modes, don't make a single pseudo.
1158 Instead, make a CONCAT of two pseudos.
1159 This allows noncontiguous allocation of the real and imaginary parts,
1160 which makes much better code. Besides, allocating DCmode
1161 pseudos overstrains reload on some machines like the 386. */
1162 rtx realpart
, imagpart
;
1163 machine_mode partmode
= GET_MODE_INNER (mode
);
1165 realpart
= gen_reg_rtx (partmode
);
1166 imagpart
= gen_reg_rtx (partmode
);
1167 return gen_rtx_CONCAT (mode
, realpart
, imagpart
);
1170 /* Do not call gen_reg_rtx with uninitialized crtl. */
1171 gcc_assert (crtl
->emit
.regno_pointer_align_length
);
1173 crtl
->emit
.ensure_regno_capacity ();
1174 gcc_assert (reg_rtx_no
< crtl
->emit
.regno_pointer_align_length
);
1176 val
= gen_raw_REG (mode
, reg_rtx_no
);
1177 regno_reg_rtx
[reg_rtx_no
++] = val
;
1181 /* Make sure m_regno_pointer_align, and regno_reg_rtx are large
1182 enough to have elements in the range 0 <= idx <= reg_rtx_no. */
1185 emit_status::ensure_regno_capacity ()
1187 int old_size
= regno_pointer_align_length
;
1189 if (reg_rtx_no
< old_size
)
1192 int new_size
= old_size
* 2;
1193 while (reg_rtx_no
>= new_size
)
1196 char *tmp
= XRESIZEVEC (char, regno_pointer_align
, new_size
);
1197 memset (tmp
+ old_size
, 0, new_size
- old_size
);
1198 regno_pointer_align
= (unsigned char *) tmp
;
1200 rtx
*new1
= GGC_RESIZEVEC (rtx
, regno_reg_rtx
, new_size
);
1201 memset (new1
+ old_size
, 0, (new_size
- old_size
) * sizeof (rtx
));
1202 regno_reg_rtx
= new1
;
1204 crtl
->emit
.regno_pointer_align_length
= new_size
;
1207 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1210 reg_is_parm_p (rtx reg
)
1214 gcc_assert (REG_P (reg
));
1215 decl
= REG_EXPR (reg
);
1216 return (decl
&& TREE_CODE (decl
) == PARM_DECL
);
1219 /* Update NEW with the same attributes as REG, but with OFFSET added
1220 to the REG_OFFSET. */
1223 update_reg_offset (rtx new_rtx
, rtx reg
, poly_int64 offset
)
1225 REG_ATTRS (new_rtx
) = get_reg_attrs (REG_EXPR (reg
),
1226 REG_OFFSET (reg
) + offset
);
1229 /* Generate a register with same attributes as REG, but with OFFSET
1230 added to the REG_OFFSET. */
1233 gen_rtx_REG_offset (rtx reg
, machine_mode mode
, unsigned int regno
,
1236 rtx new_rtx
= gen_rtx_REG (mode
, regno
);
1238 update_reg_offset (new_rtx
, reg
, offset
);
1242 /* Generate a new pseudo-register with the same attributes as REG, but
1243 with OFFSET added to the REG_OFFSET. */
1246 gen_reg_rtx_offset (rtx reg
, machine_mode mode
, int offset
)
1248 rtx new_rtx
= gen_reg_rtx (mode
);
1250 update_reg_offset (new_rtx
, reg
, offset
);
1254 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1255 new register is a (possibly paradoxical) lowpart of the old one. */
1258 adjust_reg_mode (rtx reg
, machine_mode mode
)
1260 update_reg_offset (reg
, reg
, byte_lowpart_offset (mode
, GET_MODE (reg
)));
1261 PUT_MODE (reg
, mode
);
1264 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1265 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1268 set_reg_attrs_from_value (rtx reg
, rtx x
)
1271 bool can_be_reg_pointer
= true;
1273 /* Don't call mark_reg_pointer for incompatible pointer sign
1275 while (GET_CODE (x
) == SIGN_EXTEND
1276 || GET_CODE (x
) == ZERO_EXTEND
1277 || GET_CODE (x
) == TRUNCATE
1278 || (GET_CODE (x
) == SUBREG
&& subreg_lowpart_p (x
)))
1280 #if defined(POINTERS_EXTEND_UNSIGNED)
1281 if (((GET_CODE (x
) == SIGN_EXTEND
&& POINTERS_EXTEND_UNSIGNED
)
1282 || (GET_CODE (x
) == ZERO_EXTEND
&& ! POINTERS_EXTEND_UNSIGNED
)
1283 || (paradoxical_subreg_p (x
)
1284 && ! (SUBREG_PROMOTED_VAR_P (x
)
1285 && SUBREG_CHECK_PROMOTED_SIGN (x
,
1286 POINTERS_EXTEND_UNSIGNED
))))
1287 && !targetm
.have_ptr_extend ())
1288 can_be_reg_pointer
= false;
1293 /* Hard registers can be reused for multiple purposes within the same
1294 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1295 on them is wrong. */
1296 if (HARD_REGISTER_P (reg
))
1299 offset
= byte_lowpart_offset (GET_MODE (reg
), GET_MODE (x
));
1302 if (MEM_OFFSET_KNOWN_P (x
))
1303 REG_ATTRS (reg
) = get_reg_attrs (MEM_EXPR (x
),
1304 MEM_OFFSET (x
) + offset
);
1305 if (can_be_reg_pointer
&& MEM_POINTER (x
))
1306 mark_reg_pointer (reg
, 0);
1311 update_reg_offset (reg
, x
, offset
);
1312 if (can_be_reg_pointer
&& REG_POINTER (x
))
1313 mark_reg_pointer (reg
, REGNO_POINTER_ALIGN (REGNO (x
)));
1317 /* Generate a REG rtx for a new pseudo register, copying the mode
1318 and attributes from X. */
1321 gen_reg_rtx_and_attrs (rtx x
)
1323 rtx reg
= gen_reg_rtx (GET_MODE (x
));
1324 set_reg_attrs_from_value (reg
, x
);
1328 /* Set the register attributes for registers contained in PARM_RTX.
1329 Use needed values from memory attributes of MEM. */
1332 set_reg_attrs_for_parm (rtx parm_rtx
, rtx mem
)
1334 if (REG_P (parm_rtx
))
1335 set_reg_attrs_from_value (parm_rtx
, mem
);
1336 else if (GET_CODE (parm_rtx
) == PARALLEL
)
1338 /* Check for a NULL entry in the first slot, used to indicate that the
1339 parameter goes both on the stack and in registers. */
1340 int i
= XEXP (XVECEXP (parm_rtx
, 0, 0), 0) ? 0 : 1;
1341 for (; i
< XVECLEN (parm_rtx
, 0); i
++)
1343 rtx x
= XVECEXP (parm_rtx
, 0, i
);
1344 if (REG_P (XEXP (x
, 0)))
1345 REG_ATTRS (XEXP (x
, 0))
1346 = get_reg_attrs (MEM_EXPR (mem
),
1347 INTVAL (XEXP (x
, 1)));
1352 /* Set the REG_ATTRS for registers in value X, given that X represents
1356 set_reg_attrs_for_decl_rtl (tree t
, rtx x
)
1361 if (GET_CODE (x
) == SUBREG
)
1363 gcc_assert (subreg_lowpart_p (x
));
1368 = get_reg_attrs (t
, byte_lowpart_offset (GET_MODE (x
),
1371 : TYPE_MODE (TREE_TYPE (tdecl
))));
1372 if (GET_CODE (x
) == CONCAT
)
1374 if (REG_P (XEXP (x
, 0)))
1375 REG_ATTRS (XEXP (x
, 0)) = get_reg_attrs (t
, 0);
1376 if (REG_P (XEXP (x
, 1)))
1377 REG_ATTRS (XEXP (x
, 1))
1378 = get_reg_attrs (t
, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x
, 0))));
1380 if (GET_CODE (x
) == PARALLEL
)
1384 /* Check for a NULL entry, used to indicate that the parameter goes
1385 both on the stack and in registers. */
1386 if (XEXP (XVECEXP (x
, 0, 0), 0))
1391 for (i
= start
; i
< XVECLEN (x
, 0); i
++)
1393 rtx y
= XVECEXP (x
, 0, i
);
1394 if (REG_P (XEXP (y
, 0)))
1395 REG_ATTRS (XEXP (y
, 0)) = get_reg_attrs (t
, INTVAL (XEXP (y
, 1)));
1400 /* Assign the RTX X to declaration T. */
1403 set_decl_rtl (tree t
, rtx x
)
1405 DECL_WRTL_CHECK (t
)->decl_with_rtl
.rtl
= x
;
1407 set_reg_attrs_for_decl_rtl (t
, x
);
1410 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1411 if the ABI requires the parameter to be passed by reference. */
1414 set_decl_incoming_rtl (tree t
, rtx x
, bool by_reference_p
)
1416 DECL_INCOMING_RTL (t
) = x
;
1417 if (x
&& !by_reference_p
)
1418 set_reg_attrs_for_decl_rtl (t
, x
);
1421 /* Identify REG (which may be a CONCAT) as a user register. */
1424 mark_user_reg (rtx reg
)
1426 if (GET_CODE (reg
) == CONCAT
)
1428 REG_USERVAR_P (XEXP (reg
, 0)) = 1;
1429 REG_USERVAR_P (XEXP (reg
, 1)) = 1;
1433 gcc_assert (REG_P (reg
));
1434 REG_USERVAR_P (reg
) = 1;
1438 /* Identify REG as a probable pointer register and show its alignment
1439 as ALIGN, if nonzero. */
1442 mark_reg_pointer (rtx reg
, int align
)
1444 if (! REG_POINTER (reg
))
1446 REG_POINTER (reg
) = 1;
1449 REGNO_POINTER_ALIGN (REGNO (reg
)) = align
;
1451 else if (align
&& align
< REGNO_POINTER_ALIGN (REGNO (reg
)))
1452 /* We can no-longer be sure just how aligned this pointer is. */
1453 REGNO_POINTER_ALIGN (REGNO (reg
)) = align
;
1456 /* Return 1 plus largest pseudo reg number used in the current function. */
1464 /* Return 1 + the largest label number used so far in the current function. */
1467 max_label_num (void)
1472 /* Return first label number used in this function (if any were used). */
1475 get_first_label_num (void)
1477 return first_label_num
;
1480 /* If the rtx for label was created during the expansion of a nested
1481 function, then first_label_num won't include this label number.
1482 Fix this now so that array indices work later. */
1485 maybe_set_first_label_num (rtx_code_label
*x
)
1487 if (CODE_LABEL_NUMBER (x
) < first_label_num
)
1488 first_label_num
= CODE_LABEL_NUMBER (x
);
1491 /* For use by the RTL function loader, when mingling with normal
1493 Ensure that label_num is greater than the label num of X, to avoid
1494 duplicate labels in the generated assembler. */
1497 maybe_set_max_label_num (rtx_code_label
*x
)
1499 if (CODE_LABEL_NUMBER (x
) >= label_num
)
1500 label_num
= CODE_LABEL_NUMBER (x
) + 1;
1504 /* Return a value representing some low-order bits of X, where the number
1505 of low-order bits is given by MODE. Note that no conversion is done
1506 between floating-point and fixed-point values, rather, the bit
1507 representation is returned.
1509 This function handles the cases in common between gen_lowpart, below,
1510 and two variants in cse.c and combine.c. These are the cases that can
1511 be safely handled at all points in the compilation.
1513 If this is not a case we can handle, return 0. */
1516 gen_lowpart_common (machine_mode mode
, rtx x
)
1518 int msize
= GET_MODE_SIZE (mode
);
1520 machine_mode innermode
;
1522 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1523 so we have to make one up. Yuk. */
1524 innermode
= GET_MODE (x
);
1526 && msize
* BITS_PER_UNIT
<= HOST_BITS_PER_WIDE_INT
)
1527 innermode
= int_mode_for_size (HOST_BITS_PER_WIDE_INT
, 0).require ();
1528 else if (innermode
== VOIDmode
)
1529 innermode
= int_mode_for_size (HOST_BITS_PER_DOUBLE_INT
, 0).require ();
1531 xsize
= GET_MODE_SIZE (innermode
);
1533 gcc_assert (innermode
!= VOIDmode
&& innermode
!= BLKmode
);
1535 if (innermode
== mode
)
1538 if (SCALAR_FLOAT_MODE_P (mode
))
1540 /* Don't allow paradoxical FLOAT_MODE subregs. */
1546 /* MODE must occupy no more of the underlying registers than X. */
1547 unsigned int regsize
= REGMODE_NATURAL_SIZE (innermode
);
1548 unsigned int mregs
= CEIL (msize
, regsize
);
1549 unsigned int xregs
= CEIL (xsize
, regsize
);
1554 scalar_int_mode int_mode
, int_innermode
, from_mode
;
1555 if ((GET_CODE (x
) == ZERO_EXTEND
|| GET_CODE (x
) == SIGN_EXTEND
)
1556 && is_a
<scalar_int_mode
> (mode
, &int_mode
)
1557 && is_a
<scalar_int_mode
> (innermode
, &int_innermode
)
1558 && is_a
<scalar_int_mode
> (GET_MODE (XEXP (x
, 0)), &from_mode
))
1560 /* If we are getting the low-order part of something that has been
1561 sign- or zero-extended, we can either just use the object being
1562 extended or make a narrower extension. If we want an even smaller
1563 piece than the size of the object being extended, call ourselves
1566 This case is used mostly by combine and cse. */
1568 if (from_mode
== int_mode
)
1570 else if (GET_MODE_SIZE (int_mode
) < GET_MODE_SIZE (from_mode
))
1571 return gen_lowpart_common (int_mode
, XEXP (x
, 0));
1572 else if (GET_MODE_SIZE (int_mode
) < GET_MODE_SIZE (int_innermode
))
1573 return gen_rtx_fmt_e (GET_CODE (x
), int_mode
, XEXP (x
, 0));
1575 else if (GET_CODE (x
) == SUBREG
|| REG_P (x
)
1576 || GET_CODE (x
) == CONCAT
|| const_vec_p (x
)
1577 || CONST_DOUBLE_AS_FLOAT_P (x
) || CONST_SCALAR_INT_P (x
)
1578 || CONST_POLY_INT_P (x
))
1579 return lowpart_subreg (mode
, x
, innermode
);
1581 /* Otherwise, we can't do this. */
1586 gen_highpart (machine_mode mode
, rtx x
)
1588 unsigned int msize
= GET_MODE_SIZE (mode
);
1591 /* This case loses if X is a subreg. To catch bugs early,
1592 complain if an invalid MODE is used even in other cases. */
1593 gcc_assert (msize
<= UNITS_PER_WORD
1594 || msize
== (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x
)));
1596 result
= simplify_gen_subreg (mode
, x
, GET_MODE (x
),
1597 subreg_highpart_offset (mode
, GET_MODE (x
)));
1598 gcc_assert (result
);
1600 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1601 the target if we have a MEM. gen_highpart must return a valid operand,
1602 emitting code if necessary to do so. */
1605 result
= validize_mem (result
);
1606 gcc_assert (result
);
1612 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1613 be VOIDmode constant. */
1615 gen_highpart_mode (machine_mode outermode
, machine_mode innermode
, rtx exp
)
1617 if (GET_MODE (exp
) != VOIDmode
)
1619 gcc_assert (GET_MODE (exp
) == innermode
);
1620 return gen_highpart (outermode
, exp
);
1622 return simplify_gen_subreg (outermode
, exp
, innermode
,
1623 subreg_highpart_offset (outermode
, innermode
));
1626 /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
1627 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1630 subreg_size_lowpart_offset (unsigned int outer_bytes
, unsigned int inner_bytes
)
1632 if (outer_bytes
> inner_bytes
)
1633 /* Paradoxical subregs always have a SUBREG_BYTE of 0. */
1636 if (BYTES_BIG_ENDIAN
&& WORDS_BIG_ENDIAN
)
1637 return inner_bytes
- outer_bytes
;
1638 else if (!BYTES_BIG_ENDIAN
&& !WORDS_BIG_ENDIAN
)
1641 return subreg_size_offset_from_lsb (outer_bytes
, inner_bytes
, 0);
1644 /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
1645 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1648 subreg_size_highpart_offset (unsigned int outer_bytes
,
1649 unsigned int inner_bytes
)
1651 gcc_assert (inner_bytes
>= outer_bytes
);
1653 if (BYTES_BIG_ENDIAN
&& WORDS_BIG_ENDIAN
)
1655 else if (!BYTES_BIG_ENDIAN
&& !WORDS_BIG_ENDIAN
)
1656 return inner_bytes
- outer_bytes
;
1658 return subreg_size_offset_from_lsb (outer_bytes
, inner_bytes
,
1659 (inner_bytes
- outer_bytes
)
1663 /* Return 1 iff X, assumed to be a SUBREG,
1664 refers to the least significant part of its containing reg.
1665 If X is not a SUBREG, always return 1 (it is its own low part!). */
1668 subreg_lowpart_p (const_rtx x
)
1670 if (GET_CODE (x
) != SUBREG
)
1672 else if (GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
1675 return (subreg_lowpart_offset (GET_MODE (x
), GET_MODE (SUBREG_REG (x
)))
1676 == SUBREG_BYTE (x
));
1679 /* Return subword OFFSET of operand OP.
1680 The word number, OFFSET, is interpreted as the word number starting
1681 at the low-order address. OFFSET 0 is the low-order word if not
1682 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1684 If we cannot extract the required word, we return zero. Otherwise,
1685 an rtx corresponding to the requested word will be returned.
1687 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1688 reload has completed, a valid address will always be returned. After
1689 reload, if a valid address cannot be returned, we return zero.
1691 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1692 it is the responsibility of the caller.
1694 MODE is the mode of OP in case it is a CONST_INT.
1696 ??? This is still rather broken for some cases. The problem for the
1697 moment is that all callers of this thing provide no 'goal mode' to
1698 tell us to work with. This exists because all callers were written
1699 in a word based SUBREG world.
1700 Now use of this function can be deprecated by simplify_subreg in most
1705 operand_subword (rtx op
, unsigned int offset
, int validate_address
, machine_mode mode
)
1707 if (mode
== VOIDmode
)
1708 mode
= GET_MODE (op
);
1710 gcc_assert (mode
!= VOIDmode
);
1712 /* If OP is narrower than a word, fail. */
1714 && (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
))
1717 /* If we want a word outside OP, return zero. */
1719 && (offset
+ 1) * UNITS_PER_WORD
> GET_MODE_SIZE (mode
))
1722 /* Form a new MEM at the requested address. */
1725 rtx new_rtx
= adjust_address_nv (op
, word_mode
, offset
* UNITS_PER_WORD
);
1727 if (! validate_address
)
1730 else if (reload_completed
)
1732 if (! strict_memory_address_addr_space_p (word_mode
,
1734 MEM_ADDR_SPACE (op
)))
1738 return replace_equiv_address (new_rtx
, XEXP (new_rtx
, 0));
1741 /* Rest can be handled by simplify_subreg. */
1742 return simplify_gen_subreg (word_mode
, op
, mode
, (offset
* UNITS_PER_WORD
));
1745 /* Similar to `operand_subword', but never return 0. If we can't
1746 extract the required subword, put OP into a register and try again.
1747 The second attempt must succeed. We always validate the address in
1750 MODE is the mode of OP, in case it is CONST_INT. */
1753 operand_subword_force (rtx op
, unsigned int offset
, machine_mode mode
)
1755 rtx result
= operand_subword (op
, offset
, 1, mode
);
1760 if (mode
!= BLKmode
&& mode
!= VOIDmode
)
1762 /* If this is a register which can not be accessed by words, copy it
1763 to a pseudo register. */
1765 op
= copy_to_reg (op
);
1767 op
= force_reg (mode
, op
);
1770 result
= operand_subword (op
, offset
, 1, mode
);
1771 gcc_assert (result
);
1776 mem_attrs::mem_attrs ()
1782 addrspace (ADDR_SPACE_GENERIC
),
1783 offset_known_p (false),
1784 size_known_p (false)
1787 /* Returns 1 if both MEM_EXPR can be considered equal
1791 mem_expr_equal_p (const_tree expr1
, const_tree expr2
)
1796 if (! expr1
|| ! expr2
)
1799 if (TREE_CODE (expr1
) != TREE_CODE (expr2
))
1802 return operand_equal_p (expr1
, expr2
, 0);
1805 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1806 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1810 get_mem_align_offset (rtx mem
, unsigned int align
)
1815 /* This function can't use
1816 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1817 || (MAX (MEM_ALIGN (mem),
1818 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1822 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1824 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1825 for <variable>. get_inner_reference doesn't handle it and
1826 even if it did, the alignment in that case needs to be determined
1827 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1828 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1829 isn't sufficiently aligned, the object it is in might be. */
1830 gcc_assert (MEM_P (mem
));
1831 expr
= MEM_EXPR (mem
);
1832 if (expr
== NULL_TREE
|| !MEM_OFFSET_KNOWN_P (mem
))
1835 offset
= MEM_OFFSET (mem
);
1838 if (DECL_ALIGN (expr
) < align
)
1841 else if (INDIRECT_REF_P (expr
))
1843 if (TYPE_ALIGN (TREE_TYPE (expr
)) < (unsigned int) align
)
1846 else if (TREE_CODE (expr
) == COMPONENT_REF
)
1850 tree inner
= TREE_OPERAND (expr
, 0);
1851 tree field
= TREE_OPERAND (expr
, 1);
1852 tree byte_offset
= component_ref_field_offset (expr
);
1853 tree bit_offset
= DECL_FIELD_BIT_OFFSET (field
);
1855 poly_uint64 suboffset
;
1857 || !poly_int_tree_p (byte_offset
, &suboffset
)
1858 || !tree_fits_uhwi_p (bit_offset
))
1861 offset
+= suboffset
;
1862 offset
+= tree_to_uhwi (bit_offset
) / BITS_PER_UNIT
;
1864 if (inner
== NULL_TREE
)
1866 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field
))
1867 < (unsigned int) align
)
1871 else if (DECL_P (inner
))
1873 if (DECL_ALIGN (inner
) < align
)
1877 else if (TREE_CODE (inner
) != COMPONENT_REF
)
1885 HOST_WIDE_INT misalign
;
1886 if (!known_misalignment (offset
, align
/ BITS_PER_UNIT
, &misalign
))
1891 /* Given REF (a MEM) and T, either the type of X or the expression
1892 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1893 if we are making a new object of this type. BITPOS is nonzero if
1894 there is an offset outstanding on T that will be applied later. */
1897 set_mem_attributes_minus_bitpos (rtx ref
, tree t
, int objectp
,
1900 poly_int64 apply_bitpos
= 0;
1902 struct mem_attrs attrs
, *defattrs
, *refattrs
;
1905 /* It can happen that type_for_mode was given a mode for which there
1906 is no language-level type. In which case it returns NULL, which
1911 type
= TYPE_P (t
) ? t
: TREE_TYPE (t
);
1912 if (type
== error_mark_node
)
1915 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1916 wrong answer, as it assumes that DECL_RTL already has the right alias
1917 info. Callers should not set DECL_RTL until after the call to
1918 set_mem_attributes. */
1919 gcc_assert (!DECL_P (t
) || ref
!= DECL_RTL_IF_SET (t
));
1921 /* Get the alias set from the expression or type (perhaps using a
1922 front-end routine) and use it. */
1923 attrs
.alias
= get_alias_set (t
);
1925 MEM_VOLATILE_P (ref
) |= TYPE_VOLATILE (type
);
1926 MEM_POINTER (ref
) = POINTER_TYPE_P (type
);
1928 /* Default values from pre-existing memory attributes if present. */
1929 refattrs
= MEM_ATTRS (ref
);
1932 /* ??? Can this ever happen? Calling this routine on a MEM that
1933 already carries memory attributes should probably be invalid. */
1934 attrs
.expr
= refattrs
->expr
;
1935 attrs
.offset_known_p
= refattrs
->offset_known_p
;
1936 attrs
.offset
= refattrs
->offset
;
1937 attrs
.size_known_p
= refattrs
->size_known_p
;
1938 attrs
.size
= refattrs
->size
;
1939 attrs
.align
= refattrs
->align
;
1942 /* Otherwise, default values from the mode of the MEM reference. */
1945 defattrs
= mode_mem_attrs
[(int) GET_MODE (ref
)];
1946 gcc_assert (!defattrs
->expr
);
1947 gcc_assert (!defattrs
->offset_known_p
);
1949 /* Respect mode size. */
1950 attrs
.size_known_p
= defattrs
->size_known_p
;
1951 attrs
.size
= defattrs
->size
;
1952 /* ??? Is this really necessary? We probably should always get
1953 the size from the type below. */
1955 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1956 if T is an object, always compute the object alignment below. */
1958 attrs
.align
= defattrs
->align
;
1960 attrs
.align
= BITS_PER_UNIT
;
1961 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1962 e.g. if the type carries an alignment attribute. Should we be
1963 able to simply always use TYPE_ALIGN? */
1966 /* We can set the alignment from the type if we are making an object or if
1967 this is an INDIRECT_REF. */
1968 if (objectp
|| TREE_CODE (t
) == INDIRECT_REF
)
1969 attrs
.align
= MAX (attrs
.align
, TYPE_ALIGN (type
));
1971 /* If the size is known, we can set that. */
1972 tree new_size
= TYPE_SIZE_UNIT (type
);
1974 /* The address-space is that of the type. */
1975 as
= TYPE_ADDR_SPACE (type
);
1977 /* If T is not a type, we may be able to deduce some more information about
1983 if (TREE_THIS_VOLATILE (t
))
1984 MEM_VOLATILE_P (ref
) = 1;
1986 /* Now remove any conversions: they don't change what the underlying
1987 object is. Likewise for SAVE_EXPR. */
1988 while (CONVERT_EXPR_P (t
)
1989 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
1990 || TREE_CODE (t
) == SAVE_EXPR
)
1991 t
= TREE_OPERAND (t
, 0);
1993 /* Note whether this expression can trap. */
1994 MEM_NOTRAP_P (ref
) = !tree_could_trap_p (t
);
1996 base
= get_base_address (t
);
2000 && TREE_READONLY (base
)
2001 && (TREE_STATIC (base
) || DECL_EXTERNAL (base
))
2002 && !TREE_THIS_VOLATILE (base
))
2003 MEM_READONLY_P (ref
) = 1;
2005 /* Mark static const strings readonly as well. */
2006 if (TREE_CODE (base
) == STRING_CST
2007 && TREE_READONLY (base
)
2008 && TREE_STATIC (base
))
2009 MEM_READONLY_P (ref
) = 1;
2011 /* Address-space information is on the base object. */
2012 if (TREE_CODE (base
) == MEM_REF
2013 || TREE_CODE (base
) == TARGET_MEM_REF
)
2014 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base
,
2017 as
= TYPE_ADDR_SPACE (TREE_TYPE (base
));
2020 /* If this expression uses it's parent's alias set, mark it such
2021 that we won't change it. */
2022 if (component_uses_parent_alias_set_from (t
) != NULL_TREE
)
2023 MEM_KEEP_ALIAS_SET_P (ref
) = 1;
2025 /* If this is a decl, set the attributes of the MEM from it. */
2029 attrs
.offset_known_p
= true;
2031 apply_bitpos
= bitpos
;
2032 new_size
= DECL_SIZE_UNIT (t
);
2035 /* ??? If we end up with a constant here do record a MEM_EXPR. */
2036 else if (CONSTANT_CLASS_P (t
))
2039 /* If this is a field reference, record it. */
2040 else if (TREE_CODE (t
) == COMPONENT_REF
)
2043 attrs
.offset_known_p
= true;
2045 apply_bitpos
= bitpos
;
2046 if (DECL_BIT_FIELD (TREE_OPERAND (t
, 1)))
2047 new_size
= DECL_SIZE_UNIT (TREE_OPERAND (t
, 1));
2050 /* If this is an array reference, look for an outer field reference. */
2051 else if (TREE_CODE (t
) == ARRAY_REF
)
2053 tree off_tree
= size_zero_node
;
2054 /* We can't modify t, because we use it at the end of the
2060 tree index
= TREE_OPERAND (t2
, 1);
2061 tree low_bound
= array_ref_low_bound (t2
);
2062 tree unit_size
= array_ref_element_size (t2
);
2064 /* We assume all arrays have sizes that are a multiple of a byte.
2065 First subtract the lower bound, if any, in the type of the
2066 index, then convert to sizetype and multiply by the size of
2067 the array element. */
2068 if (! integer_zerop (low_bound
))
2069 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
2072 off_tree
= size_binop (PLUS_EXPR
,
2073 size_binop (MULT_EXPR
,
2074 fold_convert (sizetype
,
2078 t2
= TREE_OPERAND (t2
, 0);
2080 while (TREE_CODE (t2
) == ARRAY_REF
);
2083 || (TREE_CODE (t2
) == COMPONENT_REF
2084 /* For trailing arrays t2 doesn't have a size that
2085 covers all valid accesses. */
2086 && ! array_at_struct_end_p (t
)))
2089 attrs
.offset_known_p
= false;
2090 if (poly_int_tree_p (off_tree
, &attrs
.offset
))
2092 attrs
.offset_known_p
= true;
2093 apply_bitpos
= bitpos
;
2096 /* Else do not record a MEM_EXPR. */
2099 /* If this is an indirect reference, record it. */
2100 else if (TREE_CODE (t
) == MEM_REF
2101 || TREE_CODE (t
) == TARGET_MEM_REF
)
2104 attrs
.offset_known_p
= true;
2106 apply_bitpos
= bitpos
;
2109 /* Compute the alignment. */
2110 unsigned int obj_align
;
2111 unsigned HOST_WIDE_INT obj_bitpos
;
2112 get_object_alignment_1 (t
, &obj_align
, &obj_bitpos
);
2113 unsigned int diff_align
= known_alignment (obj_bitpos
- bitpos
);
2114 if (diff_align
!= 0)
2115 obj_align
= MIN (obj_align
, diff_align
);
2116 attrs
.align
= MAX (attrs
.align
, obj_align
);
2119 poly_uint64 const_size
;
2120 if (poly_int_tree_p (new_size
, &const_size
))
2122 attrs
.size_known_p
= true;
2123 attrs
.size
= const_size
;
2126 /* If we modified OFFSET based on T, then subtract the outstanding
2127 bit position offset. Similarly, increase the size of the accessed
2128 object to contain the negative offset. */
2129 if (maybe_ne (apply_bitpos
, 0))
2131 gcc_assert (attrs
.offset_known_p
);
2132 poly_int64 bytepos
= bits_to_bytes_round_down (apply_bitpos
);
2133 attrs
.offset
-= bytepos
;
2134 if (attrs
.size_known_p
)
2135 attrs
.size
+= bytepos
;
2138 /* Now set the attributes we computed above. */
2139 attrs
.addrspace
= as
;
2140 set_mem_attrs (ref
, &attrs
);
2144 set_mem_attributes (rtx ref
, tree t
, int objectp
)
2146 set_mem_attributes_minus_bitpos (ref
, t
, objectp
, 0);
2149 /* Set the alias set of MEM to SET. */
2152 set_mem_alias_set (rtx mem
, alias_set_type set
)
2154 /* If the new and old alias sets don't conflict, something is wrong. */
2155 gcc_checking_assert (alias_sets_conflict_p (set
, MEM_ALIAS_SET (mem
)));
2156 mem_attrs
attrs (*get_mem_attrs (mem
));
2158 set_mem_attrs (mem
, &attrs
);
2161 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2164 set_mem_addr_space (rtx mem
, addr_space_t addrspace
)
2166 mem_attrs
attrs (*get_mem_attrs (mem
));
2167 attrs
.addrspace
= addrspace
;
2168 set_mem_attrs (mem
, &attrs
);
2171 /* Set the alignment of MEM to ALIGN bits. */
2174 set_mem_align (rtx mem
, unsigned int align
)
2176 mem_attrs
attrs (*get_mem_attrs (mem
));
2177 attrs
.align
= align
;
2178 set_mem_attrs (mem
, &attrs
);
2181 /* Set the expr for MEM to EXPR. */
2184 set_mem_expr (rtx mem
, tree expr
)
2186 mem_attrs
attrs (*get_mem_attrs (mem
));
2188 set_mem_attrs (mem
, &attrs
);
2191 /* Set the offset of MEM to OFFSET. */
2194 set_mem_offset (rtx mem
, poly_int64 offset
)
2196 mem_attrs
attrs (*get_mem_attrs (mem
));
2197 attrs
.offset_known_p
= true;
2198 attrs
.offset
= offset
;
2199 set_mem_attrs (mem
, &attrs
);
2202 /* Clear the offset of MEM. */
2205 clear_mem_offset (rtx mem
)
2207 mem_attrs
attrs (*get_mem_attrs (mem
));
2208 attrs
.offset_known_p
= false;
2209 set_mem_attrs (mem
, &attrs
);
2212 /* Set the size of MEM to SIZE. */
2215 set_mem_size (rtx mem
, poly_int64 size
)
2217 mem_attrs
attrs (*get_mem_attrs (mem
));
2218 attrs
.size_known_p
= true;
2220 set_mem_attrs (mem
, &attrs
);
2223 /* Clear the size of MEM. */
2226 clear_mem_size (rtx mem
)
2228 mem_attrs
attrs (*get_mem_attrs (mem
));
2229 attrs
.size_known_p
= false;
2230 set_mem_attrs (mem
, &attrs
);
2233 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2234 and its address changed to ADDR. (VOIDmode means don't change the mode.
2235 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2236 returned memory location is required to be valid. INPLACE is true if any
2237 changes can be made directly to MEMREF or false if MEMREF must be treated
2240 The memory attributes are not changed. */
2243 change_address_1 (rtx memref
, machine_mode mode
, rtx addr
, int validate
,
2249 gcc_assert (MEM_P (memref
));
2250 as
= MEM_ADDR_SPACE (memref
);
2251 if (mode
== VOIDmode
)
2252 mode
= GET_MODE (memref
);
2254 addr
= XEXP (memref
, 0);
2255 if (mode
== GET_MODE (memref
) && addr
== XEXP (memref
, 0)
2256 && (!validate
|| memory_address_addr_space_p (mode
, addr
, as
)))
2259 /* Don't validate address for LRA. LRA can make the address valid
2260 by itself in most efficient way. */
2261 if (validate
&& !lra_in_progress
)
2263 if (reload_in_progress
|| reload_completed
)
2264 gcc_assert (memory_address_addr_space_p (mode
, addr
, as
));
2266 addr
= memory_address_addr_space (mode
, addr
, as
);
2269 if (rtx_equal_p (addr
, XEXP (memref
, 0)) && mode
== GET_MODE (memref
))
2274 XEXP (memref
, 0) = addr
;
2278 new_rtx
= gen_rtx_MEM (mode
, addr
);
2279 MEM_COPY_ATTRIBUTES (new_rtx
, memref
);
2283 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2284 way we are changing MEMREF, so we only preserve the alias set. */
2287 change_address (rtx memref
, machine_mode mode
, rtx addr
)
2289 rtx new_rtx
= change_address_1 (memref
, mode
, addr
, 1, false);
2290 machine_mode mmode
= GET_MODE (new_rtx
);
2291 struct mem_attrs
*defattrs
;
2293 mem_attrs
attrs (*get_mem_attrs (memref
));
2294 defattrs
= mode_mem_attrs
[(int) mmode
];
2295 attrs
.expr
= NULL_TREE
;
2296 attrs
.offset_known_p
= false;
2297 attrs
.size_known_p
= defattrs
->size_known_p
;
2298 attrs
.size
= defattrs
->size
;
2299 attrs
.align
= defattrs
->align
;
2301 /* If there are no changes, just return the original memory reference. */
2302 if (new_rtx
== memref
)
2304 if (mem_attrs_eq_p (get_mem_attrs (memref
), &attrs
))
2307 new_rtx
= gen_rtx_MEM (mmode
, XEXP (memref
, 0));
2308 MEM_COPY_ATTRIBUTES (new_rtx
, memref
);
2311 set_mem_attrs (new_rtx
, &attrs
);
2315 /* Return a memory reference like MEMREF, but with its mode changed
2316 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2317 nonzero, the memory address is forced to be valid.
2318 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2319 and the caller is responsible for adjusting MEMREF base register.
2320 If ADJUST_OBJECT is zero, the underlying object associated with the
2321 memory reference is left unchanged and the caller is responsible for
2322 dealing with it. Otherwise, if the new memory reference is outside
2323 the underlying object, even partially, then the object is dropped.
2324 SIZE, if nonzero, is the size of an access in cases where MODE
2325 has no inherent size. */
2328 adjust_address_1 (rtx memref
, machine_mode mode
, poly_int64 offset
,
2329 int validate
, int adjust_address
, int adjust_object
,
2332 rtx addr
= XEXP (memref
, 0);
2334 scalar_int_mode address_mode
;
2335 struct mem_attrs
attrs (*get_mem_attrs (memref
)), *defattrs
;
2336 unsigned HOST_WIDE_INT max_align
;
2337 #ifdef POINTERS_EXTEND_UNSIGNED
2338 scalar_int_mode pointer_mode
2339 = targetm
.addr_space
.pointer_mode (attrs
.addrspace
);
2342 /* VOIDmode means no mode change for change_address_1. */
2343 if (mode
== VOIDmode
)
2344 mode
= GET_MODE (memref
);
2346 /* Take the size of non-BLKmode accesses from the mode. */
2347 defattrs
= mode_mem_attrs
[(int) mode
];
2348 if (defattrs
->size_known_p
)
2349 size
= defattrs
->size
;
2351 /* If there are no changes, just return the original memory reference. */
2352 if (mode
== GET_MODE (memref
)
2353 && known_eq (offset
, 0)
2354 && (known_eq (size
, 0)
2355 || (attrs
.size_known_p
&& known_eq (attrs
.size
, size
)))
2356 && (!validate
|| memory_address_addr_space_p (mode
, addr
,
2360 /* ??? Prefer to create garbage instead of creating shared rtl.
2361 This may happen even if offset is nonzero -- consider
2362 (plus (plus reg reg) const_int) -- so do this always. */
2363 addr
= copy_rtx (addr
);
2365 /* Convert a possibly large offset to a signed value within the
2366 range of the target address space. */
2367 address_mode
= get_address_mode (memref
);
2368 offset
= trunc_int_for_mode (offset
, address_mode
);
2372 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2373 object, we can merge it into the LO_SUM. */
2374 if (GET_MODE (memref
) != BLKmode
2375 && GET_CODE (addr
) == LO_SUM
2376 && known_in_range_p (offset
,
2377 0, (GET_MODE_ALIGNMENT (GET_MODE (memref
))
2379 addr
= gen_rtx_LO_SUM (address_mode
, XEXP (addr
, 0),
2380 plus_constant (address_mode
,
2381 XEXP (addr
, 1), offset
));
2382 #ifdef POINTERS_EXTEND_UNSIGNED
2383 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2384 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2385 the fact that pointers are not allowed to overflow. */
2386 else if (POINTERS_EXTEND_UNSIGNED
> 0
2387 && GET_CODE (addr
) == ZERO_EXTEND
2388 && GET_MODE (XEXP (addr
, 0)) == pointer_mode
2389 && known_eq (trunc_int_for_mode (offset
, pointer_mode
), offset
))
2390 addr
= gen_rtx_ZERO_EXTEND (address_mode
,
2391 plus_constant (pointer_mode
,
2392 XEXP (addr
, 0), offset
));
2395 addr
= plus_constant (address_mode
, addr
, offset
);
2398 new_rtx
= change_address_1 (memref
, mode
, addr
, validate
, false);
2400 /* If the address is a REG, change_address_1 rightfully returns memref,
2401 but this would destroy memref's MEM_ATTRS. */
2402 if (new_rtx
== memref
&& maybe_ne (offset
, 0))
2403 new_rtx
= copy_rtx (new_rtx
);
2405 /* Conservatively drop the object if we don't know where we start from. */
2406 if (adjust_object
&& (!attrs
.offset_known_p
|| !attrs
.size_known_p
))
2408 attrs
.expr
= NULL_TREE
;
2412 /* Compute the new values of the memory attributes due to this adjustment.
2413 We add the offsets and update the alignment. */
2414 if (attrs
.offset_known_p
)
2416 attrs
.offset
+= offset
;
2418 /* Drop the object if the new left end is not within its bounds. */
2419 if (adjust_object
&& maybe_lt (attrs
.offset
, 0))
2421 attrs
.expr
= NULL_TREE
;
2426 /* Compute the new alignment by taking the MIN of the alignment and the
2427 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2429 if (maybe_ne (offset
, 0))
2431 max_align
= known_alignment (offset
) * BITS_PER_UNIT
;
2432 attrs
.align
= MIN (attrs
.align
, max_align
);
2435 if (maybe_ne (size
, 0))
2437 /* Drop the object if the new right end is not within its bounds. */
2438 if (adjust_object
&& maybe_gt (offset
+ size
, attrs
.size
))
2440 attrs
.expr
= NULL_TREE
;
2443 attrs
.size_known_p
= true;
2446 else if (attrs
.size_known_p
)
2448 gcc_assert (!adjust_object
);
2449 attrs
.size
-= offset
;
2450 /* ??? The store_by_pieces machinery generates negative sizes,
2451 so don't assert for that here. */
2454 set_mem_attrs (new_rtx
, &attrs
);
2459 /* Return a memory reference like MEMREF, but with its mode changed
2460 to MODE and its address changed to ADDR, which is assumed to be
2461 MEMREF offset by OFFSET bytes. If VALIDATE is
2462 nonzero, the memory address is forced to be valid. */
2465 adjust_automodify_address_1 (rtx memref
, machine_mode mode
, rtx addr
,
2466 poly_int64 offset
, int validate
)
2468 memref
= change_address_1 (memref
, VOIDmode
, addr
, validate
, false);
2469 return adjust_address_1 (memref
, mode
, offset
, validate
, 0, 0, 0);
2472 /* Return a memory reference like MEMREF, but whose address is changed by
2473 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2474 known to be in OFFSET (possibly 1). */
2477 offset_address (rtx memref
, rtx offset
, unsigned HOST_WIDE_INT pow2
)
2479 rtx new_rtx
, addr
= XEXP (memref
, 0);
2480 machine_mode address_mode
;
2481 struct mem_attrs
*defattrs
;
2483 mem_attrs
attrs (*get_mem_attrs (memref
));
2484 address_mode
= get_address_mode (memref
);
2485 new_rtx
= simplify_gen_binary (PLUS
, address_mode
, addr
, offset
);
2487 /* At this point we don't know _why_ the address is invalid. It
2488 could have secondary memory references, multiplies or anything.
2490 However, if we did go and rearrange things, we can wind up not
2491 being able to recognize the magic around pic_offset_table_rtx.
2492 This stuff is fragile, and is yet another example of why it is
2493 bad to expose PIC machinery too early. */
2494 if (! memory_address_addr_space_p (GET_MODE (memref
), new_rtx
,
2496 && GET_CODE (addr
) == PLUS
2497 && XEXP (addr
, 0) == pic_offset_table_rtx
)
2499 addr
= force_reg (GET_MODE (addr
), addr
);
2500 new_rtx
= simplify_gen_binary (PLUS
, address_mode
, addr
, offset
);
2503 update_temp_slot_address (XEXP (memref
, 0), new_rtx
);
2504 new_rtx
= change_address_1 (memref
, VOIDmode
, new_rtx
, 1, false);
2506 /* If there are no changes, just return the original memory reference. */
2507 if (new_rtx
== memref
)
2510 /* Update the alignment to reflect the offset. Reset the offset, which
2512 defattrs
= mode_mem_attrs
[(int) GET_MODE (new_rtx
)];
2513 attrs
.offset_known_p
= false;
2514 attrs
.size_known_p
= defattrs
->size_known_p
;
2515 attrs
.size
= defattrs
->size
;
2516 attrs
.align
= MIN (attrs
.align
, pow2
* BITS_PER_UNIT
);
2517 set_mem_attrs (new_rtx
, &attrs
);
2521 /* Return a memory reference like MEMREF, but with its address changed to
2522 ADDR. The caller is asserting that the actual piece of memory pointed
2523 to is the same, just the form of the address is being changed, such as
2524 by putting something into a register. INPLACE is true if any changes
2525 can be made directly to MEMREF or false if MEMREF must be treated as
2529 replace_equiv_address (rtx memref
, rtx addr
, bool inplace
)
2531 /* change_address_1 copies the memory attribute structure without change
2532 and that's exactly what we want here. */
2533 update_temp_slot_address (XEXP (memref
, 0), addr
);
2534 return change_address_1 (memref
, VOIDmode
, addr
, 1, inplace
);
2537 /* Likewise, but the reference is not required to be valid. */
2540 replace_equiv_address_nv (rtx memref
, rtx addr
, bool inplace
)
2542 return change_address_1 (memref
, VOIDmode
, addr
, 0, inplace
);
2545 /* Return a memory reference like MEMREF, but with its mode widened to
2546 MODE and offset by OFFSET. This would be used by targets that e.g.
2547 cannot issue QImode memory operations and have to use SImode memory
2548 operations plus masking logic. */
2551 widen_memory_access (rtx memref
, machine_mode mode
, poly_int64 offset
)
2553 rtx new_rtx
= adjust_address_1 (memref
, mode
, offset
, 1, 1, 0, 0);
2554 unsigned int size
= GET_MODE_SIZE (mode
);
2556 /* If there are no changes, just return the original memory reference. */
2557 if (new_rtx
== memref
)
2560 mem_attrs
attrs (*get_mem_attrs (new_rtx
));
2562 /* If we don't know what offset we were at within the expression, then
2563 we can't know if we've overstepped the bounds. */
2564 if (! attrs
.offset_known_p
)
2565 attrs
.expr
= NULL_TREE
;
2569 if (TREE_CODE (attrs
.expr
) == COMPONENT_REF
)
2571 tree field
= TREE_OPERAND (attrs
.expr
, 1);
2572 tree offset
= component_ref_field_offset (attrs
.expr
);
2574 if (! DECL_SIZE_UNIT (field
))
2576 attrs
.expr
= NULL_TREE
;
2580 /* Is the field at least as large as the access? If so, ok,
2581 otherwise strip back to the containing structure. */
2582 if (poly_int_tree_p (DECL_SIZE_UNIT (field
))
2583 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (field
)), size
)
2584 && known_ge (attrs
.offset
, 0))
2587 poly_uint64 suboffset
;
2588 if (!poly_int_tree_p (offset
, &suboffset
))
2590 attrs
.expr
= NULL_TREE
;
2594 attrs
.expr
= TREE_OPERAND (attrs
.expr
, 0);
2595 attrs
.offset
+= suboffset
;
2596 attrs
.offset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
2599 /* Similarly for the decl. */
2600 else if (DECL_P (attrs
.expr
)
2601 && DECL_SIZE_UNIT (attrs
.expr
)
2602 && poly_int_tree_p (DECL_SIZE_UNIT (attrs
.expr
))
2603 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (attrs
.expr
)),
2605 && known_ge (attrs
.offset
, 0))
2609 /* The widened memory access overflows the expression, which means
2610 that it could alias another expression. Zap it. */
2611 attrs
.expr
= NULL_TREE
;
2617 attrs
.offset_known_p
= false;
2619 /* The widened memory may alias other stuff, so zap the alias set. */
2620 /* ??? Maybe use get_alias_set on any remaining expression. */
2622 attrs
.size_known_p
= true;
2624 set_mem_attrs (new_rtx
, &attrs
);
2628 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2629 static GTY(()) tree spill_slot_decl
;
2632 get_spill_slot_decl (bool force_build_p
)
2634 tree d
= spill_slot_decl
;
2637 if (d
|| !force_build_p
)
2640 d
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
2641 VAR_DECL
, get_identifier ("%sfp"), void_type_node
);
2642 DECL_ARTIFICIAL (d
) = 1;
2643 DECL_IGNORED_P (d
) = 1;
2645 spill_slot_decl
= d
;
2647 rd
= gen_rtx_MEM (BLKmode
, frame_pointer_rtx
);
2648 MEM_NOTRAP_P (rd
) = 1;
2649 mem_attrs
attrs (*mode_mem_attrs
[(int) BLKmode
]);
2650 attrs
.alias
= new_alias_set ();
2652 set_mem_attrs (rd
, &attrs
);
2653 SET_DECL_RTL (d
, rd
);
2658 /* Given MEM, a result from assign_stack_local, fill in the memory
2659 attributes as appropriate for a register allocator spill slot.
2660 These slots are not aliasable by other memory. We arrange for
2661 them all to use a single MEM_EXPR, so that the aliasing code can
2662 work properly in the case of shared spill slots. */
2665 set_mem_attrs_for_spill (rtx mem
)
2669 mem_attrs
attrs (*get_mem_attrs (mem
));
2670 attrs
.expr
= get_spill_slot_decl (true);
2671 attrs
.alias
= MEM_ALIAS_SET (DECL_RTL (attrs
.expr
));
2672 attrs
.addrspace
= ADDR_SPACE_GENERIC
;
2674 /* We expect the incoming memory to be of the form:
2675 (mem:MODE (plus (reg sfp) (const_int offset)))
2676 with perhaps the plus missing for offset = 0. */
2677 addr
= XEXP (mem
, 0);
2678 attrs
.offset_known_p
= true;
2679 strip_offset (addr
, &attrs
.offset
);
2681 set_mem_attrs (mem
, &attrs
);
2682 MEM_NOTRAP_P (mem
) = 1;
2685 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2688 gen_label_rtx (void)
2690 return as_a
<rtx_code_label
*> (
2691 gen_rtx_CODE_LABEL (VOIDmode
, NULL_RTX
, NULL_RTX
,
2692 NULL
, label_num
++, NULL
));
2695 /* For procedure integration. */
2697 /* Install new pointers to the first and last insns in the chain.
2698 Also, set cur_insn_uid to one higher than the last in use.
2699 Used for an inline-procedure after copying the insn chain. */
2702 set_new_first_and_last_insn (rtx_insn
*first
, rtx_insn
*last
)
2706 set_first_insn (first
);
2707 set_last_insn (last
);
2710 if (MIN_NONDEBUG_INSN_UID
|| MAY_HAVE_DEBUG_INSNS
)
2712 int debug_count
= 0;
2714 cur_insn_uid
= MIN_NONDEBUG_INSN_UID
- 1;
2715 cur_debug_insn_uid
= 0;
2717 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2718 if (INSN_UID (insn
) < MIN_NONDEBUG_INSN_UID
)
2719 cur_debug_insn_uid
= MAX (cur_debug_insn_uid
, INSN_UID (insn
));
2722 cur_insn_uid
= MAX (cur_insn_uid
, INSN_UID (insn
));
2723 if (DEBUG_INSN_P (insn
))
2728 cur_debug_insn_uid
= MIN_NONDEBUG_INSN_UID
+ debug_count
;
2730 cur_debug_insn_uid
++;
2733 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2734 cur_insn_uid
= MAX (cur_insn_uid
, INSN_UID (insn
));
2739 /* Go through all the RTL insn bodies and copy any invalid shared
2740 structure. This routine should only be called once. */
2743 unshare_all_rtl_1 (rtx_insn
*insn
)
2745 /* Unshare just about everything else. */
2746 unshare_all_rtl_in_chain (insn
);
2748 /* Make sure the addresses of stack slots found outside the insn chain
2749 (such as, in DECL_RTL of a variable) are not shared
2750 with the insn chain.
2752 This special care is necessary when the stack slot MEM does not
2753 actually appear in the insn chain. If it does appear, its address
2754 is unshared from all else at that point. */
2757 FOR_EACH_VEC_SAFE_ELT (stack_slot_list
, i
, temp
)
2758 (*stack_slot_list
)[i
] = copy_rtx_if_shared (temp
);
2761 /* Go through all the RTL insn bodies and copy any invalid shared
2762 structure, again. This is a fairly expensive thing to do so it
2763 should be done sparingly. */
2766 unshare_all_rtl_again (rtx_insn
*insn
)
2771 for (p
= insn
; p
; p
= NEXT_INSN (p
))
2774 reset_used_flags (PATTERN (p
));
2775 reset_used_flags (REG_NOTES (p
));
2777 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p
));
2780 /* Make sure that virtual stack slots are not shared. */
2781 set_used_decls (DECL_INITIAL (cfun
->decl
));
2783 /* Make sure that virtual parameters are not shared. */
2784 for (decl
= DECL_ARGUMENTS (cfun
->decl
); decl
; decl
= DECL_CHAIN (decl
))
2785 set_used_flags (DECL_RTL (decl
));
2789 FOR_EACH_VEC_SAFE_ELT (stack_slot_list
, i
, temp
)
2790 reset_used_flags (temp
);
2792 unshare_all_rtl_1 (insn
);
2796 unshare_all_rtl (void)
2798 unshare_all_rtl_1 (get_insns ());
2800 for (tree decl
= DECL_ARGUMENTS (cfun
->decl
); decl
; decl
= DECL_CHAIN (decl
))
2802 if (DECL_RTL_SET_P (decl
))
2803 SET_DECL_RTL (decl
, copy_rtx_if_shared (DECL_RTL (decl
)));
2804 DECL_INCOMING_RTL (decl
) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl
));
2811 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2812 Recursively does the same for subexpressions. */
2815 verify_rtx_sharing (rtx orig
, rtx insn
)
2820 const char *format_ptr
;
2825 code
= GET_CODE (x
);
2827 /* These types may be freely shared. */
2843 /* SCRATCH must be shared because they represent distinct values. */
2846 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2847 clobbers or clobbers of hard registers that originated as pseudos.
2848 This is needed to allow safe register renaming. */
2849 if (REG_P (XEXP (x
, 0))
2850 && HARD_REGISTER_NUM_P (REGNO (XEXP (x
, 0)))
2851 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x
, 0))))
2856 if (shared_const_p (orig
))
2861 /* A MEM is allowed to be shared if its address is constant. */
2862 if (CONSTANT_ADDRESS_P (XEXP (x
, 0))
2863 || reload_completed
|| reload_in_progress
)
2872 /* This rtx may not be shared. If it has already been seen,
2873 replace it with a copy of itself. */
2874 if (flag_checking
&& RTX_FLAG (x
, used
))
2876 error ("invalid rtl sharing found in the insn");
2878 error ("shared rtx");
2880 internal_error ("internal consistency failure");
2882 gcc_assert (!RTX_FLAG (x
, used
));
2884 RTX_FLAG (x
, used
) = 1;
2886 /* Now scan the subexpressions recursively. */
2888 format_ptr
= GET_RTX_FORMAT (code
);
2890 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
2892 switch (*format_ptr
++)
2895 verify_rtx_sharing (XEXP (x
, i
), insn
);
2899 if (XVEC (x
, i
) != NULL
)
2902 int len
= XVECLEN (x
, i
);
2904 for (j
= 0; j
< len
; j
++)
2906 /* We allow sharing of ASM_OPERANDS inside single
2908 if (j
&& GET_CODE (XVECEXP (x
, i
, j
)) == SET
2909 && (GET_CODE (SET_SRC (XVECEXP (x
, i
, j
)))
2911 verify_rtx_sharing (SET_DEST (XVECEXP (x
, i
, j
)), insn
);
2913 verify_rtx_sharing (XVECEXP (x
, i
, j
), insn
);
2922 /* Reset used-flags for INSN. */
2925 reset_insn_used_flags (rtx insn
)
2927 gcc_assert (INSN_P (insn
));
2928 reset_used_flags (PATTERN (insn
));
2929 reset_used_flags (REG_NOTES (insn
));
2931 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn
));
2934 /* Go through all the RTL insn bodies and clear all the USED bits. */
2937 reset_all_used_flags (void)
2941 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
2944 rtx pat
= PATTERN (p
);
2945 if (GET_CODE (pat
) != SEQUENCE
)
2946 reset_insn_used_flags (p
);
2949 gcc_assert (REG_NOTES (p
) == NULL
);
2950 for (int i
= 0; i
< XVECLEN (pat
, 0); i
++)
2952 rtx insn
= XVECEXP (pat
, 0, i
);
2954 reset_insn_used_flags (insn
);
2960 /* Verify sharing in INSN. */
2963 verify_insn_sharing (rtx insn
)
2965 gcc_assert (INSN_P (insn
));
2966 verify_rtx_sharing (PATTERN (insn
), insn
);
2967 verify_rtx_sharing (REG_NOTES (insn
), insn
);
2969 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn
), insn
);
2972 /* Go through all the RTL insn bodies and check that there is no unexpected
2973 sharing in between the subexpressions. */
2976 verify_rtl_sharing (void)
2980 timevar_push (TV_VERIFY_RTL_SHARING
);
2982 reset_all_used_flags ();
2984 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
2987 rtx pat
= PATTERN (p
);
2988 if (GET_CODE (pat
) != SEQUENCE
)
2989 verify_insn_sharing (p
);
2991 for (int i
= 0; i
< XVECLEN (pat
, 0); i
++)
2993 rtx insn
= XVECEXP (pat
, 0, i
);
2995 verify_insn_sharing (insn
);
2999 reset_all_used_flags ();
3001 timevar_pop (TV_VERIFY_RTL_SHARING
);
3004 /* Go through all the RTL insn bodies and copy any invalid shared structure.
3005 Assumes the mark bits are cleared at entry. */
3008 unshare_all_rtl_in_chain (rtx_insn
*insn
)
3010 for (; insn
; insn
= NEXT_INSN (insn
))
3013 PATTERN (insn
) = copy_rtx_if_shared (PATTERN (insn
));
3014 REG_NOTES (insn
) = copy_rtx_if_shared (REG_NOTES (insn
));
3016 CALL_INSN_FUNCTION_USAGE (insn
)
3017 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn
));
3021 /* Go through all virtual stack slots of a function and mark them as
3022 shared. We never replace the DECL_RTLs themselves with a copy,
3023 but expressions mentioned into a DECL_RTL cannot be shared with
3024 expressions in the instruction stream.
3026 Note that reload may convert pseudo registers into memories in-place.
3027 Pseudo registers are always shared, but MEMs never are. Thus if we
3028 reset the used flags on MEMs in the instruction stream, we must set
3029 them again on MEMs that appear in DECL_RTLs. */
3032 set_used_decls (tree blk
)
3037 for (t
= BLOCK_VARS (blk
); t
; t
= DECL_CHAIN (t
))
3038 if (DECL_RTL_SET_P (t
))
3039 set_used_flags (DECL_RTL (t
));
3041 /* Now process sub-blocks. */
3042 for (t
= BLOCK_SUBBLOCKS (blk
); t
; t
= BLOCK_CHAIN (t
))
3046 /* Mark ORIG as in use, and return a copy of it if it was already in use.
3047 Recursively does the same for subexpressions. Uses
3048 copy_rtx_if_shared_1 to reduce stack space. */
3051 copy_rtx_if_shared (rtx orig
)
3053 copy_rtx_if_shared_1 (&orig
);
3057 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
3058 use. Recursively does the same for subexpressions. */
3061 copy_rtx_if_shared_1 (rtx
*orig1
)
3067 const char *format_ptr
;
3071 /* Repeat is used to turn tail-recursion into iteration. */
3078 code
= GET_CODE (x
);
3080 /* These types may be freely shared. */
3096 /* SCRATCH must be shared because they represent distinct values. */
3099 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
3100 clobbers or clobbers of hard registers that originated as pseudos.
3101 This is needed to allow safe register renaming. */
3102 if (REG_P (XEXP (x
, 0))
3103 && HARD_REGISTER_NUM_P (REGNO (XEXP (x
, 0)))
3104 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x
, 0))))
3109 if (shared_const_p (x
))
3119 /* The chain of insns is not being copied. */
3126 /* This rtx may not be shared. If it has already been seen,
3127 replace it with a copy of itself. */
3129 if (RTX_FLAG (x
, used
))
3131 x
= shallow_copy_rtx (x
);
3134 RTX_FLAG (x
, used
) = 1;
3136 /* Now scan the subexpressions recursively.
3137 We can store any replaced subexpressions directly into X
3138 since we know X is not shared! Any vectors in X
3139 must be copied if X was copied. */
3141 format_ptr
= GET_RTX_FORMAT (code
);
3142 length
= GET_RTX_LENGTH (code
);
3145 for (i
= 0; i
< length
; i
++)
3147 switch (*format_ptr
++)
3151 copy_rtx_if_shared_1 (last_ptr
);
3152 last_ptr
= &XEXP (x
, i
);
3156 if (XVEC (x
, i
) != NULL
)
3159 int len
= XVECLEN (x
, i
);
3161 /* Copy the vector iff I copied the rtx and the length
3163 if (copied
&& len
> 0)
3164 XVEC (x
, i
) = gen_rtvec_v (len
, XVEC (x
, i
)->elem
);
3166 /* Call recursively on all inside the vector. */
3167 for (j
= 0; j
< len
; j
++)
3170 copy_rtx_if_shared_1 (last_ptr
);
3171 last_ptr
= &XVECEXP (x
, i
, j
);
3186 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3189 mark_used_flags (rtx x
, int flag
)
3193 const char *format_ptr
;
3196 /* Repeat is used to turn tail-recursion into iteration. */
3201 code
= GET_CODE (x
);
3203 /* These types may be freely shared so we needn't do any resetting
3227 /* The chain of insns is not being copied. */
3234 RTX_FLAG (x
, used
) = flag
;
3236 format_ptr
= GET_RTX_FORMAT (code
);
3237 length
= GET_RTX_LENGTH (code
);
3239 for (i
= 0; i
< length
; i
++)
3241 switch (*format_ptr
++)
3249 mark_used_flags (XEXP (x
, i
), flag
);
3253 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3254 mark_used_flags (XVECEXP (x
, i
, j
), flag
);
3260 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3261 to look for shared sub-parts. */
3264 reset_used_flags (rtx x
)
3266 mark_used_flags (x
, 0);
3269 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3270 to look for shared sub-parts. */
3273 set_used_flags (rtx x
)
3275 mark_used_flags (x
, 1);
3278 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3279 Return X or the rtx for the pseudo reg the value of X was copied into.
3280 OTHER must be valid as a SET_DEST. */
3283 make_safe_from (rtx x
, rtx other
)
3286 switch (GET_CODE (other
))
3289 other
= SUBREG_REG (other
);
3291 case STRICT_LOW_PART
:
3294 other
= XEXP (other
, 0);
3303 && GET_CODE (x
) != SUBREG
)
3305 && (REGNO (other
) < FIRST_PSEUDO_REGISTER
3306 || reg_mentioned_p (other
, x
))))
3308 rtx temp
= gen_reg_rtx (GET_MODE (x
));
3309 emit_move_insn (temp
, x
);
3315 /* Emission of insns (adding them to the doubly-linked list). */
3317 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3320 get_last_insn_anywhere (void)
3322 struct sequence_stack
*seq
;
3323 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
3329 /* Return the first nonnote insn emitted in current sequence or current
3330 function. This routine looks inside SEQUENCEs. */
3333 get_first_nonnote_insn (void)
3335 rtx_insn
*insn
= get_insns ();
3340 for (insn
= next_insn (insn
);
3341 insn
&& NOTE_P (insn
);
3342 insn
= next_insn (insn
))
3346 if (NONJUMP_INSN_P (insn
)
3347 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3348 insn
= as_a
<rtx_sequence
*> (PATTERN (insn
))->insn (0);
3355 /* Return the last nonnote insn emitted in current sequence or current
3356 function. This routine looks inside SEQUENCEs. */
3359 get_last_nonnote_insn (void)
3361 rtx_insn
*insn
= get_last_insn ();
3366 for (insn
= previous_insn (insn
);
3367 insn
&& NOTE_P (insn
);
3368 insn
= previous_insn (insn
))
3372 if (NONJUMP_INSN_P (insn
))
3373 if (rtx_sequence
*seq
= dyn_cast
<rtx_sequence
*> (PATTERN (insn
)))
3374 insn
= seq
->insn (seq
->len () - 1);
3381 /* Return the number of actual (non-debug) insns emitted in this
3385 get_max_insn_count (void)
3387 int n
= cur_insn_uid
;
3389 /* The table size must be stable across -g, to avoid codegen
3390 differences due to debug insns, and not be affected by
3391 -fmin-insn-uid, to avoid excessive table size and to simplify
3392 debugging of -fcompare-debug failures. */
3393 if (cur_debug_insn_uid
> MIN_NONDEBUG_INSN_UID
)
3394 n
-= cur_debug_insn_uid
;
3396 n
-= MIN_NONDEBUG_INSN_UID
;
3402 /* Return the next insn. If it is a SEQUENCE, return the first insn
3406 next_insn (rtx_insn
*insn
)
3410 insn
= NEXT_INSN (insn
);
3411 if (insn
&& NONJUMP_INSN_P (insn
)
3412 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3413 insn
= as_a
<rtx_sequence
*> (PATTERN (insn
))->insn (0);
3419 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3423 previous_insn (rtx_insn
*insn
)
3427 insn
= PREV_INSN (insn
);
3428 if (insn
&& NONJUMP_INSN_P (insn
))
3429 if (rtx_sequence
*seq
= dyn_cast
<rtx_sequence
*> (PATTERN (insn
)))
3430 insn
= seq
->insn (seq
->len () - 1);
3436 /* Return the next insn after INSN that is not a NOTE. This routine does not
3437 look inside SEQUENCEs. */
3440 next_nonnote_insn (rtx_insn
*insn
)
3444 insn
= NEXT_INSN (insn
);
3445 if (insn
== 0 || !NOTE_P (insn
))
3452 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3453 routine does not look inside SEQUENCEs. */
3456 next_nondebug_insn (rtx_insn
*insn
)
3460 insn
= NEXT_INSN (insn
);
3461 if (insn
== 0 || !DEBUG_INSN_P (insn
))
3468 /* Return the previous insn before INSN that is not a NOTE. This routine does
3469 not look inside SEQUENCEs. */
3472 prev_nonnote_insn (rtx_insn
*insn
)
3476 insn
= PREV_INSN (insn
);
3477 if (insn
== 0 || !NOTE_P (insn
))
3484 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3485 This routine does not look inside SEQUENCEs. */
3488 prev_nondebug_insn (rtx_insn
*insn
)
3492 insn
= PREV_INSN (insn
);
3493 if (insn
== 0 || !DEBUG_INSN_P (insn
))
3500 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3501 This routine does not look inside SEQUENCEs. */
3504 next_nonnote_nondebug_insn (rtx_insn
*insn
)
3508 insn
= NEXT_INSN (insn
);
3509 if (insn
== 0 || (!NOTE_P (insn
) && !DEBUG_INSN_P (insn
)))
3516 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN,
3517 but stop the search before we enter another basic block. This
3518 routine does not look inside SEQUENCEs. */
3521 next_nonnote_nondebug_insn_bb (rtx_insn
*insn
)
3525 insn
= NEXT_INSN (insn
);
3528 if (DEBUG_INSN_P (insn
))
3532 if (NOTE_INSN_BASIC_BLOCK_P (insn
))
3539 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3540 This routine does not look inside SEQUENCEs. */
3543 prev_nonnote_nondebug_insn (rtx_insn
*insn
)
3547 insn
= PREV_INSN (insn
);
3548 if (insn
== 0 || (!NOTE_P (insn
) && !DEBUG_INSN_P (insn
)))
3555 /* Return the previous insn before INSN that is not a NOTE nor
3556 DEBUG_INSN, but stop the search before we enter another basic
3557 block. This routine does not look inside SEQUENCEs. */
3560 prev_nonnote_nondebug_insn_bb (rtx_insn
*insn
)
3564 insn
= PREV_INSN (insn
);
3567 if (DEBUG_INSN_P (insn
))
3571 if (NOTE_INSN_BASIC_BLOCK_P (insn
))
3578 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3579 or 0, if there is none. This routine does not look inside
3583 next_real_insn (rtx uncast_insn
)
3585 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3589 insn
= NEXT_INSN (insn
);
3590 if (insn
== 0 || INSN_P (insn
))
3597 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3598 or 0, if there is none. This routine does not look inside
3602 prev_real_insn (rtx_insn
*insn
)
3606 insn
= PREV_INSN (insn
);
3607 if (insn
== 0 || INSN_P (insn
))
3614 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3615 This routine does not look inside SEQUENCEs. */
3618 last_call_insn (void)
3622 for (insn
= get_last_insn ();
3623 insn
&& !CALL_P (insn
);
3624 insn
= PREV_INSN (insn
))
3627 return safe_as_a
<rtx_call_insn
*> (insn
);
3630 /* Find the next insn after INSN that really does something. This routine
3631 does not look inside SEQUENCEs. After reload this also skips over
3632 standalone USE and CLOBBER insn. */
3635 active_insn_p (const rtx_insn
*insn
)
3637 return (CALL_P (insn
) || JUMP_P (insn
)
3638 || JUMP_TABLE_DATA_P (insn
) /* FIXME */
3639 || (NONJUMP_INSN_P (insn
)
3640 && (! reload_completed
3641 || (GET_CODE (PATTERN (insn
)) != USE
3642 && GET_CODE (PATTERN (insn
)) != CLOBBER
))));
3646 next_active_insn (rtx_insn
*insn
)
3650 insn
= NEXT_INSN (insn
);
3651 if (insn
== 0 || active_insn_p (insn
))
3658 /* Find the last insn before INSN that really does something. This routine
3659 does not look inside SEQUENCEs. After reload this also skips over
3660 standalone USE and CLOBBER insn. */
3663 prev_active_insn (rtx_insn
*insn
)
3667 insn
= PREV_INSN (insn
);
3668 if (insn
== 0 || active_insn_p (insn
))
3675 /* Return the next insn that uses CC0 after INSN, which is assumed to
3676 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3677 applied to the result of this function should yield INSN).
3679 Normally, this is simply the next insn. However, if a REG_CC_USER note
3680 is present, it contains the insn that uses CC0.
3682 Return 0 if we can't find the insn. */
3685 next_cc0_user (rtx_insn
*insn
)
3687 rtx note
= find_reg_note (insn
, REG_CC_USER
, NULL_RTX
);
3690 return safe_as_a
<rtx_insn
*> (XEXP (note
, 0));
3692 insn
= next_nonnote_insn (insn
);
3693 if (insn
&& NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3694 insn
= as_a
<rtx_sequence
*> (PATTERN (insn
))->insn (0);
3696 if (insn
&& INSN_P (insn
) && reg_mentioned_p (cc0_rtx
, PATTERN (insn
)))
3702 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3703 note, it is the previous insn. */
3706 prev_cc0_setter (rtx_insn
*insn
)
3708 rtx note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
3711 return safe_as_a
<rtx_insn
*> (XEXP (note
, 0));
3713 insn
= prev_nonnote_insn (insn
);
3714 gcc_assert (sets_cc0_p (PATTERN (insn
)));
3719 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3722 find_auto_inc (const_rtx x
, const_rtx reg
)
3724 subrtx_iterator::array_type array
;
3725 FOR_EACH_SUBRTX (iter
, array
, x
, NONCONST
)
3727 const_rtx x
= *iter
;
3728 if (GET_RTX_CLASS (GET_CODE (x
)) == RTX_AUTOINC
3729 && rtx_equal_p (reg
, XEXP (x
, 0)))
3735 /* Increment the label uses for all labels present in rtx. */
3738 mark_label_nuses (rtx x
)
3744 code
= GET_CODE (x
);
3745 if (code
== LABEL_REF
&& LABEL_P (label_ref_label (x
)))
3746 LABEL_NUSES (label_ref_label (x
))++;
3748 fmt
= GET_RTX_FORMAT (code
);
3749 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3752 mark_label_nuses (XEXP (x
, i
));
3753 else if (fmt
[i
] == 'E')
3754 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
3755 mark_label_nuses (XVECEXP (x
, i
, j
));
3760 /* Try splitting insns that can be split for better scheduling.
3761 PAT is the pattern which might split.
3762 TRIAL is the insn providing PAT.
3763 LAST is nonzero if we should return the last insn of the sequence produced.
3765 If this routine succeeds in splitting, it returns the first or last
3766 replacement insn depending on the value of LAST. Otherwise, it
3767 returns TRIAL. If the insn to be returned can be split, it will be. */
3770 try_split (rtx pat
, rtx_insn
*trial
, int last
)
3772 rtx_insn
*before
, *after
;
3774 rtx_insn
*seq
, *tem
;
3775 profile_probability probability
;
3776 rtx_insn
*insn_last
, *insn
;
3778 rtx_insn
*call_insn
= NULL
;
3780 /* We're not good at redistributing frame information. */
3781 if (RTX_FRAME_RELATED_P (trial
))
3784 if (any_condjump_p (trial
)
3785 && (note
= find_reg_note (trial
, REG_BR_PROB
, 0)))
3786 split_branch_probability
3787 = profile_probability::from_reg_br_prob_note (XINT (note
, 0));
3789 split_branch_probability
= profile_probability::uninitialized ();
3791 probability
= split_branch_probability
;
3793 seq
= split_insns (pat
, trial
);
3795 split_branch_probability
= profile_probability::uninitialized ();
3800 /* Avoid infinite loop if any insn of the result matches
3801 the original pattern. */
3805 if (INSN_P (insn_last
)
3806 && rtx_equal_p (PATTERN (insn_last
), pat
))
3808 if (!NEXT_INSN (insn_last
))
3810 insn_last
= NEXT_INSN (insn_last
);
3813 /* We will be adding the new sequence to the function. The splitters
3814 may have introduced invalid RTL sharing, so unshare the sequence now. */
3815 unshare_all_rtl_in_chain (seq
);
3817 /* Mark labels and copy flags. */
3818 for (insn
= insn_last
; insn
; insn
= PREV_INSN (insn
))
3823 CROSSING_JUMP_P (insn
) = CROSSING_JUMP_P (trial
);
3824 mark_jump_label (PATTERN (insn
), insn
, 0);
3826 if (probability
.initialized_p ()
3827 && any_condjump_p (insn
)
3828 && !find_reg_note (insn
, REG_BR_PROB
, 0))
3830 /* We can preserve the REG_BR_PROB notes only if exactly
3831 one jump is created, otherwise the machine description
3832 is responsible for this step using
3833 split_branch_probability variable. */
3834 gcc_assert (njumps
== 1);
3835 add_reg_br_prob_note (insn
, probability
);
3840 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3841 in SEQ and copy any additional information across. */
3844 for (insn
= insn_last
; insn
; insn
= PREV_INSN (insn
))
3850 gcc_assert (call_insn
== NULL_RTX
);
3853 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3854 target may have explicitly specified. */
3855 p
= &CALL_INSN_FUNCTION_USAGE (insn
);
3858 *p
= CALL_INSN_FUNCTION_USAGE (trial
);
3860 /* If the old call was a sibling call, the new one must
3862 SIBLING_CALL_P (insn
) = SIBLING_CALL_P (trial
);
3864 /* If the new call is the last instruction in the sequence,
3865 it will effectively replace the old call in-situ. Otherwise
3866 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3867 so that it comes immediately after the new call. */
3868 if (NEXT_INSN (insn
))
3869 for (next
= NEXT_INSN (trial
);
3870 next
&& NOTE_P (next
);
3871 next
= NEXT_INSN (next
))
3872 if (NOTE_KIND (next
) == NOTE_INSN_CALL_ARG_LOCATION
)
3875 add_insn_after (next
, insn
, NULL
);
3881 /* Copy notes, particularly those related to the CFG. */
3882 for (note
= REG_NOTES (trial
); note
; note
= XEXP (note
, 1))
3884 switch (REG_NOTE_KIND (note
))
3887 copy_reg_eh_region_note_backward (note
, insn_last
, NULL
);
3893 case REG_CALL_NOCF_CHECK
:
3894 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3897 add_reg_note (insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3901 case REG_NON_LOCAL_GOTO
:
3902 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3905 add_reg_note (insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3913 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3915 rtx reg
= XEXP (note
, 0);
3916 if (!FIND_REG_INC_NOTE (insn
, reg
)
3917 && find_auto_inc (PATTERN (insn
), reg
))
3918 add_reg_note (insn
, REG_INC
, reg
);
3923 fixup_args_size_notes (NULL
, insn_last
, INTVAL (XEXP (note
, 0)));
3927 gcc_assert (call_insn
!= NULL_RTX
);
3928 add_reg_note (call_insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3936 /* If there are LABELS inside the split insns increment the
3937 usage count so we don't delete the label. */
3941 while (insn
!= NULL_RTX
)
3943 /* JUMP_P insns have already been "marked" above. */
3944 if (NONJUMP_INSN_P (insn
))
3945 mark_label_nuses (PATTERN (insn
));
3947 insn
= PREV_INSN (insn
);
3951 before
= PREV_INSN (trial
);
3952 after
= NEXT_INSN (trial
);
3954 tem
= emit_insn_after_setloc (seq
, trial
, INSN_LOCATION (trial
));
3956 delete_insn (trial
);
3958 /* Recursively call try_split for each new insn created; by the
3959 time control returns here that insn will be fully split, so
3960 set LAST and continue from the insn after the one returned.
3961 We can't use next_active_insn here since AFTER may be a note.
3962 Ignore deleted insns, which can be occur if not optimizing. */
3963 for (tem
= NEXT_INSN (before
); tem
!= after
; tem
= NEXT_INSN (tem
))
3964 if (! tem
->deleted () && INSN_P (tem
))
3965 tem
= try_split (PATTERN (tem
), tem
, 1);
3967 /* Return either the first or the last insn, depending on which was
3970 ? (after
? PREV_INSN (after
) : get_last_insn ())
3971 : NEXT_INSN (before
);
3974 /* Make and return an INSN rtx, initializing all its slots.
3975 Store PATTERN in the pattern slots. */
3978 make_insn_raw (rtx pattern
)
3982 insn
= as_a
<rtx_insn
*> (rtx_alloc (INSN
));
3984 INSN_UID (insn
) = cur_insn_uid
++;
3985 PATTERN (insn
) = pattern
;
3986 INSN_CODE (insn
) = -1;
3987 REG_NOTES (insn
) = NULL
;
3988 INSN_LOCATION (insn
) = curr_insn_location ();
3989 BLOCK_FOR_INSN (insn
) = NULL
;
3991 #ifdef ENABLE_RTL_CHECKING
3994 && (returnjump_p (insn
)
3995 || (GET_CODE (insn
) == SET
3996 && SET_DEST (insn
) == pc_rtx
)))
3998 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
4006 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
4009 make_debug_insn_raw (rtx pattern
)
4011 rtx_debug_insn
*insn
;
4013 insn
= as_a
<rtx_debug_insn
*> (rtx_alloc (DEBUG_INSN
));
4014 INSN_UID (insn
) = cur_debug_insn_uid
++;
4015 if (cur_debug_insn_uid
> MIN_NONDEBUG_INSN_UID
)
4016 INSN_UID (insn
) = cur_insn_uid
++;
4018 PATTERN (insn
) = pattern
;
4019 INSN_CODE (insn
) = -1;
4020 REG_NOTES (insn
) = NULL
;
4021 INSN_LOCATION (insn
) = curr_insn_location ();
4022 BLOCK_FOR_INSN (insn
) = NULL
;
4027 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
4030 make_jump_insn_raw (rtx pattern
)
4032 rtx_jump_insn
*insn
;
4034 insn
= as_a
<rtx_jump_insn
*> (rtx_alloc (JUMP_INSN
));
4035 INSN_UID (insn
) = cur_insn_uid
++;
4037 PATTERN (insn
) = pattern
;
4038 INSN_CODE (insn
) = -1;
4039 REG_NOTES (insn
) = NULL
;
4040 JUMP_LABEL (insn
) = NULL
;
4041 INSN_LOCATION (insn
) = curr_insn_location ();
4042 BLOCK_FOR_INSN (insn
) = NULL
;
4047 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
4050 make_call_insn_raw (rtx pattern
)
4052 rtx_call_insn
*insn
;
4054 insn
= as_a
<rtx_call_insn
*> (rtx_alloc (CALL_INSN
));
4055 INSN_UID (insn
) = cur_insn_uid
++;
4057 PATTERN (insn
) = pattern
;
4058 INSN_CODE (insn
) = -1;
4059 REG_NOTES (insn
) = NULL
;
4060 CALL_INSN_FUNCTION_USAGE (insn
) = NULL
;
4061 INSN_LOCATION (insn
) = curr_insn_location ();
4062 BLOCK_FOR_INSN (insn
) = NULL
;
4067 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
4070 make_note_raw (enum insn_note subtype
)
4072 /* Some notes are never created this way at all. These notes are
4073 only created by patching out insns. */
4074 gcc_assert (subtype
!= NOTE_INSN_DELETED_LABEL
4075 && subtype
!= NOTE_INSN_DELETED_DEBUG_LABEL
);
4077 rtx_note
*note
= as_a
<rtx_note
*> (rtx_alloc (NOTE
));
4078 INSN_UID (note
) = cur_insn_uid
++;
4079 NOTE_KIND (note
) = subtype
;
4080 BLOCK_FOR_INSN (note
) = NULL
;
4081 memset (&NOTE_DATA (note
), 0, sizeof (NOTE_DATA (note
)));
4085 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
4086 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
4087 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
4090 link_insn_into_chain (rtx_insn
*insn
, rtx_insn
*prev
, rtx_insn
*next
)
4092 SET_PREV_INSN (insn
) = prev
;
4093 SET_NEXT_INSN (insn
) = next
;
4096 SET_NEXT_INSN (prev
) = insn
;
4097 if (NONJUMP_INSN_P (prev
) && GET_CODE (PATTERN (prev
)) == SEQUENCE
)
4099 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (prev
));
4100 SET_NEXT_INSN (sequence
->insn (sequence
->len () - 1)) = insn
;
4105 SET_PREV_INSN (next
) = insn
;
4106 if (NONJUMP_INSN_P (next
) && GET_CODE (PATTERN (next
)) == SEQUENCE
)
4108 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (next
));
4109 SET_PREV_INSN (sequence
->insn (0)) = insn
;
4113 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
4115 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (insn
));
4116 SET_PREV_INSN (sequence
->insn (0)) = prev
;
4117 SET_NEXT_INSN (sequence
->insn (sequence
->len () - 1)) = next
;
4121 /* Add INSN to the end of the doubly-linked list.
4122 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
4125 add_insn (rtx_insn
*insn
)
4127 rtx_insn
*prev
= get_last_insn ();
4128 link_insn_into_chain (insn
, prev
, NULL
);
4129 if (get_insns () == NULL
)
4130 set_first_insn (insn
);
4131 set_last_insn (insn
);
4134 /* Add INSN into the doubly-linked list after insn AFTER. */
4137 add_insn_after_nobb (rtx_insn
*insn
, rtx_insn
*after
)
4139 rtx_insn
*next
= NEXT_INSN (after
);
4141 gcc_assert (!optimize
|| !after
->deleted ());
4143 link_insn_into_chain (insn
, after
, next
);
4147 struct sequence_stack
*seq
;
4149 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4150 if (after
== seq
->last
)
4158 /* Add INSN into the doubly-linked list before insn BEFORE. */
4161 add_insn_before_nobb (rtx_insn
*insn
, rtx_insn
*before
)
4163 rtx_insn
*prev
= PREV_INSN (before
);
4165 gcc_assert (!optimize
|| !before
->deleted ());
4167 link_insn_into_chain (insn
, prev
, before
);
4171 struct sequence_stack
*seq
;
4173 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4174 if (before
== seq
->first
)
4184 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4185 If BB is NULL, an attempt is made to infer the bb from before.
4187 This and the next function should be the only functions called
4188 to insert an insn once delay slots have been filled since only
4189 they know how to update a SEQUENCE. */
4192 add_insn_after (rtx uncast_insn
, rtx uncast_after
, basic_block bb
)
4194 rtx_insn
*insn
= as_a
<rtx_insn
*> (uncast_insn
);
4195 rtx_insn
*after
= as_a
<rtx_insn
*> (uncast_after
);
4196 add_insn_after_nobb (insn
, after
);
4197 if (!BARRIER_P (after
)
4198 && !BARRIER_P (insn
)
4199 && (bb
= BLOCK_FOR_INSN (after
)))
4201 set_block_for_insn (insn
, bb
);
4203 df_insn_rescan (insn
);
4204 /* Should not happen as first in the BB is always
4205 either NOTE or LABEL. */
4206 if (BB_END (bb
) == after
4207 /* Avoid clobbering of structure when creating new BB. */
4208 && !BARRIER_P (insn
)
4209 && !NOTE_INSN_BASIC_BLOCK_P (insn
))
4214 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4215 If BB is NULL, an attempt is made to infer the bb from before.
4217 This and the previous function should be the only functions called
4218 to insert an insn once delay slots have been filled since only
4219 they know how to update a SEQUENCE. */
4222 add_insn_before (rtx uncast_insn
, rtx uncast_before
, basic_block bb
)
4224 rtx_insn
*insn
= as_a
<rtx_insn
*> (uncast_insn
);
4225 rtx_insn
*before
= as_a
<rtx_insn
*> (uncast_before
);
4226 add_insn_before_nobb (insn
, before
);
4229 && !BARRIER_P (before
)
4230 && !BARRIER_P (insn
))
4231 bb
= BLOCK_FOR_INSN (before
);
4235 set_block_for_insn (insn
, bb
);
4237 df_insn_rescan (insn
);
4238 /* Should not happen as first in the BB is always either NOTE or
4240 gcc_assert (BB_HEAD (bb
) != insn
4241 /* Avoid clobbering of structure when creating new BB. */
4243 || NOTE_INSN_BASIC_BLOCK_P (insn
));
4247 /* Replace insn with an deleted instruction note. */
4250 set_insn_deleted (rtx insn
)
4253 df_insn_delete (as_a
<rtx_insn
*> (insn
));
4254 PUT_CODE (insn
, NOTE
);
4255 NOTE_KIND (insn
) = NOTE_INSN_DELETED
;
4259 /* Unlink INSN from the insn chain.
4261 This function knows how to handle sequences.
4263 This function does not invalidate data flow information associated with
4264 INSN (i.e. does not call df_insn_delete). That makes this function
4265 usable for only disconnecting an insn from the chain, and re-emit it
4268 To later insert INSN elsewhere in the insn chain via add_insn and
4269 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4270 the caller. Nullifying them here breaks many insn chain walks.
4272 To really delete an insn and related DF information, use delete_insn. */
4275 remove_insn (rtx uncast_insn
)
4277 rtx_insn
*insn
= as_a
<rtx_insn
*> (uncast_insn
);
4278 rtx_insn
*next
= NEXT_INSN (insn
);
4279 rtx_insn
*prev
= PREV_INSN (insn
);
4284 SET_NEXT_INSN (prev
) = next
;
4285 if (NONJUMP_INSN_P (prev
) && GET_CODE (PATTERN (prev
)) == SEQUENCE
)
4287 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (prev
));
4288 SET_NEXT_INSN (sequence
->insn (sequence
->len () - 1)) = next
;
4293 struct sequence_stack
*seq
;
4295 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4296 if (insn
== seq
->first
)
4307 SET_PREV_INSN (next
) = prev
;
4308 if (NONJUMP_INSN_P (next
) && GET_CODE (PATTERN (next
)) == SEQUENCE
)
4310 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (next
));
4311 SET_PREV_INSN (sequence
->insn (0)) = prev
;
4316 struct sequence_stack
*seq
;
4318 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4319 if (insn
== seq
->last
)
4328 /* Fix up basic block boundaries, if necessary. */
4329 if (!BARRIER_P (insn
)
4330 && (bb
= BLOCK_FOR_INSN (insn
)))
4332 if (BB_HEAD (bb
) == insn
)
4334 /* Never ever delete the basic block note without deleting whole
4336 gcc_assert (!NOTE_P (insn
));
4337 BB_HEAD (bb
) = next
;
4339 if (BB_END (bb
) == insn
)
4344 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4347 add_function_usage_to (rtx call_insn
, rtx call_fusage
)
4349 gcc_assert (call_insn
&& CALL_P (call_insn
));
4351 /* Put the register usage information on the CALL. If there is already
4352 some usage information, put ours at the end. */
4353 if (CALL_INSN_FUNCTION_USAGE (call_insn
))
4357 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
); XEXP (link
, 1) != 0;
4358 link
= XEXP (link
, 1))
4361 XEXP (link
, 1) = call_fusage
;
4364 CALL_INSN_FUNCTION_USAGE (call_insn
) = call_fusage
;
4367 /* Delete all insns made since FROM.
4368 FROM becomes the new last instruction. */
4371 delete_insns_since (rtx_insn
*from
)
4376 SET_NEXT_INSN (from
) = 0;
4377 set_last_insn (from
);
4380 /* This function is deprecated, please use sequences instead.
4382 Move a consecutive bunch of insns to a different place in the chain.
4383 The insns to be moved are those between FROM and TO.
4384 They are moved to a new position after the insn AFTER.
4385 AFTER must not be FROM or TO or any insn in between.
4387 This function does not know about SEQUENCEs and hence should not be
4388 called after delay-slot filling has been done. */
4391 reorder_insns_nobb (rtx_insn
*from
, rtx_insn
*to
, rtx_insn
*after
)
4395 for (rtx_insn
*x
= from
; x
!= to
; x
= NEXT_INSN (x
))
4396 gcc_assert (after
!= x
);
4397 gcc_assert (after
!= to
);
4400 /* Splice this bunch out of where it is now. */
4401 if (PREV_INSN (from
))
4402 SET_NEXT_INSN (PREV_INSN (from
)) = NEXT_INSN (to
);
4404 SET_PREV_INSN (NEXT_INSN (to
)) = PREV_INSN (from
);
4405 if (get_last_insn () == to
)
4406 set_last_insn (PREV_INSN (from
));
4407 if (get_insns () == from
)
4408 set_first_insn (NEXT_INSN (to
));
4410 /* Make the new neighbors point to it and it to them. */
4411 if (NEXT_INSN (after
))
4412 SET_PREV_INSN (NEXT_INSN (after
)) = to
;
4414 SET_NEXT_INSN (to
) = NEXT_INSN (after
);
4415 SET_PREV_INSN (from
) = after
;
4416 SET_NEXT_INSN (after
) = from
;
4417 if (after
== get_last_insn ())
4421 /* Same as function above, but take care to update BB boundaries. */
4423 reorder_insns (rtx_insn
*from
, rtx_insn
*to
, rtx_insn
*after
)
4425 rtx_insn
*prev
= PREV_INSN (from
);
4426 basic_block bb
, bb2
;
4428 reorder_insns_nobb (from
, to
, after
);
4430 if (!BARRIER_P (after
)
4431 && (bb
= BLOCK_FOR_INSN (after
)))
4434 df_set_bb_dirty (bb
);
4436 if (!BARRIER_P (from
)
4437 && (bb2
= BLOCK_FOR_INSN (from
)))
4439 if (BB_END (bb2
) == to
)
4440 BB_END (bb2
) = prev
;
4441 df_set_bb_dirty (bb2
);
4444 if (BB_END (bb
) == after
)
4447 for (x
= from
; x
!= NEXT_INSN (to
); x
= NEXT_INSN (x
))
4449 df_insn_change_bb (x
, bb
);
4454 /* Emit insn(s) of given code and pattern
4455 at a specified place within the doubly-linked list.
4457 All of the emit_foo global entry points accept an object
4458 X which is either an insn list or a PATTERN of a single
4461 There are thus a few canonical ways to generate code and
4462 emit it at a specific place in the instruction stream. For
4463 example, consider the instruction named SPOT and the fact that
4464 we would like to emit some instructions before SPOT. We might
4468 ... emit the new instructions ...
4469 insns_head = get_insns ();
4472 emit_insn_before (insns_head, SPOT);
4474 It used to be common to generate SEQUENCE rtl instead, but that
4475 is a relic of the past which no longer occurs. The reason is that
4476 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4477 generated would almost certainly die right after it was created. */
4480 emit_pattern_before_noloc (rtx x
, rtx before
, rtx last
, basic_block bb
,
4481 rtx_insn
*(*make_raw
) (rtx
))
4485 gcc_assert (before
);
4488 return safe_as_a
<rtx_insn
*> (last
);
4490 switch (GET_CODE (x
))
4499 insn
= as_a
<rtx_insn
*> (x
);
4502 rtx_insn
*next
= NEXT_INSN (insn
);
4503 add_insn_before (insn
, before
, bb
);
4509 #ifdef ENABLE_RTL_CHECKING
4516 last
= (*make_raw
) (x
);
4517 add_insn_before (last
, before
, bb
);
4521 return safe_as_a
<rtx_insn
*> (last
);
4524 /* Make X be output before the instruction BEFORE. */
4527 emit_insn_before_noloc (rtx x
, rtx_insn
*before
, basic_block bb
)
4529 return emit_pattern_before_noloc (x
, before
, before
, bb
, make_insn_raw
);
4532 /* Make an instruction with body X and code JUMP_INSN
4533 and output it before the instruction BEFORE. */
4536 emit_jump_insn_before_noloc (rtx x
, rtx_insn
*before
)
4538 return as_a
<rtx_jump_insn
*> (
4539 emit_pattern_before_noloc (x
, before
, NULL_RTX
, NULL
,
4540 make_jump_insn_raw
));
4543 /* Make an instruction with body X and code CALL_INSN
4544 and output it before the instruction BEFORE. */
4547 emit_call_insn_before_noloc (rtx x
, rtx_insn
*before
)
4549 return emit_pattern_before_noloc (x
, before
, NULL_RTX
, NULL
,
4550 make_call_insn_raw
);
4553 /* Make an instruction with body X and code DEBUG_INSN
4554 and output it before the instruction BEFORE. */
4557 emit_debug_insn_before_noloc (rtx x
, rtx before
)
4559 return emit_pattern_before_noloc (x
, before
, NULL_RTX
, NULL
,
4560 make_debug_insn_raw
);
4563 /* Make an insn of code BARRIER
4564 and output it before the insn BEFORE. */
4567 emit_barrier_before (rtx before
)
4569 rtx_barrier
*insn
= as_a
<rtx_barrier
*> (rtx_alloc (BARRIER
));
4571 INSN_UID (insn
) = cur_insn_uid
++;
4573 add_insn_before (insn
, before
, NULL
);
4577 /* Emit the label LABEL before the insn BEFORE. */
4580 emit_label_before (rtx label
, rtx_insn
*before
)
4582 gcc_checking_assert (INSN_UID (label
) == 0);
4583 INSN_UID (label
) = cur_insn_uid
++;
4584 add_insn_before (label
, before
, NULL
);
4585 return as_a
<rtx_code_label
*> (label
);
4588 /* Helper for emit_insn_after, handles lists of instructions
4592 emit_insn_after_1 (rtx_insn
*first
, rtx uncast_after
, basic_block bb
)
4594 rtx_insn
*after
= safe_as_a
<rtx_insn
*> (uncast_after
);
4596 rtx_insn
*after_after
;
4597 if (!bb
&& !BARRIER_P (after
))
4598 bb
= BLOCK_FOR_INSN (after
);
4602 df_set_bb_dirty (bb
);
4603 for (last
= first
; NEXT_INSN (last
); last
= NEXT_INSN (last
))
4604 if (!BARRIER_P (last
))
4606 set_block_for_insn (last
, bb
);
4607 df_insn_rescan (last
);
4609 if (!BARRIER_P (last
))
4611 set_block_for_insn (last
, bb
);
4612 df_insn_rescan (last
);
4614 if (BB_END (bb
) == after
)
4618 for (last
= first
; NEXT_INSN (last
); last
= NEXT_INSN (last
))
4621 after_after
= NEXT_INSN (after
);
4623 SET_NEXT_INSN (after
) = first
;
4624 SET_PREV_INSN (first
) = after
;
4625 SET_NEXT_INSN (last
) = after_after
;
4627 SET_PREV_INSN (after_after
) = last
;
4629 if (after
== get_last_insn ())
4630 set_last_insn (last
);
4636 emit_pattern_after_noloc (rtx x
, rtx uncast_after
, basic_block bb
,
4637 rtx_insn
*(*make_raw
)(rtx
))
4639 rtx_insn
*after
= safe_as_a
<rtx_insn
*> (uncast_after
);
4640 rtx_insn
*last
= after
;
4647 switch (GET_CODE (x
))
4656 last
= emit_insn_after_1 (as_a
<rtx_insn
*> (x
), after
, bb
);
4659 #ifdef ENABLE_RTL_CHECKING
4666 last
= (*make_raw
) (x
);
4667 add_insn_after (last
, after
, bb
);
4674 /* Make X be output after the insn AFTER and set the BB of insn. If
4675 BB is NULL, an attempt is made to infer the BB from AFTER. */
4678 emit_insn_after_noloc (rtx x
, rtx after
, basic_block bb
)
4680 return emit_pattern_after_noloc (x
, after
, bb
, make_insn_raw
);
4684 /* Make an insn of code JUMP_INSN with body X
4685 and output it after the insn AFTER. */
4688 emit_jump_insn_after_noloc (rtx x
, rtx after
)
4690 return as_a
<rtx_jump_insn
*> (
4691 emit_pattern_after_noloc (x
, after
, NULL
, make_jump_insn_raw
));
4694 /* Make an instruction with body X and code CALL_INSN
4695 and output it after the instruction AFTER. */
4698 emit_call_insn_after_noloc (rtx x
, rtx after
)
4700 return emit_pattern_after_noloc (x
, after
, NULL
, make_call_insn_raw
);
4703 /* Make an instruction with body X and code CALL_INSN
4704 and output it after the instruction AFTER. */
4707 emit_debug_insn_after_noloc (rtx x
, rtx after
)
4709 return emit_pattern_after_noloc (x
, after
, NULL
, make_debug_insn_raw
);
4712 /* Make an insn of code BARRIER
4713 and output it after the insn AFTER. */
4716 emit_barrier_after (rtx after
)
4718 rtx_barrier
*insn
= as_a
<rtx_barrier
*> (rtx_alloc (BARRIER
));
4720 INSN_UID (insn
) = cur_insn_uid
++;
4722 add_insn_after (insn
, after
, NULL
);
4726 /* Emit the label LABEL after the insn AFTER. */
4729 emit_label_after (rtx label
, rtx_insn
*after
)
4731 gcc_checking_assert (INSN_UID (label
) == 0);
4732 INSN_UID (label
) = cur_insn_uid
++;
4733 add_insn_after (label
, after
, NULL
);
4734 return as_a
<rtx_insn
*> (label
);
4737 /* Notes require a bit of special handling: Some notes need to have their
4738 BLOCK_FOR_INSN set, others should never have it set, and some should
4739 have it set or clear depending on the context. */
4741 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4742 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4743 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4746 note_outside_basic_block_p (enum insn_note subtype
, bool on_bb_boundary_p
)
4750 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4751 case NOTE_INSN_SWITCH_TEXT_SECTIONS
:
4754 /* Notes for var tracking and EH region markers can appear between or
4755 inside basic blocks. If the caller is emitting on the basic block
4756 boundary, do not set BLOCK_FOR_INSN on the new note. */
4757 case NOTE_INSN_VAR_LOCATION
:
4758 case NOTE_INSN_CALL_ARG_LOCATION
:
4759 case NOTE_INSN_EH_REGION_BEG
:
4760 case NOTE_INSN_EH_REGION_END
:
4761 return on_bb_boundary_p
;
4763 /* Otherwise, BLOCK_FOR_INSN must be set. */
4769 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4772 emit_note_after (enum insn_note subtype
, rtx_insn
*after
)
4774 rtx_note
*note
= make_note_raw (subtype
);
4775 basic_block bb
= BARRIER_P (after
) ? NULL
: BLOCK_FOR_INSN (after
);
4776 bool on_bb_boundary_p
= (bb
!= NULL
&& BB_END (bb
) == after
);
4778 if (note_outside_basic_block_p (subtype
, on_bb_boundary_p
))
4779 add_insn_after_nobb (note
, after
);
4781 add_insn_after (note
, after
, bb
);
4785 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4788 emit_note_before (enum insn_note subtype
, rtx_insn
*before
)
4790 rtx_note
*note
= make_note_raw (subtype
);
4791 basic_block bb
= BARRIER_P (before
) ? NULL
: BLOCK_FOR_INSN (before
);
4792 bool on_bb_boundary_p
= (bb
!= NULL
&& BB_HEAD (bb
) == before
);
4794 if (note_outside_basic_block_p (subtype
, on_bb_boundary_p
))
4795 add_insn_before_nobb (note
, before
);
4797 add_insn_before (note
, before
, bb
);
4801 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4802 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4805 emit_pattern_after_setloc (rtx pattern
, rtx uncast_after
, int loc
,
4806 rtx_insn
*(*make_raw
) (rtx
))
4808 rtx_insn
*after
= safe_as_a
<rtx_insn
*> (uncast_after
);
4809 rtx_insn
*last
= emit_pattern_after_noloc (pattern
, after
, NULL
, make_raw
);
4811 if (pattern
== NULL_RTX
|| !loc
)
4814 after
= NEXT_INSN (after
);
4817 if (active_insn_p (after
)
4818 && !JUMP_TABLE_DATA_P (after
) /* FIXME */
4819 && !INSN_LOCATION (after
))
4820 INSN_LOCATION (after
) = loc
;
4823 after
= NEXT_INSN (after
);
4828 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4829 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4833 emit_pattern_after (rtx pattern
, rtx uncast_after
, bool skip_debug_insns
,
4834 rtx_insn
*(*make_raw
) (rtx
))
4836 rtx_insn
*after
= safe_as_a
<rtx_insn
*> (uncast_after
);
4837 rtx_insn
*prev
= after
;
4839 if (skip_debug_insns
)
4840 while (DEBUG_INSN_P (prev
))
4841 prev
= PREV_INSN (prev
);
4844 return emit_pattern_after_setloc (pattern
, after
, INSN_LOCATION (prev
),
4847 return emit_pattern_after_noloc (pattern
, after
, NULL
, make_raw
);
4850 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4852 emit_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4854 return emit_pattern_after_setloc (pattern
, after
, loc
, make_insn_raw
);
4857 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4859 emit_insn_after (rtx pattern
, rtx after
)
4861 return emit_pattern_after (pattern
, after
, true, make_insn_raw
);
4864 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4866 emit_jump_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4868 return as_a
<rtx_jump_insn
*> (
4869 emit_pattern_after_setloc (pattern
, after
, loc
, make_jump_insn_raw
));
4872 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4874 emit_jump_insn_after (rtx pattern
, rtx after
)
4876 return as_a
<rtx_jump_insn
*> (
4877 emit_pattern_after (pattern
, after
, true, make_jump_insn_raw
));
4880 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4882 emit_call_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4884 return emit_pattern_after_setloc (pattern
, after
, loc
, make_call_insn_raw
);
4887 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4889 emit_call_insn_after (rtx pattern
, rtx after
)
4891 return emit_pattern_after (pattern
, after
, true, make_call_insn_raw
);
4894 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4896 emit_debug_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4898 return emit_pattern_after_setloc (pattern
, after
, loc
, make_debug_insn_raw
);
4901 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4903 emit_debug_insn_after (rtx pattern
, rtx after
)
4905 return emit_pattern_after (pattern
, after
, false, make_debug_insn_raw
);
4908 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4909 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4910 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4914 emit_pattern_before_setloc (rtx pattern
, rtx uncast_before
, int loc
, bool insnp
,
4915 rtx_insn
*(*make_raw
) (rtx
))
4917 rtx_insn
*before
= as_a
<rtx_insn
*> (uncast_before
);
4918 rtx_insn
*first
= PREV_INSN (before
);
4919 rtx_insn
*last
= emit_pattern_before_noloc (pattern
, before
,
4920 insnp
? before
: NULL_RTX
,
4923 if (pattern
== NULL_RTX
|| !loc
)
4927 first
= get_insns ();
4929 first
= NEXT_INSN (first
);
4932 if (active_insn_p (first
)
4933 && !JUMP_TABLE_DATA_P (first
) /* FIXME */
4934 && !INSN_LOCATION (first
))
4935 INSN_LOCATION (first
) = loc
;
4938 first
= NEXT_INSN (first
);
4943 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4944 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4945 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4946 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4949 emit_pattern_before (rtx pattern
, rtx uncast_before
, bool skip_debug_insns
,
4950 bool insnp
, rtx_insn
*(*make_raw
) (rtx
))
4952 rtx_insn
*before
= safe_as_a
<rtx_insn
*> (uncast_before
);
4953 rtx_insn
*next
= before
;
4955 if (skip_debug_insns
)
4956 while (DEBUG_INSN_P (next
))
4957 next
= PREV_INSN (next
);
4960 return emit_pattern_before_setloc (pattern
, before
, INSN_LOCATION (next
),
4963 return emit_pattern_before_noloc (pattern
, before
,
4964 insnp
? before
: NULL_RTX
,
4968 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4970 emit_insn_before_setloc (rtx pattern
, rtx_insn
*before
, int loc
)
4972 return emit_pattern_before_setloc (pattern
, before
, loc
, true,
4976 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4978 emit_insn_before (rtx pattern
, rtx before
)
4980 return emit_pattern_before (pattern
, before
, true, true, make_insn_raw
);
4983 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4985 emit_jump_insn_before_setloc (rtx pattern
, rtx_insn
*before
, int loc
)
4987 return as_a
<rtx_jump_insn
*> (
4988 emit_pattern_before_setloc (pattern
, before
, loc
, false,
4989 make_jump_insn_raw
));
4992 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4994 emit_jump_insn_before (rtx pattern
, rtx before
)
4996 return as_a
<rtx_jump_insn
*> (
4997 emit_pattern_before (pattern
, before
, true, false,
4998 make_jump_insn_raw
));
5001 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5003 emit_call_insn_before_setloc (rtx pattern
, rtx_insn
*before
, int loc
)
5005 return emit_pattern_before_setloc (pattern
, before
, loc
, false,
5006 make_call_insn_raw
);
5009 /* Like emit_call_insn_before_noloc,
5010 but set insn_location according to BEFORE. */
5012 emit_call_insn_before (rtx pattern
, rtx_insn
*before
)
5014 return emit_pattern_before (pattern
, before
, true, false,
5015 make_call_insn_raw
);
5018 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5020 emit_debug_insn_before_setloc (rtx pattern
, rtx before
, int loc
)
5022 return emit_pattern_before_setloc (pattern
, before
, loc
, false,
5023 make_debug_insn_raw
);
5026 /* Like emit_debug_insn_before_noloc,
5027 but set insn_location according to BEFORE. */
5029 emit_debug_insn_before (rtx pattern
, rtx_insn
*before
)
5031 return emit_pattern_before (pattern
, before
, false, false,
5032 make_debug_insn_raw
);
5035 /* Take X and emit it at the end of the doubly-linked
5038 Returns the last insn emitted. */
5043 rtx_insn
*last
= get_last_insn ();
5049 switch (GET_CODE (x
))
5058 insn
= as_a
<rtx_insn
*> (x
);
5061 rtx_insn
*next
= NEXT_INSN (insn
);
5068 #ifdef ENABLE_RTL_CHECKING
5069 case JUMP_TABLE_DATA
:
5076 last
= make_insn_raw (x
);
5084 /* Make an insn of code DEBUG_INSN with pattern X
5085 and add it to the end of the doubly-linked list. */
5088 emit_debug_insn (rtx x
)
5090 rtx_insn
*last
= get_last_insn ();
5096 switch (GET_CODE (x
))
5105 insn
= as_a
<rtx_insn
*> (x
);
5108 rtx_insn
*next
= NEXT_INSN (insn
);
5115 #ifdef ENABLE_RTL_CHECKING
5116 case JUMP_TABLE_DATA
:
5123 last
= make_debug_insn_raw (x
);
5131 /* Make an insn of code JUMP_INSN with pattern X
5132 and add it to the end of the doubly-linked list. */
5135 emit_jump_insn (rtx x
)
5137 rtx_insn
*last
= NULL
;
5140 switch (GET_CODE (x
))
5149 insn
= as_a
<rtx_insn
*> (x
);
5152 rtx_insn
*next
= NEXT_INSN (insn
);
5159 #ifdef ENABLE_RTL_CHECKING
5160 case JUMP_TABLE_DATA
:
5167 last
= make_jump_insn_raw (x
);
5175 /* Make an insn of code CALL_INSN with pattern X
5176 and add it to the end of the doubly-linked list. */
5179 emit_call_insn (rtx x
)
5183 switch (GET_CODE (x
))
5192 insn
= emit_insn (x
);
5195 #ifdef ENABLE_RTL_CHECKING
5197 case JUMP_TABLE_DATA
:
5203 insn
= make_call_insn_raw (x
);
5211 /* Add the label LABEL to the end of the doubly-linked list. */
5214 emit_label (rtx uncast_label
)
5216 rtx_code_label
*label
= as_a
<rtx_code_label
*> (uncast_label
);
5218 gcc_checking_assert (INSN_UID (label
) == 0);
5219 INSN_UID (label
) = cur_insn_uid
++;
5224 /* Make an insn of code JUMP_TABLE_DATA
5225 and add it to the end of the doubly-linked list. */
5227 rtx_jump_table_data
*
5228 emit_jump_table_data (rtx table
)
5230 rtx_jump_table_data
*jump_table_data
=
5231 as_a
<rtx_jump_table_data
*> (rtx_alloc (JUMP_TABLE_DATA
));
5232 INSN_UID (jump_table_data
) = cur_insn_uid
++;
5233 PATTERN (jump_table_data
) = table
;
5234 BLOCK_FOR_INSN (jump_table_data
) = NULL
;
5235 add_insn (jump_table_data
);
5236 return jump_table_data
;
5239 /* Make an insn of code BARRIER
5240 and add it to the end of the doubly-linked list. */
5245 rtx_barrier
*barrier
= as_a
<rtx_barrier
*> (rtx_alloc (BARRIER
));
5246 INSN_UID (barrier
) = cur_insn_uid
++;
5251 /* Emit a copy of note ORIG. */
5254 emit_note_copy (rtx_note
*orig
)
5256 enum insn_note kind
= (enum insn_note
) NOTE_KIND (orig
);
5257 rtx_note
*note
= make_note_raw (kind
);
5258 NOTE_DATA (note
) = NOTE_DATA (orig
);
5263 /* Make an insn of code NOTE or type NOTE_NO
5264 and add it to the end of the doubly-linked list. */
5267 emit_note (enum insn_note kind
)
5269 rtx_note
*note
= make_note_raw (kind
);
5274 /* Emit a clobber of lvalue X. */
5277 emit_clobber (rtx x
)
5279 /* CONCATs should not appear in the insn stream. */
5280 if (GET_CODE (x
) == CONCAT
)
5282 emit_clobber (XEXP (x
, 0));
5283 return emit_clobber (XEXP (x
, 1));
5285 return emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
5288 /* Return a sequence of insns to clobber lvalue X. */
5302 /* Emit a use of rvalue X. */
5307 /* CONCATs should not appear in the insn stream. */
5308 if (GET_CODE (x
) == CONCAT
)
5310 emit_use (XEXP (x
, 0));
5311 return emit_use (XEXP (x
, 1));
5313 return emit_insn (gen_rtx_USE (VOIDmode
, x
));
5316 /* Return a sequence of insns to use rvalue X. */
5330 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5331 Return the set in INSN that such notes describe, or NULL if the notes
5332 have no meaning for INSN. */
5335 set_for_reg_notes (rtx insn
)
5342 pat
= PATTERN (insn
);
5343 if (GET_CODE (pat
) == PARALLEL
)
5345 /* We do not use single_set because that ignores SETs of unused
5346 registers. REG_EQUAL and REG_EQUIV notes really do require the
5347 PARALLEL to have a single SET. */
5348 if (multiple_sets (insn
))
5350 pat
= XVECEXP (pat
, 0, 0);
5353 if (GET_CODE (pat
) != SET
)
5356 reg
= SET_DEST (pat
);
5358 /* Notes apply to the contents of a STRICT_LOW_PART. */
5359 if (GET_CODE (reg
) == STRICT_LOW_PART
5360 || GET_CODE (reg
) == ZERO_EXTRACT
)
5361 reg
= XEXP (reg
, 0);
5363 /* Check that we have a register. */
5364 if (!(REG_P (reg
) || GET_CODE (reg
) == SUBREG
))
5370 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5371 note of this type already exists, remove it first. */
5374 set_unique_reg_note (rtx insn
, enum reg_note kind
, rtx datum
)
5376 rtx note
= find_reg_note (insn
, kind
, NULL_RTX
);
5382 /* We need to support the REG_EQUAL on USE trick of find_reloads. */
5383 if (!set_for_reg_notes (insn
) && GET_CODE (PATTERN (insn
)) != USE
)
5386 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5387 It serves no useful purpose and breaks eliminate_regs. */
5388 if (GET_CODE (datum
) == ASM_OPERANDS
)
5391 /* Notes with side effects are dangerous. Even if the side-effect
5392 initially mirrors one in PATTERN (INSN), later optimizations
5393 might alter the way that the final register value is calculated
5394 and so move or alter the side-effect in some way. The note would
5395 then no longer be a valid substitution for SET_SRC. */
5396 if (side_effects_p (datum
))
5405 XEXP (note
, 0) = datum
;
5408 add_reg_note (insn
, kind
, datum
);
5409 note
= REG_NOTES (insn
);
5416 df_notes_rescan (as_a
<rtx_insn
*> (insn
));
5425 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5427 set_dst_reg_note (rtx insn
, enum reg_note kind
, rtx datum
, rtx dst
)
5429 rtx set
= set_for_reg_notes (insn
);
5431 if (set
&& SET_DEST (set
) == dst
)
5432 return set_unique_reg_note (insn
, kind
, datum
);
5436 /* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5437 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5440 If X is a label, it is simply added into the insn chain. */
5443 emit (rtx x
, bool allow_barrier_p
)
5445 enum rtx_code code
= classify_insn (x
);
5450 return emit_label (x
);
5452 return emit_insn (x
);
5455 rtx_insn
*insn
= emit_jump_insn (x
);
5457 && (any_uncondjump_p (insn
) || GET_CODE (x
) == RETURN
))
5458 return emit_barrier ();
5462 return emit_call_insn (x
);
5464 return emit_debug_insn (x
);
5470 /* Space for free sequence stack entries. */
5471 static GTY ((deletable
)) struct sequence_stack
*free_sequence_stack
;
5473 /* Begin emitting insns to a sequence. If this sequence will contain
5474 something that might cause the compiler to pop arguments to function
5475 calls (because those pops have previously been deferred; see
5476 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5477 before calling this function. That will ensure that the deferred
5478 pops are not accidentally emitted in the middle of this sequence. */
5481 start_sequence (void)
5483 struct sequence_stack
*tem
;
5485 if (free_sequence_stack
!= NULL
)
5487 tem
= free_sequence_stack
;
5488 free_sequence_stack
= tem
->next
;
5491 tem
= ggc_alloc
<sequence_stack
> ();
5493 tem
->next
= get_current_sequence ()->next
;
5494 tem
->first
= get_insns ();
5495 tem
->last
= get_last_insn ();
5496 get_current_sequence ()->next
= tem
;
5502 /* Set up the insn chain starting with FIRST as the current sequence,
5503 saving the previously current one. See the documentation for
5504 start_sequence for more information about how to use this function. */
5507 push_to_sequence (rtx_insn
*first
)
5513 for (last
= first
; last
&& NEXT_INSN (last
); last
= NEXT_INSN (last
))
5516 set_first_insn (first
);
5517 set_last_insn (last
);
5520 /* Like push_to_sequence, but take the last insn as an argument to avoid
5521 looping through the list. */
5524 push_to_sequence2 (rtx_insn
*first
, rtx_insn
*last
)
5528 set_first_insn (first
);
5529 set_last_insn (last
);
5532 /* Set up the outer-level insn chain
5533 as the current sequence, saving the previously current one. */
5536 push_topmost_sequence (void)
5538 struct sequence_stack
*top
;
5542 top
= get_topmost_sequence ();
5543 set_first_insn (top
->first
);
5544 set_last_insn (top
->last
);
5547 /* After emitting to the outer-level insn chain, update the outer-level
5548 insn chain, and restore the previous saved state. */
5551 pop_topmost_sequence (void)
5553 struct sequence_stack
*top
;
5555 top
= get_topmost_sequence ();
5556 top
->first
= get_insns ();
5557 top
->last
= get_last_insn ();
5562 /* After emitting to a sequence, restore previous saved state.
5564 To get the contents of the sequence just made, you must call
5565 `get_insns' *before* calling here.
5567 If the compiler might have deferred popping arguments while
5568 generating this sequence, and this sequence will not be immediately
5569 inserted into the instruction stream, use do_pending_stack_adjust
5570 before calling get_insns. That will ensure that the deferred
5571 pops are inserted into this sequence, and not into some random
5572 location in the instruction stream. See INHIBIT_DEFER_POP for more
5573 information about deferred popping of arguments. */
5578 struct sequence_stack
*tem
= get_current_sequence ()->next
;
5580 set_first_insn (tem
->first
);
5581 set_last_insn (tem
->last
);
5582 get_current_sequence ()->next
= tem
->next
;
5584 memset (tem
, 0, sizeof (*tem
));
5585 tem
->next
= free_sequence_stack
;
5586 free_sequence_stack
= tem
;
5589 /* Return 1 if currently emitting into a sequence. */
5592 in_sequence_p (void)
5594 return get_current_sequence ()->next
!= 0;
5597 /* Put the various virtual registers into REGNO_REG_RTX. */
5600 init_virtual_regs (void)
5602 regno_reg_rtx
[VIRTUAL_INCOMING_ARGS_REGNUM
] = virtual_incoming_args_rtx
;
5603 regno_reg_rtx
[VIRTUAL_STACK_VARS_REGNUM
] = virtual_stack_vars_rtx
;
5604 regno_reg_rtx
[VIRTUAL_STACK_DYNAMIC_REGNUM
] = virtual_stack_dynamic_rtx
;
5605 regno_reg_rtx
[VIRTUAL_OUTGOING_ARGS_REGNUM
] = virtual_outgoing_args_rtx
;
5606 regno_reg_rtx
[VIRTUAL_CFA_REGNUM
] = virtual_cfa_rtx
;
5607 regno_reg_rtx
[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM
]
5608 = virtual_preferred_stack_boundary_rtx
;
5612 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5613 static rtx copy_insn_scratch_in
[MAX_RECOG_OPERANDS
];
5614 static rtx copy_insn_scratch_out
[MAX_RECOG_OPERANDS
];
5615 static int copy_insn_n_scratches
;
5617 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5618 copied an ASM_OPERANDS.
5619 In that case, it is the original input-operand vector. */
5620 static rtvec orig_asm_operands_vector
;
5622 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5623 copied an ASM_OPERANDS.
5624 In that case, it is the copied input-operand vector. */
5625 static rtvec copy_asm_operands_vector
;
5627 /* Likewise for the constraints vector. */
5628 static rtvec orig_asm_constraints_vector
;
5629 static rtvec copy_asm_constraints_vector
;
5631 /* Recursively create a new copy of an rtx for copy_insn.
5632 This function differs from copy_rtx in that it handles SCRATCHes and
5633 ASM_OPERANDs properly.
5634 Normally, this function is not used directly; use copy_insn as front end.
5635 However, you could first copy an insn pattern with copy_insn and then use
5636 this function afterwards to properly copy any REG_NOTEs containing
5640 copy_insn_1 (rtx orig
)
5645 const char *format_ptr
;
5650 code
= GET_CODE (orig
);
5665 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5666 clobbers or clobbers of hard registers that originated as pseudos.
5667 This is needed to allow safe register renaming. */
5668 if (REG_P (XEXP (orig
, 0))
5669 && HARD_REGISTER_NUM_P (REGNO (XEXP (orig
, 0)))
5670 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig
, 0))))
5675 for (i
= 0; i
< copy_insn_n_scratches
; i
++)
5676 if (copy_insn_scratch_in
[i
] == orig
)
5677 return copy_insn_scratch_out
[i
];
5681 if (shared_const_p (orig
))
5685 /* A MEM with a constant address is not sharable. The problem is that
5686 the constant address may need to be reloaded. If the mem is shared,
5687 then reloading one copy of this mem will cause all copies to appear
5688 to have been reloaded. */
5694 /* Copy the various flags, fields, and other information. We assume
5695 that all fields need copying, and then clear the fields that should
5696 not be copied. That is the sensible default behavior, and forces
5697 us to explicitly document why we are *not* copying a flag. */
5698 copy
= shallow_copy_rtx (orig
);
5700 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5703 RTX_FLAG (copy
, jump
) = 0;
5704 RTX_FLAG (copy
, call
) = 0;
5705 RTX_FLAG (copy
, frame_related
) = 0;
5708 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
5710 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
5711 switch (*format_ptr
++)
5714 if (XEXP (orig
, i
) != NULL
)
5715 XEXP (copy
, i
) = copy_insn_1 (XEXP (orig
, i
));
5720 if (XVEC (orig
, i
) == orig_asm_constraints_vector
)
5721 XVEC (copy
, i
) = copy_asm_constraints_vector
;
5722 else if (XVEC (orig
, i
) == orig_asm_operands_vector
)
5723 XVEC (copy
, i
) = copy_asm_operands_vector
;
5724 else if (XVEC (orig
, i
) != NULL
)
5726 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
5727 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
5728 XVECEXP (copy
, i
, j
) = copy_insn_1 (XVECEXP (orig
, i
, j
));
5739 /* These are left unchanged. */
5746 if (code
== SCRATCH
)
5748 i
= copy_insn_n_scratches
++;
5749 gcc_assert (i
< MAX_RECOG_OPERANDS
);
5750 copy_insn_scratch_in
[i
] = orig
;
5751 copy_insn_scratch_out
[i
] = copy
;
5753 else if (code
== ASM_OPERANDS
)
5755 orig_asm_operands_vector
= ASM_OPERANDS_INPUT_VEC (orig
);
5756 copy_asm_operands_vector
= ASM_OPERANDS_INPUT_VEC (copy
);
5757 orig_asm_constraints_vector
= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig
);
5758 copy_asm_constraints_vector
= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy
);
5764 /* Create a new copy of an rtx.
5765 This function differs from copy_rtx in that it handles SCRATCHes and
5766 ASM_OPERANDs properly.
5767 INSN doesn't really have to be a full INSN; it could be just the
5770 copy_insn (rtx insn
)
5772 copy_insn_n_scratches
= 0;
5773 orig_asm_operands_vector
= 0;
5774 orig_asm_constraints_vector
= 0;
5775 copy_asm_operands_vector
= 0;
5776 copy_asm_constraints_vector
= 0;
5777 return copy_insn_1 (insn
);
5780 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5781 on that assumption that INSN itself remains in its original place. */
5784 copy_delay_slot_insn (rtx_insn
*insn
)
5786 /* Copy INSN with its rtx_code, all its notes, location etc. */
5787 insn
= as_a
<rtx_insn
*> (copy_rtx (insn
));
5788 INSN_UID (insn
) = cur_insn_uid
++;
5792 /* Initialize data structures and variables in this file
5793 before generating rtl for each function. */
5798 set_first_insn (NULL
);
5799 set_last_insn (NULL
);
5800 if (MIN_NONDEBUG_INSN_UID
)
5801 cur_insn_uid
= MIN_NONDEBUG_INSN_UID
;
5804 cur_debug_insn_uid
= 1;
5805 reg_rtx_no
= LAST_VIRTUAL_REGISTER
+ 1;
5806 first_label_num
= label_num
;
5807 get_current_sequence ()->next
= NULL
;
5809 /* Init the tables that describe all the pseudo regs. */
5811 crtl
->emit
.regno_pointer_align_length
= LAST_VIRTUAL_REGISTER
+ 101;
5813 crtl
->emit
.regno_pointer_align
5814 = XCNEWVEC (unsigned char, crtl
->emit
.regno_pointer_align_length
);
5817 = ggc_cleared_vec_alloc
<rtx
> (crtl
->emit
.regno_pointer_align_length
);
5819 /* Put copies of all the hard registers into regno_reg_rtx. */
5820 memcpy (regno_reg_rtx
,
5821 initial_regno_reg_rtx
,
5822 FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
5824 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5825 init_virtual_regs ();
5827 /* Indicate that the virtual registers and stack locations are
5829 REG_POINTER (stack_pointer_rtx
) = 1;
5830 REG_POINTER (frame_pointer_rtx
) = 1;
5831 REG_POINTER (hard_frame_pointer_rtx
) = 1;
5832 REG_POINTER (arg_pointer_rtx
) = 1;
5834 REG_POINTER (virtual_incoming_args_rtx
) = 1;
5835 REG_POINTER (virtual_stack_vars_rtx
) = 1;
5836 REG_POINTER (virtual_stack_dynamic_rtx
) = 1;
5837 REG_POINTER (virtual_outgoing_args_rtx
) = 1;
5838 REG_POINTER (virtual_cfa_rtx
) = 1;
5840 #ifdef STACK_BOUNDARY
5841 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM
) = STACK_BOUNDARY
;
5842 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
5843 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
5844 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM
) = STACK_BOUNDARY
;
5846 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM
) = STACK_BOUNDARY
;
5847 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM
) = STACK_BOUNDARY
;
5848 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM
) = STACK_BOUNDARY
;
5849 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM
) = STACK_BOUNDARY
;
5851 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM
) = BITS_PER_WORD
;
5854 #ifdef INIT_EXPANDERS
5859 /* Return true if X is a valid element for a duplicated vector constant
5860 of the given mode. */
5863 valid_for_const_vec_duplicate_p (machine_mode
, rtx x
)
5865 return (CONST_SCALAR_INT_P (x
)
5866 || CONST_DOUBLE_AS_FLOAT_P (x
)
5867 || CONST_FIXED_P (x
));
5870 /* Like gen_const_vec_duplicate, but ignore const_tiny_rtx. */
5873 gen_const_vec_duplicate_1 (machine_mode mode
, rtx el
)
5875 int nunits
= GET_MODE_NUNITS (mode
);
5876 rtvec v
= rtvec_alloc (nunits
);
5877 for (int i
= 0; i
< nunits
; ++i
)
5878 RTVEC_ELT (v
, i
) = el
;
5879 return gen_rtx_raw_CONST_VECTOR (mode
, v
);
5882 /* Generate a vector constant of mode MODE in which every element has
5886 gen_const_vec_duplicate (machine_mode mode
, rtx elt
)
5888 scalar_mode inner_mode
= GET_MODE_INNER (mode
);
5889 if (elt
== CONST0_RTX (inner_mode
))
5890 return CONST0_RTX (mode
);
5891 else if (elt
== CONST1_RTX (inner_mode
))
5892 return CONST1_RTX (mode
);
5893 else if (elt
== CONSTM1_RTX (inner_mode
))
5894 return CONSTM1_RTX (mode
);
5896 return gen_const_vec_duplicate_1 (mode
, elt
);
5899 /* Return a vector rtx of mode MODE in which every element has value X.
5900 The result will be a constant if X is constant. */
5903 gen_vec_duplicate (machine_mode mode
, rtx x
)
5905 if (valid_for_const_vec_duplicate_p (mode
, x
))
5906 return gen_const_vec_duplicate (mode
, x
);
5907 return gen_rtx_VEC_DUPLICATE (mode
, x
);
5910 /* A subroutine of const_vec_series_p that handles the case in which
5911 X is known to be an integer CONST_VECTOR. */
5914 const_vec_series_p_1 (const_rtx x
, rtx
*base_out
, rtx
*step_out
)
5916 unsigned int nelts
= CONST_VECTOR_NUNITS (x
);
5920 scalar_mode inner
= GET_MODE_INNER (GET_MODE (x
));
5921 rtx base
= CONST_VECTOR_ELT (x
, 0);
5922 rtx step
= simplify_binary_operation (MINUS
, inner
,
5923 CONST_VECTOR_ELT (x
, 1), base
);
5924 if (rtx_equal_p (step
, CONST0_RTX (inner
)))
5927 for (unsigned int i
= 2; i
< nelts
; ++i
)
5929 rtx diff
= simplify_binary_operation (MINUS
, inner
,
5930 CONST_VECTOR_ELT (x
, i
),
5931 CONST_VECTOR_ELT (x
, i
- 1));
5932 if (!rtx_equal_p (step
, diff
))
5941 /* Generate a vector constant of mode MODE in which element I has
5942 the value BASE + I * STEP. */
5945 gen_const_vec_series (machine_mode mode
, rtx base
, rtx step
)
5947 gcc_assert (CONSTANT_P (base
) && CONSTANT_P (step
));
5949 int nunits
= GET_MODE_NUNITS (mode
);
5950 rtvec v
= rtvec_alloc (nunits
);
5951 scalar_mode inner_mode
= GET_MODE_INNER (mode
);
5952 RTVEC_ELT (v
, 0) = base
;
5953 for (int i
= 1; i
< nunits
; ++i
)
5954 RTVEC_ELT (v
, i
) = simplify_gen_binary (PLUS
, inner_mode
,
5955 RTVEC_ELT (v
, i
- 1), step
);
5956 return gen_rtx_raw_CONST_VECTOR (mode
, v
);
5959 /* Generate a vector of mode MODE in which element I has the value
5960 BASE + I * STEP. The result will be a constant if BASE and STEP
5961 are both constants. */
5964 gen_vec_series (machine_mode mode
, rtx base
, rtx step
)
5966 if (step
== const0_rtx
)
5967 return gen_vec_duplicate (mode
, base
);
5968 if (CONSTANT_P (base
) && CONSTANT_P (step
))
5969 return gen_const_vec_series (mode
, base
, step
);
5970 return gen_rtx_VEC_SERIES (mode
, base
, step
);
5973 /* Generate a new vector constant for mode MODE and constant value
5977 gen_const_vector (machine_mode mode
, int constant
)
5979 machine_mode inner
= GET_MODE_INNER (mode
);
5981 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner
));
5983 rtx el
= const_tiny_rtx
[constant
][(int) inner
];
5986 return gen_const_vec_duplicate_1 (mode
, el
);
5989 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5990 all elements are zero, and the one vector when all elements are one. */
5992 gen_rtx_CONST_VECTOR (machine_mode mode
, rtvec v
)
5994 gcc_assert (GET_MODE_NUNITS (mode
) == GET_NUM_ELEM (v
));
5996 /* If the values are all the same, check to see if we can use one of the
5997 standard constant vectors. */
5998 if (rtvec_all_equal_p (v
))
5999 return gen_const_vec_duplicate (mode
, RTVEC_ELT (v
, 0));
6001 return gen_rtx_raw_CONST_VECTOR (mode
, v
);
6004 /* Initialise global register information required by all functions. */
6007 init_emit_regs (void)
6013 /* Reset register attributes */
6014 reg_attrs_htab
->empty ();
6016 /* We need reg_raw_mode, so initialize the modes now. */
6017 init_reg_modes_target ();
6019 /* Assign register numbers to the globally defined register rtx. */
6020 stack_pointer_rtx
= gen_raw_REG (Pmode
, STACK_POINTER_REGNUM
);
6021 frame_pointer_rtx
= gen_raw_REG (Pmode
, FRAME_POINTER_REGNUM
);
6022 hard_frame_pointer_rtx
= gen_raw_REG (Pmode
, HARD_FRAME_POINTER_REGNUM
);
6023 arg_pointer_rtx
= gen_raw_REG (Pmode
, ARG_POINTER_REGNUM
);
6024 virtual_incoming_args_rtx
=
6025 gen_raw_REG (Pmode
, VIRTUAL_INCOMING_ARGS_REGNUM
);
6026 virtual_stack_vars_rtx
=
6027 gen_raw_REG (Pmode
, VIRTUAL_STACK_VARS_REGNUM
);
6028 virtual_stack_dynamic_rtx
=
6029 gen_raw_REG (Pmode
, VIRTUAL_STACK_DYNAMIC_REGNUM
);
6030 virtual_outgoing_args_rtx
=
6031 gen_raw_REG (Pmode
, VIRTUAL_OUTGOING_ARGS_REGNUM
);
6032 virtual_cfa_rtx
= gen_raw_REG (Pmode
, VIRTUAL_CFA_REGNUM
);
6033 virtual_preferred_stack_boundary_rtx
=
6034 gen_raw_REG (Pmode
, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM
);
6036 /* Initialize RTL for commonly used hard registers. These are
6037 copied into regno_reg_rtx as we begin to compile each function. */
6038 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
6039 initial_regno_reg_rtx
[i
] = gen_raw_REG (reg_raw_mode
[i
], i
);
6041 #ifdef RETURN_ADDRESS_POINTER_REGNUM
6042 return_address_pointer_rtx
6043 = gen_raw_REG (Pmode
, RETURN_ADDRESS_POINTER_REGNUM
);
6046 pic_offset_table_rtx
= NULL_RTX
;
6047 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
)
6048 pic_offset_table_rtx
= gen_raw_REG (Pmode
, PIC_OFFSET_TABLE_REGNUM
);
6050 for (i
= 0; i
< (int) MAX_MACHINE_MODE
; i
++)
6052 mode
= (machine_mode
) i
;
6053 attrs
= ggc_cleared_alloc
<mem_attrs
> ();
6054 attrs
->align
= BITS_PER_UNIT
;
6055 attrs
->addrspace
= ADDR_SPACE_GENERIC
;
6056 if (mode
!= BLKmode
)
6058 attrs
->size_known_p
= true;
6059 attrs
->size
= GET_MODE_SIZE (mode
);
6060 if (STRICT_ALIGNMENT
)
6061 attrs
->align
= GET_MODE_ALIGNMENT (mode
);
6063 mode_mem_attrs
[i
] = attrs
;
6066 split_branch_probability
= profile_probability::uninitialized ();
6069 /* Initialize global machine_mode variables. */
6072 init_derived_machine_modes (void)
6074 opt_scalar_int_mode mode_iter
, opt_byte_mode
, opt_word_mode
;
6075 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_INT
)
6077 scalar_int_mode mode
= mode_iter
.require ();
6079 if (GET_MODE_BITSIZE (mode
) == BITS_PER_UNIT
6080 && !opt_byte_mode
.exists ())
6081 opt_byte_mode
= mode
;
6083 if (GET_MODE_BITSIZE (mode
) == BITS_PER_WORD
6084 && !opt_word_mode
.exists ())
6085 opt_word_mode
= mode
;
6088 byte_mode
= opt_byte_mode
.require ();
6089 word_mode
= opt_word_mode
.require ();
6090 ptr_mode
= as_a
<scalar_int_mode
>
6091 (mode_for_size (POINTER_SIZE
, GET_MODE_CLASS (Pmode
), 0).require ());
6094 /* Create some permanent unique rtl objects shared between all functions. */
6097 init_emit_once (void)
6101 scalar_float_mode double_mode
;
6102 opt_scalar_mode smode_iter
;
6104 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
6105 CONST_FIXED, and memory attribute hash tables. */
6106 const_int_htab
= hash_table
<const_int_hasher
>::create_ggc (37);
6108 #if TARGET_SUPPORTS_WIDE_INT
6109 const_wide_int_htab
= hash_table
<const_wide_int_hasher
>::create_ggc (37);
6111 const_double_htab
= hash_table
<const_double_hasher
>::create_ggc (37);
6113 if (NUM_POLY_INT_COEFFS
> 1)
6114 const_poly_int_htab
= hash_table
<const_poly_int_hasher
>::create_ggc (37);
6116 const_fixed_htab
= hash_table
<const_fixed_hasher
>::create_ggc (37);
6118 reg_attrs_htab
= hash_table
<reg_attr_hasher
>::create_ggc (37);
6120 #ifdef INIT_EXPANDERS
6121 /* This is to initialize {init|mark|free}_machine_status before the first
6122 call to push_function_context_to. This is needed by the Chill front
6123 end which calls push_function_context_to before the first call to
6124 init_function_start. */
6128 /* Create the unique rtx's for certain rtx codes and operand values. */
6130 /* Process stack-limiting command-line options. */
6131 if (opt_fstack_limit_symbol_arg
!= NULL
)
6133 = gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (opt_fstack_limit_symbol_arg
));
6134 if (opt_fstack_limit_register_no
>= 0)
6135 stack_limit_rtx
= gen_rtx_REG (Pmode
, opt_fstack_limit_register_no
);
6137 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
6138 tries to use these variables. */
6139 for (i
= - MAX_SAVED_CONST_INT
; i
<= MAX_SAVED_CONST_INT
; i
++)
6140 const_int_rtx
[i
+ MAX_SAVED_CONST_INT
] =
6141 gen_rtx_raw_CONST_INT (VOIDmode
, (HOST_WIDE_INT
) i
);
6143 if (STORE_FLAG_VALUE
>= - MAX_SAVED_CONST_INT
6144 && STORE_FLAG_VALUE
<= MAX_SAVED_CONST_INT
)
6145 const_true_rtx
= const_int_rtx
[STORE_FLAG_VALUE
+ MAX_SAVED_CONST_INT
];
6147 const_true_rtx
= gen_rtx_CONST_INT (VOIDmode
, STORE_FLAG_VALUE
);
6149 double_mode
= float_mode_for_size (DOUBLE_TYPE_SIZE
).require ();
6151 real_from_integer (&dconst0
, double_mode
, 0, SIGNED
);
6152 real_from_integer (&dconst1
, double_mode
, 1, SIGNED
);
6153 real_from_integer (&dconst2
, double_mode
, 2, SIGNED
);
6158 dconsthalf
= dconst1
;
6159 SET_REAL_EXP (&dconsthalf
, REAL_EXP (&dconsthalf
) - 1);
6161 for (i
= 0; i
< 3; i
++)
6163 const REAL_VALUE_TYPE
*const r
=
6164 (i
== 0 ? &dconst0
: i
== 1 ? &dconst1
: &dconst2
);
6166 FOR_EACH_MODE_IN_CLASS (mode
, MODE_FLOAT
)
6167 const_tiny_rtx
[i
][(int) mode
] =
6168 const_double_from_real_value (*r
, mode
);
6170 FOR_EACH_MODE_IN_CLASS (mode
, MODE_DECIMAL_FLOAT
)
6171 const_tiny_rtx
[i
][(int) mode
] =
6172 const_double_from_real_value (*r
, mode
);
6174 const_tiny_rtx
[i
][(int) VOIDmode
] = GEN_INT (i
);
6176 FOR_EACH_MODE_IN_CLASS (mode
, MODE_INT
)
6177 const_tiny_rtx
[i
][(int) mode
] = GEN_INT (i
);
6179 for (mode
= MIN_MODE_PARTIAL_INT
;
6180 mode
<= MAX_MODE_PARTIAL_INT
;
6181 mode
= (machine_mode
)((int)(mode
) + 1))
6182 const_tiny_rtx
[i
][(int) mode
] = GEN_INT (i
);
6185 const_tiny_rtx
[3][(int) VOIDmode
] = constm1_rtx
;
6187 FOR_EACH_MODE_IN_CLASS (mode
, MODE_INT
)
6188 const_tiny_rtx
[3][(int) mode
] = constm1_rtx
;
6190 for (mode
= MIN_MODE_PARTIAL_INT
;
6191 mode
<= MAX_MODE_PARTIAL_INT
;
6192 mode
= (machine_mode
)((int)(mode
) + 1))
6193 const_tiny_rtx
[3][(int) mode
] = constm1_rtx
;
6195 FOR_EACH_MODE_IN_CLASS (mode
, MODE_COMPLEX_INT
)
6197 rtx inner
= const_tiny_rtx
[0][(int)GET_MODE_INNER (mode
)];
6198 const_tiny_rtx
[0][(int) mode
] = gen_rtx_CONCAT (mode
, inner
, inner
);
6201 FOR_EACH_MODE_IN_CLASS (mode
, MODE_COMPLEX_FLOAT
)
6203 rtx inner
= const_tiny_rtx
[0][(int)GET_MODE_INNER (mode
)];
6204 const_tiny_rtx
[0][(int) mode
] = gen_rtx_CONCAT (mode
, inner
, inner
);
6207 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_INT
)
6209 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6210 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6211 const_tiny_rtx
[3][(int) mode
] = gen_const_vector (mode
, 3);
6214 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_FLOAT
)
6216 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6217 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6220 FOR_EACH_MODE_IN_CLASS (smode_iter
, MODE_FRACT
)
6222 scalar_mode smode
= smode_iter
.require ();
6223 FCONST0 (smode
).data
.high
= 0;
6224 FCONST0 (smode
).data
.low
= 0;
6225 FCONST0 (smode
).mode
= smode
;
6226 const_tiny_rtx
[0][(int) smode
]
6227 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode
), smode
);
6230 FOR_EACH_MODE_IN_CLASS (smode_iter
, MODE_UFRACT
)
6232 scalar_mode smode
= smode_iter
.require ();
6233 FCONST0 (smode
).data
.high
= 0;
6234 FCONST0 (smode
).data
.low
= 0;
6235 FCONST0 (smode
).mode
= smode
;
6236 const_tiny_rtx
[0][(int) smode
]
6237 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode
), smode
);
6240 FOR_EACH_MODE_IN_CLASS (smode_iter
, MODE_ACCUM
)
6242 scalar_mode smode
= smode_iter
.require ();
6243 FCONST0 (smode
).data
.high
= 0;
6244 FCONST0 (smode
).data
.low
= 0;
6245 FCONST0 (smode
).mode
= smode
;
6246 const_tiny_rtx
[0][(int) smode
]
6247 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode
), smode
);
6249 /* We store the value 1. */
6250 FCONST1 (smode
).data
.high
= 0;
6251 FCONST1 (smode
).data
.low
= 0;
6252 FCONST1 (smode
).mode
= smode
;
6253 FCONST1 (smode
).data
6254 = double_int_one
.lshift (GET_MODE_FBIT (smode
),
6255 HOST_BITS_PER_DOUBLE_INT
,
6256 SIGNED_FIXED_POINT_MODE_P (smode
));
6257 const_tiny_rtx
[1][(int) smode
]
6258 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode
), smode
);
6261 FOR_EACH_MODE_IN_CLASS (smode_iter
, MODE_UACCUM
)
6263 scalar_mode smode
= smode_iter
.require ();
6264 FCONST0 (smode
).data
.high
= 0;
6265 FCONST0 (smode
).data
.low
= 0;
6266 FCONST0 (smode
).mode
= smode
;
6267 const_tiny_rtx
[0][(int) smode
]
6268 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode
), smode
);
6270 /* We store the value 1. */
6271 FCONST1 (smode
).data
.high
= 0;
6272 FCONST1 (smode
).data
.low
= 0;
6273 FCONST1 (smode
).mode
= smode
;
6274 FCONST1 (smode
).data
6275 = double_int_one
.lshift (GET_MODE_FBIT (smode
),
6276 HOST_BITS_PER_DOUBLE_INT
,
6277 SIGNED_FIXED_POINT_MODE_P (smode
));
6278 const_tiny_rtx
[1][(int) smode
]
6279 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode
), smode
);
6282 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_FRACT
)
6284 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6287 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_UFRACT
)
6289 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6292 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_ACCUM
)
6294 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6295 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6298 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_UACCUM
)
6300 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6301 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6304 for (i
= (int) CCmode
; i
< (int) MAX_MACHINE_MODE
; ++i
)
6305 if (GET_MODE_CLASS ((machine_mode
) i
) == MODE_CC
)
6306 const_tiny_rtx
[0][i
] = const0_rtx
;
6308 const_tiny_rtx
[0][(int) BImode
] = const0_rtx
;
6309 if (STORE_FLAG_VALUE
== 1)
6310 const_tiny_rtx
[1][(int) BImode
] = const1_rtx
;
6312 FOR_EACH_MODE_IN_CLASS (smode_iter
, MODE_POINTER_BOUNDS
)
6314 scalar_mode smode
= smode_iter
.require ();
6315 wide_int wi_zero
= wi::zero (GET_MODE_PRECISION (smode
));
6316 const_tiny_rtx
[0][smode
] = immed_wide_int_const (wi_zero
, smode
);
6319 pc_rtx
= gen_rtx_fmt_ (PC
, VOIDmode
);
6320 ret_rtx
= gen_rtx_fmt_ (RETURN
, VOIDmode
);
6321 simple_return_rtx
= gen_rtx_fmt_ (SIMPLE_RETURN
, VOIDmode
);
6322 cc0_rtx
= gen_rtx_fmt_ (CC0
, VOIDmode
);
6323 invalid_insn_rtx
= gen_rtx_INSN (VOIDmode
,
6327 /*pattern=*/NULL_RTX
,
6330 /*reg_notes=*/NULL_RTX
);
6333 /* Produce exact duplicate of insn INSN after AFTER.
6334 Care updating of libcall regions if present. */
6337 emit_copy_of_insn_after (rtx_insn
*insn
, rtx_insn
*after
)
6342 switch (GET_CODE (insn
))
6345 new_rtx
= emit_insn_after (copy_insn (PATTERN (insn
)), after
);
6349 new_rtx
= emit_jump_insn_after (copy_insn (PATTERN (insn
)), after
);
6350 CROSSING_JUMP_P (new_rtx
) = CROSSING_JUMP_P (insn
);
6354 new_rtx
= emit_debug_insn_after (copy_insn (PATTERN (insn
)), after
);
6358 new_rtx
= emit_call_insn_after (copy_insn (PATTERN (insn
)), after
);
6359 if (CALL_INSN_FUNCTION_USAGE (insn
))
6360 CALL_INSN_FUNCTION_USAGE (new_rtx
)
6361 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn
));
6362 SIBLING_CALL_P (new_rtx
) = SIBLING_CALL_P (insn
);
6363 RTL_CONST_CALL_P (new_rtx
) = RTL_CONST_CALL_P (insn
);
6364 RTL_PURE_CALL_P (new_rtx
) = RTL_PURE_CALL_P (insn
);
6365 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx
)
6366 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn
);
6373 /* Update LABEL_NUSES. */
6374 mark_jump_label (PATTERN (new_rtx
), new_rtx
, 0);
6376 INSN_LOCATION (new_rtx
) = INSN_LOCATION (insn
);
6378 /* If the old insn is frame related, then so is the new one. This is
6379 primarily needed for IA-64 unwind info which marks epilogue insns,
6380 which may be duplicated by the basic block reordering code. */
6381 RTX_FRAME_RELATED_P (new_rtx
) = RTX_FRAME_RELATED_P (insn
);
6383 /* Locate the end of existing REG_NOTES in NEW_RTX. */
6384 rtx
*ptail
= ®_NOTES (new_rtx
);
6385 while (*ptail
!= NULL_RTX
)
6386 ptail
= &XEXP (*ptail
, 1);
6388 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6389 will make them. REG_LABEL_TARGETs are created there too, but are
6390 supposed to be sticky, so we copy them. */
6391 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
6392 if (REG_NOTE_KIND (link
) != REG_LABEL_OPERAND
)
6394 *ptail
= duplicate_reg_note (link
);
6395 ptail
= &XEXP (*ptail
, 1);
6398 INSN_CODE (new_rtx
) = INSN_CODE (insn
);
6402 static GTY((deletable
)) rtx hard_reg_clobbers
[NUM_MACHINE_MODES
][FIRST_PSEUDO_REGISTER
];
6404 gen_hard_reg_clobber (machine_mode mode
, unsigned int regno
)
6406 if (hard_reg_clobbers
[mode
][regno
])
6407 return hard_reg_clobbers
[mode
][regno
];
6409 return (hard_reg_clobbers
[mode
][regno
] =
6410 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (mode
, regno
)));
6413 location_t prologue_location
;
6414 location_t epilogue_location
;
6416 /* Hold current location information and last location information, so the
6417 datastructures are built lazily only when some instructions in given
6418 place are needed. */
6419 static location_t curr_location
;
6421 /* Allocate insn location datastructure. */
6423 insn_locations_init (void)
6425 prologue_location
= epilogue_location
= 0;
6426 curr_location
= UNKNOWN_LOCATION
;
6429 /* At the end of emit stage, clear current location. */
6431 insn_locations_finalize (void)
6433 epilogue_location
= curr_location
;
6434 curr_location
= UNKNOWN_LOCATION
;
6437 /* Set current location. */
6439 set_curr_insn_location (location_t location
)
6441 curr_location
= location
;
6444 /* Get current location. */
6446 curr_insn_location (void)
6448 return curr_location
;
6451 /* Return lexical scope block insn belongs to. */
6453 insn_scope (const rtx_insn
*insn
)
6455 return LOCATION_BLOCK (INSN_LOCATION (insn
));
6458 /* Return line number of the statement that produced this insn. */
6460 insn_line (const rtx_insn
*insn
)
6462 return LOCATION_LINE (INSN_LOCATION (insn
));
6465 /* Return source file of the statement that produced this insn. */
6467 insn_file (const rtx_insn
*insn
)
6469 return LOCATION_FILE (INSN_LOCATION (insn
));
6472 /* Return expanded location of the statement that produced this insn. */
6474 insn_location (const rtx_insn
*insn
)
6476 return expand_location (INSN_LOCATION (insn
));
6479 /* Return true if memory model MODEL requires a pre-operation (release-style)
6480 barrier or a post-operation (acquire-style) barrier. While not universal,
6481 this function matches behavior of several targets. */
6484 need_atomic_barrier_p (enum memmodel model
, bool pre
)
6486 switch (model
& MEMMODEL_BASE_MASK
)
6488 case MEMMODEL_RELAXED
:
6489 case MEMMODEL_CONSUME
:
6491 case MEMMODEL_RELEASE
:
6493 case MEMMODEL_ACQUIRE
:
6495 case MEMMODEL_ACQ_REL
:
6496 case MEMMODEL_SEQ_CST
:
6503 /* Return a constant shift amount for shifting a value of mode MODE
6507 gen_int_shift_amount (machine_mode
, poly_int64 value
)
6509 /* Use a 64-bit mode, to avoid any truncation.
6511 ??? Perhaps this should be automatically derived from the .md files
6512 instead, or perhaps have a target hook. */
6513 scalar_int_mode shift_mode
= (BITS_PER_UNIT
== 8
6515 : int_mode_for_size (64, 0).require ());
6516 return gen_int_mode (value
, shift_mode
);
6519 /* Initialize fields of rtl_data related to stack alignment. */
6522 rtl_data::init_stack_alignment ()
6524 stack_alignment_needed
= STACK_BOUNDARY
;
6525 max_used_stack_slot_alignment
= STACK_BOUNDARY
;
6526 stack_alignment_estimated
= 0;
6527 preferred_stack_boundary
= STACK_BOUNDARY
;
6531 #include "gt-emit-rtl.h"