1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 /* Middle-to-low level generation of rtx code and insns.
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
36 #include "coretypes.h"
44 #include "stringpool.h"
45 #include "insn-config.h"
49 #include "diagnostic-core.h"
51 #include "fold-const.h"
60 #include "stor-layout.h"
63 #include "rtx-vector-builder.h"
65 struct target_rtl default_target_rtl
;
67 struct target_rtl
*this_target_rtl
= &default_target_rtl
;
70 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
72 /* Commonly used modes. */
74 scalar_int_mode byte_mode
; /* Mode whose width is BITS_PER_UNIT. */
75 scalar_int_mode word_mode
; /* Mode whose width is BITS_PER_WORD. */
76 scalar_int_mode ptr_mode
; /* Mode whose width is POINTER_SIZE. */
78 /* Datastructures maintained for currently processed function in RTL form. */
80 struct rtl_data x_rtl
;
82 /* Indexed by pseudo register number, gives the rtx for that pseudo.
83 Allocated in parallel with regno_pointer_align.
84 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
85 with length attribute nested in top level structures. */
89 /* This is *not* reset after each function. It gives each CODE_LABEL
90 in the entire compilation a unique label number. */
92 static GTY(()) int label_num
= 1;
94 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
95 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
96 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
97 is set only for MODE_INT and MODE_VECTOR_INT modes. */
99 rtx const_tiny_rtx
[4][(int) MAX_MACHINE_MODE
];
103 REAL_VALUE_TYPE dconst0
;
104 REAL_VALUE_TYPE dconst1
;
105 REAL_VALUE_TYPE dconst2
;
106 REAL_VALUE_TYPE dconstm1
;
107 REAL_VALUE_TYPE dconsthalf
;
109 /* Record fixed-point constant 0 and 1. */
110 FIXED_VALUE_TYPE fconst0
[MAX_FCONST0
];
111 FIXED_VALUE_TYPE fconst1
[MAX_FCONST1
];
113 /* We make one copy of (const_int C) where C is in
114 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
115 to save space during the compilation and simplify comparisons of
118 rtx const_int_rtx
[MAX_SAVED_CONST_INT
* 2 + 1];
120 /* Standard pieces of rtx, to be substituted directly into things. */
123 rtx simple_return_rtx
;
126 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
127 this pointer should normally never be dereferenced), but is required to be
128 distinct from NULL_RTX. Currently used by peephole2 pass. */
129 rtx_insn
*invalid_insn_rtx
;
131 /* A hash table storing CONST_INTs whose absolute value is greater
132 than MAX_SAVED_CONST_INT. */
134 struct const_int_hasher
: ggc_cache_ptr_hash
<rtx_def
>
136 typedef HOST_WIDE_INT compare_type
;
138 static hashval_t
hash (rtx i
);
139 static bool equal (rtx i
, HOST_WIDE_INT h
);
142 static GTY ((cache
)) hash_table
<const_int_hasher
> *const_int_htab
;
144 struct const_wide_int_hasher
: ggc_cache_ptr_hash
<rtx_def
>
146 static hashval_t
hash (rtx x
);
147 static bool equal (rtx x
, rtx y
);
150 static GTY ((cache
)) hash_table
<const_wide_int_hasher
> *const_wide_int_htab
;
152 struct const_poly_int_hasher
: ggc_cache_ptr_hash
<rtx_def
>
154 typedef std::pair
<machine_mode
, poly_wide_int_ref
> compare_type
;
156 static hashval_t
hash (rtx x
);
157 static bool equal (rtx x
, const compare_type
&y
);
160 static GTY ((cache
)) hash_table
<const_poly_int_hasher
> *const_poly_int_htab
;
162 /* A hash table storing register attribute structures. */
163 struct reg_attr_hasher
: ggc_cache_ptr_hash
<reg_attrs
>
165 static hashval_t
hash (reg_attrs
*x
);
166 static bool equal (reg_attrs
*a
, reg_attrs
*b
);
169 static GTY ((cache
)) hash_table
<reg_attr_hasher
> *reg_attrs_htab
;
171 /* A hash table storing all CONST_DOUBLEs. */
172 struct const_double_hasher
: ggc_cache_ptr_hash
<rtx_def
>
174 static hashval_t
hash (rtx x
);
175 static bool equal (rtx x
, rtx y
);
178 static GTY ((cache
)) hash_table
<const_double_hasher
> *const_double_htab
;
180 /* A hash table storing all CONST_FIXEDs. */
181 struct const_fixed_hasher
: ggc_cache_ptr_hash
<rtx_def
>
183 static hashval_t
hash (rtx x
);
184 static bool equal (rtx x
, rtx y
);
187 static GTY ((cache
)) hash_table
<const_fixed_hasher
> *const_fixed_htab
;
189 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
190 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
191 #define first_label_num (crtl->emit.x_first_label_num)
193 static void set_used_decls (tree
);
194 static void mark_label_nuses (rtx
);
195 #if TARGET_SUPPORTS_WIDE_INT
196 static rtx
lookup_const_wide_int (rtx
);
198 static rtx
lookup_const_double (rtx
);
199 static rtx
lookup_const_fixed (rtx
);
200 static rtx
gen_const_vector (machine_mode
, int);
201 static void copy_rtx_if_shared_1 (rtx
*orig
);
203 /* Probability of the conditional branch currently proceeded by try_split. */
204 profile_probability split_branch_probability
;
206 /* Returns a hash code for X (which is a really a CONST_INT). */
209 const_int_hasher::hash (rtx x
)
211 return (hashval_t
) INTVAL (x
);
214 /* Returns nonzero if the value represented by X (which is really a
215 CONST_INT) is the same as that given by Y (which is really a
219 const_int_hasher::equal (rtx x
, HOST_WIDE_INT y
)
221 return (INTVAL (x
) == y
);
224 #if TARGET_SUPPORTS_WIDE_INT
225 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
228 const_wide_int_hasher::hash (rtx x
)
231 unsigned HOST_WIDE_INT hash
= 0;
234 for (i
= 0; i
< CONST_WIDE_INT_NUNITS (xr
); i
++)
235 hash
+= CONST_WIDE_INT_ELT (xr
, i
);
237 return (hashval_t
) hash
;
240 /* Returns nonzero if the value represented by X (which is really a
241 CONST_WIDE_INT) is the same as that given by Y (which is really a
245 const_wide_int_hasher::equal (rtx x
, rtx y
)
250 if (CONST_WIDE_INT_NUNITS (xr
) != CONST_WIDE_INT_NUNITS (yr
))
253 for (i
= 0; i
< CONST_WIDE_INT_NUNITS (xr
); i
++)
254 if (CONST_WIDE_INT_ELT (xr
, i
) != CONST_WIDE_INT_ELT (yr
, i
))
261 /* Returns a hash code for CONST_POLY_INT X. */
264 const_poly_int_hasher::hash (rtx x
)
267 h
.add_int (GET_MODE (x
));
268 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
269 h
.add_wide_int (CONST_POLY_INT_COEFFS (x
)[i
]);
273 /* Returns nonzero if CONST_POLY_INT X is an rtx representation of Y. */
276 const_poly_int_hasher::equal (rtx x
, const compare_type
&y
)
278 if (GET_MODE (x
) != y
.first
)
280 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
281 if (CONST_POLY_INT_COEFFS (x
)[i
] != y
.second
.coeffs
[i
])
286 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
288 const_double_hasher::hash (rtx x
)
290 const_rtx
const value
= x
;
293 if (TARGET_SUPPORTS_WIDE_INT
== 0 && GET_MODE (value
) == VOIDmode
)
294 h
= CONST_DOUBLE_LOW (value
) ^ CONST_DOUBLE_HIGH (value
);
297 h
= real_hash (CONST_DOUBLE_REAL_VALUE (value
));
298 /* MODE is used in the comparison, so it should be in the hash. */
299 h
^= GET_MODE (value
);
304 /* Returns nonzero if the value represented by X (really a ...)
305 is the same as that represented by Y (really a ...) */
307 const_double_hasher::equal (rtx x
, rtx y
)
309 const_rtx
const a
= x
, b
= y
;
311 if (GET_MODE (a
) != GET_MODE (b
))
313 if (TARGET_SUPPORTS_WIDE_INT
== 0 && GET_MODE (a
) == VOIDmode
)
314 return (CONST_DOUBLE_LOW (a
) == CONST_DOUBLE_LOW (b
)
315 && CONST_DOUBLE_HIGH (a
) == CONST_DOUBLE_HIGH (b
));
317 return real_identical (CONST_DOUBLE_REAL_VALUE (a
),
318 CONST_DOUBLE_REAL_VALUE (b
));
321 /* Returns a hash code for X (which is really a CONST_FIXED). */
324 const_fixed_hasher::hash (rtx x
)
326 const_rtx
const value
= x
;
329 h
= fixed_hash (CONST_FIXED_VALUE (value
));
330 /* MODE is used in the comparison, so it should be in the hash. */
331 h
^= GET_MODE (value
);
335 /* Returns nonzero if the value represented by X is the same as that
339 const_fixed_hasher::equal (rtx x
, rtx y
)
341 const_rtx
const a
= x
, b
= y
;
343 if (GET_MODE (a
) != GET_MODE (b
))
345 return fixed_identical (CONST_FIXED_VALUE (a
), CONST_FIXED_VALUE (b
));
348 /* Return true if the given memory attributes are equal. */
351 mem_attrs_eq_p (const struct mem_attrs
*p
, const struct mem_attrs
*q
)
357 return (p
->alias
== q
->alias
358 && p
->offset_known_p
== q
->offset_known_p
359 && (!p
->offset_known_p
|| known_eq (p
->offset
, q
->offset
))
360 && p
->size_known_p
== q
->size_known_p
361 && (!p
->size_known_p
|| known_eq (p
->size
, q
->size
))
362 && p
->align
== q
->align
363 && p
->addrspace
== q
->addrspace
364 && (p
->expr
== q
->expr
365 || (p
->expr
!= NULL_TREE
&& q
->expr
!= NULL_TREE
366 && operand_equal_p (p
->expr
, q
->expr
, 0))));
369 /* Set MEM's memory attributes so that they are the same as ATTRS. */
372 set_mem_attrs (rtx mem
, mem_attrs
*attrs
)
374 /* If everything is the default, we can just clear the attributes. */
375 if (mem_attrs_eq_p (attrs
, mode_mem_attrs
[(int) GET_MODE (mem
)]))
382 || !mem_attrs_eq_p (attrs
, MEM_ATTRS (mem
)))
384 MEM_ATTRS (mem
) = ggc_alloc
<mem_attrs
> ();
385 memcpy (MEM_ATTRS (mem
), attrs
, sizeof (mem_attrs
));
389 /* Returns a hash code for X (which is a really a reg_attrs *). */
392 reg_attr_hasher::hash (reg_attrs
*x
)
394 const reg_attrs
*const p
= x
;
398 h
.add_poly_hwi (p
->offset
);
402 /* Returns nonzero if the value represented by X is the same as that given by
406 reg_attr_hasher::equal (reg_attrs
*x
, reg_attrs
*y
)
408 const reg_attrs
*const p
= x
;
409 const reg_attrs
*const q
= y
;
411 return (p
->decl
== q
->decl
&& known_eq (p
->offset
, q
->offset
));
413 /* Allocate a new reg_attrs structure and insert it into the hash table if
414 one identical to it is not already in the table. We are doing this for
418 get_reg_attrs (tree decl
, poly_int64 offset
)
422 /* If everything is the default, we can just return zero. */
423 if (decl
== 0 && known_eq (offset
, 0))
427 attrs
.offset
= offset
;
429 reg_attrs
**slot
= reg_attrs_htab
->find_slot (&attrs
, INSERT
);
432 *slot
= ggc_alloc
<reg_attrs
> ();
433 memcpy (*slot
, &attrs
, sizeof (reg_attrs
));
441 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
442 and to block register equivalences to be seen across this insn. */
447 rtx x
= gen_rtx_ASM_INPUT (VOIDmode
, "");
448 MEM_VOLATILE_P (x
) = true;
454 /* Set the mode and register number of X to MODE and REGNO. */
457 set_mode_and_regno (rtx x
, machine_mode mode
, unsigned int regno
)
459 unsigned int nregs
= (HARD_REGISTER_NUM_P (regno
)
460 ? hard_regno_nregs (regno
, mode
)
462 PUT_MODE_RAW (x
, mode
);
463 set_regno_raw (x
, regno
, nregs
);
466 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
467 don't attempt to share with the various global pieces of rtl (such as
468 frame_pointer_rtx). */
471 gen_raw_REG (machine_mode mode
, unsigned int regno
)
473 rtx x
= rtx_alloc (REG MEM_STAT_INFO
);
474 set_mode_and_regno (x
, mode
, regno
);
475 REG_ATTRS (x
) = NULL
;
476 ORIGINAL_REGNO (x
) = regno
;
480 /* There are some RTL codes that require special attention; the generation
481 functions do the raw handling. If you add to this list, modify
482 special_rtx in gengenrtl.c as well. */
485 gen_rtx_EXPR_LIST (machine_mode mode
, rtx expr
, rtx expr_list
)
487 return as_a
<rtx_expr_list
*> (gen_rtx_fmt_ee (EXPR_LIST
, mode
, expr
,
492 gen_rtx_INSN_LIST (machine_mode mode
, rtx insn
, rtx insn_list
)
494 return as_a
<rtx_insn_list
*> (gen_rtx_fmt_ue (INSN_LIST
, mode
, insn
,
499 gen_rtx_INSN (machine_mode mode
, rtx_insn
*prev_insn
, rtx_insn
*next_insn
,
500 basic_block bb
, rtx pattern
, int location
, int code
,
503 return as_a
<rtx_insn
*> (gen_rtx_fmt_uuBeiie (INSN
, mode
,
504 prev_insn
, next_insn
,
505 bb
, pattern
, location
, code
,
510 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED
, HOST_WIDE_INT arg
)
512 if (arg
>= - MAX_SAVED_CONST_INT
&& arg
<= MAX_SAVED_CONST_INT
)
513 return const_int_rtx
[arg
+ MAX_SAVED_CONST_INT
];
515 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
516 if (const_true_rtx
&& arg
== STORE_FLAG_VALUE
)
517 return const_true_rtx
;
520 /* Look up the CONST_INT in the hash table. */
521 rtx
*slot
= const_int_htab
->find_slot_with_hash (arg
, (hashval_t
) arg
,
524 *slot
= gen_rtx_raw_CONST_INT (VOIDmode
, arg
);
530 gen_int_mode (poly_int64 c
, machine_mode mode
)
532 c
= trunc_int_for_mode (c
, mode
);
533 if (c
.is_constant ())
534 return GEN_INT (c
.coeffs
[0]);
535 unsigned int prec
= GET_MODE_PRECISION (as_a
<scalar_mode
> (mode
));
536 return immed_wide_int_const (poly_wide_int::from (c
, prec
, SIGNED
), mode
);
539 /* CONST_DOUBLEs might be created from pairs of integers, or from
540 REAL_VALUE_TYPEs. Also, their length is known only at run time,
541 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
543 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
544 hash table. If so, return its counterpart; otherwise add it
545 to the hash table and return it. */
547 lookup_const_double (rtx real
)
549 rtx
*slot
= const_double_htab
->find_slot (real
, INSERT
);
556 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
557 VALUE in mode MODE. */
559 const_double_from_real_value (REAL_VALUE_TYPE value
, machine_mode mode
)
561 rtx real
= rtx_alloc (CONST_DOUBLE
);
562 PUT_MODE (real
, mode
);
566 return lookup_const_double (real
);
569 /* Determine whether FIXED, a CONST_FIXED, already exists in the
570 hash table. If so, return its counterpart; otherwise add it
571 to the hash table and return it. */
574 lookup_const_fixed (rtx fixed
)
576 rtx
*slot
= const_fixed_htab
->find_slot (fixed
, INSERT
);
583 /* Return a CONST_FIXED rtx for a fixed-point value specified by
584 VALUE in mode MODE. */
587 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value
, machine_mode mode
)
589 rtx fixed
= rtx_alloc (CONST_FIXED
);
590 PUT_MODE (fixed
, mode
);
594 return lookup_const_fixed (fixed
);
597 #if TARGET_SUPPORTS_WIDE_INT == 0
598 /* Constructs double_int from rtx CST. */
601 rtx_to_double_int (const_rtx cst
)
605 if (CONST_INT_P (cst
))
606 r
= double_int::from_shwi (INTVAL (cst
));
607 else if (CONST_DOUBLE_AS_INT_P (cst
))
609 r
.low
= CONST_DOUBLE_LOW (cst
);
610 r
.high
= CONST_DOUBLE_HIGH (cst
);
619 #if TARGET_SUPPORTS_WIDE_INT
620 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
621 If so, return its counterpart; otherwise add it to the hash table and
625 lookup_const_wide_int (rtx wint
)
627 rtx
*slot
= const_wide_int_htab
->find_slot (wint
, INSERT
);
635 /* Return an rtx constant for V, given that the constant has mode MODE.
636 The returned rtx will be a CONST_INT if V fits, otherwise it will be
637 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
638 (if TARGET_SUPPORTS_WIDE_INT). */
641 immed_wide_int_const_1 (const wide_int_ref
&v
, machine_mode mode
)
643 unsigned int len
= v
.get_len ();
644 /* Not scalar_int_mode because we also allow pointer bound modes. */
645 unsigned int prec
= GET_MODE_PRECISION (as_a
<scalar_mode
> (mode
));
647 /* Allow truncation but not extension since we do not know if the
648 number is signed or unsigned. */
649 gcc_assert (prec
<= v
.get_precision ());
651 if (len
< 2 || prec
<= HOST_BITS_PER_WIDE_INT
)
652 return gen_int_mode (v
.elt (0), mode
);
654 #if TARGET_SUPPORTS_WIDE_INT
658 unsigned int blocks_needed
659 = (prec
+ HOST_BITS_PER_WIDE_INT
- 1) / HOST_BITS_PER_WIDE_INT
;
661 if (len
> blocks_needed
)
664 value
= const_wide_int_alloc (len
);
666 /* It is so tempting to just put the mode in here. Must control
668 PUT_MODE (value
, VOIDmode
);
669 CWI_PUT_NUM_ELEM (value
, len
);
671 for (i
= 0; i
< len
; i
++)
672 CONST_WIDE_INT_ELT (value
, i
) = v
.elt (i
);
674 return lookup_const_wide_int (value
);
677 return immed_double_const (v
.elt (0), v
.elt (1), mode
);
681 #if TARGET_SUPPORTS_WIDE_INT == 0
682 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
683 of ints: I0 is the low-order word and I1 is the high-order word.
684 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
685 implied upper bits are copies of the high bit of i1. The value
686 itself is neither signed nor unsigned. Do not use this routine for
687 non-integer modes; convert to REAL_VALUE_TYPE and use
688 const_double_from_real_value. */
691 immed_double_const (HOST_WIDE_INT i0
, HOST_WIDE_INT i1
, machine_mode mode
)
696 /* There are the following cases (note that there are no modes with
697 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
699 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
701 2) If the value of the integer fits into HOST_WIDE_INT anyway
702 (i.e., i1 consists only from copies of the sign bit, and sign
703 of i0 and i1 are the same), then we return a CONST_INT for i0.
704 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
706 if (is_a
<scalar_mode
> (mode
, &smode
)
707 && GET_MODE_BITSIZE (smode
) <= HOST_BITS_PER_WIDE_INT
)
708 return gen_int_mode (i0
, mode
);
710 /* If this integer fits in one word, return a CONST_INT. */
711 if ((i1
== 0 && i0
>= 0) || (i1
== ~0 && i0
< 0))
714 /* We use VOIDmode for integers. */
715 value
= rtx_alloc (CONST_DOUBLE
);
716 PUT_MODE (value
, VOIDmode
);
718 CONST_DOUBLE_LOW (value
) = i0
;
719 CONST_DOUBLE_HIGH (value
) = i1
;
721 for (i
= 2; i
< (sizeof CONST_DOUBLE_FORMAT
- 1); i
++)
722 XWINT (value
, i
) = 0;
724 return lookup_const_double (value
);
728 /* Return an rtx representation of C in mode MODE. */
731 immed_wide_int_const (const poly_wide_int_ref
&c
, machine_mode mode
)
733 if (c
.is_constant ())
734 return immed_wide_int_const_1 (c
.coeffs
[0], mode
);
736 /* Not scalar_int_mode because we also allow pointer bound modes. */
737 unsigned int prec
= GET_MODE_PRECISION (as_a
<scalar_mode
> (mode
));
739 /* Allow truncation but not extension since we do not know if the
740 number is signed or unsigned. */
741 gcc_assert (prec
<= c
.coeffs
[0].get_precision ());
742 poly_wide_int newc
= poly_wide_int::from (c
, prec
, SIGNED
);
744 /* See whether we already have an rtx for this constant. */
747 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
748 h
.add_wide_int (newc
.coeffs
[i
]);
749 const_poly_int_hasher::compare_type
typed_value (mode
, newc
);
750 rtx
*slot
= const_poly_int_htab
->find_slot_with_hash (typed_value
,
756 /* Create a new rtx. There's a choice to be made here between installing
757 the actual mode of the rtx or leaving it as VOIDmode (for consistency
758 with CONST_INT). In practice the handling of the codes is different
759 enough that we get no benefit from using VOIDmode, and various places
760 assume that VOIDmode implies CONST_INT. Using the real mode seems like
761 the right long-term direction anyway. */
762 typedef trailing_wide_ints
<NUM_POLY_INT_COEFFS
> twi
;
763 size_t extra_size
= twi::extra_size (prec
);
764 x
= rtx_alloc_v (CONST_POLY_INT
,
765 sizeof (struct const_poly_int_def
) + extra_size
);
767 CONST_POLY_INT_COEFFS (x
).set_precision (prec
);
768 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
769 CONST_POLY_INT_COEFFS (x
)[i
] = newc
.coeffs
[i
];
776 gen_rtx_REG (machine_mode mode
, unsigned int regno
)
778 /* In case the MD file explicitly references the frame pointer, have
779 all such references point to the same frame pointer. This is
780 used during frame pointer elimination to distinguish the explicit
781 references to these registers from pseudos that happened to be
784 If we have eliminated the frame pointer or arg pointer, we will
785 be using it as a normal register, for example as a spill
786 register. In such cases, we might be accessing it in a mode that
787 is not Pmode and therefore cannot use the pre-allocated rtx.
789 Also don't do this when we are making new REGs in reload, since
790 we don't want to get confused with the real pointers. */
792 if (mode
== Pmode
&& !reload_in_progress
&& !lra_in_progress
)
794 if (regno
== FRAME_POINTER_REGNUM
795 && (!reload_completed
|| frame_pointer_needed
))
796 return frame_pointer_rtx
;
798 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
799 && regno
== HARD_FRAME_POINTER_REGNUM
800 && (!reload_completed
|| frame_pointer_needed
))
801 return hard_frame_pointer_rtx
;
802 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
803 if (FRAME_POINTER_REGNUM
!= ARG_POINTER_REGNUM
804 && regno
== ARG_POINTER_REGNUM
)
805 return arg_pointer_rtx
;
807 #ifdef RETURN_ADDRESS_POINTER_REGNUM
808 if (regno
== RETURN_ADDRESS_POINTER_REGNUM
)
809 return return_address_pointer_rtx
;
811 if (regno
== (unsigned) PIC_OFFSET_TABLE_REGNUM
812 && PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
813 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
814 return pic_offset_table_rtx
;
815 if (regno
== STACK_POINTER_REGNUM
)
816 return stack_pointer_rtx
;
820 /* If the per-function register table has been set up, try to re-use
821 an existing entry in that table to avoid useless generation of RTL.
823 This code is disabled for now until we can fix the various backends
824 which depend on having non-shared hard registers in some cases. Long
825 term we want to re-enable this code as it can significantly cut down
826 on the amount of useless RTL that gets generated.
828 We'll also need to fix some code that runs after reload that wants to
829 set ORIGINAL_REGNO. */
834 && regno
< FIRST_PSEUDO_REGISTER
835 && reg_raw_mode
[regno
] == mode
)
836 return regno_reg_rtx
[regno
];
839 return gen_raw_REG (mode
, regno
);
843 gen_rtx_MEM (machine_mode mode
, rtx addr
)
845 rtx rt
= gen_rtx_raw_MEM (mode
, addr
);
847 /* This field is not cleared by the mere allocation of the rtx, so
854 /* Generate a memory referring to non-trapping constant memory. */
857 gen_const_mem (machine_mode mode
, rtx addr
)
859 rtx mem
= gen_rtx_MEM (mode
, addr
);
860 MEM_READONLY_P (mem
) = 1;
861 MEM_NOTRAP_P (mem
) = 1;
865 /* Generate a MEM referring to fixed portions of the frame, e.g., register
869 gen_frame_mem (machine_mode mode
, rtx addr
)
871 rtx mem
= gen_rtx_MEM (mode
, addr
);
872 MEM_NOTRAP_P (mem
) = 1;
873 set_mem_alias_set (mem
, get_frame_alias_set ());
877 /* Generate a MEM referring to a temporary use of the stack, not part
878 of the fixed stack frame. For example, something which is pushed
879 by a target splitter. */
881 gen_tmp_stack_mem (machine_mode mode
, rtx addr
)
883 rtx mem
= gen_rtx_MEM (mode
, addr
);
884 MEM_NOTRAP_P (mem
) = 1;
885 if (!cfun
->calls_alloca
)
886 set_mem_alias_set (mem
, get_frame_alias_set ());
890 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
891 this construct would be valid, and false otherwise. */
894 validate_subreg (machine_mode omode
, machine_mode imode
,
895 const_rtx reg
, poly_uint64 offset
)
897 unsigned int isize
= GET_MODE_SIZE (imode
);
898 unsigned int osize
= GET_MODE_SIZE (omode
);
900 /* All subregs must be aligned. */
901 if (!multiple_p (offset
, osize
))
904 /* The subreg offset cannot be outside the inner object. */
905 if (maybe_ge (offset
, isize
))
908 unsigned int regsize
= REGMODE_NATURAL_SIZE (imode
);
910 /* ??? This should not be here. Temporarily continue to allow word_mode
911 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
912 Generally, backends are doing something sketchy but it'll take time to
914 if (omode
== word_mode
)
916 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
917 is the culprit here, and not the backends. */
918 else if (osize
>= regsize
&& isize
>= osize
)
920 /* Allow component subregs of complex and vector. Though given the below
921 extraction rules, it's not always clear what that means. */
922 else if ((COMPLEX_MODE_P (imode
) || VECTOR_MODE_P (imode
))
923 && GET_MODE_INNER (imode
) == omode
)
925 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
926 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
927 represent this. It's questionable if this ought to be represented at
928 all -- why can't this all be hidden in post-reload splitters that make
929 arbitrarily mode changes to the registers themselves. */
930 else if (VECTOR_MODE_P (omode
) && GET_MODE_INNER (omode
) == imode
)
932 /* Subregs involving floating point modes are not allowed to
933 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
934 (subreg:SI (reg:DF) 0) isn't. */
935 else if (FLOAT_MODE_P (imode
) || FLOAT_MODE_P (omode
))
937 if (! (isize
== osize
938 /* LRA can use subreg to store a floating point value in
939 an integer mode. Although the floating point and the
940 integer modes need the same number of hard registers,
941 the size of floating point mode can be less than the
942 integer mode. LRA also uses subregs for a register
943 should be used in different mode in on insn. */
948 /* Paradoxical subregs must have offset zero. */
950 return known_eq (offset
, 0U);
952 /* This is a normal subreg. Verify that the offset is representable. */
954 /* For hard registers, we already have most of these rules collected in
955 subreg_offset_representable_p. */
956 if (reg
&& REG_P (reg
) && HARD_REGISTER_P (reg
))
958 unsigned int regno
= REGNO (reg
);
960 if ((COMPLEX_MODE_P (imode
) || VECTOR_MODE_P (imode
))
961 && GET_MODE_INNER (imode
) == omode
)
963 else if (!REG_CAN_CHANGE_MODE_P (regno
, imode
, omode
))
966 return subreg_offset_representable_p (regno
, imode
, offset
, omode
);
969 /* For pseudo registers, we want most of the same checks. Namely:
971 Assume that the pseudo register will be allocated to hard registers
972 that can hold REGSIZE bytes each. If OSIZE is not a multiple of REGSIZE,
973 the remainder must correspond to the lowpart of the containing hard
974 register. If BYTES_BIG_ENDIAN, the lowpart is at the highest offset,
975 otherwise it is at the lowest offset.
977 Given that we've already checked the mode and offset alignment,
978 we only have to check subblock subregs here. */
980 && ! (lra_in_progress
&& (FLOAT_MODE_P (imode
) || FLOAT_MODE_P (omode
))))
982 poly_uint64 block_size
= MIN (isize
, regsize
);
983 unsigned int start_reg
;
984 poly_uint64 offset_within_reg
;
985 if (!can_div_trunc_p (offset
, block_size
, &start_reg
, &offset_within_reg
)
987 ? maybe_ne (offset_within_reg
, block_size
- osize
)
988 : maybe_ne (offset_within_reg
, 0U)))
995 gen_rtx_SUBREG (machine_mode mode
, rtx reg
, poly_uint64 offset
)
997 gcc_assert (validate_subreg (mode
, GET_MODE (reg
), reg
, offset
));
998 return gen_rtx_raw_SUBREG (mode
, reg
, offset
);
1001 /* Generate a SUBREG representing the least-significant part of REG if MODE
1002 is smaller than mode of REG, otherwise paradoxical SUBREG. */
1005 gen_lowpart_SUBREG (machine_mode mode
, rtx reg
)
1007 machine_mode inmode
;
1009 inmode
= GET_MODE (reg
);
1010 if (inmode
== VOIDmode
)
1012 return gen_rtx_SUBREG (mode
, reg
,
1013 subreg_lowpart_offset (mode
, inmode
));
1017 gen_rtx_VAR_LOCATION (machine_mode mode
, tree decl
, rtx loc
,
1018 enum var_init_status status
)
1020 rtx x
= gen_rtx_fmt_te (VAR_LOCATION
, mode
, decl
, loc
);
1021 PAT_VAR_LOCATION_STATUS (x
) = status
;
1026 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
1029 gen_rtvec (int n
, ...)
1037 /* Don't allocate an empty rtvec... */
1044 rt_val
= rtvec_alloc (n
);
1046 for (i
= 0; i
< n
; i
++)
1047 rt_val
->elem
[i
] = va_arg (p
, rtx
);
1054 gen_rtvec_v (int n
, rtx
*argp
)
1059 /* Don't allocate an empty rtvec... */
1063 rt_val
= rtvec_alloc (n
);
1065 for (i
= 0; i
< n
; i
++)
1066 rt_val
->elem
[i
] = *argp
++;
1072 gen_rtvec_v (int n
, rtx_insn
**argp
)
1077 /* Don't allocate an empty rtvec... */
1081 rt_val
= rtvec_alloc (n
);
1083 for (i
= 0; i
< n
; i
++)
1084 rt_val
->elem
[i
] = *argp
++;
1090 /* Return the number of bytes between the start of an OUTER_MODE
1091 in-memory value and the start of an INNER_MODE in-memory value,
1092 given that the former is a lowpart of the latter. It may be a
1093 paradoxical lowpart, in which case the offset will be negative
1094 on big-endian targets. */
1097 byte_lowpart_offset (machine_mode outer_mode
,
1098 machine_mode inner_mode
)
1100 if (paradoxical_subreg_p (outer_mode
, inner_mode
))
1101 return -subreg_lowpart_offset (inner_mode
, outer_mode
);
1103 return subreg_lowpart_offset (outer_mode
, inner_mode
);
1106 /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
1107 from address X. For paradoxical big-endian subregs this is a
1108 negative value, otherwise it's the same as OFFSET. */
1111 subreg_memory_offset (machine_mode outer_mode
, machine_mode inner_mode
,
1114 if (paradoxical_subreg_p (outer_mode
, inner_mode
))
1116 gcc_assert (known_eq (offset
, 0U));
1117 return -subreg_lowpart_offset (inner_mode
, outer_mode
);
1122 /* As above, but return the offset that existing subreg X would have
1123 if SUBREG_REG (X) were stored in memory. The only significant thing
1124 about the current SUBREG_REG is its mode. */
1127 subreg_memory_offset (const_rtx x
)
1129 return subreg_memory_offset (GET_MODE (x
), GET_MODE (SUBREG_REG (x
)),
1133 /* Generate a REG rtx for a new pseudo register of mode MODE.
1134 This pseudo is assigned the next sequential register number. */
1137 gen_reg_rtx (machine_mode mode
)
1140 unsigned int align
= GET_MODE_ALIGNMENT (mode
);
1142 gcc_assert (can_create_pseudo_p ());
1144 /* If a virtual register with bigger mode alignment is generated,
1145 increase stack alignment estimation because it might be spilled
1147 if (SUPPORTS_STACK_ALIGNMENT
1148 && crtl
->stack_alignment_estimated
< align
1149 && !crtl
->stack_realign_processed
)
1151 unsigned int min_align
= MINIMUM_ALIGNMENT (NULL
, mode
, align
);
1152 if (crtl
->stack_alignment_estimated
< min_align
)
1153 crtl
->stack_alignment_estimated
= min_align
;
1156 if (generating_concat_p
1157 && (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
1158 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
))
1160 /* For complex modes, don't make a single pseudo.
1161 Instead, make a CONCAT of two pseudos.
1162 This allows noncontiguous allocation of the real and imaginary parts,
1163 which makes much better code. Besides, allocating DCmode
1164 pseudos overstrains reload on some machines like the 386. */
1165 rtx realpart
, imagpart
;
1166 machine_mode partmode
= GET_MODE_INNER (mode
);
1168 realpart
= gen_reg_rtx (partmode
);
1169 imagpart
= gen_reg_rtx (partmode
);
1170 return gen_rtx_CONCAT (mode
, realpart
, imagpart
);
1173 /* Do not call gen_reg_rtx with uninitialized crtl. */
1174 gcc_assert (crtl
->emit
.regno_pointer_align_length
);
1176 crtl
->emit
.ensure_regno_capacity ();
1177 gcc_assert (reg_rtx_no
< crtl
->emit
.regno_pointer_align_length
);
1179 val
= gen_raw_REG (mode
, reg_rtx_no
);
1180 regno_reg_rtx
[reg_rtx_no
++] = val
;
1184 /* Make sure m_regno_pointer_align, and regno_reg_rtx are large
1185 enough to have elements in the range 0 <= idx <= reg_rtx_no. */
1188 emit_status::ensure_regno_capacity ()
1190 int old_size
= regno_pointer_align_length
;
1192 if (reg_rtx_no
< old_size
)
1195 int new_size
= old_size
* 2;
1196 while (reg_rtx_no
>= new_size
)
1199 char *tmp
= XRESIZEVEC (char, regno_pointer_align
, new_size
);
1200 memset (tmp
+ old_size
, 0, new_size
- old_size
);
1201 regno_pointer_align
= (unsigned char *) tmp
;
1203 rtx
*new1
= GGC_RESIZEVEC (rtx
, regno_reg_rtx
, new_size
);
1204 memset (new1
+ old_size
, 0, (new_size
- old_size
) * sizeof (rtx
));
1205 regno_reg_rtx
= new1
;
1207 crtl
->emit
.regno_pointer_align_length
= new_size
;
1210 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1213 reg_is_parm_p (rtx reg
)
1217 gcc_assert (REG_P (reg
));
1218 decl
= REG_EXPR (reg
);
1219 return (decl
&& TREE_CODE (decl
) == PARM_DECL
);
1222 /* Update NEW with the same attributes as REG, but with OFFSET added
1223 to the REG_OFFSET. */
1226 update_reg_offset (rtx new_rtx
, rtx reg
, poly_int64 offset
)
1228 REG_ATTRS (new_rtx
) = get_reg_attrs (REG_EXPR (reg
),
1229 REG_OFFSET (reg
) + offset
);
1232 /* Generate a register with same attributes as REG, but with OFFSET
1233 added to the REG_OFFSET. */
1236 gen_rtx_REG_offset (rtx reg
, machine_mode mode
, unsigned int regno
,
1239 rtx new_rtx
= gen_rtx_REG (mode
, regno
);
1241 update_reg_offset (new_rtx
, reg
, offset
);
1245 /* Generate a new pseudo-register with the same attributes as REG, but
1246 with OFFSET added to the REG_OFFSET. */
1249 gen_reg_rtx_offset (rtx reg
, machine_mode mode
, int offset
)
1251 rtx new_rtx
= gen_reg_rtx (mode
);
1253 update_reg_offset (new_rtx
, reg
, offset
);
1257 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1258 new register is a (possibly paradoxical) lowpart of the old one. */
1261 adjust_reg_mode (rtx reg
, machine_mode mode
)
1263 update_reg_offset (reg
, reg
, byte_lowpart_offset (mode
, GET_MODE (reg
)));
1264 PUT_MODE (reg
, mode
);
1267 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1268 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1271 set_reg_attrs_from_value (rtx reg
, rtx x
)
1274 bool can_be_reg_pointer
= true;
1276 /* Don't call mark_reg_pointer for incompatible pointer sign
1278 while (GET_CODE (x
) == SIGN_EXTEND
1279 || GET_CODE (x
) == ZERO_EXTEND
1280 || GET_CODE (x
) == TRUNCATE
1281 || (GET_CODE (x
) == SUBREG
&& subreg_lowpart_p (x
)))
1283 #if defined(POINTERS_EXTEND_UNSIGNED)
1284 if (((GET_CODE (x
) == SIGN_EXTEND
&& POINTERS_EXTEND_UNSIGNED
)
1285 || (GET_CODE (x
) == ZERO_EXTEND
&& ! POINTERS_EXTEND_UNSIGNED
)
1286 || (paradoxical_subreg_p (x
)
1287 && ! (SUBREG_PROMOTED_VAR_P (x
)
1288 && SUBREG_CHECK_PROMOTED_SIGN (x
,
1289 POINTERS_EXTEND_UNSIGNED
))))
1290 && !targetm
.have_ptr_extend ())
1291 can_be_reg_pointer
= false;
1296 /* Hard registers can be reused for multiple purposes within the same
1297 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1298 on them is wrong. */
1299 if (HARD_REGISTER_P (reg
))
1302 offset
= byte_lowpart_offset (GET_MODE (reg
), GET_MODE (x
));
1305 if (MEM_OFFSET_KNOWN_P (x
))
1306 REG_ATTRS (reg
) = get_reg_attrs (MEM_EXPR (x
),
1307 MEM_OFFSET (x
) + offset
);
1308 if (can_be_reg_pointer
&& MEM_POINTER (x
))
1309 mark_reg_pointer (reg
, 0);
1314 update_reg_offset (reg
, x
, offset
);
1315 if (can_be_reg_pointer
&& REG_POINTER (x
))
1316 mark_reg_pointer (reg
, REGNO_POINTER_ALIGN (REGNO (x
)));
1320 /* Generate a REG rtx for a new pseudo register, copying the mode
1321 and attributes from X. */
1324 gen_reg_rtx_and_attrs (rtx x
)
1326 rtx reg
= gen_reg_rtx (GET_MODE (x
));
1327 set_reg_attrs_from_value (reg
, x
);
1331 /* Set the register attributes for registers contained in PARM_RTX.
1332 Use needed values from memory attributes of MEM. */
1335 set_reg_attrs_for_parm (rtx parm_rtx
, rtx mem
)
1337 if (REG_P (parm_rtx
))
1338 set_reg_attrs_from_value (parm_rtx
, mem
);
1339 else if (GET_CODE (parm_rtx
) == PARALLEL
)
1341 /* Check for a NULL entry in the first slot, used to indicate that the
1342 parameter goes both on the stack and in registers. */
1343 int i
= XEXP (XVECEXP (parm_rtx
, 0, 0), 0) ? 0 : 1;
1344 for (; i
< XVECLEN (parm_rtx
, 0); i
++)
1346 rtx x
= XVECEXP (parm_rtx
, 0, i
);
1347 if (REG_P (XEXP (x
, 0)))
1348 REG_ATTRS (XEXP (x
, 0))
1349 = get_reg_attrs (MEM_EXPR (mem
),
1350 INTVAL (XEXP (x
, 1)));
1355 /* Set the REG_ATTRS for registers in value X, given that X represents
1359 set_reg_attrs_for_decl_rtl (tree t
, rtx x
)
1364 if (GET_CODE (x
) == SUBREG
)
1366 gcc_assert (subreg_lowpart_p (x
));
1371 = get_reg_attrs (t
, byte_lowpart_offset (GET_MODE (x
),
1374 : TYPE_MODE (TREE_TYPE (tdecl
))));
1375 if (GET_CODE (x
) == CONCAT
)
1377 if (REG_P (XEXP (x
, 0)))
1378 REG_ATTRS (XEXP (x
, 0)) = get_reg_attrs (t
, 0);
1379 if (REG_P (XEXP (x
, 1)))
1380 REG_ATTRS (XEXP (x
, 1))
1381 = get_reg_attrs (t
, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x
, 0))));
1383 if (GET_CODE (x
) == PARALLEL
)
1387 /* Check for a NULL entry, used to indicate that the parameter goes
1388 both on the stack and in registers. */
1389 if (XEXP (XVECEXP (x
, 0, 0), 0))
1394 for (i
= start
; i
< XVECLEN (x
, 0); i
++)
1396 rtx y
= XVECEXP (x
, 0, i
);
1397 if (REG_P (XEXP (y
, 0)))
1398 REG_ATTRS (XEXP (y
, 0)) = get_reg_attrs (t
, INTVAL (XEXP (y
, 1)));
1403 /* Assign the RTX X to declaration T. */
1406 set_decl_rtl (tree t
, rtx x
)
1408 DECL_WRTL_CHECK (t
)->decl_with_rtl
.rtl
= x
;
1410 set_reg_attrs_for_decl_rtl (t
, x
);
1413 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1414 if the ABI requires the parameter to be passed by reference. */
1417 set_decl_incoming_rtl (tree t
, rtx x
, bool by_reference_p
)
1419 DECL_INCOMING_RTL (t
) = x
;
1420 if (x
&& !by_reference_p
)
1421 set_reg_attrs_for_decl_rtl (t
, x
);
1424 /* Identify REG (which may be a CONCAT) as a user register. */
1427 mark_user_reg (rtx reg
)
1429 if (GET_CODE (reg
) == CONCAT
)
1431 REG_USERVAR_P (XEXP (reg
, 0)) = 1;
1432 REG_USERVAR_P (XEXP (reg
, 1)) = 1;
1436 gcc_assert (REG_P (reg
));
1437 REG_USERVAR_P (reg
) = 1;
1441 /* Identify REG as a probable pointer register and show its alignment
1442 as ALIGN, if nonzero. */
1445 mark_reg_pointer (rtx reg
, int align
)
1447 if (! REG_POINTER (reg
))
1449 REG_POINTER (reg
) = 1;
1452 REGNO_POINTER_ALIGN (REGNO (reg
)) = align
;
1454 else if (align
&& align
< REGNO_POINTER_ALIGN (REGNO (reg
)))
1455 /* We can no-longer be sure just how aligned this pointer is. */
1456 REGNO_POINTER_ALIGN (REGNO (reg
)) = align
;
1459 /* Return 1 plus largest pseudo reg number used in the current function. */
1467 /* Return 1 + the largest label number used so far in the current function. */
1470 max_label_num (void)
1475 /* Return first label number used in this function (if any were used). */
1478 get_first_label_num (void)
1480 return first_label_num
;
1483 /* If the rtx for label was created during the expansion of a nested
1484 function, then first_label_num won't include this label number.
1485 Fix this now so that array indices work later. */
1488 maybe_set_first_label_num (rtx_code_label
*x
)
1490 if (CODE_LABEL_NUMBER (x
) < first_label_num
)
1491 first_label_num
= CODE_LABEL_NUMBER (x
);
1494 /* For use by the RTL function loader, when mingling with normal
1496 Ensure that label_num is greater than the label num of X, to avoid
1497 duplicate labels in the generated assembler. */
1500 maybe_set_max_label_num (rtx_code_label
*x
)
1502 if (CODE_LABEL_NUMBER (x
) >= label_num
)
1503 label_num
= CODE_LABEL_NUMBER (x
) + 1;
1507 /* Return a value representing some low-order bits of X, where the number
1508 of low-order bits is given by MODE. Note that no conversion is done
1509 between floating-point and fixed-point values, rather, the bit
1510 representation is returned.
1512 This function handles the cases in common between gen_lowpart, below,
1513 and two variants in cse.c and combine.c. These are the cases that can
1514 be safely handled at all points in the compilation.
1516 If this is not a case we can handle, return 0. */
1519 gen_lowpart_common (machine_mode mode
, rtx x
)
1521 int msize
= GET_MODE_SIZE (mode
);
1523 machine_mode innermode
;
1525 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1526 so we have to make one up. Yuk. */
1527 innermode
= GET_MODE (x
);
1529 && msize
* BITS_PER_UNIT
<= HOST_BITS_PER_WIDE_INT
)
1530 innermode
= int_mode_for_size (HOST_BITS_PER_WIDE_INT
, 0).require ();
1531 else if (innermode
== VOIDmode
)
1532 innermode
= int_mode_for_size (HOST_BITS_PER_DOUBLE_INT
, 0).require ();
1534 xsize
= GET_MODE_SIZE (innermode
);
1536 gcc_assert (innermode
!= VOIDmode
&& innermode
!= BLKmode
);
1538 if (innermode
== mode
)
1541 if (SCALAR_FLOAT_MODE_P (mode
))
1543 /* Don't allow paradoxical FLOAT_MODE subregs. */
1549 /* MODE must occupy no more of the underlying registers than X. */
1550 unsigned int regsize
= REGMODE_NATURAL_SIZE (innermode
);
1551 unsigned int mregs
= CEIL (msize
, regsize
);
1552 unsigned int xregs
= CEIL (xsize
, regsize
);
1557 scalar_int_mode int_mode
, int_innermode
, from_mode
;
1558 if ((GET_CODE (x
) == ZERO_EXTEND
|| GET_CODE (x
) == SIGN_EXTEND
)
1559 && is_a
<scalar_int_mode
> (mode
, &int_mode
)
1560 && is_a
<scalar_int_mode
> (innermode
, &int_innermode
)
1561 && is_a
<scalar_int_mode
> (GET_MODE (XEXP (x
, 0)), &from_mode
))
1563 /* If we are getting the low-order part of something that has been
1564 sign- or zero-extended, we can either just use the object being
1565 extended or make a narrower extension. If we want an even smaller
1566 piece than the size of the object being extended, call ourselves
1569 This case is used mostly by combine and cse. */
1571 if (from_mode
== int_mode
)
1573 else if (GET_MODE_SIZE (int_mode
) < GET_MODE_SIZE (from_mode
))
1574 return gen_lowpart_common (int_mode
, XEXP (x
, 0));
1575 else if (GET_MODE_SIZE (int_mode
) < GET_MODE_SIZE (int_innermode
))
1576 return gen_rtx_fmt_e (GET_CODE (x
), int_mode
, XEXP (x
, 0));
1578 else if (GET_CODE (x
) == SUBREG
|| REG_P (x
)
1579 || GET_CODE (x
) == CONCAT
|| const_vec_p (x
)
1580 || CONST_DOUBLE_AS_FLOAT_P (x
) || CONST_SCALAR_INT_P (x
)
1581 || CONST_POLY_INT_P (x
))
1582 return lowpart_subreg (mode
, x
, innermode
);
1584 /* Otherwise, we can't do this. */
1589 gen_highpart (machine_mode mode
, rtx x
)
1591 unsigned int msize
= GET_MODE_SIZE (mode
);
1594 /* This case loses if X is a subreg. To catch bugs early,
1595 complain if an invalid MODE is used even in other cases. */
1596 gcc_assert (msize
<= UNITS_PER_WORD
1597 || msize
== (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x
)));
1599 result
= simplify_gen_subreg (mode
, x
, GET_MODE (x
),
1600 subreg_highpart_offset (mode
, GET_MODE (x
)));
1601 gcc_assert (result
);
1603 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1604 the target if we have a MEM. gen_highpart must return a valid operand,
1605 emitting code if necessary to do so. */
1608 result
= validize_mem (result
);
1609 gcc_assert (result
);
1615 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1616 be VOIDmode constant. */
1618 gen_highpart_mode (machine_mode outermode
, machine_mode innermode
, rtx exp
)
1620 if (GET_MODE (exp
) != VOIDmode
)
1622 gcc_assert (GET_MODE (exp
) == innermode
);
1623 return gen_highpart (outermode
, exp
);
1625 return simplify_gen_subreg (outermode
, exp
, innermode
,
1626 subreg_highpart_offset (outermode
, innermode
));
1629 /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
1630 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1633 subreg_size_lowpart_offset (poly_uint64 outer_bytes
, poly_uint64 inner_bytes
)
1635 gcc_checking_assert (ordered_p (outer_bytes
, inner_bytes
));
1636 if (maybe_gt (outer_bytes
, inner_bytes
))
1637 /* Paradoxical subregs always have a SUBREG_BYTE of 0. */
1640 if (BYTES_BIG_ENDIAN
&& WORDS_BIG_ENDIAN
)
1641 return inner_bytes
- outer_bytes
;
1642 else if (!BYTES_BIG_ENDIAN
&& !WORDS_BIG_ENDIAN
)
1645 return subreg_size_offset_from_lsb (outer_bytes
, inner_bytes
, 0);
1648 /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
1649 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1652 subreg_size_highpart_offset (poly_uint64 outer_bytes
, poly_uint64 inner_bytes
)
1654 gcc_assert (known_ge (inner_bytes
, outer_bytes
));
1656 if (BYTES_BIG_ENDIAN
&& WORDS_BIG_ENDIAN
)
1658 else if (!BYTES_BIG_ENDIAN
&& !WORDS_BIG_ENDIAN
)
1659 return inner_bytes
- outer_bytes
;
1661 return subreg_size_offset_from_lsb (outer_bytes
, inner_bytes
,
1662 (inner_bytes
- outer_bytes
)
1666 /* Return 1 iff X, assumed to be a SUBREG,
1667 refers to the least significant part of its containing reg.
1668 If X is not a SUBREG, always return 1 (it is its own low part!). */
1671 subreg_lowpart_p (const_rtx x
)
1673 if (GET_CODE (x
) != SUBREG
)
1675 else if (GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
1678 return known_eq (subreg_lowpart_offset (GET_MODE (x
),
1679 GET_MODE (SUBREG_REG (x
))),
1683 /* Return subword OFFSET of operand OP.
1684 The word number, OFFSET, is interpreted as the word number starting
1685 at the low-order address. OFFSET 0 is the low-order word if not
1686 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1688 If we cannot extract the required word, we return zero. Otherwise,
1689 an rtx corresponding to the requested word will be returned.
1691 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1692 reload has completed, a valid address will always be returned. After
1693 reload, if a valid address cannot be returned, we return zero.
1695 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1696 it is the responsibility of the caller.
1698 MODE is the mode of OP in case it is a CONST_INT.
1700 ??? This is still rather broken for some cases. The problem for the
1701 moment is that all callers of this thing provide no 'goal mode' to
1702 tell us to work with. This exists because all callers were written
1703 in a word based SUBREG world.
1704 Now use of this function can be deprecated by simplify_subreg in most
1709 operand_subword (rtx op
, poly_uint64 offset
, int validate_address
,
1712 if (mode
== VOIDmode
)
1713 mode
= GET_MODE (op
);
1715 gcc_assert (mode
!= VOIDmode
);
1717 /* If OP is narrower than a word, fail. */
1719 && maybe_lt (GET_MODE_SIZE (mode
), UNITS_PER_WORD
))
1722 /* If we want a word outside OP, return zero. */
1724 && maybe_gt ((offset
+ 1) * UNITS_PER_WORD
, GET_MODE_SIZE (mode
)))
1727 /* Form a new MEM at the requested address. */
1730 rtx new_rtx
= adjust_address_nv (op
, word_mode
, offset
* UNITS_PER_WORD
);
1732 if (! validate_address
)
1735 else if (reload_completed
)
1737 if (! strict_memory_address_addr_space_p (word_mode
,
1739 MEM_ADDR_SPACE (op
)))
1743 return replace_equiv_address (new_rtx
, XEXP (new_rtx
, 0));
1746 /* Rest can be handled by simplify_subreg. */
1747 return simplify_gen_subreg (word_mode
, op
, mode
, (offset
* UNITS_PER_WORD
));
1750 /* Similar to `operand_subword', but never return 0. If we can't
1751 extract the required subword, put OP into a register and try again.
1752 The second attempt must succeed. We always validate the address in
1755 MODE is the mode of OP, in case it is CONST_INT. */
1758 operand_subword_force (rtx op
, poly_uint64 offset
, machine_mode mode
)
1760 rtx result
= operand_subword (op
, offset
, 1, mode
);
1765 if (mode
!= BLKmode
&& mode
!= VOIDmode
)
1767 /* If this is a register which can not be accessed by words, copy it
1768 to a pseudo register. */
1770 op
= copy_to_reg (op
);
1772 op
= force_reg (mode
, op
);
1775 result
= operand_subword (op
, offset
, 1, mode
);
1776 gcc_assert (result
);
1781 mem_attrs::mem_attrs ()
1787 addrspace (ADDR_SPACE_GENERIC
),
1788 offset_known_p (false),
1789 size_known_p (false)
1792 /* Returns 1 if both MEM_EXPR can be considered equal
1796 mem_expr_equal_p (const_tree expr1
, const_tree expr2
)
1801 if (! expr1
|| ! expr2
)
1804 if (TREE_CODE (expr1
) != TREE_CODE (expr2
))
1807 return operand_equal_p (expr1
, expr2
, 0);
1810 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1811 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1815 get_mem_align_offset (rtx mem
, unsigned int align
)
1820 /* This function can't use
1821 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1822 || (MAX (MEM_ALIGN (mem),
1823 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1827 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1829 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1830 for <variable>. get_inner_reference doesn't handle it and
1831 even if it did, the alignment in that case needs to be determined
1832 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1833 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1834 isn't sufficiently aligned, the object it is in might be. */
1835 gcc_assert (MEM_P (mem
));
1836 expr
= MEM_EXPR (mem
);
1837 if (expr
== NULL_TREE
|| !MEM_OFFSET_KNOWN_P (mem
))
1840 offset
= MEM_OFFSET (mem
);
1843 if (DECL_ALIGN (expr
) < align
)
1846 else if (INDIRECT_REF_P (expr
))
1848 if (TYPE_ALIGN (TREE_TYPE (expr
)) < (unsigned int) align
)
1851 else if (TREE_CODE (expr
) == COMPONENT_REF
)
1855 tree inner
= TREE_OPERAND (expr
, 0);
1856 tree field
= TREE_OPERAND (expr
, 1);
1857 tree byte_offset
= component_ref_field_offset (expr
);
1858 tree bit_offset
= DECL_FIELD_BIT_OFFSET (field
);
1860 poly_uint64 suboffset
;
1862 || !poly_int_tree_p (byte_offset
, &suboffset
)
1863 || !tree_fits_uhwi_p (bit_offset
))
1866 offset
+= suboffset
;
1867 offset
+= tree_to_uhwi (bit_offset
) / BITS_PER_UNIT
;
1869 if (inner
== NULL_TREE
)
1871 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field
))
1872 < (unsigned int) align
)
1876 else if (DECL_P (inner
))
1878 if (DECL_ALIGN (inner
) < align
)
1882 else if (TREE_CODE (inner
) != COMPONENT_REF
)
1890 HOST_WIDE_INT misalign
;
1891 if (!known_misalignment (offset
, align
/ BITS_PER_UNIT
, &misalign
))
1896 /* Given REF (a MEM) and T, either the type of X or the expression
1897 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1898 if we are making a new object of this type. BITPOS is nonzero if
1899 there is an offset outstanding on T that will be applied later. */
1902 set_mem_attributes_minus_bitpos (rtx ref
, tree t
, int objectp
,
1905 poly_int64 apply_bitpos
= 0;
1907 struct mem_attrs attrs
, *defattrs
, *refattrs
;
1910 /* It can happen that type_for_mode was given a mode for which there
1911 is no language-level type. In which case it returns NULL, which
1916 type
= TYPE_P (t
) ? t
: TREE_TYPE (t
);
1917 if (type
== error_mark_node
)
1920 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1921 wrong answer, as it assumes that DECL_RTL already has the right alias
1922 info. Callers should not set DECL_RTL until after the call to
1923 set_mem_attributes. */
1924 gcc_assert (!DECL_P (t
) || ref
!= DECL_RTL_IF_SET (t
));
1926 /* Get the alias set from the expression or type (perhaps using a
1927 front-end routine) and use it. */
1928 attrs
.alias
= get_alias_set (t
);
1930 MEM_VOLATILE_P (ref
) |= TYPE_VOLATILE (type
);
1931 MEM_POINTER (ref
) = POINTER_TYPE_P (type
);
1933 /* Default values from pre-existing memory attributes if present. */
1934 refattrs
= MEM_ATTRS (ref
);
1937 /* ??? Can this ever happen? Calling this routine on a MEM that
1938 already carries memory attributes should probably be invalid. */
1939 attrs
.expr
= refattrs
->expr
;
1940 attrs
.offset_known_p
= refattrs
->offset_known_p
;
1941 attrs
.offset
= refattrs
->offset
;
1942 attrs
.size_known_p
= refattrs
->size_known_p
;
1943 attrs
.size
= refattrs
->size
;
1944 attrs
.align
= refattrs
->align
;
1947 /* Otherwise, default values from the mode of the MEM reference. */
1950 defattrs
= mode_mem_attrs
[(int) GET_MODE (ref
)];
1951 gcc_assert (!defattrs
->expr
);
1952 gcc_assert (!defattrs
->offset_known_p
);
1954 /* Respect mode size. */
1955 attrs
.size_known_p
= defattrs
->size_known_p
;
1956 attrs
.size
= defattrs
->size
;
1957 /* ??? Is this really necessary? We probably should always get
1958 the size from the type below. */
1960 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1961 if T is an object, always compute the object alignment below. */
1963 attrs
.align
= defattrs
->align
;
1965 attrs
.align
= BITS_PER_UNIT
;
1966 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1967 e.g. if the type carries an alignment attribute. Should we be
1968 able to simply always use TYPE_ALIGN? */
1971 /* We can set the alignment from the type if we are making an object or if
1972 this is an INDIRECT_REF. */
1973 if (objectp
|| TREE_CODE (t
) == INDIRECT_REF
)
1974 attrs
.align
= MAX (attrs
.align
, TYPE_ALIGN (type
));
1976 /* If the size is known, we can set that. */
1977 tree new_size
= TYPE_SIZE_UNIT (type
);
1979 /* The address-space is that of the type. */
1980 as
= TYPE_ADDR_SPACE (type
);
1982 /* If T is not a type, we may be able to deduce some more information about
1988 if (TREE_THIS_VOLATILE (t
))
1989 MEM_VOLATILE_P (ref
) = 1;
1991 /* Now remove any conversions: they don't change what the underlying
1992 object is. Likewise for SAVE_EXPR. */
1993 while (CONVERT_EXPR_P (t
)
1994 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
1995 || TREE_CODE (t
) == SAVE_EXPR
)
1996 t
= TREE_OPERAND (t
, 0);
1998 /* Note whether this expression can trap. */
1999 MEM_NOTRAP_P (ref
) = !tree_could_trap_p (t
);
2001 base
= get_base_address (t
);
2005 && TREE_READONLY (base
)
2006 && (TREE_STATIC (base
) || DECL_EXTERNAL (base
))
2007 && !TREE_THIS_VOLATILE (base
))
2008 MEM_READONLY_P (ref
) = 1;
2010 /* Mark static const strings readonly as well. */
2011 if (TREE_CODE (base
) == STRING_CST
2012 && TREE_READONLY (base
)
2013 && TREE_STATIC (base
))
2014 MEM_READONLY_P (ref
) = 1;
2016 /* Address-space information is on the base object. */
2017 if (TREE_CODE (base
) == MEM_REF
2018 || TREE_CODE (base
) == TARGET_MEM_REF
)
2019 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base
,
2022 as
= TYPE_ADDR_SPACE (TREE_TYPE (base
));
2025 /* If this expression uses it's parent's alias set, mark it such
2026 that we won't change it. */
2027 if (component_uses_parent_alias_set_from (t
) != NULL_TREE
)
2028 MEM_KEEP_ALIAS_SET_P (ref
) = 1;
2030 /* If this is a decl, set the attributes of the MEM from it. */
2034 attrs
.offset_known_p
= true;
2036 apply_bitpos
= bitpos
;
2037 new_size
= DECL_SIZE_UNIT (t
);
2040 /* ??? If we end up with a constant here do record a MEM_EXPR. */
2041 else if (CONSTANT_CLASS_P (t
))
2044 /* If this is a field reference, record it. */
2045 else if (TREE_CODE (t
) == COMPONENT_REF
)
2048 attrs
.offset_known_p
= true;
2050 apply_bitpos
= bitpos
;
2051 if (DECL_BIT_FIELD (TREE_OPERAND (t
, 1)))
2052 new_size
= DECL_SIZE_UNIT (TREE_OPERAND (t
, 1));
2055 /* If this is an array reference, look for an outer field reference. */
2056 else if (TREE_CODE (t
) == ARRAY_REF
)
2058 tree off_tree
= size_zero_node
;
2059 /* We can't modify t, because we use it at the end of the
2065 tree index
= TREE_OPERAND (t2
, 1);
2066 tree low_bound
= array_ref_low_bound (t2
);
2067 tree unit_size
= array_ref_element_size (t2
);
2069 /* We assume all arrays have sizes that are a multiple of a byte.
2070 First subtract the lower bound, if any, in the type of the
2071 index, then convert to sizetype and multiply by the size of
2072 the array element. */
2073 if (! integer_zerop (low_bound
))
2074 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
2077 off_tree
= size_binop (PLUS_EXPR
,
2078 size_binop (MULT_EXPR
,
2079 fold_convert (sizetype
,
2083 t2
= TREE_OPERAND (t2
, 0);
2085 while (TREE_CODE (t2
) == ARRAY_REF
);
2088 || (TREE_CODE (t2
) == COMPONENT_REF
2089 /* For trailing arrays t2 doesn't have a size that
2090 covers all valid accesses. */
2091 && ! array_at_struct_end_p (t
)))
2094 attrs
.offset_known_p
= false;
2095 if (poly_int_tree_p (off_tree
, &attrs
.offset
))
2097 attrs
.offset_known_p
= true;
2098 apply_bitpos
= bitpos
;
2101 /* Else do not record a MEM_EXPR. */
2104 /* If this is an indirect reference, record it. */
2105 else if (TREE_CODE (t
) == MEM_REF
2106 || TREE_CODE (t
) == TARGET_MEM_REF
)
2109 attrs
.offset_known_p
= true;
2111 apply_bitpos
= bitpos
;
2114 /* Compute the alignment. */
2115 unsigned int obj_align
;
2116 unsigned HOST_WIDE_INT obj_bitpos
;
2117 get_object_alignment_1 (t
, &obj_align
, &obj_bitpos
);
2118 unsigned int diff_align
= known_alignment (obj_bitpos
- bitpos
);
2119 if (diff_align
!= 0)
2120 obj_align
= MIN (obj_align
, diff_align
);
2121 attrs
.align
= MAX (attrs
.align
, obj_align
);
2124 poly_uint64 const_size
;
2125 if (poly_int_tree_p (new_size
, &const_size
))
2127 attrs
.size_known_p
= true;
2128 attrs
.size
= const_size
;
2131 /* If we modified OFFSET based on T, then subtract the outstanding
2132 bit position offset. Similarly, increase the size of the accessed
2133 object to contain the negative offset. */
2134 if (maybe_ne (apply_bitpos
, 0))
2136 gcc_assert (attrs
.offset_known_p
);
2137 poly_int64 bytepos
= bits_to_bytes_round_down (apply_bitpos
);
2138 attrs
.offset
-= bytepos
;
2139 if (attrs
.size_known_p
)
2140 attrs
.size
+= bytepos
;
2143 /* Now set the attributes we computed above. */
2144 attrs
.addrspace
= as
;
2145 set_mem_attrs (ref
, &attrs
);
2149 set_mem_attributes (rtx ref
, tree t
, int objectp
)
2151 set_mem_attributes_minus_bitpos (ref
, t
, objectp
, 0);
2154 /* Set the alias set of MEM to SET. */
2157 set_mem_alias_set (rtx mem
, alias_set_type set
)
2159 /* If the new and old alias sets don't conflict, something is wrong. */
2160 gcc_checking_assert (alias_sets_conflict_p (set
, MEM_ALIAS_SET (mem
)));
2161 mem_attrs
attrs (*get_mem_attrs (mem
));
2163 set_mem_attrs (mem
, &attrs
);
2166 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2169 set_mem_addr_space (rtx mem
, addr_space_t addrspace
)
2171 mem_attrs
attrs (*get_mem_attrs (mem
));
2172 attrs
.addrspace
= addrspace
;
2173 set_mem_attrs (mem
, &attrs
);
2176 /* Set the alignment of MEM to ALIGN bits. */
2179 set_mem_align (rtx mem
, unsigned int align
)
2181 mem_attrs
attrs (*get_mem_attrs (mem
));
2182 attrs
.align
= align
;
2183 set_mem_attrs (mem
, &attrs
);
2186 /* Set the expr for MEM to EXPR. */
2189 set_mem_expr (rtx mem
, tree expr
)
2191 mem_attrs
attrs (*get_mem_attrs (mem
));
2193 set_mem_attrs (mem
, &attrs
);
2196 /* Set the offset of MEM to OFFSET. */
2199 set_mem_offset (rtx mem
, poly_int64 offset
)
2201 mem_attrs
attrs (*get_mem_attrs (mem
));
2202 attrs
.offset_known_p
= true;
2203 attrs
.offset
= offset
;
2204 set_mem_attrs (mem
, &attrs
);
2207 /* Clear the offset of MEM. */
2210 clear_mem_offset (rtx mem
)
2212 mem_attrs
attrs (*get_mem_attrs (mem
));
2213 attrs
.offset_known_p
= false;
2214 set_mem_attrs (mem
, &attrs
);
2217 /* Set the size of MEM to SIZE. */
2220 set_mem_size (rtx mem
, poly_int64 size
)
2222 mem_attrs
attrs (*get_mem_attrs (mem
));
2223 attrs
.size_known_p
= true;
2225 set_mem_attrs (mem
, &attrs
);
2228 /* Clear the size of MEM. */
2231 clear_mem_size (rtx mem
)
2233 mem_attrs
attrs (*get_mem_attrs (mem
));
2234 attrs
.size_known_p
= false;
2235 set_mem_attrs (mem
, &attrs
);
2238 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2239 and its address changed to ADDR. (VOIDmode means don't change the mode.
2240 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2241 returned memory location is required to be valid. INPLACE is true if any
2242 changes can be made directly to MEMREF or false if MEMREF must be treated
2245 The memory attributes are not changed. */
2248 change_address_1 (rtx memref
, machine_mode mode
, rtx addr
, int validate
,
2254 gcc_assert (MEM_P (memref
));
2255 as
= MEM_ADDR_SPACE (memref
);
2256 if (mode
== VOIDmode
)
2257 mode
= GET_MODE (memref
);
2259 addr
= XEXP (memref
, 0);
2260 if (mode
== GET_MODE (memref
) && addr
== XEXP (memref
, 0)
2261 && (!validate
|| memory_address_addr_space_p (mode
, addr
, as
)))
2264 /* Don't validate address for LRA. LRA can make the address valid
2265 by itself in most efficient way. */
2266 if (validate
&& !lra_in_progress
)
2268 if (reload_in_progress
|| reload_completed
)
2269 gcc_assert (memory_address_addr_space_p (mode
, addr
, as
));
2271 addr
= memory_address_addr_space (mode
, addr
, as
);
2274 if (rtx_equal_p (addr
, XEXP (memref
, 0)) && mode
== GET_MODE (memref
))
2279 XEXP (memref
, 0) = addr
;
2283 new_rtx
= gen_rtx_MEM (mode
, addr
);
2284 MEM_COPY_ATTRIBUTES (new_rtx
, memref
);
2288 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2289 way we are changing MEMREF, so we only preserve the alias set. */
2292 change_address (rtx memref
, machine_mode mode
, rtx addr
)
2294 rtx new_rtx
= change_address_1 (memref
, mode
, addr
, 1, false);
2295 machine_mode mmode
= GET_MODE (new_rtx
);
2296 struct mem_attrs
*defattrs
;
2298 mem_attrs
attrs (*get_mem_attrs (memref
));
2299 defattrs
= mode_mem_attrs
[(int) mmode
];
2300 attrs
.expr
= NULL_TREE
;
2301 attrs
.offset_known_p
= false;
2302 attrs
.size_known_p
= defattrs
->size_known_p
;
2303 attrs
.size
= defattrs
->size
;
2304 attrs
.align
= defattrs
->align
;
2306 /* If there are no changes, just return the original memory reference. */
2307 if (new_rtx
== memref
)
2309 if (mem_attrs_eq_p (get_mem_attrs (memref
), &attrs
))
2312 new_rtx
= gen_rtx_MEM (mmode
, XEXP (memref
, 0));
2313 MEM_COPY_ATTRIBUTES (new_rtx
, memref
);
2316 set_mem_attrs (new_rtx
, &attrs
);
2320 /* Return a memory reference like MEMREF, but with its mode changed
2321 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2322 nonzero, the memory address is forced to be valid.
2323 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2324 and the caller is responsible for adjusting MEMREF base register.
2325 If ADJUST_OBJECT is zero, the underlying object associated with the
2326 memory reference is left unchanged and the caller is responsible for
2327 dealing with it. Otherwise, if the new memory reference is outside
2328 the underlying object, even partially, then the object is dropped.
2329 SIZE, if nonzero, is the size of an access in cases where MODE
2330 has no inherent size. */
2333 adjust_address_1 (rtx memref
, machine_mode mode
, poly_int64 offset
,
2334 int validate
, int adjust_address
, int adjust_object
,
2337 rtx addr
= XEXP (memref
, 0);
2339 scalar_int_mode address_mode
;
2340 struct mem_attrs
attrs (*get_mem_attrs (memref
)), *defattrs
;
2341 unsigned HOST_WIDE_INT max_align
;
2342 #ifdef POINTERS_EXTEND_UNSIGNED
2343 scalar_int_mode pointer_mode
2344 = targetm
.addr_space
.pointer_mode (attrs
.addrspace
);
2347 /* VOIDmode means no mode change for change_address_1. */
2348 if (mode
== VOIDmode
)
2349 mode
= GET_MODE (memref
);
2351 /* Take the size of non-BLKmode accesses from the mode. */
2352 defattrs
= mode_mem_attrs
[(int) mode
];
2353 if (defattrs
->size_known_p
)
2354 size
= defattrs
->size
;
2356 /* If there are no changes, just return the original memory reference. */
2357 if (mode
== GET_MODE (memref
)
2358 && known_eq (offset
, 0)
2359 && (known_eq (size
, 0)
2360 || (attrs
.size_known_p
&& known_eq (attrs
.size
, size
)))
2361 && (!validate
|| memory_address_addr_space_p (mode
, addr
,
2365 /* ??? Prefer to create garbage instead of creating shared rtl.
2366 This may happen even if offset is nonzero -- consider
2367 (plus (plus reg reg) const_int) -- so do this always. */
2368 addr
= copy_rtx (addr
);
2370 /* Convert a possibly large offset to a signed value within the
2371 range of the target address space. */
2372 address_mode
= get_address_mode (memref
);
2373 offset
= trunc_int_for_mode (offset
, address_mode
);
2377 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2378 object, we can merge it into the LO_SUM. */
2379 if (GET_MODE (memref
) != BLKmode
2380 && GET_CODE (addr
) == LO_SUM
2381 && known_in_range_p (offset
,
2382 0, (GET_MODE_ALIGNMENT (GET_MODE (memref
))
2384 addr
= gen_rtx_LO_SUM (address_mode
, XEXP (addr
, 0),
2385 plus_constant (address_mode
,
2386 XEXP (addr
, 1), offset
));
2387 #ifdef POINTERS_EXTEND_UNSIGNED
2388 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2389 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2390 the fact that pointers are not allowed to overflow. */
2391 else if (POINTERS_EXTEND_UNSIGNED
> 0
2392 && GET_CODE (addr
) == ZERO_EXTEND
2393 && GET_MODE (XEXP (addr
, 0)) == pointer_mode
2394 && known_eq (trunc_int_for_mode (offset
, pointer_mode
), offset
))
2395 addr
= gen_rtx_ZERO_EXTEND (address_mode
,
2396 plus_constant (pointer_mode
,
2397 XEXP (addr
, 0), offset
));
2400 addr
= plus_constant (address_mode
, addr
, offset
);
2403 new_rtx
= change_address_1 (memref
, mode
, addr
, validate
, false);
2405 /* If the address is a REG, change_address_1 rightfully returns memref,
2406 but this would destroy memref's MEM_ATTRS. */
2407 if (new_rtx
== memref
&& maybe_ne (offset
, 0))
2408 new_rtx
= copy_rtx (new_rtx
);
2410 /* Conservatively drop the object if we don't know where we start from. */
2411 if (adjust_object
&& (!attrs
.offset_known_p
|| !attrs
.size_known_p
))
2413 attrs
.expr
= NULL_TREE
;
2417 /* Compute the new values of the memory attributes due to this adjustment.
2418 We add the offsets and update the alignment. */
2419 if (attrs
.offset_known_p
)
2421 attrs
.offset
+= offset
;
2423 /* Drop the object if the new left end is not within its bounds. */
2424 if (adjust_object
&& maybe_lt (attrs
.offset
, 0))
2426 attrs
.expr
= NULL_TREE
;
2431 /* Compute the new alignment by taking the MIN of the alignment and the
2432 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2434 if (maybe_ne (offset
, 0))
2436 max_align
= known_alignment (offset
) * BITS_PER_UNIT
;
2437 attrs
.align
= MIN (attrs
.align
, max_align
);
2440 if (maybe_ne (size
, 0))
2442 /* Drop the object if the new right end is not within its bounds. */
2443 if (adjust_object
&& maybe_gt (offset
+ size
, attrs
.size
))
2445 attrs
.expr
= NULL_TREE
;
2448 attrs
.size_known_p
= true;
2451 else if (attrs
.size_known_p
)
2453 gcc_assert (!adjust_object
);
2454 attrs
.size
-= offset
;
2455 /* ??? The store_by_pieces machinery generates negative sizes,
2456 so don't assert for that here. */
2459 set_mem_attrs (new_rtx
, &attrs
);
2464 /* Return a memory reference like MEMREF, but with its mode changed
2465 to MODE and its address changed to ADDR, which is assumed to be
2466 MEMREF offset by OFFSET bytes. If VALIDATE is
2467 nonzero, the memory address is forced to be valid. */
2470 adjust_automodify_address_1 (rtx memref
, machine_mode mode
, rtx addr
,
2471 poly_int64 offset
, int validate
)
2473 memref
= change_address_1 (memref
, VOIDmode
, addr
, validate
, false);
2474 return adjust_address_1 (memref
, mode
, offset
, validate
, 0, 0, 0);
2477 /* Return a memory reference like MEMREF, but whose address is changed by
2478 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2479 known to be in OFFSET (possibly 1). */
2482 offset_address (rtx memref
, rtx offset
, unsigned HOST_WIDE_INT pow2
)
2484 rtx new_rtx
, addr
= XEXP (memref
, 0);
2485 machine_mode address_mode
;
2486 struct mem_attrs
*defattrs
;
2488 mem_attrs
attrs (*get_mem_attrs (memref
));
2489 address_mode
= get_address_mode (memref
);
2490 new_rtx
= simplify_gen_binary (PLUS
, address_mode
, addr
, offset
);
2492 /* At this point we don't know _why_ the address is invalid. It
2493 could have secondary memory references, multiplies or anything.
2495 However, if we did go and rearrange things, we can wind up not
2496 being able to recognize the magic around pic_offset_table_rtx.
2497 This stuff is fragile, and is yet another example of why it is
2498 bad to expose PIC machinery too early. */
2499 if (! memory_address_addr_space_p (GET_MODE (memref
), new_rtx
,
2501 && GET_CODE (addr
) == PLUS
2502 && XEXP (addr
, 0) == pic_offset_table_rtx
)
2504 addr
= force_reg (GET_MODE (addr
), addr
);
2505 new_rtx
= simplify_gen_binary (PLUS
, address_mode
, addr
, offset
);
2508 update_temp_slot_address (XEXP (memref
, 0), new_rtx
);
2509 new_rtx
= change_address_1 (memref
, VOIDmode
, new_rtx
, 1, false);
2511 /* If there are no changes, just return the original memory reference. */
2512 if (new_rtx
== memref
)
2515 /* Update the alignment to reflect the offset. Reset the offset, which
2517 defattrs
= mode_mem_attrs
[(int) GET_MODE (new_rtx
)];
2518 attrs
.offset_known_p
= false;
2519 attrs
.size_known_p
= defattrs
->size_known_p
;
2520 attrs
.size
= defattrs
->size
;
2521 attrs
.align
= MIN (attrs
.align
, pow2
* BITS_PER_UNIT
);
2522 set_mem_attrs (new_rtx
, &attrs
);
2526 /* Return a memory reference like MEMREF, but with its address changed to
2527 ADDR. The caller is asserting that the actual piece of memory pointed
2528 to is the same, just the form of the address is being changed, such as
2529 by putting something into a register. INPLACE is true if any changes
2530 can be made directly to MEMREF or false if MEMREF must be treated as
2534 replace_equiv_address (rtx memref
, rtx addr
, bool inplace
)
2536 /* change_address_1 copies the memory attribute structure without change
2537 and that's exactly what we want here. */
2538 update_temp_slot_address (XEXP (memref
, 0), addr
);
2539 return change_address_1 (memref
, VOIDmode
, addr
, 1, inplace
);
2542 /* Likewise, but the reference is not required to be valid. */
2545 replace_equiv_address_nv (rtx memref
, rtx addr
, bool inplace
)
2547 return change_address_1 (memref
, VOIDmode
, addr
, 0, inplace
);
2550 /* Return a memory reference like MEMREF, but with its mode widened to
2551 MODE and offset by OFFSET. This would be used by targets that e.g.
2552 cannot issue QImode memory operations and have to use SImode memory
2553 operations plus masking logic. */
2556 widen_memory_access (rtx memref
, machine_mode mode
, poly_int64 offset
)
2558 rtx new_rtx
= adjust_address_1 (memref
, mode
, offset
, 1, 1, 0, 0);
2559 unsigned int size
= GET_MODE_SIZE (mode
);
2561 /* If there are no changes, just return the original memory reference. */
2562 if (new_rtx
== memref
)
2565 mem_attrs
attrs (*get_mem_attrs (new_rtx
));
2567 /* If we don't know what offset we were at within the expression, then
2568 we can't know if we've overstepped the bounds. */
2569 if (! attrs
.offset_known_p
)
2570 attrs
.expr
= NULL_TREE
;
2574 if (TREE_CODE (attrs
.expr
) == COMPONENT_REF
)
2576 tree field
= TREE_OPERAND (attrs
.expr
, 1);
2577 tree offset
= component_ref_field_offset (attrs
.expr
);
2579 if (! DECL_SIZE_UNIT (field
))
2581 attrs
.expr
= NULL_TREE
;
2585 /* Is the field at least as large as the access? If so, ok,
2586 otherwise strip back to the containing structure. */
2587 if (poly_int_tree_p (DECL_SIZE_UNIT (field
))
2588 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (field
)), size
)
2589 && known_ge (attrs
.offset
, 0))
2592 poly_uint64 suboffset
;
2593 if (!poly_int_tree_p (offset
, &suboffset
))
2595 attrs
.expr
= NULL_TREE
;
2599 attrs
.expr
= TREE_OPERAND (attrs
.expr
, 0);
2600 attrs
.offset
+= suboffset
;
2601 attrs
.offset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
2604 /* Similarly for the decl. */
2605 else if (DECL_P (attrs
.expr
)
2606 && DECL_SIZE_UNIT (attrs
.expr
)
2607 && poly_int_tree_p (DECL_SIZE_UNIT (attrs
.expr
))
2608 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (attrs
.expr
)),
2610 && known_ge (attrs
.offset
, 0))
2614 /* The widened memory access overflows the expression, which means
2615 that it could alias another expression. Zap it. */
2616 attrs
.expr
= NULL_TREE
;
2622 attrs
.offset_known_p
= false;
2624 /* The widened memory may alias other stuff, so zap the alias set. */
2625 /* ??? Maybe use get_alias_set on any remaining expression. */
2627 attrs
.size_known_p
= true;
2629 set_mem_attrs (new_rtx
, &attrs
);
2633 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2634 static GTY(()) tree spill_slot_decl
;
2637 get_spill_slot_decl (bool force_build_p
)
2639 tree d
= spill_slot_decl
;
2642 if (d
|| !force_build_p
)
2645 d
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
2646 VAR_DECL
, get_identifier ("%sfp"), void_type_node
);
2647 DECL_ARTIFICIAL (d
) = 1;
2648 DECL_IGNORED_P (d
) = 1;
2650 spill_slot_decl
= d
;
2652 rd
= gen_rtx_MEM (BLKmode
, frame_pointer_rtx
);
2653 MEM_NOTRAP_P (rd
) = 1;
2654 mem_attrs
attrs (*mode_mem_attrs
[(int) BLKmode
]);
2655 attrs
.alias
= new_alias_set ();
2657 set_mem_attrs (rd
, &attrs
);
2658 SET_DECL_RTL (d
, rd
);
2663 /* Given MEM, a result from assign_stack_local, fill in the memory
2664 attributes as appropriate for a register allocator spill slot.
2665 These slots are not aliasable by other memory. We arrange for
2666 them all to use a single MEM_EXPR, so that the aliasing code can
2667 work properly in the case of shared spill slots. */
2670 set_mem_attrs_for_spill (rtx mem
)
2674 mem_attrs
attrs (*get_mem_attrs (mem
));
2675 attrs
.expr
= get_spill_slot_decl (true);
2676 attrs
.alias
= MEM_ALIAS_SET (DECL_RTL (attrs
.expr
));
2677 attrs
.addrspace
= ADDR_SPACE_GENERIC
;
2679 /* We expect the incoming memory to be of the form:
2680 (mem:MODE (plus (reg sfp) (const_int offset)))
2681 with perhaps the plus missing for offset = 0. */
2682 addr
= XEXP (mem
, 0);
2683 attrs
.offset_known_p
= true;
2684 strip_offset (addr
, &attrs
.offset
);
2686 set_mem_attrs (mem
, &attrs
);
2687 MEM_NOTRAP_P (mem
) = 1;
2690 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2693 gen_label_rtx (void)
2695 return as_a
<rtx_code_label
*> (
2696 gen_rtx_CODE_LABEL (VOIDmode
, NULL_RTX
, NULL_RTX
,
2697 NULL
, label_num
++, NULL
));
2700 /* For procedure integration. */
2702 /* Install new pointers to the first and last insns in the chain.
2703 Also, set cur_insn_uid to one higher than the last in use.
2704 Used for an inline-procedure after copying the insn chain. */
2707 set_new_first_and_last_insn (rtx_insn
*first
, rtx_insn
*last
)
2711 set_first_insn (first
);
2712 set_last_insn (last
);
2715 if (MIN_NONDEBUG_INSN_UID
|| MAY_HAVE_DEBUG_INSNS
)
2717 int debug_count
= 0;
2719 cur_insn_uid
= MIN_NONDEBUG_INSN_UID
- 1;
2720 cur_debug_insn_uid
= 0;
2722 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2723 if (INSN_UID (insn
) < MIN_NONDEBUG_INSN_UID
)
2724 cur_debug_insn_uid
= MAX (cur_debug_insn_uid
, INSN_UID (insn
));
2727 cur_insn_uid
= MAX (cur_insn_uid
, INSN_UID (insn
));
2728 if (DEBUG_INSN_P (insn
))
2733 cur_debug_insn_uid
= MIN_NONDEBUG_INSN_UID
+ debug_count
;
2735 cur_debug_insn_uid
++;
2738 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2739 cur_insn_uid
= MAX (cur_insn_uid
, INSN_UID (insn
));
2744 /* Go through all the RTL insn bodies and copy any invalid shared
2745 structure. This routine should only be called once. */
2748 unshare_all_rtl_1 (rtx_insn
*insn
)
2750 /* Unshare just about everything else. */
2751 unshare_all_rtl_in_chain (insn
);
2753 /* Make sure the addresses of stack slots found outside the insn chain
2754 (such as, in DECL_RTL of a variable) are not shared
2755 with the insn chain.
2757 This special care is necessary when the stack slot MEM does not
2758 actually appear in the insn chain. If it does appear, its address
2759 is unshared from all else at that point. */
2762 FOR_EACH_VEC_SAFE_ELT (stack_slot_list
, i
, temp
)
2763 (*stack_slot_list
)[i
] = copy_rtx_if_shared (temp
);
2766 /* Go through all the RTL insn bodies and copy any invalid shared
2767 structure, again. This is a fairly expensive thing to do so it
2768 should be done sparingly. */
2771 unshare_all_rtl_again (rtx_insn
*insn
)
2776 for (p
= insn
; p
; p
= NEXT_INSN (p
))
2779 reset_used_flags (PATTERN (p
));
2780 reset_used_flags (REG_NOTES (p
));
2782 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p
));
2785 /* Make sure that virtual stack slots are not shared. */
2786 set_used_decls (DECL_INITIAL (cfun
->decl
));
2788 /* Make sure that virtual parameters are not shared. */
2789 for (decl
= DECL_ARGUMENTS (cfun
->decl
); decl
; decl
= DECL_CHAIN (decl
))
2790 set_used_flags (DECL_RTL (decl
));
2794 FOR_EACH_VEC_SAFE_ELT (stack_slot_list
, i
, temp
)
2795 reset_used_flags (temp
);
2797 unshare_all_rtl_1 (insn
);
2801 unshare_all_rtl (void)
2803 unshare_all_rtl_1 (get_insns ());
2805 for (tree decl
= DECL_ARGUMENTS (cfun
->decl
); decl
; decl
= DECL_CHAIN (decl
))
2807 if (DECL_RTL_SET_P (decl
))
2808 SET_DECL_RTL (decl
, copy_rtx_if_shared (DECL_RTL (decl
)));
2809 DECL_INCOMING_RTL (decl
) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl
));
2816 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2817 Recursively does the same for subexpressions. */
2820 verify_rtx_sharing (rtx orig
, rtx insn
)
2825 const char *format_ptr
;
2830 code
= GET_CODE (x
);
2832 /* These types may be freely shared. */
2848 /* SCRATCH must be shared because they represent distinct values. */
2851 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2852 clobbers or clobbers of hard registers that originated as pseudos.
2853 This is needed to allow safe register renaming. */
2854 if (REG_P (XEXP (x
, 0))
2855 && HARD_REGISTER_NUM_P (REGNO (XEXP (x
, 0)))
2856 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x
, 0))))
2861 if (shared_const_p (orig
))
2866 /* A MEM is allowed to be shared if its address is constant. */
2867 if (CONSTANT_ADDRESS_P (XEXP (x
, 0))
2868 || reload_completed
|| reload_in_progress
)
2877 /* This rtx may not be shared. If it has already been seen,
2878 replace it with a copy of itself. */
2879 if (flag_checking
&& RTX_FLAG (x
, used
))
2881 error ("invalid rtl sharing found in the insn");
2883 error ("shared rtx");
2885 internal_error ("internal consistency failure");
2887 gcc_assert (!RTX_FLAG (x
, used
));
2889 RTX_FLAG (x
, used
) = 1;
2891 /* Now scan the subexpressions recursively. */
2893 format_ptr
= GET_RTX_FORMAT (code
);
2895 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
2897 switch (*format_ptr
++)
2900 verify_rtx_sharing (XEXP (x
, i
), insn
);
2904 if (XVEC (x
, i
) != NULL
)
2907 int len
= XVECLEN (x
, i
);
2909 for (j
= 0; j
< len
; j
++)
2911 /* We allow sharing of ASM_OPERANDS inside single
2913 if (j
&& GET_CODE (XVECEXP (x
, i
, j
)) == SET
2914 && (GET_CODE (SET_SRC (XVECEXP (x
, i
, j
)))
2916 verify_rtx_sharing (SET_DEST (XVECEXP (x
, i
, j
)), insn
);
2918 verify_rtx_sharing (XVECEXP (x
, i
, j
), insn
);
2927 /* Reset used-flags for INSN. */
2930 reset_insn_used_flags (rtx insn
)
2932 gcc_assert (INSN_P (insn
));
2933 reset_used_flags (PATTERN (insn
));
2934 reset_used_flags (REG_NOTES (insn
));
2936 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn
));
2939 /* Go through all the RTL insn bodies and clear all the USED bits. */
2942 reset_all_used_flags (void)
2946 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
2949 rtx pat
= PATTERN (p
);
2950 if (GET_CODE (pat
) != SEQUENCE
)
2951 reset_insn_used_flags (p
);
2954 gcc_assert (REG_NOTES (p
) == NULL
);
2955 for (int i
= 0; i
< XVECLEN (pat
, 0); i
++)
2957 rtx insn
= XVECEXP (pat
, 0, i
);
2959 reset_insn_used_flags (insn
);
2965 /* Verify sharing in INSN. */
2968 verify_insn_sharing (rtx insn
)
2970 gcc_assert (INSN_P (insn
));
2971 verify_rtx_sharing (PATTERN (insn
), insn
);
2972 verify_rtx_sharing (REG_NOTES (insn
), insn
);
2974 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn
), insn
);
2977 /* Go through all the RTL insn bodies and check that there is no unexpected
2978 sharing in between the subexpressions. */
2981 verify_rtl_sharing (void)
2985 timevar_push (TV_VERIFY_RTL_SHARING
);
2987 reset_all_used_flags ();
2989 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
2992 rtx pat
= PATTERN (p
);
2993 if (GET_CODE (pat
) != SEQUENCE
)
2994 verify_insn_sharing (p
);
2996 for (int i
= 0; i
< XVECLEN (pat
, 0); i
++)
2998 rtx insn
= XVECEXP (pat
, 0, i
);
3000 verify_insn_sharing (insn
);
3004 reset_all_used_flags ();
3006 timevar_pop (TV_VERIFY_RTL_SHARING
);
3009 /* Go through all the RTL insn bodies and copy any invalid shared structure.
3010 Assumes the mark bits are cleared at entry. */
3013 unshare_all_rtl_in_chain (rtx_insn
*insn
)
3015 for (; insn
; insn
= NEXT_INSN (insn
))
3018 PATTERN (insn
) = copy_rtx_if_shared (PATTERN (insn
));
3019 REG_NOTES (insn
) = copy_rtx_if_shared (REG_NOTES (insn
));
3021 CALL_INSN_FUNCTION_USAGE (insn
)
3022 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn
));
3026 /* Go through all virtual stack slots of a function and mark them as
3027 shared. We never replace the DECL_RTLs themselves with a copy,
3028 but expressions mentioned into a DECL_RTL cannot be shared with
3029 expressions in the instruction stream.
3031 Note that reload may convert pseudo registers into memories in-place.
3032 Pseudo registers are always shared, but MEMs never are. Thus if we
3033 reset the used flags on MEMs in the instruction stream, we must set
3034 them again on MEMs that appear in DECL_RTLs. */
3037 set_used_decls (tree blk
)
3042 for (t
= BLOCK_VARS (blk
); t
; t
= DECL_CHAIN (t
))
3043 if (DECL_RTL_SET_P (t
))
3044 set_used_flags (DECL_RTL (t
));
3046 /* Now process sub-blocks. */
3047 for (t
= BLOCK_SUBBLOCKS (blk
); t
; t
= BLOCK_CHAIN (t
))
3051 /* Mark ORIG as in use, and return a copy of it if it was already in use.
3052 Recursively does the same for subexpressions. Uses
3053 copy_rtx_if_shared_1 to reduce stack space. */
3056 copy_rtx_if_shared (rtx orig
)
3058 copy_rtx_if_shared_1 (&orig
);
3062 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
3063 use. Recursively does the same for subexpressions. */
3066 copy_rtx_if_shared_1 (rtx
*orig1
)
3072 const char *format_ptr
;
3076 /* Repeat is used to turn tail-recursion into iteration. */
3083 code
= GET_CODE (x
);
3085 /* These types may be freely shared. */
3101 /* SCRATCH must be shared because they represent distinct values. */
3104 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
3105 clobbers or clobbers of hard registers that originated as pseudos.
3106 This is needed to allow safe register renaming. */
3107 if (REG_P (XEXP (x
, 0))
3108 && HARD_REGISTER_NUM_P (REGNO (XEXP (x
, 0)))
3109 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x
, 0))))
3114 if (shared_const_p (x
))
3124 /* The chain of insns is not being copied. */
3131 /* This rtx may not be shared. If it has already been seen,
3132 replace it with a copy of itself. */
3134 if (RTX_FLAG (x
, used
))
3136 x
= shallow_copy_rtx (x
);
3139 RTX_FLAG (x
, used
) = 1;
3141 /* Now scan the subexpressions recursively.
3142 We can store any replaced subexpressions directly into X
3143 since we know X is not shared! Any vectors in X
3144 must be copied if X was copied. */
3146 format_ptr
= GET_RTX_FORMAT (code
);
3147 length
= GET_RTX_LENGTH (code
);
3150 for (i
= 0; i
< length
; i
++)
3152 switch (*format_ptr
++)
3156 copy_rtx_if_shared_1 (last_ptr
);
3157 last_ptr
= &XEXP (x
, i
);
3161 if (XVEC (x
, i
) != NULL
)
3164 int len
= XVECLEN (x
, i
);
3166 /* Copy the vector iff I copied the rtx and the length
3168 if (copied
&& len
> 0)
3169 XVEC (x
, i
) = gen_rtvec_v (len
, XVEC (x
, i
)->elem
);
3171 /* Call recursively on all inside the vector. */
3172 for (j
= 0; j
< len
; j
++)
3175 copy_rtx_if_shared_1 (last_ptr
);
3176 last_ptr
= &XVECEXP (x
, i
, j
);
3191 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3194 mark_used_flags (rtx x
, int flag
)
3198 const char *format_ptr
;
3201 /* Repeat is used to turn tail-recursion into iteration. */
3206 code
= GET_CODE (x
);
3208 /* These types may be freely shared so we needn't do any resetting
3232 /* The chain of insns is not being copied. */
3239 RTX_FLAG (x
, used
) = flag
;
3241 format_ptr
= GET_RTX_FORMAT (code
);
3242 length
= GET_RTX_LENGTH (code
);
3244 for (i
= 0; i
< length
; i
++)
3246 switch (*format_ptr
++)
3254 mark_used_flags (XEXP (x
, i
), flag
);
3258 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3259 mark_used_flags (XVECEXP (x
, i
, j
), flag
);
3265 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3266 to look for shared sub-parts. */
3269 reset_used_flags (rtx x
)
3271 mark_used_flags (x
, 0);
3274 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3275 to look for shared sub-parts. */
3278 set_used_flags (rtx x
)
3280 mark_used_flags (x
, 1);
3283 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3284 Return X or the rtx for the pseudo reg the value of X was copied into.
3285 OTHER must be valid as a SET_DEST. */
3288 make_safe_from (rtx x
, rtx other
)
3291 switch (GET_CODE (other
))
3294 other
= SUBREG_REG (other
);
3296 case STRICT_LOW_PART
:
3299 other
= XEXP (other
, 0);
3308 && GET_CODE (x
) != SUBREG
)
3310 && (REGNO (other
) < FIRST_PSEUDO_REGISTER
3311 || reg_mentioned_p (other
, x
))))
3313 rtx temp
= gen_reg_rtx (GET_MODE (x
));
3314 emit_move_insn (temp
, x
);
3320 /* Emission of insns (adding them to the doubly-linked list). */
3322 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3325 get_last_insn_anywhere (void)
3327 struct sequence_stack
*seq
;
3328 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
3334 /* Return the first nonnote insn emitted in current sequence or current
3335 function. This routine looks inside SEQUENCEs. */
3338 get_first_nonnote_insn (void)
3340 rtx_insn
*insn
= get_insns ();
3345 for (insn
= next_insn (insn
);
3346 insn
&& NOTE_P (insn
);
3347 insn
= next_insn (insn
))
3351 if (NONJUMP_INSN_P (insn
)
3352 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3353 insn
= as_a
<rtx_sequence
*> (PATTERN (insn
))->insn (0);
3360 /* Return the last nonnote insn emitted in current sequence or current
3361 function. This routine looks inside SEQUENCEs. */
3364 get_last_nonnote_insn (void)
3366 rtx_insn
*insn
= get_last_insn ();
3371 for (insn
= previous_insn (insn
);
3372 insn
&& NOTE_P (insn
);
3373 insn
= previous_insn (insn
))
3377 if (NONJUMP_INSN_P (insn
))
3378 if (rtx_sequence
*seq
= dyn_cast
<rtx_sequence
*> (PATTERN (insn
)))
3379 insn
= seq
->insn (seq
->len () - 1);
3386 /* Return the number of actual (non-debug) insns emitted in this
3390 get_max_insn_count (void)
3392 int n
= cur_insn_uid
;
3394 /* The table size must be stable across -g, to avoid codegen
3395 differences due to debug insns, and not be affected by
3396 -fmin-insn-uid, to avoid excessive table size and to simplify
3397 debugging of -fcompare-debug failures. */
3398 if (cur_debug_insn_uid
> MIN_NONDEBUG_INSN_UID
)
3399 n
-= cur_debug_insn_uid
;
3401 n
-= MIN_NONDEBUG_INSN_UID
;
3407 /* Return the next insn. If it is a SEQUENCE, return the first insn
3411 next_insn (rtx_insn
*insn
)
3415 insn
= NEXT_INSN (insn
);
3416 if (insn
&& NONJUMP_INSN_P (insn
)
3417 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3418 insn
= as_a
<rtx_sequence
*> (PATTERN (insn
))->insn (0);
3424 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3428 previous_insn (rtx_insn
*insn
)
3432 insn
= PREV_INSN (insn
);
3433 if (insn
&& NONJUMP_INSN_P (insn
))
3434 if (rtx_sequence
*seq
= dyn_cast
<rtx_sequence
*> (PATTERN (insn
)))
3435 insn
= seq
->insn (seq
->len () - 1);
3441 /* Return the next insn after INSN that is not a NOTE. This routine does not
3442 look inside SEQUENCEs. */
3445 next_nonnote_insn (rtx_insn
*insn
)
3449 insn
= NEXT_INSN (insn
);
3450 if (insn
== 0 || !NOTE_P (insn
))
3457 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3458 routine does not look inside SEQUENCEs. */
3461 next_nondebug_insn (rtx_insn
*insn
)
3465 insn
= NEXT_INSN (insn
);
3466 if (insn
== 0 || !DEBUG_INSN_P (insn
))
3473 /* Return the previous insn before INSN that is not a NOTE. This routine does
3474 not look inside SEQUENCEs. */
3477 prev_nonnote_insn (rtx_insn
*insn
)
3481 insn
= PREV_INSN (insn
);
3482 if (insn
== 0 || !NOTE_P (insn
))
3489 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3490 This routine does not look inside SEQUENCEs. */
3493 prev_nondebug_insn (rtx_insn
*insn
)
3497 insn
= PREV_INSN (insn
);
3498 if (insn
== 0 || !DEBUG_INSN_P (insn
))
3505 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3506 This routine does not look inside SEQUENCEs. */
3509 next_nonnote_nondebug_insn (rtx_insn
*insn
)
3513 insn
= NEXT_INSN (insn
);
3514 if (insn
== 0 || (!NOTE_P (insn
) && !DEBUG_INSN_P (insn
)))
3521 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN,
3522 but stop the search before we enter another basic block. This
3523 routine does not look inside SEQUENCEs. */
3526 next_nonnote_nondebug_insn_bb (rtx_insn
*insn
)
3530 insn
= NEXT_INSN (insn
);
3533 if (DEBUG_INSN_P (insn
))
3537 if (NOTE_INSN_BASIC_BLOCK_P (insn
))
3544 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3545 This routine does not look inside SEQUENCEs. */
3548 prev_nonnote_nondebug_insn (rtx_insn
*insn
)
3552 insn
= PREV_INSN (insn
);
3553 if (insn
== 0 || (!NOTE_P (insn
) && !DEBUG_INSN_P (insn
)))
3560 /* Return the previous insn before INSN that is not a NOTE nor
3561 DEBUG_INSN, but stop the search before we enter another basic
3562 block. This routine does not look inside SEQUENCEs. */
3565 prev_nonnote_nondebug_insn_bb (rtx_insn
*insn
)
3569 insn
= PREV_INSN (insn
);
3572 if (DEBUG_INSN_P (insn
))
3576 if (NOTE_INSN_BASIC_BLOCK_P (insn
))
3583 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3584 or 0, if there is none. This routine does not look inside
3588 next_real_insn (rtx uncast_insn
)
3590 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3594 insn
= NEXT_INSN (insn
);
3595 if (insn
== 0 || INSN_P (insn
))
3602 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3603 or 0, if there is none. This routine does not look inside
3607 prev_real_insn (rtx_insn
*insn
)
3611 insn
= PREV_INSN (insn
);
3612 if (insn
== 0 || INSN_P (insn
))
3619 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3620 This routine does not look inside SEQUENCEs. */
3623 last_call_insn (void)
3627 for (insn
= get_last_insn ();
3628 insn
&& !CALL_P (insn
);
3629 insn
= PREV_INSN (insn
))
3632 return safe_as_a
<rtx_call_insn
*> (insn
);
3635 /* Find the next insn after INSN that really does something. This routine
3636 does not look inside SEQUENCEs. After reload this also skips over
3637 standalone USE and CLOBBER insn. */
3640 active_insn_p (const rtx_insn
*insn
)
3642 return (CALL_P (insn
) || JUMP_P (insn
)
3643 || JUMP_TABLE_DATA_P (insn
) /* FIXME */
3644 || (NONJUMP_INSN_P (insn
)
3645 && (! reload_completed
3646 || (GET_CODE (PATTERN (insn
)) != USE
3647 && GET_CODE (PATTERN (insn
)) != CLOBBER
))));
3651 next_active_insn (rtx_insn
*insn
)
3655 insn
= NEXT_INSN (insn
);
3656 if (insn
== 0 || active_insn_p (insn
))
3663 /* Find the last insn before INSN that really does something. This routine
3664 does not look inside SEQUENCEs. After reload this also skips over
3665 standalone USE and CLOBBER insn. */
3668 prev_active_insn (rtx_insn
*insn
)
3672 insn
= PREV_INSN (insn
);
3673 if (insn
== 0 || active_insn_p (insn
))
3680 /* Return the next insn that uses CC0 after INSN, which is assumed to
3681 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3682 applied to the result of this function should yield INSN).
3684 Normally, this is simply the next insn. However, if a REG_CC_USER note
3685 is present, it contains the insn that uses CC0.
3687 Return 0 if we can't find the insn. */
3690 next_cc0_user (rtx_insn
*insn
)
3692 rtx note
= find_reg_note (insn
, REG_CC_USER
, NULL_RTX
);
3695 return safe_as_a
<rtx_insn
*> (XEXP (note
, 0));
3697 insn
= next_nonnote_insn (insn
);
3698 if (insn
&& NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3699 insn
= as_a
<rtx_sequence
*> (PATTERN (insn
))->insn (0);
3701 if (insn
&& INSN_P (insn
) && reg_mentioned_p (cc0_rtx
, PATTERN (insn
)))
3707 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3708 note, it is the previous insn. */
3711 prev_cc0_setter (rtx_insn
*insn
)
3713 rtx note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
3716 return safe_as_a
<rtx_insn
*> (XEXP (note
, 0));
3718 insn
= prev_nonnote_insn (insn
);
3719 gcc_assert (sets_cc0_p (PATTERN (insn
)));
3724 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3727 find_auto_inc (const_rtx x
, const_rtx reg
)
3729 subrtx_iterator::array_type array
;
3730 FOR_EACH_SUBRTX (iter
, array
, x
, NONCONST
)
3732 const_rtx x
= *iter
;
3733 if (GET_RTX_CLASS (GET_CODE (x
)) == RTX_AUTOINC
3734 && rtx_equal_p (reg
, XEXP (x
, 0)))
3740 /* Increment the label uses for all labels present in rtx. */
3743 mark_label_nuses (rtx x
)
3749 code
= GET_CODE (x
);
3750 if (code
== LABEL_REF
&& LABEL_P (label_ref_label (x
)))
3751 LABEL_NUSES (label_ref_label (x
))++;
3753 fmt
= GET_RTX_FORMAT (code
);
3754 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3757 mark_label_nuses (XEXP (x
, i
));
3758 else if (fmt
[i
] == 'E')
3759 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
3760 mark_label_nuses (XVECEXP (x
, i
, j
));
3765 /* Try splitting insns that can be split for better scheduling.
3766 PAT is the pattern which might split.
3767 TRIAL is the insn providing PAT.
3768 LAST is nonzero if we should return the last insn of the sequence produced.
3770 If this routine succeeds in splitting, it returns the first or last
3771 replacement insn depending on the value of LAST. Otherwise, it
3772 returns TRIAL. If the insn to be returned can be split, it will be. */
3775 try_split (rtx pat
, rtx_insn
*trial
, int last
)
3777 rtx_insn
*before
, *after
;
3779 rtx_insn
*seq
, *tem
;
3780 profile_probability probability
;
3781 rtx_insn
*insn_last
, *insn
;
3783 rtx_insn
*call_insn
= NULL
;
3785 /* We're not good at redistributing frame information. */
3786 if (RTX_FRAME_RELATED_P (trial
))
3789 if (any_condjump_p (trial
)
3790 && (note
= find_reg_note (trial
, REG_BR_PROB
, 0)))
3791 split_branch_probability
3792 = profile_probability::from_reg_br_prob_note (XINT (note
, 0));
3794 split_branch_probability
= profile_probability::uninitialized ();
3796 probability
= split_branch_probability
;
3798 seq
= split_insns (pat
, trial
);
3800 split_branch_probability
= profile_probability::uninitialized ();
3805 /* Avoid infinite loop if any insn of the result matches
3806 the original pattern. */
3810 if (INSN_P (insn_last
)
3811 && rtx_equal_p (PATTERN (insn_last
), pat
))
3813 if (!NEXT_INSN (insn_last
))
3815 insn_last
= NEXT_INSN (insn_last
);
3818 /* We will be adding the new sequence to the function. The splitters
3819 may have introduced invalid RTL sharing, so unshare the sequence now. */
3820 unshare_all_rtl_in_chain (seq
);
3822 /* Mark labels and copy flags. */
3823 for (insn
= insn_last
; insn
; insn
= PREV_INSN (insn
))
3828 CROSSING_JUMP_P (insn
) = CROSSING_JUMP_P (trial
);
3829 mark_jump_label (PATTERN (insn
), insn
, 0);
3831 if (probability
.initialized_p ()
3832 && any_condjump_p (insn
)
3833 && !find_reg_note (insn
, REG_BR_PROB
, 0))
3835 /* We can preserve the REG_BR_PROB notes only if exactly
3836 one jump is created, otherwise the machine description
3837 is responsible for this step using
3838 split_branch_probability variable. */
3839 gcc_assert (njumps
== 1);
3840 add_reg_br_prob_note (insn
, probability
);
3845 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3846 in SEQ and copy any additional information across. */
3849 for (insn
= insn_last
; insn
; insn
= PREV_INSN (insn
))
3855 gcc_assert (call_insn
== NULL_RTX
);
3858 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3859 target may have explicitly specified. */
3860 p
= &CALL_INSN_FUNCTION_USAGE (insn
);
3863 *p
= CALL_INSN_FUNCTION_USAGE (trial
);
3865 /* If the old call was a sibling call, the new one must
3867 SIBLING_CALL_P (insn
) = SIBLING_CALL_P (trial
);
3869 /* If the new call is the last instruction in the sequence,
3870 it will effectively replace the old call in-situ. Otherwise
3871 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3872 so that it comes immediately after the new call. */
3873 if (NEXT_INSN (insn
))
3874 for (next
= NEXT_INSN (trial
);
3875 next
&& NOTE_P (next
);
3876 next
= NEXT_INSN (next
))
3877 if (NOTE_KIND (next
) == NOTE_INSN_CALL_ARG_LOCATION
)
3880 add_insn_after (next
, insn
, NULL
);
3886 /* Copy notes, particularly those related to the CFG. */
3887 for (note
= REG_NOTES (trial
); note
; note
= XEXP (note
, 1))
3889 switch (REG_NOTE_KIND (note
))
3892 copy_reg_eh_region_note_backward (note
, insn_last
, NULL
);
3898 case REG_CALL_NOCF_CHECK
:
3899 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3902 add_reg_note (insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3906 case REG_NON_LOCAL_GOTO
:
3907 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3910 add_reg_note (insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3918 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3920 rtx reg
= XEXP (note
, 0);
3921 if (!FIND_REG_INC_NOTE (insn
, reg
)
3922 && find_auto_inc (PATTERN (insn
), reg
))
3923 add_reg_note (insn
, REG_INC
, reg
);
3928 fixup_args_size_notes (NULL
, insn_last
, get_args_size (note
));
3932 gcc_assert (call_insn
!= NULL_RTX
);
3933 add_reg_note (call_insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3941 /* If there are LABELS inside the split insns increment the
3942 usage count so we don't delete the label. */
3946 while (insn
!= NULL_RTX
)
3948 /* JUMP_P insns have already been "marked" above. */
3949 if (NONJUMP_INSN_P (insn
))
3950 mark_label_nuses (PATTERN (insn
));
3952 insn
= PREV_INSN (insn
);
3956 before
= PREV_INSN (trial
);
3957 after
= NEXT_INSN (trial
);
3959 tem
= emit_insn_after_setloc (seq
, trial
, INSN_LOCATION (trial
));
3961 delete_insn (trial
);
3963 /* Recursively call try_split for each new insn created; by the
3964 time control returns here that insn will be fully split, so
3965 set LAST and continue from the insn after the one returned.
3966 We can't use next_active_insn here since AFTER may be a note.
3967 Ignore deleted insns, which can be occur if not optimizing. */
3968 for (tem
= NEXT_INSN (before
); tem
!= after
; tem
= NEXT_INSN (tem
))
3969 if (! tem
->deleted () && INSN_P (tem
))
3970 tem
= try_split (PATTERN (tem
), tem
, 1);
3972 /* Return either the first or the last insn, depending on which was
3975 ? (after
? PREV_INSN (after
) : get_last_insn ())
3976 : NEXT_INSN (before
);
3979 /* Make and return an INSN rtx, initializing all its slots.
3980 Store PATTERN in the pattern slots. */
3983 make_insn_raw (rtx pattern
)
3987 insn
= as_a
<rtx_insn
*> (rtx_alloc (INSN
));
3989 INSN_UID (insn
) = cur_insn_uid
++;
3990 PATTERN (insn
) = pattern
;
3991 INSN_CODE (insn
) = -1;
3992 REG_NOTES (insn
) = NULL
;
3993 INSN_LOCATION (insn
) = curr_insn_location ();
3994 BLOCK_FOR_INSN (insn
) = NULL
;
3996 #ifdef ENABLE_RTL_CHECKING
3999 && (returnjump_p (insn
)
4000 || (GET_CODE (insn
) == SET
4001 && SET_DEST (insn
) == pc_rtx
)))
4003 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
4011 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
4014 make_debug_insn_raw (rtx pattern
)
4016 rtx_debug_insn
*insn
;
4018 insn
= as_a
<rtx_debug_insn
*> (rtx_alloc (DEBUG_INSN
));
4019 INSN_UID (insn
) = cur_debug_insn_uid
++;
4020 if (cur_debug_insn_uid
> MIN_NONDEBUG_INSN_UID
)
4021 INSN_UID (insn
) = cur_insn_uid
++;
4023 PATTERN (insn
) = pattern
;
4024 INSN_CODE (insn
) = -1;
4025 REG_NOTES (insn
) = NULL
;
4026 INSN_LOCATION (insn
) = curr_insn_location ();
4027 BLOCK_FOR_INSN (insn
) = NULL
;
4032 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
4035 make_jump_insn_raw (rtx pattern
)
4037 rtx_jump_insn
*insn
;
4039 insn
= as_a
<rtx_jump_insn
*> (rtx_alloc (JUMP_INSN
));
4040 INSN_UID (insn
) = cur_insn_uid
++;
4042 PATTERN (insn
) = pattern
;
4043 INSN_CODE (insn
) = -1;
4044 REG_NOTES (insn
) = NULL
;
4045 JUMP_LABEL (insn
) = NULL
;
4046 INSN_LOCATION (insn
) = curr_insn_location ();
4047 BLOCK_FOR_INSN (insn
) = NULL
;
4052 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
4055 make_call_insn_raw (rtx pattern
)
4057 rtx_call_insn
*insn
;
4059 insn
= as_a
<rtx_call_insn
*> (rtx_alloc (CALL_INSN
));
4060 INSN_UID (insn
) = cur_insn_uid
++;
4062 PATTERN (insn
) = pattern
;
4063 INSN_CODE (insn
) = -1;
4064 REG_NOTES (insn
) = NULL
;
4065 CALL_INSN_FUNCTION_USAGE (insn
) = NULL
;
4066 INSN_LOCATION (insn
) = curr_insn_location ();
4067 BLOCK_FOR_INSN (insn
) = NULL
;
4072 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
4075 make_note_raw (enum insn_note subtype
)
4077 /* Some notes are never created this way at all. These notes are
4078 only created by patching out insns. */
4079 gcc_assert (subtype
!= NOTE_INSN_DELETED_LABEL
4080 && subtype
!= NOTE_INSN_DELETED_DEBUG_LABEL
);
4082 rtx_note
*note
= as_a
<rtx_note
*> (rtx_alloc (NOTE
));
4083 INSN_UID (note
) = cur_insn_uid
++;
4084 NOTE_KIND (note
) = subtype
;
4085 BLOCK_FOR_INSN (note
) = NULL
;
4086 memset (&NOTE_DATA (note
), 0, sizeof (NOTE_DATA (note
)));
4090 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
4091 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
4092 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
4095 link_insn_into_chain (rtx_insn
*insn
, rtx_insn
*prev
, rtx_insn
*next
)
4097 SET_PREV_INSN (insn
) = prev
;
4098 SET_NEXT_INSN (insn
) = next
;
4101 SET_NEXT_INSN (prev
) = insn
;
4102 if (NONJUMP_INSN_P (prev
) && GET_CODE (PATTERN (prev
)) == SEQUENCE
)
4104 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (prev
));
4105 SET_NEXT_INSN (sequence
->insn (sequence
->len () - 1)) = insn
;
4110 SET_PREV_INSN (next
) = insn
;
4111 if (NONJUMP_INSN_P (next
) && GET_CODE (PATTERN (next
)) == SEQUENCE
)
4113 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (next
));
4114 SET_PREV_INSN (sequence
->insn (0)) = insn
;
4118 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
4120 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (insn
));
4121 SET_PREV_INSN (sequence
->insn (0)) = prev
;
4122 SET_NEXT_INSN (sequence
->insn (sequence
->len () - 1)) = next
;
4126 /* Add INSN to the end of the doubly-linked list.
4127 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
4130 add_insn (rtx_insn
*insn
)
4132 rtx_insn
*prev
= get_last_insn ();
4133 link_insn_into_chain (insn
, prev
, NULL
);
4134 if (get_insns () == NULL
)
4135 set_first_insn (insn
);
4136 set_last_insn (insn
);
4139 /* Add INSN into the doubly-linked list after insn AFTER. */
4142 add_insn_after_nobb (rtx_insn
*insn
, rtx_insn
*after
)
4144 rtx_insn
*next
= NEXT_INSN (after
);
4146 gcc_assert (!optimize
|| !after
->deleted ());
4148 link_insn_into_chain (insn
, after
, next
);
4152 struct sequence_stack
*seq
;
4154 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4155 if (after
== seq
->last
)
4163 /* Add INSN into the doubly-linked list before insn BEFORE. */
4166 add_insn_before_nobb (rtx_insn
*insn
, rtx_insn
*before
)
4168 rtx_insn
*prev
= PREV_INSN (before
);
4170 gcc_assert (!optimize
|| !before
->deleted ());
4172 link_insn_into_chain (insn
, prev
, before
);
4176 struct sequence_stack
*seq
;
4178 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4179 if (before
== seq
->first
)
4189 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4190 If BB is NULL, an attempt is made to infer the bb from before.
4192 This and the next function should be the only functions called
4193 to insert an insn once delay slots have been filled since only
4194 they know how to update a SEQUENCE. */
4197 add_insn_after (rtx uncast_insn
, rtx uncast_after
, basic_block bb
)
4199 rtx_insn
*insn
= as_a
<rtx_insn
*> (uncast_insn
);
4200 rtx_insn
*after
= as_a
<rtx_insn
*> (uncast_after
);
4201 add_insn_after_nobb (insn
, after
);
4202 if (!BARRIER_P (after
)
4203 && !BARRIER_P (insn
)
4204 && (bb
= BLOCK_FOR_INSN (after
)))
4206 set_block_for_insn (insn
, bb
);
4208 df_insn_rescan (insn
);
4209 /* Should not happen as first in the BB is always
4210 either NOTE or LABEL. */
4211 if (BB_END (bb
) == after
4212 /* Avoid clobbering of structure when creating new BB. */
4213 && !BARRIER_P (insn
)
4214 && !NOTE_INSN_BASIC_BLOCK_P (insn
))
4219 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4220 If BB is NULL, an attempt is made to infer the bb from before.
4222 This and the previous function should be the only functions called
4223 to insert an insn once delay slots have been filled since only
4224 they know how to update a SEQUENCE. */
4227 add_insn_before (rtx uncast_insn
, rtx uncast_before
, basic_block bb
)
4229 rtx_insn
*insn
= as_a
<rtx_insn
*> (uncast_insn
);
4230 rtx_insn
*before
= as_a
<rtx_insn
*> (uncast_before
);
4231 add_insn_before_nobb (insn
, before
);
4234 && !BARRIER_P (before
)
4235 && !BARRIER_P (insn
))
4236 bb
= BLOCK_FOR_INSN (before
);
4240 set_block_for_insn (insn
, bb
);
4242 df_insn_rescan (insn
);
4243 /* Should not happen as first in the BB is always either NOTE or
4245 gcc_assert (BB_HEAD (bb
) != insn
4246 /* Avoid clobbering of structure when creating new BB. */
4248 || NOTE_INSN_BASIC_BLOCK_P (insn
));
4252 /* Replace insn with an deleted instruction note. */
4255 set_insn_deleted (rtx insn
)
4258 df_insn_delete (as_a
<rtx_insn
*> (insn
));
4259 PUT_CODE (insn
, NOTE
);
4260 NOTE_KIND (insn
) = NOTE_INSN_DELETED
;
4264 /* Unlink INSN from the insn chain.
4266 This function knows how to handle sequences.
4268 This function does not invalidate data flow information associated with
4269 INSN (i.e. does not call df_insn_delete). That makes this function
4270 usable for only disconnecting an insn from the chain, and re-emit it
4273 To later insert INSN elsewhere in the insn chain via add_insn and
4274 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4275 the caller. Nullifying them here breaks many insn chain walks.
4277 To really delete an insn and related DF information, use delete_insn. */
4280 remove_insn (rtx uncast_insn
)
4282 rtx_insn
*insn
= as_a
<rtx_insn
*> (uncast_insn
);
4283 rtx_insn
*next
= NEXT_INSN (insn
);
4284 rtx_insn
*prev
= PREV_INSN (insn
);
4289 SET_NEXT_INSN (prev
) = next
;
4290 if (NONJUMP_INSN_P (prev
) && GET_CODE (PATTERN (prev
)) == SEQUENCE
)
4292 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (prev
));
4293 SET_NEXT_INSN (sequence
->insn (sequence
->len () - 1)) = next
;
4298 struct sequence_stack
*seq
;
4300 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4301 if (insn
== seq
->first
)
4312 SET_PREV_INSN (next
) = prev
;
4313 if (NONJUMP_INSN_P (next
) && GET_CODE (PATTERN (next
)) == SEQUENCE
)
4315 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (next
));
4316 SET_PREV_INSN (sequence
->insn (0)) = prev
;
4321 struct sequence_stack
*seq
;
4323 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4324 if (insn
== seq
->last
)
4333 /* Fix up basic block boundaries, if necessary. */
4334 if (!BARRIER_P (insn
)
4335 && (bb
= BLOCK_FOR_INSN (insn
)))
4337 if (BB_HEAD (bb
) == insn
)
4339 /* Never ever delete the basic block note without deleting whole
4341 gcc_assert (!NOTE_P (insn
));
4342 BB_HEAD (bb
) = next
;
4344 if (BB_END (bb
) == insn
)
4349 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4352 add_function_usage_to (rtx call_insn
, rtx call_fusage
)
4354 gcc_assert (call_insn
&& CALL_P (call_insn
));
4356 /* Put the register usage information on the CALL. If there is already
4357 some usage information, put ours at the end. */
4358 if (CALL_INSN_FUNCTION_USAGE (call_insn
))
4362 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
); XEXP (link
, 1) != 0;
4363 link
= XEXP (link
, 1))
4366 XEXP (link
, 1) = call_fusage
;
4369 CALL_INSN_FUNCTION_USAGE (call_insn
) = call_fusage
;
4372 /* Delete all insns made since FROM.
4373 FROM becomes the new last instruction. */
4376 delete_insns_since (rtx_insn
*from
)
4381 SET_NEXT_INSN (from
) = 0;
4382 set_last_insn (from
);
4385 /* This function is deprecated, please use sequences instead.
4387 Move a consecutive bunch of insns to a different place in the chain.
4388 The insns to be moved are those between FROM and TO.
4389 They are moved to a new position after the insn AFTER.
4390 AFTER must not be FROM or TO or any insn in between.
4392 This function does not know about SEQUENCEs and hence should not be
4393 called after delay-slot filling has been done. */
4396 reorder_insns_nobb (rtx_insn
*from
, rtx_insn
*to
, rtx_insn
*after
)
4400 for (rtx_insn
*x
= from
; x
!= to
; x
= NEXT_INSN (x
))
4401 gcc_assert (after
!= x
);
4402 gcc_assert (after
!= to
);
4405 /* Splice this bunch out of where it is now. */
4406 if (PREV_INSN (from
))
4407 SET_NEXT_INSN (PREV_INSN (from
)) = NEXT_INSN (to
);
4409 SET_PREV_INSN (NEXT_INSN (to
)) = PREV_INSN (from
);
4410 if (get_last_insn () == to
)
4411 set_last_insn (PREV_INSN (from
));
4412 if (get_insns () == from
)
4413 set_first_insn (NEXT_INSN (to
));
4415 /* Make the new neighbors point to it and it to them. */
4416 if (NEXT_INSN (after
))
4417 SET_PREV_INSN (NEXT_INSN (after
)) = to
;
4419 SET_NEXT_INSN (to
) = NEXT_INSN (after
);
4420 SET_PREV_INSN (from
) = after
;
4421 SET_NEXT_INSN (after
) = from
;
4422 if (after
== get_last_insn ())
4426 /* Same as function above, but take care to update BB boundaries. */
4428 reorder_insns (rtx_insn
*from
, rtx_insn
*to
, rtx_insn
*after
)
4430 rtx_insn
*prev
= PREV_INSN (from
);
4431 basic_block bb
, bb2
;
4433 reorder_insns_nobb (from
, to
, after
);
4435 if (!BARRIER_P (after
)
4436 && (bb
= BLOCK_FOR_INSN (after
)))
4439 df_set_bb_dirty (bb
);
4441 if (!BARRIER_P (from
)
4442 && (bb2
= BLOCK_FOR_INSN (from
)))
4444 if (BB_END (bb2
) == to
)
4445 BB_END (bb2
) = prev
;
4446 df_set_bb_dirty (bb2
);
4449 if (BB_END (bb
) == after
)
4452 for (x
= from
; x
!= NEXT_INSN (to
); x
= NEXT_INSN (x
))
4454 df_insn_change_bb (x
, bb
);
4459 /* Emit insn(s) of given code and pattern
4460 at a specified place within the doubly-linked list.
4462 All of the emit_foo global entry points accept an object
4463 X which is either an insn list or a PATTERN of a single
4466 There are thus a few canonical ways to generate code and
4467 emit it at a specific place in the instruction stream. For
4468 example, consider the instruction named SPOT and the fact that
4469 we would like to emit some instructions before SPOT. We might
4473 ... emit the new instructions ...
4474 insns_head = get_insns ();
4477 emit_insn_before (insns_head, SPOT);
4479 It used to be common to generate SEQUENCE rtl instead, but that
4480 is a relic of the past which no longer occurs. The reason is that
4481 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4482 generated would almost certainly die right after it was created. */
4485 emit_pattern_before_noloc (rtx x
, rtx before
, rtx last
, basic_block bb
,
4486 rtx_insn
*(*make_raw
) (rtx
))
4490 gcc_assert (before
);
4493 return safe_as_a
<rtx_insn
*> (last
);
4495 switch (GET_CODE (x
))
4504 insn
= as_a
<rtx_insn
*> (x
);
4507 rtx_insn
*next
= NEXT_INSN (insn
);
4508 add_insn_before (insn
, before
, bb
);
4514 #ifdef ENABLE_RTL_CHECKING
4521 last
= (*make_raw
) (x
);
4522 add_insn_before (last
, before
, bb
);
4526 return safe_as_a
<rtx_insn
*> (last
);
4529 /* Make X be output before the instruction BEFORE. */
4532 emit_insn_before_noloc (rtx x
, rtx_insn
*before
, basic_block bb
)
4534 return emit_pattern_before_noloc (x
, before
, before
, bb
, make_insn_raw
);
4537 /* Make an instruction with body X and code JUMP_INSN
4538 and output it before the instruction BEFORE. */
4541 emit_jump_insn_before_noloc (rtx x
, rtx_insn
*before
)
4543 return as_a
<rtx_jump_insn
*> (
4544 emit_pattern_before_noloc (x
, before
, NULL_RTX
, NULL
,
4545 make_jump_insn_raw
));
4548 /* Make an instruction with body X and code CALL_INSN
4549 and output it before the instruction BEFORE. */
4552 emit_call_insn_before_noloc (rtx x
, rtx_insn
*before
)
4554 return emit_pattern_before_noloc (x
, before
, NULL_RTX
, NULL
,
4555 make_call_insn_raw
);
4558 /* Make an instruction with body X and code DEBUG_INSN
4559 and output it before the instruction BEFORE. */
4562 emit_debug_insn_before_noloc (rtx x
, rtx before
)
4564 return emit_pattern_before_noloc (x
, before
, NULL_RTX
, NULL
,
4565 make_debug_insn_raw
);
4568 /* Make an insn of code BARRIER
4569 and output it before the insn BEFORE. */
4572 emit_barrier_before (rtx before
)
4574 rtx_barrier
*insn
= as_a
<rtx_barrier
*> (rtx_alloc (BARRIER
));
4576 INSN_UID (insn
) = cur_insn_uid
++;
4578 add_insn_before (insn
, before
, NULL
);
4582 /* Emit the label LABEL before the insn BEFORE. */
4585 emit_label_before (rtx label
, rtx_insn
*before
)
4587 gcc_checking_assert (INSN_UID (label
) == 0);
4588 INSN_UID (label
) = cur_insn_uid
++;
4589 add_insn_before (label
, before
, NULL
);
4590 return as_a
<rtx_code_label
*> (label
);
4593 /* Helper for emit_insn_after, handles lists of instructions
4597 emit_insn_after_1 (rtx_insn
*first
, rtx uncast_after
, basic_block bb
)
4599 rtx_insn
*after
= safe_as_a
<rtx_insn
*> (uncast_after
);
4601 rtx_insn
*after_after
;
4602 if (!bb
&& !BARRIER_P (after
))
4603 bb
= BLOCK_FOR_INSN (after
);
4607 df_set_bb_dirty (bb
);
4608 for (last
= first
; NEXT_INSN (last
); last
= NEXT_INSN (last
))
4609 if (!BARRIER_P (last
))
4611 set_block_for_insn (last
, bb
);
4612 df_insn_rescan (last
);
4614 if (!BARRIER_P (last
))
4616 set_block_for_insn (last
, bb
);
4617 df_insn_rescan (last
);
4619 if (BB_END (bb
) == after
)
4623 for (last
= first
; NEXT_INSN (last
); last
= NEXT_INSN (last
))
4626 after_after
= NEXT_INSN (after
);
4628 SET_NEXT_INSN (after
) = first
;
4629 SET_PREV_INSN (first
) = after
;
4630 SET_NEXT_INSN (last
) = after_after
;
4632 SET_PREV_INSN (after_after
) = last
;
4634 if (after
== get_last_insn ())
4635 set_last_insn (last
);
4641 emit_pattern_after_noloc (rtx x
, rtx uncast_after
, basic_block bb
,
4642 rtx_insn
*(*make_raw
)(rtx
))
4644 rtx_insn
*after
= safe_as_a
<rtx_insn
*> (uncast_after
);
4645 rtx_insn
*last
= after
;
4652 switch (GET_CODE (x
))
4661 last
= emit_insn_after_1 (as_a
<rtx_insn
*> (x
), after
, bb
);
4664 #ifdef ENABLE_RTL_CHECKING
4671 last
= (*make_raw
) (x
);
4672 add_insn_after (last
, after
, bb
);
4679 /* Make X be output after the insn AFTER and set the BB of insn. If
4680 BB is NULL, an attempt is made to infer the BB from AFTER. */
4683 emit_insn_after_noloc (rtx x
, rtx after
, basic_block bb
)
4685 return emit_pattern_after_noloc (x
, after
, bb
, make_insn_raw
);
4689 /* Make an insn of code JUMP_INSN with body X
4690 and output it after the insn AFTER. */
4693 emit_jump_insn_after_noloc (rtx x
, rtx after
)
4695 return as_a
<rtx_jump_insn
*> (
4696 emit_pattern_after_noloc (x
, after
, NULL
, make_jump_insn_raw
));
4699 /* Make an instruction with body X and code CALL_INSN
4700 and output it after the instruction AFTER. */
4703 emit_call_insn_after_noloc (rtx x
, rtx after
)
4705 return emit_pattern_after_noloc (x
, after
, NULL
, make_call_insn_raw
);
4708 /* Make an instruction with body X and code CALL_INSN
4709 and output it after the instruction AFTER. */
4712 emit_debug_insn_after_noloc (rtx x
, rtx after
)
4714 return emit_pattern_after_noloc (x
, after
, NULL
, make_debug_insn_raw
);
4717 /* Make an insn of code BARRIER
4718 and output it after the insn AFTER. */
4721 emit_barrier_after (rtx after
)
4723 rtx_barrier
*insn
= as_a
<rtx_barrier
*> (rtx_alloc (BARRIER
));
4725 INSN_UID (insn
) = cur_insn_uid
++;
4727 add_insn_after (insn
, after
, NULL
);
4731 /* Emit the label LABEL after the insn AFTER. */
4734 emit_label_after (rtx label
, rtx_insn
*after
)
4736 gcc_checking_assert (INSN_UID (label
) == 0);
4737 INSN_UID (label
) = cur_insn_uid
++;
4738 add_insn_after (label
, after
, NULL
);
4739 return as_a
<rtx_insn
*> (label
);
4742 /* Notes require a bit of special handling: Some notes need to have their
4743 BLOCK_FOR_INSN set, others should never have it set, and some should
4744 have it set or clear depending on the context. */
4746 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4747 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4748 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4751 note_outside_basic_block_p (enum insn_note subtype
, bool on_bb_boundary_p
)
4755 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4756 case NOTE_INSN_SWITCH_TEXT_SECTIONS
:
4759 /* Notes for var tracking and EH region markers can appear between or
4760 inside basic blocks. If the caller is emitting on the basic block
4761 boundary, do not set BLOCK_FOR_INSN on the new note. */
4762 case NOTE_INSN_VAR_LOCATION
:
4763 case NOTE_INSN_CALL_ARG_LOCATION
:
4764 case NOTE_INSN_EH_REGION_BEG
:
4765 case NOTE_INSN_EH_REGION_END
:
4766 return on_bb_boundary_p
;
4768 /* Otherwise, BLOCK_FOR_INSN must be set. */
4774 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4777 emit_note_after (enum insn_note subtype
, rtx_insn
*after
)
4779 rtx_note
*note
= make_note_raw (subtype
);
4780 basic_block bb
= BARRIER_P (after
) ? NULL
: BLOCK_FOR_INSN (after
);
4781 bool on_bb_boundary_p
= (bb
!= NULL
&& BB_END (bb
) == after
);
4783 if (note_outside_basic_block_p (subtype
, on_bb_boundary_p
))
4784 add_insn_after_nobb (note
, after
);
4786 add_insn_after (note
, after
, bb
);
4790 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4793 emit_note_before (enum insn_note subtype
, rtx_insn
*before
)
4795 rtx_note
*note
= make_note_raw (subtype
);
4796 basic_block bb
= BARRIER_P (before
) ? NULL
: BLOCK_FOR_INSN (before
);
4797 bool on_bb_boundary_p
= (bb
!= NULL
&& BB_HEAD (bb
) == before
);
4799 if (note_outside_basic_block_p (subtype
, on_bb_boundary_p
))
4800 add_insn_before_nobb (note
, before
);
4802 add_insn_before (note
, before
, bb
);
4806 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4807 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4810 emit_pattern_after_setloc (rtx pattern
, rtx uncast_after
, int loc
,
4811 rtx_insn
*(*make_raw
) (rtx
))
4813 rtx_insn
*after
= safe_as_a
<rtx_insn
*> (uncast_after
);
4814 rtx_insn
*last
= emit_pattern_after_noloc (pattern
, after
, NULL
, make_raw
);
4816 if (pattern
== NULL_RTX
|| !loc
)
4819 after
= NEXT_INSN (after
);
4822 if (active_insn_p (after
)
4823 && !JUMP_TABLE_DATA_P (after
) /* FIXME */
4824 && !INSN_LOCATION (after
))
4825 INSN_LOCATION (after
) = loc
;
4828 after
= NEXT_INSN (after
);
4833 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4834 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4838 emit_pattern_after (rtx pattern
, rtx uncast_after
, bool skip_debug_insns
,
4839 rtx_insn
*(*make_raw
) (rtx
))
4841 rtx_insn
*after
= safe_as_a
<rtx_insn
*> (uncast_after
);
4842 rtx_insn
*prev
= after
;
4844 if (skip_debug_insns
)
4845 while (DEBUG_INSN_P (prev
))
4846 prev
= PREV_INSN (prev
);
4849 return emit_pattern_after_setloc (pattern
, after
, INSN_LOCATION (prev
),
4852 return emit_pattern_after_noloc (pattern
, after
, NULL
, make_raw
);
4855 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4857 emit_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4859 return emit_pattern_after_setloc (pattern
, after
, loc
, make_insn_raw
);
4862 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4864 emit_insn_after (rtx pattern
, rtx after
)
4866 return emit_pattern_after (pattern
, after
, true, make_insn_raw
);
4869 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4871 emit_jump_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4873 return as_a
<rtx_jump_insn
*> (
4874 emit_pattern_after_setloc (pattern
, after
, loc
, make_jump_insn_raw
));
4877 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4879 emit_jump_insn_after (rtx pattern
, rtx after
)
4881 return as_a
<rtx_jump_insn
*> (
4882 emit_pattern_after (pattern
, after
, true, make_jump_insn_raw
));
4885 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4887 emit_call_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4889 return emit_pattern_after_setloc (pattern
, after
, loc
, make_call_insn_raw
);
4892 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4894 emit_call_insn_after (rtx pattern
, rtx after
)
4896 return emit_pattern_after (pattern
, after
, true, make_call_insn_raw
);
4899 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4901 emit_debug_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4903 return emit_pattern_after_setloc (pattern
, after
, loc
, make_debug_insn_raw
);
4906 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4908 emit_debug_insn_after (rtx pattern
, rtx after
)
4910 return emit_pattern_after (pattern
, after
, false, make_debug_insn_raw
);
4913 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4914 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4915 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4919 emit_pattern_before_setloc (rtx pattern
, rtx uncast_before
, int loc
, bool insnp
,
4920 rtx_insn
*(*make_raw
) (rtx
))
4922 rtx_insn
*before
= as_a
<rtx_insn
*> (uncast_before
);
4923 rtx_insn
*first
= PREV_INSN (before
);
4924 rtx_insn
*last
= emit_pattern_before_noloc (pattern
, before
,
4925 insnp
? before
: NULL_RTX
,
4928 if (pattern
== NULL_RTX
|| !loc
)
4932 first
= get_insns ();
4934 first
= NEXT_INSN (first
);
4937 if (active_insn_p (first
)
4938 && !JUMP_TABLE_DATA_P (first
) /* FIXME */
4939 && !INSN_LOCATION (first
))
4940 INSN_LOCATION (first
) = loc
;
4943 first
= NEXT_INSN (first
);
4948 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4949 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4950 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4951 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4954 emit_pattern_before (rtx pattern
, rtx uncast_before
, bool skip_debug_insns
,
4955 bool insnp
, rtx_insn
*(*make_raw
) (rtx
))
4957 rtx_insn
*before
= safe_as_a
<rtx_insn
*> (uncast_before
);
4958 rtx_insn
*next
= before
;
4960 if (skip_debug_insns
)
4961 while (DEBUG_INSN_P (next
))
4962 next
= PREV_INSN (next
);
4965 return emit_pattern_before_setloc (pattern
, before
, INSN_LOCATION (next
),
4968 return emit_pattern_before_noloc (pattern
, before
,
4969 insnp
? before
: NULL_RTX
,
4973 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4975 emit_insn_before_setloc (rtx pattern
, rtx_insn
*before
, int loc
)
4977 return emit_pattern_before_setloc (pattern
, before
, loc
, true,
4981 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4983 emit_insn_before (rtx pattern
, rtx before
)
4985 return emit_pattern_before (pattern
, before
, true, true, make_insn_raw
);
4988 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4990 emit_jump_insn_before_setloc (rtx pattern
, rtx_insn
*before
, int loc
)
4992 return as_a
<rtx_jump_insn
*> (
4993 emit_pattern_before_setloc (pattern
, before
, loc
, false,
4994 make_jump_insn_raw
));
4997 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4999 emit_jump_insn_before (rtx pattern
, rtx before
)
5001 return as_a
<rtx_jump_insn
*> (
5002 emit_pattern_before (pattern
, before
, true, false,
5003 make_jump_insn_raw
));
5006 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5008 emit_call_insn_before_setloc (rtx pattern
, rtx_insn
*before
, int loc
)
5010 return emit_pattern_before_setloc (pattern
, before
, loc
, false,
5011 make_call_insn_raw
);
5014 /* Like emit_call_insn_before_noloc,
5015 but set insn_location according to BEFORE. */
5017 emit_call_insn_before (rtx pattern
, rtx_insn
*before
)
5019 return emit_pattern_before (pattern
, before
, true, false,
5020 make_call_insn_raw
);
5023 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5025 emit_debug_insn_before_setloc (rtx pattern
, rtx before
, int loc
)
5027 return emit_pattern_before_setloc (pattern
, before
, loc
, false,
5028 make_debug_insn_raw
);
5031 /* Like emit_debug_insn_before_noloc,
5032 but set insn_location according to BEFORE. */
5034 emit_debug_insn_before (rtx pattern
, rtx_insn
*before
)
5036 return emit_pattern_before (pattern
, before
, false, false,
5037 make_debug_insn_raw
);
5040 /* Take X and emit it at the end of the doubly-linked
5043 Returns the last insn emitted. */
5048 rtx_insn
*last
= get_last_insn ();
5054 switch (GET_CODE (x
))
5063 insn
= as_a
<rtx_insn
*> (x
);
5066 rtx_insn
*next
= NEXT_INSN (insn
);
5073 #ifdef ENABLE_RTL_CHECKING
5074 case JUMP_TABLE_DATA
:
5081 last
= make_insn_raw (x
);
5089 /* Make an insn of code DEBUG_INSN with pattern X
5090 and add it to the end of the doubly-linked list. */
5093 emit_debug_insn (rtx x
)
5095 rtx_insn
*last
= get_last_insn ();
5101 switch (GET_CODE (x
))
5110 insn
= as_a
<rtx_insn
*> (x
);
5113 rtx_insn
*next
= NEXT_INSN (insn
);
5120 #ifdef ENABLE_RTL_CHECKING
5121 case JUMP_TABLE_DATA
:
5128 last
= make_debug_insn_raw (x
);
5136 /* Make an insn of code JUMP_INSN with pattern X
5137 and add it to the end of the doubly-linked list. */
5140 emit_jump_insn (rtx x
)
5142 rtx_insn
*last
= NULL
;
5145 switch (GET_CODE (x
))
5154 insn
= as_a
<rtx_insn
*> (x
);
5157 rtx_insn
*next
= NEXT_INSN (insn
);
5164 #ifdef ENABLE_RTL_CHECKING
5165 case JUMP_TABLE_DATA
:
5172 last
= make_jump_insn_raw (x
);
5180 /* Make an insn of code CALL_INSN with pattern X
5181 and add it to the end of the doubly-linked list. */
5184 emit_call_insn (rtx x
)
5188 switch (GET_CODE (x
))
5197 insn
= emit_insn (x
);
5200 #ifdef ENABLE_RTL_CHECKING
5202 case JUMP_TABLE_DATA
:
5208 insn
= make_call_insn_raw (x
);
5216 /* Add the label LABEL to the end of the doubly-linked list. */
5219 emit_label (rtx uncast_label
)
5221 rtx_code_label
*label
= as_a
<rtx_code_label
*> (uncast_label
);
5223 gcc_checking_assert (INSN_UID (label
) == 0);
5224 INSN_UID (label
) = cur_insn_uid
++;
5229 /* Make an insn of code JUMP_TABLE_DATA
5230 and add it to the end of the doubly-linked list. */
5232 rtx_jump_table_data
*
5233 emit_jump_table_data (rtx table
)
5235 rtx_jump_table_data
*jump_table_data
=
5236 as_a
<rtx_jump_table_data
*> (rtx_alloc (JUMP_TABLE_DATA
));
5237 INSN_UID (jump_table_data
) = cur_insn_uid
++;
5238 PATTERN (jump_table_data
) = table
;
5239 BLOCK_FOR_INSN (jump_table_data
) = NULL
;
5240 add_insn (jump_table_data
);
5241 return jump_table_data
;
5244 /* Make an insn of code BARRIER
5245 and add it to the end of the doubly-linked list. */
5250 rtx_barrier
*barrier
= as_a
<rtx_barrier
*> (rtx_alloc (BARRIER
));
5251 INSN_UID (barrier
) = cur_insn_uid
++;
5256 /* Emit a copy of note ORIG. */
5259 emit_note_copy (rtx_note
*orig
)
5261 enum insn_note kind
= (enum insn_note
) NOTE_KIND (orig
);
5262 rtx_note
*note
= make_note_raw (kind
);
5263 NOTE_DATA (note
) = NOTE_DATA (orig
);
5268 /* Make an insn of code NOTE or type NOTE_NO
5269 and add it to the end of the doubly-linked list. */
5272 emit_note (enum insn_note kind
)
5274 rtx_note
*note
= make_note_raw (kind
);
5279 /* Emit a clobber of lvalue X. */
5282 emit_clobber (rtx x
)
5284 /* CONCATs should not appear in the insn stream. */
5285 if (GET_CODE (x
) == CONCAT
)
5287 emit_clobber (XEXP (x
, 0));
5288 return emit_clobber (XEXP (x
, 1));
5290 return emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
5293 /* Return a sequence of insns to clobber lvalue X. */
5307 /* Emit a use of rvalue X. */
5312 /* CONCATs should not appear in the insn stream. */
5313 if (GET_CODE (x
) == CONCAT
)
5315 emit_use (XEXP (x
, 0));
5316 return emit_use (XEXP (x
, 1));
5318 return emit_insn (gen_rtx_USE (VOIDmode
, x
));
5321 /* Return a sequence of insns to use rvalue X. */
5335 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5336 Return the set in INSN that such notes describe, or NULL if the notes
5337 have no meaning for INSN. */
5340 set_for_reg_notes (rtx insn
)
5347 pat
= PATTERN (insn
);
5348 if (GET_CODE (pat
) == PARALLEL
)
5350 /* We do not use single_set because that ignores SETs of unused
5351 registers. REG_EQUAL and REG_EQUIV notes really do require the
5352 PARALLEL to have a single SET. */
5353 if (multiple_sets (insn
))
5355 pat
= XVECEXP (pat
, 0, 0);
5358 if (GET_CODE (pat
) != SET
)
5361 reg
= SET_DEST (pat
);
5363 /* Notes apply to the contents of a STRICT_LOW_PART. */
5364 if (GET_CODE (reg
) == STRICT_LOW_PART
5365 || GET_CODE (reg
) == ZERO_EXTRACT
)
5366 reg
= XEXP (reg
, 0);
5368 /* Check that we have a register. */
5369 if (!(REG_P (reg
) || GET_CODE (reg
) == SUBREG
))
5375 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5376 note of this type already exists, remove it first. */
5379 set_unique_reg_note (rtx insn
, enum reg_note kind
, rtx datum
)
5381 rtx note
= find_reg_note (insn
, kind
, NULL_RTX
);
5387 /* We need to support the REG_EQUAL on USE trick of find_reloads. */
5388 if (!set_for_reg_notes (insn
) && GET_CODE (PATTERN (insn
)) != USE
)
5391 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5392 It serves no useful purpose and breaks eliminate_regs. */
5393 if (GET_CODE (datum
) == ASM_OPERANDS
)
5396 /* Notes with side effects are dangerous. Even if the side-effect
5397 initially mirrors one in PATTERN (INSN), later optimizations
5398 might alter the way that the final register value is calculated
5399 and so move or alter the side-effect in some way. The note would
5400 then no longer be a valid substitution for SET_SRC. */
5401 if (side_effects_p (datum
))
5410 XEXP (note
, 0) = datum
;
5413 add_reg_note (insn
, kind
, datum
);
5414 note
= REG_NOTES (insn
);
5421 df_notes_rescan (as_a
<rtx_insn
*> (insn
));
5430 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5432 set_dst_reg_note (rtx insn
, enum reg_note kind
, rtx datum
, rtx dst
)
5434 rtx set
= set_for_reg_notes (insn
);
5436 if (set
&& SET_DEST (set
) == dst
)
5437 return set_unique_reg_note (insn
, kind
, datum
);
5441 /* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5442 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5445 If X is a label, it is simply added into the insn chain. */
5448 emit (rtx x
, bool allow_barrier_p
)
5450 enum rtx_code code
= classify_insn (x
);
5455 return emit_label (x
);
5457 return emit_insn (x
);
5460 rtx_insn
*insn
= emit_jump_insn (x
);
5462 && (any_uncondjump_p (insn
) || GET_CODE (x
) == RETURN
))
5463 return emit_barrier ();
5467 return emit_call_insn (x
);
5469 return emit_debug_insn (x
);
5475 /* Space for free sequence stack entries. */
5476 static GTY ((deletable
)) struct sequence_stack
*free_sequence_stack
;
5478 /* Begin emitting insns to a sequence. If this sequence will contain
5479 something that might cause the compiler to pop arguments to function
5480 calls (because those pops have previously been deferred; see
5481 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5482 before calling this function. That will ensure that the deferred
5483 pops are not accidentally emitted in the middle of this sequence. */
5486 start_sequence (void)
5488 struct sequence_stack
*tem
;
5490 if (free_sequence_stack
!= NULL
)
5492 tem
= free_sequence_stack
;
5493 free_sequence_stack
= tem
->next
;
5496 tem
= ggc_alloc
<sequence_stack
> ();
5498 tem
->next
= get_current_sequence ()->next
;
5499 tem
->first
= get_insns ();
5500 tem
->last
= get_last_insn ();
5501 get_current_sequence ()->next
= tem
;
5507 /* Set up the insn chain starting with FIRST as the current sequence,
5508 saving the previously current one. See the documentation for
5509 start_sequence for more information about how to use this function. */
5512 push_to_sequence (rtx_insn
*first
)
5518 for (last
= first
; last
&& NEXT_INSN (last
); last
= NEXT_INSN (last
))
5521 set_first_insn (first
);
5522 set_last_insn (last
);
5525 /* Like push_to_sequence, but take the last insn as an argument to avoid
5526 looping through the list. */
5529 push_to_sequence2 (rtx_insn
*first
, rtx_insn
*last
)
5533 set_first_insn (first
);
5534 set_last_insn (last
);
5537 /* Set up the outer-level insn chain
5538 as the current sequence, saving the previously current one. */
5541 push_topmost_sequence (void)
5543 struct sequence_stack
*top
;
5547 top
= get_topmost_sequence ();
5548 set_first_insn (top
->first
);
5549 set_last_insn (top
->last
);
5552 /* After emitting to the outer-level insn chain, update the outer-level
5553 insn chain, and restore the previous saved state. */
5556 pop_topmost_sequence (void)
5558 struct sequence_stack
*top
;
5560 top
= get_topmost_sequence ();
5561 top
->first
= get_insns ();
5562 top
->last
= get_last_insn ();
5567 /* After emitting to a sequence, restore previous saved state.
5569 To get the contents of the sequence just made, you must call
5570 `get_insns' *before* calling here.
5572 If the compiler might have deferred popping arguments while
5573 generating this sequence, and this sequence will not be immediately
5574 inserted into the instruction stream, use do_pending_stack_adjust
5575 before calling get_insns. That will ensure that the deferred
5576 pops are inserted into this sequence, and not into some random
5577 location in the instruction stream. See INHIBIT_DEFER_POP for more
5578 information about deferred popping of arguments. */
5583 struct sequence_stack
*tem
= get_current_sequence ()->next
;
5585 set_first_insn (tem
->first
);
5586 set_last_insn (tem
->last
);
5587 get_current_sequence ()->next
= tem
->next
;
5589 memset (tem
, 0, sizeof (*tem
));
5590 tem
->next
= free_sequence_stack
;
5591 free_sequence_stack
= tem
;
5594 /* Return 1 if currently emitting into a sequence. */
5597 in_sequence_p (void)
5599 return get_current_sequence ()->next
!= 0;
5602 /* Put the various virtual registers into REGNO_REG_RTX. */
5605 init_virtual_regs (void)
5607 regno_reg_rtx
[VIRTUAL_INCOMING_ARGS_REGNUM
] = virtual_incoming_args_rtx
;
5608 regno_reg_rtx
[VIRTUAL_STACK_VARS_REGNUM
] = virtual_stack_vars_rtx
;
5609 regno_reg_rtx
[VIRTUAL_STACK_DYNAMIC_REGNUM
] = virtual_stack_dynamic_rtx
;
5610 regno_reg_rtx
[VIRTUAL_OUTGOING_ARGS_REGNUM
] = virtual_outgoing_args_rtx
;
5611 regno_reg_rtx
[VIRTUAL_CFA_REGNUM
] = virtual_cfa_rtx
;
5612 regno_reg_rtx
[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM
]
5613 = virtual_preferred_stack_boundary_rtx
;
5617 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5618 static rtx copy_insn_scratch_in
[MAX_RECOG_OPERANDS
];
5619 static rtx copy_insn_scratch_out
[MAX_RECOG_OPERANDS
];
5620 static int copy_insn_n_scratches
;
5622 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5623 copied an ASM_OPERANDS.
5624 In that case, it is the original input-operand vector. */
5625 static rtvec orig_asm_operands_vector
;
5627 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5628 copied an ASM_OPERANDS.
5629 In that case, it is the copied input-operand vector. */
5630 static rtvec copy_asm_operands_vector
;
5632 /* Likewise for the constraints vector. */
5633 static rtvec orig_asm_constraints_vector
;
5634 static rtvec copy_asm_constraints_vector
;
5636 /* Recursively create a new copy of an rtx for copy_insn.
5637 This function differs from copy_rtx in that it handles SCRATCHes and
5638 ASM_OPERANDs properly.
5639 Normally, this function is not used directly; use copy_insn as front end.
5640 However, you could first copy an insn pattern with copy_insn and then use
5641 this function afterwards to properly copy any REG_NOTEs containing
5645 copy_insn_1 (rtx orig
)
5650 const char *format_ptr
;
5655 code
= GET_CODE (orig
);
5670 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5671 clobbers or clobbers of hard registers that originated as pseudos.
5672 This is needed to allow safe register renaming. */
5673 if (REG_P (XEXP (orig
, 0))
5674 && HARD_REGISTER_NUM_P (REGNO (XEXP (orig
, 0)))
5675 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig
, 0))))
5680 for (i
= 0; i
< copy_insn_n_scratches
; i
++)
5681 if (copy_insn_scratch_in
[i
] == orig
)
5682 return copy_insn_scratch_out
[i
];
5686 if (shared_const_p (orig
))
5690 /* A MEM with a constant address is not sharable. The problem is that
5691 the constant address may need to be reloaded. If the mem is shared,
5692 then reloading one copy of this mem will cause all copies to appear
5693 to have been reloaded. */
5699 /* Copy the various flags, fields, and other information. We assume
5700 that all fields need copying, and then clear the fields that should
5701 not be copied. That is the sensible default behavior, and forces
5702 us to explicitly document why we are *not* copying a flag. */
5703 copy
= shallow_copy_rtx (orig
);
5705 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5708 RTX_FLAG (copy
, jump
) = 0;
5709 RTX_FLAG (copy
, call
) = 0;
5710 RTX_FLAG (copy
, frame_related
) = 0;
5713 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
5715 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
5716 switch (*format_ptr
++)
5719 if (XEXP (orig
, i
) != NULL
)
5720 XEXP (copy
, i
) = copy_insn_1 (XEXP (orig
, i
));
5725 if (XVEC (orig
, i
) == orig_asm_constraints_vector
)
5726 XVEC (copy
, i
) = copy_asm_constraints_vector
;
5727 else if (XVEC (orig
, i
) == orig_asm_operands_vector
)
5728 XVEC (copy
, i
) = copy_asm_operands_vector
;
5729 else if (XVEC (orig
, i
) != NULL
)
5731 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
5732 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
5733 XVECEXP (copy
, i
, j
) = copy_insn_1 (XVECEXP (orig
, i
, j
));
5745 /* These are left unchanged. */
5752 if (code
== SCRATCH
)
5754 i
= copy_insn_n_scratches
++;
5755 gcc_assert (i
< MAX_RECOG_OPERANDS
);
5756 copy_insn_scratch_in
[i
] = orig
;
5757 copy_insn_scratch_out
[i
] = copy
;
5759 else if (code
== ASM_OPERANDS
)
5761 orig_asm_operands_vector
= ASM_OPERANDS_INPUT_VEC (orig
);
5762 copy_asm_operands_vector
= ASM_OPERANDS_INPUT_VEC (copy
);
5763 orig_asm_constraints_vector
= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig
);
5764 copy_asm_constraints_vector
= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy
);
5770 /* Create a new copy of an rtx.
5771 This function differs from copy_rtx in that it handles SCRATCHes and
5772 ASM_OPERANDs properly.
5773 INSN doesn't really have to be a full INSN; it could be just the
5776 copy_insn (rtx insn
)
5778 copy_insn_n_scratches
= 0;
5779 orig_asm_operands_vector
= 0;
5780 orig_asm_constraints_vector
= 0;
5781 copy_asm_operands_vector
= 0;
5782 copy_asm_constraints_vector
= 0;
5783 return copy_insn_1 (insn
);
5786 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5787 on that assumption that INSN itself remains in its original place. */
5790 copy_delay_slot_insn (rtx_insn
*insn
)
5792 /* Copy INSN with its rtx_code, all its notes, location etc. */
5793 insn
= as_a
<rtx_insn
*> (copy_rtx (insn
));
5794 INSN_UID (insn
) = cur_insn_uid
++;
5798 /* Initialize data structures and variables in this file
5799 before generating rtl for each function. */
5804 set_first_insn (NULL
);
5805 set_last_insn (NULL
);
5806 if (MIN_NONDEBUG_INSN_UID
)
5807 cur_insn_uid
= MIN_NONDEBUG_INSN_UID
;
5810 cur_debug_insn_uid
= 1;
5811 reg_rtx_no
= LAST_VIRTUAL_REGISTER
+ 1;
5812 first_label_num
= label_num
;
5813 get_current_sequence ()->next
= NULL
;
5815 /* Init the tables that describe all the pseudo regs. */
5817 crtl
->emit
.regno_pointer_align_length
= LAST_VIRTUAL_REGISTER
+ 101;
5819 crtl
->emit
.regno_pointer_align
5820 = XCNEWVEC (unsigned char, crtl
->emit
.regno_pointer_align_length
);
5823 = ggc_cleared_vec_alloc
<rtx
> (crtl
->emit
.regno_pointer_align_length
);
5825 /* Put copies of all the hard registers into regno_reg_rtx. */
5826 memcpy (regno_reg_rtx
,
5827 initial_regno_reg_rtx
,
5828 FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
5830 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5831 init_virtual_regs ();
5833 /* Indicate that the virtual registers and stack locations are
5835 REG_POINTER (stack_pointer_rtx
) = 1;
5836 REG_POINTER (frame_pointer_rtx
) = 1;
5837 REG_POINTER (hard_frame_pointer_rtx
) = 1;
5838 REG_POINTER (arg_pointer_rtx
) = 1;
5840 REG_POINTER (virtual_incoming_args_rtx
) = 1;
5841 REG_POINTER (virtual_stack_vars_rtx
) = 1;
5842 REG_POINTER (virtual_stack_dynamic_rtx
) = 1;
5843 REG_POINTER (virtual_outgoing_args_rtx
) = 1;
5844 REG_POINTER (virtual_cfa_rtx
) = 1;
5846 #ifdef STACK_BOUNDARY
5847 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM
) = STACK_BOUNDARY
;
5848 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
5849 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
5850 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM
) = STACK_BOUNDARY
;
5852 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM
) = STACK_BOUNDARY
;
5853 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM
) = STACK_BOUNDARY
;
5854 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM
) = STACK_BOUNDARY
;
5855 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM
) = STACK_BOUNDARY
;
5857 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM
) = BITS_PER_WORD
;
5860 #ifdef INIT_EXPANDERS
5865 /* Return the value of element I of CONST_VECTOR X as a wide_int. */
5868 const_vector_int_elt (const_rtx x
, unsigned int i
)
5870 /* First handle elements that are directly encoded. */
5871 machine_mode elt_mode
= GET_MODE_INNER (GET_MODE (x
));
5872 if (i
< (unsigned int) XVECLEN (x
, 0))
5873 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x
, i
), elt_mode
);
5875 /* Identify the pattern that contains element I and work out the index of
5876 the last encoded element for that pattern. */
5877 unsigned int encoded_nelts
= const_vector_encoded_nelts (x
);
5878 unsigned int npatterns
= CONST_VECTOR_NPATTERNS (x
);
5879 unsigned int count
= i
/ npatterns
;
5880 unsigned int pattern
= i
% npatterns
;
5881 unsigned int final_i
= encoded_nelts
- npatterns
+ pattern
;
5883 /* If there are no steps, the final encoded value is the right one. */
5884 if (!CONST_VECTOR_STEPPED_P (x
))
5885 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x
, final_i
), elt_mode
);
5887 /* Otherwise work out the value from the last two encoded elements. */
5888 rtx v1
= CONST_VECTOR_ENCODED_ELT (x
, final_i
- npatterns
);
5889 rtx v2
= CONST_VECTOR_ENCODED_ELT (x
, final_i
);
5890 wide_int diff
= wi::sub (rtx_mode_t (v2
, elt_mode
),
5891 rtx_mode_t (v1
, elt_mode
));
5892 return wi::add (rtx_mode_t (v2
, elt_mode
), (count
- 2) * diff
);
5895 /* Return the value of element I of CONST_VECTOR X. */
5898 const_vector_elt (const_rtx x
, unsigned int i
)
5900 /* First handle elements that are directly encoded. */
5901 if (i
< (unsigned int) XVECLEN (x
, 0))
5902 return CONST_VECTOR_ENCODED_ELT (x
, i
);
5904 /* If there are no steps, the final encoded value is the right one. */
5905 if (!CONST_VECTOR_STEPPED_P (x
))
5907 /* Identify the pattern that contains element I and work out the index of
5908 the last encoded element for that pattern. */
5909 unsigned int encoded_nelts
= const_vector_encoded_nelts (x
);
5910 unsigned int npatterns
= CONST_VECTOR_NPATTERNS (x
);
5911 unsigned int pattern
= i
% npatterns
;
5912 unsigned int final_i
= encoded_nelts
- npatterns
+ pattern
;
5913 return CONST_VECTOR_ENCODED_ELT (x
, final_i
);
5916 /* Otherwise work out the value from the last two encoded elements. */
5917 return immed_wide_int_const (const_vector_int_elt (x
, i
),
5918 GET_MODE_INNER (GET_MODE (x
)));
5921 /* Return true if X is a valid element for a CONST_VECTOR of the given
5925 valid_for_const_vector_p (machine_mode
, rtx x
)
5927 return (CONST_SCALAR_INT_P (x
)
5928 || CONST_DOUBLE_AS_FLOAT_P (x
)
5929 || CONST_FIXED_P (x
));
5932 /* Generate a vector constant of mode MODE in which every element has
5936 gen_const_vec_duplicate (machine_mode mode
, rtx elt
)
5938 rtx_vector_builder
builder (mode
, 1, 1);
5939 builder
.quick_push (elt
);
5940 return builder
.build ();
5943 /* Return a vector rtx of mode MODE in which every element has value X.
5944 The result will be a constant if X is constant. */
5947 gen_vec_duplicate (machine_mode mode
, rtx x
)
5949 if (valid_for_const_vector_p (mode
, x
))
5950 return gen_const_vec_duplicate (mode
, x
);
5951 return gen_rtx_VEC_DUPLICATE (mode
, x
);
5954 /* A subroutine of const_vec_series_p that handles the case in which:
5956 (GET_CODE (X) == CONST_VECTOR
5957 && CONST_VECTOR_NPATTERNS (X) == 1
5958 && !CONST_VECTOR_DUPLICATE_P (X))
5960 is known to hold. */
5963 const_vec_series_p_1 (const_rtx x
, rtx
*base_out
, rtx
*step_out
)
5965 /* Stepped sequences are only defined for integers, to avoid specifying
5966 rounding behavior. */
5967 if (GET_MODE_CLASS (GET_MODE (x
)) != MODE_VECTOR_INT
)
5970 /* A non-duplicated vector with two elements can always be seen as a
5971 series with a nonzero step. Longer vectors must have a stepped
5973 if (CONST_VECTOR_NUNITS (x
) != 2
5974 && !CONST_VECTOR_STEPPED_P (x
))
5977 /* Calculate the step between the first and second elements. */
5978 scalar_mode inner
= GET_MODE_INNER (GET_MODE (x
));
5979 rtx base
= CONST_VECTOR_ELT (x
, 0);
5980 rtx step
= simplify_binary_operation (MINUS
, inner
,
5981 CONST_VECTOR_ENCODED_ELT (x
, 1), base
);
5982 if (rtx_equal_p (step
, CONST0_RTX (inner
)))
5985 /* If we have a stepped encoding, check that the step between the
5986 second and third elements is the same as STEP. */
5987 if (CONST_VECTOR_STEPPED_P (x
))
5989 rtx diff
= simplify_binary_operation (MINUS
, inner
,
5990 CONST_VECTOR_ENCODED_ELT (x
, 2),
5991 CONST_VECTOR_ENCODED_ELT (x
, 1));
5992 if (!rtx_equal_p (step
, diff
))
6001 /* Generate a vector constant of mode MODE in which element I has
6002 the value BASE + I * STEP. */
6005 gen_const_vec_series (machine_mode mode
, rtx base
, rtx step
)
6007 gcc_assert (valid_for_const_vector_p (mode
, base
)
6008 && valid_for_const_vector_p (mode
, step
));
6010 rtx_vector_builder
builder (mode
, 1, 3);
6011 builder
.quick_push (base
);
6012 for (int i
= 1; i
< 3; ++i
)
6013 builder
.quick_push (simplify_gen_binary (PLUS
, GET_MODE_INNER (mode
),
6014 builder
[i
- 1], step
));
6015 return builder
.build ();
6018 /* Generate a vector of mode MODE in which element I has the value
6019 BASE + I * STEP. The result will be a constant if BASE and STEP
6020 are both constants. */
6023 gen_vec_series (machine_mode mode
, rtx base
, rtx step
)
6025 if (step
== const0_rtx
)
6026 return gen_vec_duplicate (mode
, base
);
6027 if (valid_for_const_vector_p (mode
, base
)
6028 && valid_for_const_vector_p (mode
, step
))
6029 return gen_const_vec_series (mode
, base
, step
);
6030 return gen_rtx_VEC_SERIES (mode
, base
, step
);
6033 /* Generate a new vector constant for mode MODE and constant value
6037 gen_const_vector (machine_mode mode
, int constant
)
6039 machine_mode inner
= GET_MODE_INNER (mode
);
6041 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner
));
6043 rtx el
= const_tiny_rtx
[constant
][(int) inner
];
6046 return gen_const_vec_duplicate (mode
, el
);
6049 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
6050 all elements are zero, and the one vector when all elements are one. */
6052 gen_rtx_CONST_VECTOR (machine_mode mode
, rtvec v
)
6054 gcc_assert (GET_MODE_NUNITS (mode
) == GET_NUM_ELEM (v
));
6056 /* If the values are all the same, check to see if we can use one of the
6057 standard constant vectors. */
6058 if (rtvec_all_equal_p (v
))
6059 return gen_const_vec_duplicate (mode
, RTVEC_ELT (v
, 0));
6061 unsigned int nunits
= GET_NUM_ELEM (v
);
6062 rtx_vector_builder
builder (mode
, nunits
, 1);
6063 for (unsigned int i
= 0; i
< nunits
; ++i
)
6064 builder
.quick_push (RTVEC_ELT (v
, i
));
6065 return builder
.build (v
);
6068 /* Initialise global register information required by all functions. */
6071 init_emit_regs (void)
6077 /* Reset register attributes */
6078 reg_attrs_htab
->empty ();
6080 /* We need reg_raw_mode, so initialize the modes now. */
6081 init_reg_modes_target ();
6083 /* Assign register numbers to the globally defined register rtx. */
6084 stack_pointer_rtx
= gen_raw_REG (Pmode
, STACK_POINTER_REGNUM
);
6085 frame_pointer_rtx
= gen_raw_REG (Pmode
, FRAME_POINTER_REGNUM
);
6086 hard_frame_pointer_rtx
= gen_raw_REG (Pmode
, HARD_FRAME_POINTER_REGNUM
);
6087 arg_pointer_rtx
= gen_raw_REG (Pmode
, ARG_POINTER_REGNUM
);
6088 virtual_incoming_args_rtx
=
6089 gen_raw_REG (Pmode
, VIRTUAL_INCOMING_ARGS_REGNUM
);
6090 virtual_stack_vars_rtx
=
6091 gen_raw_REG (Pmode
, VIRTUAL_STACK_VARS_REGNUM
);
6092 virtual_stack_dynamic_rtx
=
6093 gen_raw_REG (Pmode
, VIRTUAL_STACK_DYNAMIC_REGNUM
);
6094 virtual_outgoing_args_rtx
=
6095 gen_raw_REG (Pmode
, VIRTUAL_OUTGOING_ARGS_REGNUM
);
6096 virtual_cfa_rtx
= gen_raw_REG (Pmode
, VIRTUAL_CFA_REGNUM
);
6097 virtual_preferred_stack_boundary_rtx
=
6098 gen_raw_REG (Pmode
, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM
);
6100 /* Initialize RTL for commonly used hard registers. These are
6101 copied into regno_reg_rtx as we begin to compile each function. */
6102 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
6103 initial_regno_reg_rtx
[i
] = gen_raw_REG (reg_raw_mode
[i
], i
);
6105 #ifdef RETURN_ADDRESS_POINTER_REGNUM
6106 return_address_pointer_rtx
6107 = gen_raw_REG (Pmode
, RETURN_ADDRESS_POINTER_REGNUM
);
6110 pic_offset_table_rtx
= NULL_RTX
;
6111 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
)
6112 pic_offset_table_rtx
= gen_raw_REG (Pmode
, PIC_OFFSET_TABLE_REGNUM
);
6114 for (i
= 0; i
< (int) MAX_MACHINE_MODE
; i
++)
6116 mode
= (machine_mode
) i
;
6117 attrs
= ggc_cleared_alloc
<mem_attrs
> ();
6118 attrs
->align
= BITS_PER_UNIT
;
6119 attrs
->addrspace
= ADDR_SPACE_GENERIC
;
6120 if (mode
!= BLKmode
)
6122 attrs
->size_known_p
= true;
6123 attrs
->size
= GET_MODE_SIZE (mode
);
6124 if (STRICT_ALIGNMENT
)
6125 attrs
->align
= GET_MODE_ALIGNMENT (mode
);
6127 mode_mem_attrs
[i
] = attrs
;
6130 split_branch_probability
= profile_probability::uninitialized ();
6133 /* Initialize global machine_mode variables. */
6136 init_derived_machine_modes (void)
6138 opt_scalar_int_mode mode_iter
, opt_byte_mode
, opt_word_mode
;
6139 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_INT
)
6141 scalar_int_mode mode
= mode_iter
.require ();
6143 if (GET_MODE_BITSIZE (mode
) == BITS_PER_UNIT
6144 && !opt_byte_mode
.exists ())
6145 opt_byte_mode
= mode
;
6147 if (GET_MODE_BITSIZE (mode
) == BITS_PER_WORD
6148 && !opt_word_mode
.exists ())
6149 opt_word_mode
= mode
;
6152 byte_mode
= opt_byte_mode
.require ();
6153 word_mode
= opt_word_mode
.require ();
6154 ptr_mode
= as_a
<scalar_int_mode
>
6155 (mode_for_size (POINTER_SIZE
, GET_MODE_CLASS (Pmode
), 0).require ());
6158 /* Create some permanent unique rtl objects shared between all functions. */
6161 init_emit_once (void)
6165 scalar_float_mode double_mode
;
6166 opt_scalar_mode smode_iter
;
6168 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
6169 CONST_FIXED, and memory attribute hash tables. */
6170 const_int_htab
= hash_table
<const_int_hasher
>::create_ggc (37);
6172 #if TARGET_SUPPORTS_WIDE_INT
6173 const_wide_int_htab
= hash_table
<const_wide_int_hasher
>::create_ggc (37);
6175 const_double_htab
= hash_table
<const_double_hasher
>::create_ggc (37);
6177 if (NUM_POLY_INT_COEFFS
> 1)
6178 const_poly_int_htab
= hash_table
<const_poly_int_hasher
>::create_ggc (37);
6180 const_fixed_htab
= hash_table
<const_fixed_hasher
>::create_ggc (37);
6182 reg_attrs_htab
= hash_table
<reg_attr_hasher
>::create_ggc (37);
6184 #ifdef INIT_EXPANDERS
6185 /* This is to initialize {init|mark|free}_machine_status before the first
6186 call to push_function_context_to. This is needed by the Chill front
6187 end which calls push_function_context_to before the first call to
6188 init_function_start. */
6192 /* Create the unique rtx's for certain rtx codes and operand values. */
6194 /* Process stack-limiting command-line options. */
6195 if (opt_fstack_limit_symbol_arg
!= NULL
)
6197 = gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (opt_fstack_limit_symbol_arg
));
6198 if (opt_fstack_limit_register_no
>= 0)
6199 stack_limit_rtx
= gen_rtx_REG (Pmode
, opt_fstack_limit_register_no
);
6201 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
6202 tries to use these variables. */
6203 for (i
= - MAX_SAVED_CONST_INT
; i
<= MAX_SAVED_CONST_INT
; i
++)
6204 const_int_rtx
[i
+ MAX_SAVED_CONST_INT
] =
6205 gen_rtx_raw_CONST_INT (VOIDmode
, (HOST_WIDE_INT
) i
);
6207 if (STORE_FLAG_VALUE
>= - MAX_SAVED_CONST_INT
6208 && STORE_FLAG_VALUE
<= MAX_SAVED_CONST_INT
)
6209 const_true_rtx
= const_int_rtx
[STORE_FLAG_VALUE
+ MAX_SAVED_CONST_INT
];
6211 const_true_rtx
= gen_rtx_CONST_INT (VOIDmode
, STORE_FLAG_VALUE
);
6213 double_mode
= float_mode_for_size (DOUBLE_TYPE_SIZE
).require ();
6215 real_from_integer (&dconst0
, double_mode
, 0, SIGNED
);
6216 real_from_integer (&dconst1
, double_mode
, 1, SIGNED
);
6217 real_from_integer (&dconst2
, double_mode
, 2, SIGNED
);
6222 dconsthalf
= dconst1
;
6223 SET_REAL_EXP (&dconsthalf
, REAL_EXP (&dconsthalf
) - 1);
6225 for (i
= 0; i
< 3; i
++)
6227 const REAL_VALUE_TYPE
*const r
=
6228 (i
== 0 ? &dconst0
: i
== 1 ? &dconst1
: &dconst2
);
6230 FOR_EACH_MODE_IN_CLASS (mode
, MODE_FLOAT
)
6231 const_tiny_rtx
[i
][(int) mode
] =
6232 const_double_from_real_value (*r
, mode
);
6234 FOR_EACH_MODE_IN_CLASS (mode
, MODE_DECIMAL_FLOAT
)
6235 const_tiny_rtx
[i
][(int) mode
] =
6236 const_double_from_real_value (*r
, mode
);
6238 const_tiny_rtx
[i
][(int) VOIDmode
] = GEN_INT (i
);
6240 FOR_EACH_MODE_IN_CLASS (mode
, MODE_INT
)
6241 const_tiny_rtx
[i
][(int) mode
] = GEN_INT (i
);
6243 for (mode
= MIN_MODE_PARTIAL_INT
;
6244 mode
<= MAX_MODE_PARTIAL_INT
;
6245 mode
= (machine_mode
)((int)(mode
) + 1))
6246 const_tiny_rtx
[i
][(int) mode
] = GEN_INT (i
);
6249 const_tiny_rtx
[3][(int) VOIDmode
] = constm1_rtx
;
6251 FOR_EACH_MODE_IN_CLASS (mode
, MODE_INT
)
6252 const_tiny_rtx
[3][(int) mode
] = constm1_rtx
;
6254 for (mode
= MIN_MODE_PARTIAL_INT
;
6255 mode
<= MAX_MODE_PARTIAL_INT
;
6256 mode
= (machine_mode
)((int)(mode
) + 1))
6257 const_tiny_rtx
[3][(int) mode
] = constm1_rtx
;
6259 FOR_EACH_MODE_IN_CLASS (mode
, MODE_COMPLEX_INT
)
6261 rtx inner
= const_tiny_rtx
[0][(int)GET_MODE_INNER (mode
)];
6262 const_tiny_rtx
[0][(int) mode
] = gen_rtx_CONCAT (mode
, inner
, inner
);
6265 FOR_EACH_MODE_IN_CLASS (mode
, MODE_COMPLEX_FLOAT
)
6267 rtx inner
= const_tiny_rtx
[0][(int)GET_MODE_INNER (mode
)];
6268 const_tiny_rtx
[0][(int) mode
] = gen_rtx_CONCAT (mode
, inner
, inner
);
6271 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_INT
)
6273 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6274 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6275 const_tiny_rtx
[3][(int) mode
] = gen_const_vector (mode
, 3);
6278 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_FLOAT
)
6280 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6281 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6284 FOR_EACH_MODE_IN_CLASS (smode_iter
, MODE_FRACT
)
6286 scalar_mode smode
= smode_iter
.require ();
6287 FCONST0 (smode
).data
.high
= 0;
6288 FCONST0 (smode
).data
.low
= 0;
6289 FCONST0 (smode
).mode
= smode
;
6290 const_tiny_rtx
[0][(int) smode
]
6291 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode
), smode
);
6294 FOR_EACH_MODE_IN_CLASS (smode_iter
, MODE_UFRACT
)
6296 scalar_mode smode
= smode_iter
.require ();
6297 FCONST0 (smode
).data
.high
= 0;
6298 FCONST0 (smode
).data
.low
= 0;
6299 FCONST0 (smode
).mode
= smode
;
6300 const_tiny_rtx
[0][(int) smode
]
6301 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode
), smode
);
6304 FOR_EACH_MODE_IN_CLASS (smode_iter
, MODE_ACCUM
)
6306 scalar_mode smode
= smode_iter
.require ();
6307 FCONST0 (smode
).data
.high
= 0;
6308 FCONST0 (smode
).data
.low
= 0;
6309 FCONST0 (smode
).mode
= smode
;
6310 const_tiny_rtx
[0][(int) smode
]
6311 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode
), smode
);
6313 /* We store the value 1. */
6314 FCONST1 (smode
).data
.high
= 0;
6315 FCONST1 (smode
).data
.low
= 0;
6316 FCONST1 (smode
).mode
= smode
;
6317 FCONST1 (smode
).data
6318 = double_int_one
.lshift (GET_MODE_FBIT (smode
),
6319 HOST_BITS_PER_DOUBLE_INT
,
6320 SIGNED_FIXED_POINT_MODE_P (smode
));
6321 const_tiny_rtx
[1][(int) smode
]
6322 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode
), smode
);
6325 FOR_EACH_MODE_IN_CLASS (smode_iter
, MODE_UACCUM
)
6327 scalar_mode smode
= smode_iter
.require ();
6328 FCONST0 (smode
).data
.high
= 0;
6329 FCONST0 (smode
).data
.low
= 0;
6330 FCONST0 (smode
).mode
= smode
;
6331 const_tiny_rtx
[0][(int) smode
]
6332 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode
), smode
);
6334 /* We store the value 1. */
6335 FCONST1 (smode
).data
.high
= 0;
6336 FCONST1 (smode
).data
.low
= 0;
6337 FCONST1 (smode
).mode
= smode
;
6338 FCONST1 (smode
).data
6339 = double_int_one
.lshift (GET_MODE_FBIT (smode
),
6340 HOST_BITS_PER_DOUBLE_INT
,
6341 SIGNED_FIXED_POINT_MODE_P (smode
));
6342 const_tiny_rtx
[1][(int) smode
]
6343 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode
), smode
);
6346 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_FRACT
)
6348 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6351 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_UFRACT
)
6353 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6356 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_ACCUM
)
6358 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6359 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6362 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_UACCUM
)
6364 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6365 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6368 for (i
= (int) CCmode
; i
< (int) MAX_MACHINE_MODE
; ++i
)
6369 if (GET_MODE_CLASS ((machine_mode
) i
) == MODE_CC
)
6370 const_tiny_rtx
[0][i
] = const0_rtx
;
6372 const_tiny_rtx
[0][(int) BImode
] = const0_rtx
;
6373 if (STORE_FLAG_VALUE
== 1)
6374 const_tiny_rtx
[1][(int) BImode
] = const1_rtx
;
6376 FOR_EACH_MODE_IN_CLASS (smode_iter
, MODE_POINTER_BOUNDS
)
6378 scalar_mode smode
= smode_iter
.require ();
6379 wide_int wi_zero
= wi::zero (GET_MODE_PRECISION (smode
));
6380 const_tiny_rtx
[0][smode
] = immed_wide_int_const (wi_zero
, smode
);
6383 pc_rtx
= gen_rtx_fmt_ (PC
, VOIDmode
);
6384 ret_rtx
= gen_rtx_fmt_ (RETURN
, VOIDmode
);
6385 simple_return_rtx
= gen_rtx_fmt_ (SIMPLE_RETURN
, VOIDmode
);
6386 cc0_rtx
= gen_rtx_fmt_ (CC0
, VOIDmode
);
6387 invalid_insn_rtx
= gen_rtx_INSN (VOIDmode
,
6391 /*pattern=*/NULL_RTX
,
6394 /*reg_notes=*/NULL_RTX
);
6397 /* Produce exact duplicate of insn INSN after AFTER.
6398 Care updating of libcall regions if present. */
6401 emit_copy_of_insn_after (rtx_insn
*insn
, rtx_insn
*after
)
6406 switch (GET_CODE (insn
))
6409 new_rtx
= emit_insn_after (copy_insn (PATTERN (insn
)), after
);
6413 new_rtx
= emit_jump_insn_after (copy_insn (PATTERN (insn
)), after
);
6414 CROSSING_JUMP_P (new_rtx
) = CROSSING_JUMP_P (insn
);
6418 new_rtx
= emit_debug_insn_after (copy_insn (PATTERN (insn
)), after
);
6422 new_rtx
= emit_call_insn_after (copy_insn (PATTERN (insn
)), after
);
6423 if (CALL_INSN_FUNCTION_USAGE (insn
))
6424 CALL_INSN_FUNCTION_USAGE (new_rtx
)
6425 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn
));
6426 SIBLING_CALL_P (new_rtx
) = SIBLING_CALL_P (insn
);
6427 RTL_CONST_CALL_P (new_rtx
) = RTL_CONST_CALL_P (insn
);
6428 RTL_PURE_CALL_P (new_rtx
) = RTL_PURE_CALL_P (insn
);
6429 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx
)
6430 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn
);
6437 /* Update LABEL_NUSES. */
6438 mark_jump_label (PATTERN (new_rtx
), new_rtx
, 0);
6440 INSN_LOCATION (new_rtx
) = INSN_LOCATION (insn
);
6442 /* If the old insn is frame related, then so is the new one. This is
6443 primarily needed for IA-64 unwind info which marks epilogue insns,
6444 which may be duplicated by the basic block reordering code. */
6445 RTX_FRAME_RELATED_P (new_rtx
) = RTX_FRAME_RELATED_P (insn
);
6447 /* Locate the end of existing REG_NOTES in NEW_RTX. */
6448 rtx
*ptail
= ®_NOTES (new_rtx
);
6449 while (*ptail
!= NULL_RTX
)
6450 ptail
= &XEXP (*ptail
, 1);
6452 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6453 will make them. REG_LABEL_TARGETs are created there too, but are
6454 supposed to be sticky, so we copy them. */
6455 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
6456 if (REG_NOTE_KIND (link
) != REG_LABEL_OPERAND
)
6458 *ptail
= duplicate_reg_note (link
);
6459 ptail
= &XEXP (*ptail
, 1);
6462 INSN_CODE (new_rtx
) = INSN_CODE (insn
);
6466 static GTY((deletable
)) rtx hard_reg_clobbers
[NUM_MACHINE_MODES
][FIRST_PSEUDO_REGISTER
];
6468 gen_hard_reg_clobber (machine_mode mode
, unsigned int regno
)
6470 if (hard_reg_clobbers
[mode
][regno
])
6471 return hard_reg_clobbers
[mode
][regno
];
6473 return (hard_reg_clobbers
[mode
][regno
] =
6474 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (mode
, regno
)));
6477 location_t prologue_location
;
6478 location_t epilogue_location
;
6480 /* Hold current location information and last location information, so the
6481 datastructures are built lazily only when some instructions in given
6482 place are needed. */
6483 static location_t curr_location
;
6485 /* Allocate insn location datastructure. */
6487 insn_locations_init (void)
6489 prologue_location
= epilogue_location
= 0;
6490 curr_location
= UNKNOWN_LOCATION
;
6493 /* At the end of emit stage, clear current location. */
6495 insn_locations_finalize (void)
6497 epilogue_location
= curr_location
;
6498 curr_location
= UNKNOWN_LOCATION
;
6501 /* Set current location. */
6503 set_curr_insn_location (location_t location
)
6505 curr_location
= location
;
6508 /* Get current location. */
6510 curr_insn_location (void)
6512 return curr_location
;
6515 /* Return lexical scope block insn belongs to. */
6517 insn_scope (const rtx_insn
*insn
)
6519 return LOCATION_BLOCK (INSN_LOCATION (insn
));
6522 /* Return line number of the statement that produced this insn. */
6524 insn_line (const rtx_insn
*insn
)
6526 return LOCATION_LINE (INSN_LOCATION (insn
));
6529 /* Return source file of the statement that produced this insn. */
6531 insn_file (const rtx_insn
*insn
)
6533 return LOCATION_FILE (INSN_LOCATION (insn
));
6536 /* Return expanded location of the statement that produced this insn. */
6538 insn_location (const rtx_insn
*insn
)
6540 return expand_location (INSN_LOCATION (insn
));
6543 /* Return true if memory model MODEL requires a pre-operation (release-style)
6544 barrier or a post-operation (acquire-style) barrier. While not universal,
6545 this function matches behavior of several targets. */
6548 need_atomic_barrier_p (enum memmodel model
, bool pre
)
6550 switch (model
& MEMMODEL_BASE_MASK
)
6552 case MEMMODEL_RELAXED
:
6553 case MEMMODEL_CONSUME
:
6555 case MEMMODEL_RELEASE
:
6557 case MEMMODEL_ACQUIRE
:
6559 case MEMMODEL_ACQ_REL
:
6560 case MEMMODEL_SEQ_CST
:
6567 /* Return a constant shift amount for shifting a value of mode MODE
6571 gen_int_shift_amount (machine_mode
, poly_int64 value
)
6573 /* Use a 64-bit mode, to avoid any truncation.
6575 ??? Perhaps this should be automatically derived from the .md files
6576 instead, or perhaps have a target hook. */
6577 scalar_int_mode shift_mode
= (BITS_PER_UNIT
== 8
6579 : int_mode_for_size (64, 0).require ());
6580 return gen_int_mode (value
, shift_mode
);
6583 /* Initialize fields of rtl_data related to stack alignment. */
6586 rtl_data::init_stack_alignment ()
6588 stack_alignment_needed
= STACK_BOUNDARY
;
6589 max_used_stack_slot_alignment
= STACK_BOUNDARY
;
6590 stack_alignment_estimated
= 0;
6591 preferred_stack_boundary
= STACK_BOUNDARY
;
6595 #include "gt-emit-rtl.h"