1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
38 #include "diagnostic.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
45 #include "insn-attr.h"
50 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
52 #include "optabs-tree.h"
55 #include "langhooks.h"
56 #include "common/common-target.h"
58 #include "tree-ssa-live.h"
59 #include "tree-outof-ssa.h"
60 #include "tree-ssa-address.h"
63 #include "gimple-fold.h"
64 #include "rtx-vector-builder.h"
67 /* If this is nonzero, we do not bother generating VOLATILE
68 around volatile memory references, and we are willing to
69 output indirect addresses. If cse is to follow, we reject
70 indirect addresses so a useful potential cse is generated;
71 if it is used only once, instruction combination will produce
72 the same indirect address eventually. */
75 static bool block_move_libcall_safe_for_call_parm (void);
76 static bool emit_block_move_via_pattern (rtx
, rtx
, rtx
, unsigned, unsigned,
77 HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
78 unsigned HOST_WIDE_INT
,
79 unsigned HOST_WIDE_INT
, bool);
80 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
81 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
82 static rtx_insn
*compress_float_constant (rtx
, rtx
);
83 static rtx
get_subtarget (rtx
);
84 static void store_constructor (tree
, rtx
, int, poly_int64
, bool);
85 static rtx
store_field (rtx
, poly_int64
, poly_int64
, poly_uint64
, poly_uint64
,
86 machine_mode
, tree
, alias_set_type
, bool, bool);
88 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (const_tree
, const_tree
);
90 static int is_aligning_offset (const_tree
, const_tree
);
91 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
92 static rtx
do_store_flag (sepops
, rtx
, machine_mode
);
94 static void emit_single_push_insn (machine_mode
, rtx
, tree
);
96 static void do_tablejump (rtx
, machine_mode
, rtx
, rtx
, rtx
,
98 static rtx
const_vector_from_tree (tree
);
99 static rtx
const_scalar_mask_from_tree (scalar_int_mode
, tree
);
100 static tree
tree_expr_size (const_tree
);
101 static HOST_WIDE_INT
int_expr_size (tree
);
102 static void convert_mode_scalar (rtx
, rtx
, int);
105 /* This is run to set up which modes can be used
106 directly in memory and to initialize the block move optab. It is run
107 at the beginning of compilation and when the target is reinitialized. */
110 init_expr_target (void)
117 /* Try indexing by frame ptr and try by stack ptr.
118 It is known that on the Convex the stack ptr isn't a valid index.
119 With luck, one or the other is valid on any machine. */
120 mem
= gen_rtx_MEM (word_mode
, stack_pointer_rtx
);
121 mem1
= gen_rtx_MEM (word_mode
, frame_pointer_rtx
);
123 /* A scratch register we can modify in-place below to avoid
124 useless RTL allocations. */
125 reg
= gen_rtx_REG (word_mode
, LAST_VIRTUAL_REGISTER
+ 1);
127 rtx_insn
*insn
= as_a
<rtx_insn
*> (rtx_alloc (INSN
));
128 pat
= gen_rtx_SET (NULL_RTX
, NULL_RTX
);
129 PATTERN (insn
) = pat
;
131 for (machine_mode mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
132 mode
= (machine_mode
) ((int) mode
+ 1))
136 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
137 PUT_MODE (mem
, mode
);
138 PUT_MODE (mem1
, mode
);
140 /* See if there is some register that can be used in this mode and
141 directly loaded or stored from memory. */
143 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
144 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
145 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
148 if (!targetm
.hard_regno_mode_ok (regno
, mode
))
151 set_mode_and_regno (reg
, mode
, regno
);
154 SET_DEST (pat
) = reg
;
155 if (recog (pat
, insn
, &num_clobbers
) >= 0)
156 direct_load
[(int) mode
] = 1;
158 SET_SRC (pat
) = mem1
;
159 SET_DEST (pat
) = reg
;
160 if (recog (pat
, insn
, &num_clobbers
) >= 0)
161 direct_load
[(int) mode
] = 1;
164 SET_DEST (pat
) = mem
;
165 if (recog (pat
, insn
, &num_clobbers
) >= 0)
166 direct_store
[(int) mode
] = 1;
169 SET_DEST (pat
) = mem1
;
170 if (recog (pat
, insn
, &num_clobbers
) >= 0)
171 direct_store
[(int) mode
] = 1;
175 mem
= gen_rtx_MEM (VOIDmode
, gen_raw_REG (Pmode
, LAST_VIRTUAL_REGISTER
+ 1));
177 opt_scalar_float_mode mode_iter
;
178 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_FLOAT
)
180 scalar_float_mode mode
= mode_iter
.require ();
181 scalar_float_mode srcmode
;
182 FOR_EACH_MODE_UNTIL (srcmode
, mode
)
186 ic
= can_extend_p (mode
, srcmode
, 0);
187 if (ic
== CODE_FOR_nothing
)
190 PUT_MODE (mem
, srcmode
);
192 if (insn_operand_matches (ic
, 1, mem
))
193 float_extend_from_mem
[mode
][srcmode
] = true;
198 /* This is run at the start of compiling a function. */
203 memset (&crtl
->expr
, 0, sizeof (crtl
->expr
));
206 /* Copy data from FROM to TO, where the machine modes are not the same.
207 Both modes may be integer, or both may be floating, or both may be
209 UNSIGNEDP should be nonzero if FROM is an unsigned type.
210 This causes zero-extension instead of sign-extension. */
213 convert_move (rtx to
, rtx from
, int unsignedp
)
215 machine_mode to_mode
= GET_MODE (to
);
216 machine_mode from_mode
= GET_MODE (from
);
218 gcc_assert (to_mode
!= BLKmode
);
219 gcc_assert (from_mode
!= BLKmode
);
221 /* If the source and destination are already the same, then there's
226 /* If FROM is a SUBREG that indicates that we have already done at least
227 the required extension, strip it. We don't handle such SUBREGs as
230 scalar_int_mode to_int_mode
;
231 if (GET_CODE (from
) == SUBREG
232 && SUBREG_PROMOTED_VAR_P (from
)
233 && is_a
<scalar_int_mode
> (to_mode
, &to_int_mode
)
234 && (GET_MODE_PRECISION (subreg_promoted_mode (from
))
235 >= GET_MODE_PRECISION (to_int_mode
))
236 && SUBREG_CHECK_PROMOTED_SIGN (from
, unsignedp
))
238 from
= gen_lowpart (to_int_mode
, SUBREG_REG (from
));
239 from_mode
= to_int_mode
;
242 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
244 if (to_mode
== from_mode
245 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
247 emit_move_insn (to
, from
);
251 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
253 gcc_assert (known_eq (GET_MODE_BITSIZE (from_mode
),
254 GET_MODE_BITSIZE (to_mode
)));
256 if (VECTOR_MODE_P (to_mode
))
257 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
259 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
261 emit_move_insn (to
, from
);
265 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
267 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
268 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
272 convert_mode_scalar (to
, from
, unsignedp
);
275 /* Like convert_move, but deals only with scalar modes. */
278 convert_mode_scalar (rtx to
, rtx from
, int unsignedp
)
280 /* Both modes should be scalar types. */
281 scalar_mode from_mode
= as_a
<scalar_mode
> (GET_MODE (from
));
282 scalar_mode to_mode
= as_a
<scalar_mode
> (GET_MODE (to
));
283 bool to_real
= SCALAR_FLOAT_MODE_P (to_mode
);
284 bool from_real
= SCALAR_FLOAT_MODE_P (from_mode
);
288 gcc_assert (to_real
== from_real
);
290 /* rtx code for making an equivalent value. */
291 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
292 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
300 gcc_assert ((GET_MODE_PRECISION (from_mode
)
301 != GET_MODE_PRECISION (to_mode
))
302 || (DECIMAL_FLOAT_MODE_P (from_mode
)
303 != DECIMAL_FLOAT_MODE_P (to_mode
)));
305 if (GET_MODE_PRECISION (from_mode
) == GET_MODE_PRECISION (to_mode
))
306 /* Conversion between decimal float and binary float, same size. */
307 tab
= DECIMAL_FLOAT_MODE_P (from_mode
) ? trunc_optab
: sext_optab
;
308 else if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
313 /* Try converting directly if the insn is supported. */
315 code
= convert_optab_handler (tab
, to_mode
, from_mode
);
316 if (code
!= CODE_FOR_nothing
)
318 emit_unop_insn (code
, to
, from
,
319 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
323 /* Otherwise use a libcall. */
324 libcall
= convert_optab_libfunc (tab
, to_mode
, from_mode
);
326 /* Is this conversion implemented yet? */
327 gcc_assert (libcall
);
330 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
332 insns
= get_insns ();
334 emit_libcall_block (insns
, to
, value
,
335 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
337 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
341 /* Handle pointer conversion. */ /* SPEE 900220. */
342 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
346 if (GET_MODE_PRECISION (from_mode
) > GET_MODE_PRECISION (to_mode
))
353 if (convert_optab_handler (ctab
, to_mode
, from_mode
)
356 emit_unop_insn (convert_optab_handler (ctab
, to_mode
, from_mode
),
362 /* Targets are expected to provide conversion insns between PxImode and
363 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
364 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
366 scalar_int_mode full_mode
367 = smallest_int_mode_for_size (GET_MODE_BITSIZE (to_mode
));
369 gcc_assert (convert_optab_handler (trunc_optab
, to_mode
, full_mode
)
370 != CODE_FOR_nothing
);
372 if (full_mode
!= from_mode
)
373 from
= convert_to_mode (full_mode
, from
, unsignedp
);
374 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, full_mode
),
378 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
381 scalar_int_mode full_mode
382 = smallest_int_mode_for_size (GET_MODE_BITSIZE (from_mode
));
383 convert_optab ctab
= unsignedp
? zext_optab
: sext_optab
;
384 enum insn_code icode
;
386 icode
= convert_optab_handler (ctab
, full_mode
, from_mode
);
387 gcc_assert (icode
!= CODE_FOR_nothing
);
389 if (to_mode
== full_mode
)
391 emit_unop_insn (icode
, to
, from
, UNKNOWN
);
395 new_from
= gen_reg_rtx (full_mode
);
396 emit_unop_insn (icode
, new_from
, from
, UNKNOWN
);
398 /* else proceed to integer conversions below. */
399 from_mode
= full_mode
;
403 /* Make sure both are fixed-point modes or both are not. */
404 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
) ==
405 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode
));
406 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
))
408 /* If we widen from_mode to to_mode and they are in the same class,
409 we won't saturate the result.
410 Otherwise, always saturate the result to play safe. */
411 if (GET_MODE_CLASS (from_mode
) == GET_MODE_CLASS (to_mode
)
412 && GET_MODE_SIZE (from_mode
) < GET_MODE_SIZE (to_mode
))
413 expand_fixed_convert (to
, from
, 0, 0);
415 expand_fixed_convert (to
, from
, 0, 1);
419 /* Now both modes are integers. */
421 /* Handle expanding beyond a word. */
422 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
)
423 && GET_MODE_PRECISION (to_mode
) > BITS_PER_WORD
)
430 scalar_mode lowpart_mode
;
431 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
433 /* Try converting directly if the insn is supported. */
434 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
437 /* If FROM is a SUBREG, put it into a register. Do this
438 so that we always generate the same set of insns for
439 better cse'ing; if an intermediate assignment occurred,
440 we won't be doing the operation directly on the SUBREG. */
441 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
442 from
= force_reg (from_mode
, from
);
443 emit_unop_insn (code
, to
, from
, equiv_code
);
446 /* Next, try converting via full word. */
447 else if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
448 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
449 != CODE_FOR_nothing
))
451 rtx word_to
= gen_reg_rtx (word_mode
);
454 if (reg_overlap_mentioned_p (to
, from
))
455 from
= force_reg (from_mode
, from
);
458 convert_move (word_to
, from
, unsignedp
);
459 emit_unop_insn (code
, to
, word_to
, equiv_code
);
463 /* No special multiword conversion insn; do it by hand. */
466 /* Since we will turn this into a no conflict block, we must ensure
467 the source does not overlap the target so force it into an isolated
468 register when maybe so. Likewise for any MEM input, since the
469 conversion sequence might require several references to it and we
470 must ensure we're getting the same value every time. */
472 if (MEM_P (from
) || reg_overlap_mentioned_p (to
, from
))
473 from
= force_reg (from_mode
, from
);
475 /* Get a copy of FROM widened to a word, if necessary. */
476 if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
)
477 lowpart_mode
= word_mode
;
479 lowpart_mode
= from_mode
;
481 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
483 lowpart
= gen_lowpart (lowpart_mode
, to
);
484 emit_move_insn (lowpart
, lowfrom
);
486 /* Compute the value to put in each remaining word. */
488 fill_value
= const0_rtx
;
490 fill_value
= emit_store_flag_force (gen_reg_rtx (word_mode
),
491 LT
, lowfrom
, const0_rtx
,
492 lowpart_mode
, 0, -1);
494 /* Fill the remaining words. */
495 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
497 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
498 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
500 gcc_assert (subword
);
502 if (fill_value
!= subword
)
503 emit_move_insn (subword
, fill_value
);
506 insns
= get_insns ();
513 /* Truncating multi-word to a word or less. */
514 if (GET_MODE_PRECISION (from_mode
) > BITS_PER_WORD
515 && GET_MODE_PRECISION (to_mode
) <= BITS_PER_WORD
)
518 && ! MEM_VOLATILE_P (from
)
519 && direct_load
[(int) to_mode
]
520 && ! mode_dependent_address_p (XEXP (from
, 0),
521 MEM_ADDR_SPACE (from
)))
523 || GET_CODE (from
) == SUBREG
))
524 from
= force_reg (from_mode
, from
);
525 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
529 /* Now follow all the conversions between integers
530 no more than a word long. */
532 /* For truncation, usually we can just refer to FROM in a narrower mode. */
533 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
534 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, from_mode
))
537 && ! MEM_VOLATILE_P (from
)
538 && direct_load
[(int) to_mode
]
539 && ! mode_dependent_address_p (XEXP (from
, 0),
540 MEM_ADDR_SPACE (from
)))
542 || GET_CODE (from
) == SUBREG
))
543 from
= force_reg (from_mode
, from
);
544 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
545 && !targetm
.hard_regno_mode_ok (REGNO (from
), to_mode
))
546 from
= copy_to_reg (from
);
547 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
551 /* Handle extension. */
552 if (GET_MODE_PRECISION (to_mode
) > GET_MODE_PRECISION (from_mode
))
554 /* Convert directly if that works. */
555 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
558 emit_unop_insn (code
, to
, from
, equiv_code
);
563 scalar_mode intermediate
;
567 /* Search for a mode to convert via. */
568 opt_scalar_mode intermediate_iter
;
569 FOR_EACH_MODE_FROM (intermediate_iter
, from_mode
)
571 scalar_mode intermediate
= intermediate_iter
.require ();
572 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
574 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
575 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
,
577 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
578 != CODE_FOR_nothing
))
580 convert_move (to
, convert_to_mode (intermediate
, from
,
581 unsignedp
), unsignedp
);
586 /* No suitable intermediate mode.
587 Generate what we need with shifts. */
588 shift_amount
= (GET_MODE_PRECISION (to_mode
)
589 - GET_MODE_PRECISION (from_mode
));
590 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
591 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
593 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
596 emit_move_insn (to
, tmp
);
601 /* Support special truncate insns for certain modes. */
602 if (convert_optab_handler (trunc_optab
, to_mode
,
603 from_mode
) != CODE_FOR_nothing
)
605 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, from_mode
),
610 /* Handle truncation of volatile memrefs, and so on;
611 the things that couldn't be truncated directly,
612 and for which there was no special instruction.
614 ??? Code above formerly short-circuited this, for most integer
615 mode pairs, with a force_reg in from_mode followed by a recursive
616 call to this routine. Appears always to have been wrong. */
617 if (GET_MODE_PRECISION (to_mode
) < GET_MODE_PRECISION (from_mode
))
619 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
620 emit_move_insn (to
, temp
);
624 /* Mode combination is not recognized. */
628 /* Return an rtx for a value that would result
629 from converting X to mode MODE.
630 Both X and MODE may be floating, or both integer.
631 UNSIGNEDP is nonzero if X is an unsigned value.
632 This can be done by referring to a part of X in place
633 or by copying to a new temporary with conversion. */
636 convert_to_mode (machine_mode mode
, rtx x
, int unsignedp
)
638 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
641 /* Return an rtx for a value that would result
642 from converting X from mode OLDMODE to mode MODE.
643 Both modes may be floating, or both integer.
644 UNSIGNEDP is nonzero if X is an unsigned value.
646 This can be done by referring to a part of X in place
647 or by copying to a new temporary with conversion.
649 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
652 convert_modes (machine_mode mode
, machine_mode oldmode
, rtx x
, int unsignedp
)
655 scalar_int_mode int_mode
;
657 /* If FROM is a SUBREG that indicates that we have already done at least
658 the required extension, strip it. */
660 if (GET_CODE (x
) == SUBREG
661 && SUBREG_PROMOTED_VAR_P (x
)
662 && is_a
<scalar_int_mode
> (mode
, &int_mode
)
663 && (GET_MODE_PRECISION (subreg_promoted_mode (x
))
664 >= GET_MODE_PRECISION (int_mode
))
665 && SUBREG_CHECK_PROMOTED_SIGN (x
, unsignedp
))
666 x
= gen_lowpart (int_mode
, SUBREG_REG (x
));
668 if (GET_MODE (x
) != VOIDmode
)
669 oldmode
= GET_MODE (x
);
674 if (CONST_SCALAR_INT_P (x
)
675 && is_int_mode (mode
, &int_mode
))
677 /* If the caller did not tell us the old mode, then there is not
678 much to do with respect to canonicalization. We have to
679 assume that all the bits are significant. */
680 if (GET_MODE_CLASS (oldmode
) != MODE_INT
)
681 oldmode
= MAX_MODE_INT
;
682 wide_int w
= wide_int::from (rtx_mode_t (x
, oldmode
),
683 GET_MODE_PRECISION (int_mode
),
684 unsignedp
? UNSIGNED
: SIGNED
);
685 return immed_wide_int_const (w
, int_mode
);
688 /* We can do this with a gen_lowpart if both desired and current modes
689 are integer, and this is either a constant integer, a register, or a
691 scalar_int_mode int_oldmode
;
692 if (is_int_mode (mode
, &int_mode
)
693 && is_int_mode (oldmode
, &int_oldmode
)
694 && GET_MODE_PRECISION (int_mode
) <= GET_MODE_PRECISION (int_oldmode
)
695 && ((MEM_P (x
) && !MEM_VOLATILE_P (x
) && direct_load
[(int) int_mode
])
696 || CONST_POLY_INT_P (x
)
698 && (!HARD_REGISTER_P (x
)
699 || targetm
.hard_regno_mode_ok (REGNO (x
), int_mode
))
700 && TRULY_NOOP_TRUNCATION_MODES_P (int_mode
, GET_MODE (x
)))))
701 return gen_lowpart (int_mode
, x
);
703 /* Converting from integer constant into mode is always equivalent to an
705 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
707 gcc_assert (known_eq (GET_MODE_BITSIZE (mode
),
708 GET_MODE_BITSIZE (oldmode
)));
709 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
712 temp
= gen_reg_rtx (mode
);
713 convert_move (temp
, x
, unsignedp
);
717 /* Return the largest alignment we can use for doing a move (or store)
718 of MAX_PIECES. ALIGN is the largest alignment we could use. */
721 alignment_for_piecewise_move (unsigned int max_pieces
, unsigned int align
)
723 scalar_int_mode tmode
724 = int_mode_for_size (max_pieces
* BITS_PER_UNIT
, 1).require ();
726 if (align
>= GET_MODE_ALIGNMENT (tmode
))
727 align
= GET_MODE_ALIGNMENT (tmode
);
730 scalar_int_mode xmode
= NARROWEST_INT_MODE
;
731 opt_scalar_int_mode mode_iter
;
732 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_INT
)
734 tmode
= mode_iter
.require ();
735 if (GET_MODE_SIZE (tmode
) > max_pieces
736 || targetm
.slow_unaligned_access (tmode
, align
))
741 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
747 /* Return the widest integer mode that is narrower than SIZE bytes. */
749 static scalar_int_mode
750 widest_int_mode_for_size (unsigned int size
)
752 scalar_int_mode result
= NARROWEST_INT_MODE
;
754 gcc_checking_assert (size
> 1);
756 opt_scalar_int_mode tmode
;
757 FOR_EACH_MODE_IN_CLASS (tmode
, MODE_INT
)
758 if (GET_MODE_SIZE (tmode
.require ()) < size
)
759 result
= tmode
.require ();
764 /* Determine whether an operation OP on LEN bytes with alignment ALIGN can
765 and should be performed piecewise. */
768 can_do_by_pieces (unsigned HOST_WIDE_INT len
, unsigned int align
,
769 enum by_pieces_operation op
)
771 return targetm
.use_by_pieces_infrastructure_p (len
, align
, op
,
772 optimize_insn_for_speed_p ());
775 /* Determine whether the LEN bytes can be moved by using several move
776 instructions. Return nonzero if a call to move_by_pieces should
780 can_move_by_pieces (unsigned HOST_WIDE_INT len
, unsigned int align
)
782 return can_do_by_pieces (len
, align
, MOVE_BY_PIECES
);
785 /* Return number of insns required to perform operation OP by pieces
786 for L bytes. ALIGN (in bits) is maximum alignment we can assume. */
788 unsigned HOST_WIDE_INT
789 by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
790 unsigned int max_size
, by_pieces_operation op
)
792 unsigned HOST_WIDE_INT n_insns
= 0;
794 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
796 while (max_size
> 1 && l
> 0)
798 scalar_int_mode mode
= widest_int_mode_for_size (max_size
);
799 enum insn_code icode
;
801 unsigned int modesize
= GET_MODE_SIZE (mode
);
803 icode
= optab_handler (mov_optab
, mode
);
804 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
806 unsigned HOST_WIDE_INT n_pieces
= l
/ modesize
;
814 case COMPARE_BY_PIECES
:
815 int batch
= targetm
.compare_by_pieces_branch_ratio (mode
);
816 int batch_ops
= 4 * batch
- 1;
817 unsigned HOST_WIDE_INT full
= n_pieces
/ batch
;
818 n_insns
+= full
* batch_ops
;
819 if (n_pieces
% batch
!= 0)
832 /* Used when performing piecewise block operations, holds information
833 about one of the memory objects involved. The member functions
834 can be used to generate code for loading from the object and
835 updating the address when iterating. */
839 /* The object being referenced, a MEM. Can be NULL_RTX to indicate
842 /* The address of the object. Can differ from that seen in the
843 MEM rtx if we copied the address to a register. */
845 /* Nonzero if the address on the object has an autoincrement already,
846 signifies whether that was an increment or decrement. */
847 signed char m_addr_inc
;
848 /* Nonzero if we intend to use autoinc without the address already
849 having autoinc form. We will insert add insns around each memory
850 reference, expecting later passes to form autoinc addressing modes.
851 The only supported options are predecrement and postincrement. */
852 signed char m_explicit_inc
;
853 /* True if we have either of the two possible cases of using
856 /* True if this is an address to be used for load operations rather
860 /* Optionally, a function to obtain constants for any given offset into
861 the objects, and data associated with it. */
862 by_pieces_constfn m_constfn
;
865 pieces_addr (rtx
, bool, by_pieces_constfn
, void *);
866 rtx
adjust (scalar_int_mode
, HOST_WIDE_INT
);
867 void increment_address (HOST_WIDE_INT
);
868 void maybe_predec (HOST_WIDE_INT
);
869 void maybe_postinc (HOST_WIDE_INT
);
870 void decide_autoinc (machine_mode
, bool, HOST_WIDE_INT
);
877 /* Initialize a pieces_addr structure from an object OBJ. IS_LOAD is
878 true if the operation to be performed on this object is a load
879 rather than a store. For stores, OBJ can be NULL, in which case we
880 assume the operation is a stack push. For loads, the optional
881 CONSTFN and its associated CFNDATA can be used in place of the
884 pieces_addr::pieces_addr (rtx obj
, bool is_load
, by_pieces_constfn constfn
,
886 : m_obj (obj
), m_is_load (is_load
), m_constfn (constfn
), m_cfndata (cfndata
)
892 rtx addr
= XEXP (obj
, 0);
893 rtx_code code
= GET_CODE (addr
);
895 bool dec
= code
== PRE_DEC
|| code
== POST_DEC
;
896 bool inc
= code
== PRE_INC
|| code
== POST_INC
;
899 m_addr_inc
= dec
? -1 : 1;
901 /* While we have always looked for these codes here, the code
902 implementing the memory operation has never handled them.
903 Support could be added later if necessary or beneficial. */
904 gcc_assert (code
!= PRE_INC
&& code
!= POST_DEC
);
912 if (STACK_GROWS_DOWNWARD
)
918 gcc_assert (constfn
!= NULL
);
922 gcc_assert (is_load
);
925 /* Decide whether to use autoinc for an address involved in a memory op.
926 MODE is the mode of the accesses, REVERSE is true if we've decided to
927 perform the operation starting from the end, and LEN is the length of
928 the operation. Don't override an earlier decision to set m_auto. */
931 pieces_addr::decide_autoinc (machine_mode
ARG_UNUSED (mode
), bool reverse
,
934 if (m_auto
|| m_obj
== NULL_RTX
)
937 bool use_predec
= (m_is_load
938 ? USE_LOAD_PRE_DECREMENT (mode
)
939 : USE_STORE_PRE_DECREMENT (mode
));
940 bool use_postinc
= (m_is_load
941 ? USE_LOAD_POST_INCREMENT (mode
)
942 : USE_STORE_POST_INCREMENT (mode
));
943 machine_mode addr_mode
= get_address_mode (m_obj
);
945 if (use_predec
&& reverse
)
947 m_addr
= copy_to_mode_reg (addr_mode
,
948 plus_constant (addr_mode
,
953 else if (use_postinc
&& !reverse
)
955 m_addr
= copy_to_mode_reg (addr_mode
, m_addr
);
959 else if (CONSTANT_P (m_addr
))
960 m_addr
= copy_to_mode_reg (addr_mode
, m_addr
);
963 /* Adjust the address to refer to the data at OFFSET in MODE. If we
964 are using autoincrement for this address, we don't add the offset,
965 but we still modify the MEM's properties. */
968 pieces_addr::adjust (scalar_int_mode mode
, HOST_WIDE_INT offset
)
971 return m_constfn (m_cfndata
, offset
, mode
);
972 if (m_obj
== NULL_RTX
)
975 return adjust_automodify_address (m_obj
, mode
, m_addr
, offset
);
977 return adjust_address (m_obj
, mode
, offset
);
980 /* Emit an add instruction to increment the address by SIZE. */
983 pieces_addr::increment_address (HOST_WIDE_INT size
)
985 rtx amount
= gen_int_mode (size
, GET_MODE (m_addr
));
986 emit_insn (gen_add2_insn (m_addr
, amount
));
989 /* If we are supposed to decrement the address after each access, emit code
990 to do so now. Increment by SIZE (which has should have the correct sign
994 pieces_addr::maybe_predec (HOST_WIDE_INT size
)
996 if (m_explicit_inc
>= 0)
998 gcc_assert (HAVE_PRE_DECREMENT
);
999 increment_address (size
);
1002 /* If we are supposed to decrement the address after each access, emit code
1003 to do so now. Increment by SIZE. */
1006 pieces_addr::maybe_postinc (HOST_WIDE_INT size
)
1008 if (m_explicit_inc
<= 0)
1010 gcc_assert (HAVE_POST_INCREMENT
);
1011 increment_address (size
);
1014 /* This structure is used by do_op_by_pieces to describe the operation
1017 class op_by_pieces_d
1020 pieces_addr m_to
, m_from
;
1021 unsigned HOST_WIDE_INT m_len
;
1022 HOST_WIDE_INT m_offset
;
1023 unsigned int m_align
;
1024 unsigned int m_max_size
;
1027 /* Virtual functions, overriden by derived classes for the specific
1029 virtual void generate (rtx
, rtx
, machine_mode
) = 0;
1030 virtual bool prepare_mode (machine_mode
, unsigned int) = 0;
1031 virtual void finish_mode (machine_mode
)
1036 op_by_pieces_d (rtx
, bool, rtx
, bool, by_pieces_constfn
, void *,
1037 unsigned HOST_WIDE_INT
, unsigned int);
1041 /* The constructor for an op_by_pieces_d structure. We require two
1042 objects named TO and FROM, which are identified as loads or stores
1043 by TO_LOAD and FROM_LOAD. If FROM is a load, the optional FROM_CFN
1044 and its associated FROM_CFN_DATA can be used to replace loads with
1045 constant values. LEN describes the length of the operation. */
1047 op_by_pieces_d::op_by_pieces_d (rtx to
, bool to_load
,
1048 rtx from
, bool from_load
,
1049 by_pieces_constfn from_cfn
,
1050 void *from_cfn_data
,
1051 unsigned HOST_WIDE_INT len
,
1053 : m_to (to
, to_load
, NULL
, NULL
),
1054 m_from (from
, from_load
, from_cfn
, from_cfn_data
),
1055 m_len (len
), m_max_size (MOVE_MAX_PIECES
+ 1)
1057 int toi
= m_to
.get_addr_inc ();
1058 int fromi
= m_from
.get_addr_inc ();
1059 if (toi
>= 0 && fromi
>= 0)
1061 else if (toi
<= 0 && fromi
<= 0)
1066 m_offset
= m_reverse
? len
: 0;
1067 align
= MIN (to
? MEM_ALIGN (to
) : align
,
1068 from
? MEM_ALIGN (from
) : align
);
1070 /* If copying requires more than two move insns,
1071 copy addresses to registers (to make displacements shorter)
1072 and use post-increment if available. */
1073 if (by_pieces_ninsns (len
, align
, m_max_size
, MOVE_BY_PIECES
) > 2)
1075 /* Find the mode of the largest comparison. */
1076 scalar_int_mode mode
= widest_int_mode_for_size (m_max_size
);
1078 m_from
.decide_autoinc (mode
, m_reverse
, len
);
1079 m_to
.decide_autoinc (mode
, m_reverse
, len
);
1082 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
1086 /* This function contains the main loop used for expanding a block
1087 operation. First move what we can in the largest integer mode,
1088 then go to successively smaller modes. For every access, call
1089 GENFUN with the two operands and the EXTRA_DATA. */
1092 op_by_pieces_d::run ()
1094 while (m_max_size
> 1 && m_len
> 0)
1096 scalar_int_mode mode
= widest_int_mode_for_size (m_max_size
);
1098 if (prepare_mode (mode
, m_align
))
1100 unsigned int size
= GET_MODE_SIZE (mode
);
1101 rtx to1
= NULL_RTX
, from1
;
1103 while (m_len
>= size
)
1108 to1
= m_to
.adjust (mode
, m_offset
);
1109 from1
= m_from
.adjust (mode
, m_offset
);
1111 m_to
.maybe_predec (-(HOST_WIDE_INT
)size
);
1112 m_from
.maybe_predec (-(HOST_WIDE_INT
)size
);
1114 generate (to1
, from1
, mode
);
1116 m_to
.maybe_postinc (size
);
1117 m_from
.maybe_postinc (size
);
1128 m_max_size
= GET_MODE_SIZE (mode
);
1131 /* The code above should have handled everything. */
1132 gcc_assert (!m_len
);
1135 /* Derived class from op_by_pieces_d, providing support for block move
1138 class move_by_pieces_d
: public op_by_pieces_d
1140 insn_gen_fn m_gen_fun
;
1141 void generate (rtx
, rtx
, machine_mode
);
1142 bool prepare_mode (machine_mode
, unsigned int);
1145 move_by_pieces_d (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
1147 : op_by_pieces_d (to
, false, from
, true, NULL
, NULL
, len
, align
)
1150 rtx
finish_retmode (memop_ret
);
1153 /* Return true if MODE can be used for a set of copies, given an
1154 alignment ALIGN. Prepare whatever data is necessary for later
1155 calls to generate. */
1158 move_by_pieces_d::prepare_mode (machine_mode mode
, unsigned int align
)
1160 insn_code icode
= optab_handler (mov_optab
, mode
);
1161 m_gen_fun
= GEN_FCN (icode
);
1162 return icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
);
1165 /* A callback used when iterating for a compare_by_pieces_operation.
1166 OP0 and OP1 are the values that have been loaded and should be
1167 compared in MODE. If OP0 is NULL, this means we should generate a
1168 push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1169 gen function that should be used to generate the mode. */
1172 move_by_pieces_d::generate (rtx op0
, rtx op1
,
1173 machine_mode mode ATTRIBUTE_UNUSED
)
1175 #ifdef PUSH_ROUNDING
1176 if (op0
== NULL_RTX
)
1178 emit_single_push_insn (mode
, op1
, NULL
);
1182 emit_insn (m_gen_fun (op0
, op1
));
1185 /* Perform the final adjustment at the end of a string to obtain the
1186 correct return value for the block operation.
1187 Return value is based on RETMODE argument. */
1190 move_by_pieces_d::finish_retmode (memop_ret retmode
)
1192 gcc_assert (!m_reverse
);
1193 if (retmode
== RETURN_END_MINUS_ONE
)
1195 m_to
.maybe_postinc (-1);
1198 return m_to
.adjust (QImode
, m_offset
);
1201 /* Generate several move instructions to copy LEN bytes from block FROM to
1202 block TO. (These are MEM rtx's with BLKmode).
1204 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1205 used to push FROM to the stack.
1207 ALIGN is maximum stack alignment we can assume.
1209 Return value is based on RETMODE argument. */
1212 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
1213 unsigned int align
, memop_ret retmode
)
1215 #ifndef PUSH_ROUNDING
1220 move_by_pieces_d
data (to
, from
, len
, align
);
1224 if (retmode
!= RETURN_BEGIN
)
1225 return data
.finish_retmode (retmode
);
1230 /* Derived class from op_by_pieces_d, providing support for block move
1233 class store_by_pieces_d
: public op_by_pieces_d
1235 insn_gen_fn m_gen_fun
;
1236 void generate (rtx
, rtx
, machine_mode
);
1237 bool prepare_mode (machine_mode
, unsigned int);
1240 store_by_pieces_d (rtx to
, by_pieces_constfn cfn
, void *cfn_data
,
1241 unsigned HOST_WIDE_INT len
, unsigned int align
)
1242 : op_by_pieces_d (to
, false, NULL_RTX
, true, cfn
, cfn_data
, len
, align
)
1245 rtx
finish_retmode (memop_ret
);
1248 /* Return true if MODE can be used for a set of stores, given an
1249 alignment ALIGN. Prepare whatever data is necessary for later
1250 calls to generate. */
1253 store_by_pieces_d::prepare_mode (machine_mode mode
, unsigned int align
)
1255 insn_code icode
= optab_handler (mov_optab
, mode
);
1256 m_gen_fun
= GEN_FCN (icode
);
1257 return icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
);
1260 /* A callback used when iterating for a store_by_pieces_operation.
1261 OP0 and OP1 are the values that have been loaded and should be
1262 compared in MODE. If OP0 is NULL, this means we should generate a
1263 push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1264 gen function that should be used to generate the mode. */
1267 store_by_pieces_d::generate (rtx op0
, rtx op1
, machine_mode
)
1269 emit_insn (m_gen_fun (op0
, op1
));
1272 /* Perform the final adjustment at the end of a string to obtain the
1273 correct return value for the block operation.
1274 Return value is based on RETMODE argument. */
1277 store_by_pieces_d::finish_retmode (memop_ret retmode
)
1279 gcc_assert (!m_reverse
);
1280 if (retmode
== RETURN_END_MINUS_ONE
)
1282 m_to
.maybe_postinc (-1);
1285 return m_to
.adjust (QImode
, m_offset
);
1288 /* Determine whether the LEN bytes generated by CONSTFUN can be
1289 stored to memory using several move instructions. CONSTFUNDATA is
1290 a pointer which will be passed as argument in every CONSTFUN call.
1291 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
1292 a memset operation and false if it's a copy of a constant string.
1293 Return nonzero if a call to store_by_pieces should succeed. */
1296 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
1297 rtx (*constfun
) (void *, HOST_WIDE_INT
, scalar_int_mode
),
1298 void *constfundata
, unsigned int align
, bool memsetp
)
1300 unsigned HOST_WIDE_INT l
;
1301 unsigned int max_size
;
1302 HOST_WIDE_INT offset
= 0;
1303 enum insn_code icode
;
1305 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
1306 rtx cst ATTRIBUTE_UNUSED
;
1311 if (!targetm
.use_by_pieces_infrastructure_p (len
, align
,
1315 optimize_insn_for_speed_p ()))
1318 align
= alignment_for_piecewise_move (STORE_MAX_PIECES
, align
);
1320 /* We would first store what we can in the largest integer mode, then go to
1321 successively smaller modes. */
1324 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
1328 max_size
= STORE_MAX_PIECES
+ 1;
1329 while (max_size
> 1 && l
> 0)
1331 scalar_int_mode mode
= widest_int_mode_for_size (max_size
);
1333 icode
= optab_handler (mov_optab
, mode
);
1334 if (icode
!= CODE_FOR_nothing
1335 && align
>= GET_MODE_ALIGNMENT (mode
))
1337 unsigned int size
= GET_MODE_SIZE (mode
);
1344 cst
= (*constfun
) (constfundata
, offset
, mode
);
1345 if (!targetm
.legitimate_constant_p (mode
, cst
))
1355 max_size
= GET_MODE_SIZE (mode
);
1358 /* The code above should have handled everything. */
1365 /* Generate several move instructions to store LEN bytes generated by
1366 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
1367 pointer which will be passed as argument in every CONSTFUN call.
1368 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
1369 a memset operation and false if it's a copy of a constant string.
1370 Return value is based on RETMODE argument. */
1373 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
1374 rtx (*constfun
) (void *, HOST_WIDE_INT
, scalar_int_mode
),
1375 void *constfundata
, unsigned int align
, bool memsetp
,
1380 gcc_assert (retmode
!= RETURN_END_MINUS_ONE
);
1384 gcc_assert (targetm
.use_by_pieces_infrastructure_p
1386 memsetp
? SET_BY_PIECES
: STORE_BY_PIECES
,
1387 optimize_insn_for_speed_p ()));
1389 store_by_pieces_d
data (to
, constfun
, constfundata
, len
, align
);
1392 if (retmode
!= RETURN_BEGIN
)
1393 return data
.finish_retmode (retmode
);
1398 /* Callback routine for clear_by_pieces.
1399 Return const0_rtx unconditionally. */
1402 clear_by_pieces_1 (void *, HOST_WIDE_INT
, scalar_int_mode
)
1407 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
1408 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
1411 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
1416 store_by_pieces_d
data (to
, clear_by_pieces_1
, NULL
, len
, align
);
1420 /* Context used by compare_by_pieces_genfn. It stores the fail label
1421 to jump to in case of miscomparison, and for branch ratios greater than 1,
1422 it stores an accumulator and the current and maximum counts before
1423 emitting another branch. */
1425 class compare_by_pieces_d
: public op_by_pieces_d
1427 rtx_code_label
*m_fail_label
;
1429 int m_count
, m_batch
;
1431 void generate (rtx
, rtx
, machine_mode
);
1432 bool prepare_mode (machine_mode
, unsigned int);
1433 void finish_mode (machine_mode
);
1435 compare_by_pieces_d (rtx op0
, rtx op1
, by_pieces_constfn op1_cfn
,
1436 void *op1_cfn_data
, HOST_WIDE_INT len
, int align
,
1437 rtx_code_label
*fail_label
)
1438 : op_by_pieces_d (op0
, true, op1
, true, op1_cfn
, op1_cfn_data
, len
, align
)
1440 m_fail_label
= fail_label
;
1444 /* A callback used when iterating for a compare_by_pieces_operation.
1445 OP0 and OP1 are the values that have been loaded and should be
1446 compared in MODE. DATA holds a pointer to the compare_by_pieces_data
1447 context structure. */
1450 compare_by_pieces_d::generate (rtx op0
, rtx op1
, machine_mode mode
)
1454 rtx temp
= expand_binop (mode
, sub_optab
, op0
, op1
, NULL_RTX
,
1455 true, OPTAB_LIB_WIDEN
);
1457 temp
= expand_binop (mode
, ior_optab
, m_accumulator
, temp
, temp
,
1458 true, OPTAB_LIB_WIDEN
);
1459 m_accumulator
= temp
;
1461 if (++m_count
< m_batch
)
1465 op0
= m_accumulator
;
1467 m_accumulator
= NULL_RTX
;
1469 do_compare_rtx_and_jump (op0
, op1
, NE
, true, mode
, NULL_RTX
, NULL
,
1470 m_fail_label
, profile_probability::uninitialized ());
1473 /* Return true if MODE can be used for a set of moves and comparisons,
1474 given an alignment ALIGN. Prepare whatever data is necessary for
1475 later calls to generate. */
1478 compare_by_pieces_d::prepare_mode (machine_mode mode
, unsigned int align
)
1480 insn_code icode
= optab_handler (mov_optab
, mode
);
1481 if (icode
== CODE_FOR_nothing
1482 || align
< GET_MODE_ALIGNMENT (mode
)
1483 || !can_compare_p (EQ
, mode
, ccp_jump
))
1485 m_batch
= targetm
.compare_by_pieces_branch_ratio (mode
);
1488 m_accumulator
= NULL_RTX
;
1493 /* Called after expanding a series of comparisons in MODE. If we have
1494 accumulated results for which we haven't emitted a branch yet, do
1498 compare_by_pieces_d::finish_mode (machine_mode mode
)
1500 if (m_accumulator
!= NULL_RTX
)
1501 do_compare_rtx_and_jump (m_accumulator
, const0_rtx
, NE
, true, mode
,
1502 NULL_RTX
, NULL
, m_fail_label
,
1503 profile_probability::uninitialized ());
1506 /* Generate several move instructions to compare LEN bytes from blocks
1507 ARG0 and ARG1. (These are MEM rtx's with BLKmode).
1509 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1510 used to push FROM to the stack.
1512 ALIGN is maximum stack alignment we can assume.
1514 Optionally, the caller can pass a constfn and associated data in A1_CFN
1515 and A1_CFN_DATA. describing that the second operand being compared is a
1516 known constant and how to obtain its data. */
1519 compare_by_pieces (rtx arg0
, rtx arg1
, unsigned HOST_WIDE_INT len
,
1520 rtx target
, unsigned int align
,
1521 by_pieces_constfn a1_cfn
, void *a1_cfn_data
)
1523 rtx_code_label
*fail_label
= gen_label_rtx ();
1524 rtx_code_label
*end_label
= gen_label_rtx ();
1526 if (target
== NULL_RTX
1527 || !REG_P (target
) || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
1528 target
= gen_reg_rtx (TYPE_MODE (integer_type_node
));
1530 compare_by_pieces_d
data (arg0
, arg1
, a1_cfn
, a1_cfn_data
, len
, align
,
1535 emit_move_insn (target
, const0_rtx
);
1536 emit_jump (end_label
);
1538 emit_label (fail_label
);
1539 emit_move_insn (target
, const1_rtx
);
1540 emit_label (end_label
);
1545 /* Emit code to move a block Y to a block X. This may be done with
1546 string-move instructions, with multiple scalar move instructions,
1547 or with a library call.
1549 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1550 SIZE is an rtx that says how long they are.
1551 ALIGN is the maximum alignment we can assume they have.
1552 METHOD describes what kind of copy this is, and what mechanisms may be used.
1553 MIN_SIZE is the minimal size of block to move
1554 MAX_SIZE is the maximal size of block to move, if it cannot be represented
1555 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1557 Return the address of the new block, if memcpy is called and returns it,
1561 emit_block_move_hints (rtx x
, rtx y
, rtx size
, enum block_op_methods method
,
1562 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1563 unsigned HOST_WIDE_INT min_size
,
1564 unsigned HOST_WIDE_INT max_size
,
1565 unsigned HOST_WIDE_INT probable_max_size
,
1566 bool bail_out_libcall
, bool *is_move_done
,
1574 *is_move_done
= true;
1577 if (CONST_INT_P (size
) && INTVAL (size
) == 0)
1582 case BLOCK_OP_NORMAL
:
1583 case BLOCK_OP_TAILCALL
:
1587 case BLOCK_OP_CALL_PARM
:
1588 may_use_call
= block_move_libcall_safe_for_call_parm ();
1590 /* Make inhibit_defer_pop nonzero around the library call
1591 to force it to pop the arguments right away. */
1595 case BLOCK_OP_NO_LIBCALL
:
1599 case BLOCK_OP_NO_LIBCALL_RET
:
1607 gcc_assert (MEM_P (x
) && MEM_P (y
));
1608 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1609 gcc_assert (align
>= BITS_PER_UNIT
);
1611 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1612 block copy is more efficient for other large modes, e.g. DCmode. */
1613 x
= adjust_address (x
, BLKmode
, 0);
1614 y
= adjust_address (y
, BLKmode
, 0);
1616 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1617 can be incorrect is coming from __builtin_memcpy. */
1618 poly_int64 const_size
;
1619 if (poly_int_rtx_p (size
, &const_size
))
1621 x
= shallow_copy_rtx (x
);
1622 y
= shallow_copy_rtx (y
);
1623 set_mem_size (x
, const_size
);
1624 set_mem_size (y
, const_size
);
1627 bool pieces_ok
= false;
1628 if (CONST_INT_P (size
))
1629 pieces_ok
= can_move_by_pieces (INTVAL (size
), align
);
1630 bool pattern_ok
= false;
1632 if (!pieces_ok
|| might_overlap
)
1635 = emit_block_move_via_pattern (x
, y
, size
, align
,
1636 expected_align
, expected_size
,
1637 min_size
, max_size
, probable_max_size
,
1639 if (!pattern_ok
&& might_overlap
)
1641 /* Do not try any of the other methods below as they are not safe
1642 for overlapping moves. */
1643 *is_move_done
= false;
1651 move_by_pieces (x
, y
, INTVAL (size
), align
, RETURN_BEGIN
);
1652 else if (may_use_call
&& !might_overlap
1653 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
))
1654 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y
)))
1656 if (bail_out_libcall
)
1659 *is_move_done
= false;
1663 if (may_use_call
< 0)
1666 retval
= emit_block_copy_via_libcall (x
, y
, size
,
1667 method
== BLOCK_OP_TAILCALL
);
1669 else if (might_overlap
)
1670 *is_move_done
= false;
1672 emit_block_move_via_loop (x
, y
, size
, align
);
1674 if (method
== BLOCK_OP_CALL_PARM
)
1681 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1683 unsigned HOST_WIDE_INT max
, min
= 0;
1684 if (GET_CODE (size
) == CONST_INT
)
1685 min
= max
= UINTVAL (size
);
1687 max
= GET_MODE_MASK (GET_MODE (size
));
1688 return emit_block_move_hints (x
, y
, size
, method
, 0, -1,
1692 /* A subroutine of emit_block_move. Returns true if calling the
1693 block move libcall will not clobber any parameters which may have
1694 already been placed on the stack. */
1697 block_move_libcall_safe_for_call_parm (void)
1699 #if defined (REG_PARM_STACK_SPACE)
1703 /* If arguments are pushed on the stack, then they're safe. */
1707 /* If registers go on the stack anyway, any argument is sure to clobber
1708 an outgoing argument. */
1709 #if defined (REG_PARM_STACK_SPACE)
1710 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1711 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1712 depend on its argument. */
1714 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn
? NULL_TREE
: TREE_TYPE (fn
)))
1715 && REG_PARM_STACK_SPACE (fn
) != 0)
1719 /* If any argument goes in memory, then it might clobber an outgoing
1722 CUMULATIVE_ARGS args_so_far_v
;
1723 cumulative_args_t args_so_far
;
1726 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1727 INIT_CUMULATIVE_ARGS (args_so_far_v
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1728 args_so_far
= pack_cumulative_args (&args_so_far_v
);
1730 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1731 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1733 machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1734 function_arg_info
arg_info (mode
, /*named=*/true);
1735 rtx tmp
= targetm
.calls
.function_arg (args_so_far
, arg_info
);
1736 if (!tmp
|| !REG_P (tmp
))
1738 if (targetm
.calls
.arg_partial_bytes (args_so_far
, arg_info
))
1740 targetm
.calls
.function_arg_advance (args_so_far
, arg_info
);
1746 /* A subroutine of emit_block_move. Expand a cpymem or movmem pattern;
1747 return true if successful.
1749 X is the destination of the copy or move.
1750 Y is the source of the copy or move.
1751 SIZE is the size of the block to be moved.
1753 MIGHT_OVERLAP indicates this originated with expansion of a
1754 builtin_memmove() and the source and destination blocks may
1759 emit_block_move_via_pattern (rtx x
, rtx y
, rtx size
, unsigned int align
,
1760 unsigned int expected_align
,
1761 HOST_WIDE_INT expected_size
,
1762 unsigned HOST_WIDE_INT min_size
,
1763 unsigned HOST_WIDE_INT max_size
,
1764 unsigned HOST_WIDE_INT probable_max_size
,
1767 if (expected_align
< align
)
1768 expected_align
= align
;
1769 if (expected_size
!= -1)
1771 if ((unsigned HOST_WIDE_INT
)expected_size
> probable_max_size
)
1772 expected_size
= probable_max_size
;
1773 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
1774 expected_size
= min_size
;
1777 /* Since this is a move insn, we don't care about volatility. */
1778 temporary_volatile_ok
v (true);
1780 /* Try the most limited insn first, because there's no point
1781 including more than one in the machine description unless
1782 the more limited one has some advantage. */
1784 opt_scalar_int_mode mode_iter
;
1785 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_INT
)
1787 scalar_int_mode mode
= mode_iter
.require ();
1788 enum insn_code code
;
1790 code
= direct_optab_handler (movmem_optab
, mode
);
1792 code
= direct_optab_handler (cpymem_optab
, mode
);
1794 if (code
!= CODE_FOR_nothing
1795 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1796 here because if SIZE is less than the mode mask, as it is
1797 returned by the macro, it will definitely be less than the
1798 actual mode mask. Since SIZE is within the Pmode address
1799 space, we limit MODE to Pmode. */
1800 && ((CONST_INT_P (size
)
1801 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1802 <= (GET_MODE_MASK (mode
) >> 1)))
1803 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
1804 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
1806 class expand_operand ops
[9];
1809 /* ??? When called via emit_block_move_for_call, it'd be
1810 nice if there were some way to inform the backend, so
1811 that it doesn't fail the expansion because it thinks
1812 emitting the libcall would be more efficient. */
1813 nops
= insn_data
[(int) code
].n_generator_args
;
1814 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
1816 create_fixed_operand (&ops
[0], x
);
1817 create_fixed_operand (&ops
[1], y
);
1818 /* The check above guarantees that this size conversion is valid. */
1819 create_convert_operand_to (&ops
[2], size
, mode
, true);
1820 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
1823 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
1824 create_integer_operand (&ops
[5], expected_size
);
1828 create_integer_operand (&ops
[6], min_size
);
1829 /* If we cannot represent the maximal size,
1830 make parameter NULL. */
1831 if ((HOST_WIDE_INT
) max_size
!= -1)
1832 create_integer_operand (&ops
[7], max_size
);
1834 create_fixed_operand (&ops
[7], NULL
);
1838 /* If we cannot represent the maximal size,
1839 make parameter NULL. */
1840 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
1841 create_integer_operand (&ops
[8], probable_max_size
);
1843 create_fixed_operand (&ops
[8], NULL
);
1845 if (maybe_expand_insn (code
, nops
, ops
))
1853 /* A subroutine of emit_block_move. Copy the data via an explicit
1854 loop. This is used only when libcalls are forbidden. */
1855 /* ??? It'd be nice to copy in hunks larger than QImode. */
1858 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1859 unsigned int align ATTRIBUTE_UNUSED
)
1861 rtx_code_label
*cmp_label
, *top_label
;
1862 rtx iter
, x_addr
, y_addr
, tmp
;
1863 machine_mode x_addr_mode
= get_address_mode (x
);
1864 machine_mode y_addr_mode
= get_address_mode (y
);
1865 machine_mode iter_mode
;
1867 iter_mode
= GET_MODE (size
);
1868 if (iter_mode
== VOIDmode
)
1869 iter_mode
= word_mode
;
1871 top_label
= gen_label_rtx ();
1872 cmp_label
= gen_label_rtx ();
1873 iter
= gen_reg_rtx (iter_mode
);
1875 emit_move_insn (iter
, const0_rtx
);
1877 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1878 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1879 do_pending_stack_adjust ();
1881 emit_jump (cmp_label
);
1882 emit_label (top_label
);
1884 tmp
= convert_modes (x_addr_mode
, iter_mode
, iter
, true);
1885 x_addr
= simplify_gen_binary (PLUS
, x_addr_mode
, x_addr
, tmp
);
1887 if (x_addr_mode
!= y_addr_mode
)
1888 tmp
= convert_modes (y_addr_mode
, iter_mode
, iter
, true);
1889 y_addr
= simplify_gen_binary (PLUS
, y_addr_mode
, y_addr
, tmp
);
1891 x
= change_address (x
, QImode
, x_addr
);
1892 y
= change_address (y
, QImode
, y_addr
);
1894 emit_move_insn (x
, y
);
1896 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1897 true, OPTAB_LIB_WIDEN
);
1899 emit_move_insn (iter
, tmp
);
1901 emit_label (cmp_label
);
1903 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1905 profile_probability::guessed_always ()
1906 .apply_scale (9, 10));
1909 /* Expand a call to memcpy or memmove or memcmp, and return the result.
1910 TAILCALL is true if this is a tail call. */
1913 emit_block_op_via_libcall (enum built_in_function fncode
, rtx dst
, rtx src
,
1914 rtx size
, bool tailcall
)
1916 rtx dst_addr
, src_addr
;
1917 tree call_expr
, dst_tree
, src_tree
, size_tree
;
1918 machine_mode size_mode
;
1920 /* Since dst and src are passed to a libcall, mark the corresponding
1921 tree EXPR as addressable. */
1922 tree dst_expr
= MEM_EXPR (dst
);
1923 tree src_expr
= MEM_EXPR (src
);
1925 mark_addressable (dst_expr
);
1927 mark_addressable (src_expr
);
1929 dst_addr
= copy_addr_to_reg (XEXP (dst
, 0));
1930 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1931 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1933 src_addr
= copy_addr_to_reg (XEXP (src
, 0));
1934 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1935 src_tree
= make_tree (ptr_type_node
, src_addr
);
1937 size_mode
= TYPE_MODE (sizetype
);
1938 size
= convert_to_mode (size_mode
, size
, 1);
1939 size
= copy_to_mode_reg (size_mode
, size
);
1940 size_tree
= make_tree (sizetype
, size
);
1942 /* It is incorrect to use the libcall calling conventions for calls to
1943 memcpy/memmove/memcmp because they can be provided by the user. */
1944 tree fn
= builtin_decl_implicit (fncode
);
1945 call_expr
= build_call_expr (fn
, 3, dst_tree
, src_tree
, size_tree
);
1946 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
1948 return expand_call (call_expr
, NULL_RTX
, false);
1951 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
1952 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
1953 otherwise return null. */
1956 expand_cmpstrn_or_cmpmem (insn_code icode
, rtx target
, rtx arg1_rtx
,
1957 rtx arg2_rtx
, tree arg3_type
, rtx arg3_rtx
,
1958 HOST_WIDE_INT align
)
1960 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
1962 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
1965 class expand_operand ops
[5];
1966 create_output_operand (&ops
[0], target
, insn_mode
);
1967 create_fixed_operand (&ops
[1], arg1_rtx
);
1968 create_fixed_operand (&ops
[2], arg2_rtx
);
1969 create_convert_operand_from (&ops
[3], arg3_rtx
, TYPE_MODE (arg3_type
),
1970 TYPE_UNSIGNED (arg3_type
));
1971 create_integer_operand (&ops
[4], align
);
1972 if (maybe_expand_insn (icode
, 5, ops
))
1973 return ops
[0].value
;
1977 /* Expand a block compare between X and Y with length LEN using the
1978 cmpmem optab, placing the result in TARGET. LEN_TYPE is the type
1979 of the expression that was used to calculate the length. ALIGN
1980 gives the known minimum common alignment. */
1983 emit_block_cmp_via_cmpmem (rtx x
, rtx y
, rtx len
, tree len_type
, rtx target
,
1986 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
1987 implementing memcmp because it will stop if it encounters two
1989 insn_code icode
= direct_optab_handler (cmpmem_optab
, SImode
);
1991 if (icode
== CODE_FOR_nothing
)
1994 return expand_cmpstrn_or_cmpmem (icode
, target
, x
, y
, len_type
, len
, align
);
1997 /* Emit code to compare a block Y to a block X. This may be done with
1998 string-compare instructions, with multiple scalar instructions,
1999 or with a library call.
2001 Both X and Y must be MEM rtx's. LEN is an rtx that says how long
2002 they are. LEN_TYPE is the type of the expression that was used to
2005 If EQUALITY_ONLY is true, it means we don't have to return the tri-state
2006 value of a normal memcmp call, instead we can just compare for equality.
2007 If FORCE_LIBCALL is true, we should emit a call to memcmp rather than
2010 Optionally, the caller can pass a constfn and associated data in Y_CFN
2011 and Y_CFN_DATA. describing that the second operand being compared is a
2012 known constant and how to obtain its data.
2013 Return the result of the comparison, or NULL_RTX if we failed to
2014 perform the operation. */
2017 emit_block_cmp_hints (rtx x
, rtx y
, rtx len
, tree len_type
, rtx target
,
2018 bool equality_only
, by_pieces_constfn y_cfn
,
2023 if (CONST_INT_P (len
) && INTVAL (len
) == 0)
2026 gcc_assert (MEM_P (x
) && MEM_P (y
));
2027 unsigned int align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
2028 gcc_assert (align
>= BITS_PER_UNIT
);
2030 x
= adjust_address (x
, BLKmode
, 0);
2031 y
= adjust_address (y
, BLKmode
, 0);
2034 && CONST_INT_P (len
)
2035 && can_do_by_pieces (INTVAL (len
), align
, COMPARE_BY_PIECES
))
2036 result
= compare_by_pieces (x
, y
, INTVAL (len
), target
, align
,
2039 result
= emit_block_cmp_via_cmpmem (x
, y
, len
, len_type
, target
, align
);
2044 /* Copy all or part of a value X into registers starting at REGNO.
2045 The number of registers to be filled is NREGS. */
2048 move_block_to_reg (int regno
, rtx x
, int nregs
, machine_mode mode
)
2053 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
2054 x
= validize_mem (force_const_mem (mode
, x
));
2056 /* See if the machine can do this with a load multiple insn. */
2057 if (targetm
.have_load_multiple ())
2059 rtx_insn
*last
= get_last_insn ();
2060 rtx first
= gen_rtx_REG (word_mode
, regno
);
2061 if (rtx_insn
*pat
= targetm
.gen_load_multiple (first
, x
,
2068 delete_insns_since (last
);
2071 for (int i
= 0; i
< nregs
; i
++)
2072 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
2073 operand_subword_force (x
, i
, mode
));
2076 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2077 The number of registers to be filled is NREGS. */
2080 move_block_from_reg (int regno
, rtx x
, int nregs
)
2085 /* See if the machine can do this with a store multiple insn. */
2086 if (targetm
.have_store_multiple ())
2088 rtx_insn
*last
= get_last_insn ();
2089 rtx first
= gen_rtx_REG (word_mode
, regno
);
2090 if (rtx_insn
*pat
= targetm
.gen_store_multiple (x
, first
,
2097 delete_insns_since (last
);
2100 for (int i
= 0; i
< nregs
; i
++)
2102 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
2106 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
2110 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2111 ORIG, where ORIG is a non-consecutive group of registers represented by
2112 a PARALLEL. The clone is identical to the original except in that the
2113 original set of registers is replaced by a new set of pseudo registers.
2114 The new set has the same modes as the original set. */
2117 gen_group_rtx (rtx orig
)
2122 gcc_assert (GET_CODE (orig
) == PARALLEL
);
2124 length
= XVECLEN (orig
, 0);
2125 tmps
= XALLOCAVEC (rtx
, length
);
2127 /* Skip a NULL entry in first slot. */
2128 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
2133 for (; i
< length
; i
++)
2135 machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
2136 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
2138 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
2141 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
2144 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
2145 except that values are placed in TMPS[i], and must later be moved
2146 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
2149 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
,
2154 machine_mode m
= GET_MODE (orig_src
);
2156 gcc_assert (GET_CODE (dst
) == PARALLEL
);
2159 && !SCALAR_INT_MODE_P (m
)
2160 && !MEM_P (orig_src
)
2161 && GET_CODE (orig_src
) != CONCAT
)
2163 scalar_int_mode imode
;
2164 if (int_mode_for_mode (GET_MODE (orig_src
)).exists (&imode
))
2166 src
= gen_reg_rtx (imode
);
2167 emit_move_insn (gen_lowpart (GET_MODE (orig_src
), src
), orig_src
);
2171 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
);
2172 emit_move_insn (src
, orig_src
);
2174 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
2178 /* Check for a NULL entry, used to indicate that the parameter goes
2179 both on the stack and in registers. */
2180 if (XEXP (XVECEXP (dst
, 0, 0), 0))
2185 /* Process the pieces. */
2186 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2188 machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
2189 poly_int64 bytepos
= rtx_to_poly_int64 (XEXP (XVECEXP (dst
, 0, i
), 1));
2190 poly_int64 bytelen
= GET_MODE_SIZE (mode
);
2191 poly_int64 shift
= 0;
2193 /* Handle trailing fragments that run over the size of the struct.
2194 It's the target's responsibility to make sure that the fragment
2195 cannot be strictly smaller in some cases and strictly larger
2197 gcc_checking_assert (ordered_p (bytepos
+ bytelen
, ssize
));
2198 if (known_size_p (ssize
) && maybe_gt (bytepos
+ bytelen
, ssize
))
2200 /* Arrange to shift the fragment to where it belongs.
2201 extract_bit_field loads to the lsb of the reg. */
2203 #ifdef BLOCK_REG_PADDING
2204 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
2205 == (BYTES_BIG_ENDIAN
? PAD_UPWARD
: PAD_DOWNWARD
)
2210 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2211 bytelen
= ssize
- bytepos
;
2212 gcc_assert (maybe_gt (bytelen
, 0));
2215 /* If we won't be loading directly from memory, protect the real source
2216 from strange tricks we might play; but make sure that the source can
2217 be loaded directly into the destination. */
2219 if (!MEM_P (orig_src
)
2220 && (!CONSTANT_P (orig_src
)
2221 || (GET_MODE (orig_src
) != mode
2222 && GET_MODE (orig_src
) != VOIDmode
)))
2224 if (GET_MODE (orig_src
) == VOIDmode
)
2225 src
= gen_reg_rtx (mode
);
2227 src
= gen_reg_rtx (GET_MODE (orig_src
));
2229 emit_move_insn (src
, orig_src
);
2232 /* Optimize the access just a bit. */
2234 && (! targetm
.slow_unaligned_access (mode
, MEM_ALIGN (src
))
2235 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
2236 && multiple_p (bytepos
* BITS_PER_UNIT
, GET_MODE_ALIGNMENT (mode
))
2237 && known_eq (bytelen
, GET_MODE_SIZE (mode
)))
2239 tmps
[i
] = gen_reg_rtx (mode
);
2240 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
2242 else if (COMPLEX_MODE_P (mode
)
2243 && GET_MODE (src
) == mode
2244 && known_eq (bytelen
, GET_MODE_SIZE (mode
)))
2245 /* Let emit_move_complex do the bulk of the work. */
2247 else if (GET_CODE (src
) == CONCAT
)
2249 poly_int64 slen
= GET_MODE_SIZE (GET_MODE (src
));
2250 poly_int64 slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
2254 if (can_div_trunc_p (bytepos
, slen0
, &elt
, &subpos
)
2255 && known_le (subpos
+ bytelen
, slen0
))
2257 /* The following assumes that the concatenated objects all
2258 have the same size. In this case, a simple calculation
2259 can be used to determine the object and the bit field
2261 tmps
[i
] = XEXP (src
, elt
);
2262 if (maybe_ne (subpos
, 0)
2263 || maybe_ne (subpos
+ bytelen
, slen0
)
2264 || (!CONSTANT_P (tmps
[i
])
2265 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
)))
2266 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
2267 subpos
* BITS_PER_UNIT
,
2268 1, NULL_RTX
, mode
, mode
, false,
2275 gcc_assert (known_eq (bytepos
, 0));
2276 mem
= assign_stack_temp (GET_MODE (src
), slen
);
2277 emit_move_insn (mem
, src
);
2278 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
2279 0, 1, NULL_RTX
, mode
, mode
, false,
2283 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2284 SIMD register, which is currently broken. While we get GCC
2285 to emit proper RTL for these cases, let's dump to memory. */
2286 else if (VECTOR_MODE_P (GET_MODE (dst
))
2289 poly_uint64 slen
= GET_MODE_SIZE (GET_MODE (src
));
2292 mem
= assign_stack_temp (GET_MODE (src
), slen
);
2293 emit_move_insn (mem
, src
);
2294 tmps
[i
] = adjust_address (mem
, mode
, bytepos
);
2296 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
2297 && XVECLEN (dst
, 0) > 1)
2298 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE (dst
), bytepos
);
2299 else if (CONSTANT_P (src
))
2301 if (known_eq (bytelen
, ssize
))
2307 /* TODO: const_wide_int can have sizes other than this... */
2308 gcc_assert (known_eq (2 * bytelen
, ssize
));
2309 split_double (src
, &first
, &second
);
2316 else if (REG_P (src
) && GET_MODE (src
) == mode
)
2319 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
2320 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
2321 mode
, mode
, false, NULL
);
2323 if (maybe_ne (shift
, 0))
2324 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
2329 /* Emit code to move a block SRC of type TYPE to a block DST,
2330 where DST is non-consecutive registers represented by a PARALLEL.
2331 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
2335 emit_group_load (rtx dst
, rtx src
, tree type
, poly_int64 ssize
)
2340 tmps
= XALLOCAVEC (rtx
, XVECLEN (dst
, 0));
2341 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
2343 /* Copy the extracted pieces into the proper (probable) hard regs. */
2344 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
2346 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
2349 emit_move_insn (d
, tmps
[i
]);
2353 /* Similar, but load SRC into new pseudos in a format that looks like
2354 PARALLEL. This can later be fed to emit_group_move to get things
2355 in the right place. */
2358 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, poly_int64 ssize
)
2363 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
2364 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
2366 /* Convert the vector to look just like the original PARALLEL, except
2367 with the computed values. */
2368 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
2370 rtx e
= XVECEXP (parallel
, 0, i
);
2371 rtx d
= XEXP (e
, 0);
2375 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
2376 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
2378 RTVEC_ELT (vec
, i
) = e
;
2381 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
2384 /* Emit code to move a block SRC to block DST, where SRC and DST are
2385 non-consecutive groups of registers, each represented by a PARALLEL. */
2388 emit_group_move (rtx dst
, rtx src
)
2392 gcc_assert (GET_CODE (src
) == PARALLEL
2393 && GET_CODE (dst
) == PARALLEL
2394 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
2396 /* Skip first entry if NULL. */
2397 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
2398 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
2399 XEXP (XVECEXP (src
, 0, i
), 0));
2402 /* Move a group of registers represented by a PARALLEL into pseudos. */
2405 emit_group_move_into_temps (rtx src
)
2407 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
2410 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
2412 rtx e
= XVECEXP (src
, 0, i
);
2413 rtx d
= XEXP (e
, 0);
2416 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
2417 RTVEC_ELT (vec
, i
) = e
;
2420 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
2423 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
2424 where SRC is non-consecutive registers represented by a PARALLEL.
2425 SSIZE represents the total size of block ORIG_DST, or -1 if not
2429 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
,
2433 int start
, finish
, i
;
2434 machine_mode m
= GET_MODE (orig_dst
);
2436 gcc_assert (GET_CODE (src
) == PARALLEL
);
2438 if (!SCALAR_INT_MODE_P (m
)
2439 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
2441 scalar_int_mode imode
;
2442 if (int_mode_for_mode (GET_MODE (orig_dst
)).exists (&imode
))
2444 dst
= gen_reg_rtx (imode
);
2445 emit_group_store (dst
, src
, type
, ssize
);
2446 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
2450 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
);
2451 emit_group_store (dst
, src
, type
, ssize
);
2453 emit_move_insn (orig_dst
, dst
);
2457 /* Check for a NULL entry, used to indicate that the parameter goes
2458 both on the stack and in registers. */
2459 if (XEXP (XVECEXP (src
, 0, 0), 0))
2463 finish
= XVECLEN (src
, 0);
2465 tmps
= XALLOCAVEC (rtx
, finish
);
2467 /* Copy the (probable) hard regs into pseudos. */
2468 for (i
= start
; i
< finish
; i
++)
2470 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2471 if (!REG_P (reg
) || REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
2473 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2474 emit_move_insn (tmps
[i
], reg
);
2480 /* If we won't be storing directly into memory, protect the real destination
2481 from strange tricks we might play. */
2483 if (GET_CODE (dst
) == PARALLEL
)
2487 /* We can get a PARALLEL dst if there is a conditional expression in
2488 a return statement. In that case, the dst and src are the same,
2489 so no action is necessary. */
2490 if (rtx_equal_p (dst
, src
))
2493 /* It is unclear if we can ever reach here, but we may as well handle
2494 it. Allocate a temporary, and split this into a store/load to/from
2496 temp
= assign_stack_temp (GET_MODE (dst
), ssize
);
2497 emit_group_store (temp
, src
, type
, ssize
);
2498 emit_group_load (dst
, temp
, type
, ssize
);
2501 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
2503 machine_mode outer
= GET_MODE (dst
);
2509 if (!REG_P (dst
) || REGNO (dst
) < FIRST_PSEUDO_REGISTER
)
2510 dst
= gen_reg_rtx (outer
);
2512 /* Make life a bit easier for combine. */
2513 /* If the first element of the vector is the low part
2514 of the destination mode, use a paradoxical subreg to
2515 initialize the destination. */
2518 inner
= GET_MODE (tmps
[start
]);
2519 bytepos
= subreg_lowpart_offset (inner
, outer
);
2520 if (known_eq (rtx_to_poly_int64 (XEXP (XVECEXP (src
, 0, start
), 1)),
2523 temp
= simplify_gen_subreg (outer
, tmps
[start
],
2527 emit_move_insn (dst
, temp
);
2534 /* If the first element wasn't the low part, try the last. */
2536 && start
< finish
- 1)
2538 inner
= GET_MODE (tmps
[finish
- 1]);
2539 bytepos
= subreg_lowpart_offset (inner
, outer
);
2540 if (known_eq (rtx_to_poly_int64 (XEXP (XVECEXP (src
, 0,
2544 temp
= simplify_gen_subreg (outer
, tmps
[finish
- 1],
2548 emit_move_insn (dst
, temp
);
2555 /* Otherwise, simply initialize the result to zero. */
2557 emit_move_insn (dst
, CONST0_RTX (outer
));
2560 /* Process the pieces. */
2561 for (i
= start
; i
< finish
; i
++)
2563 poly_int64 bytepos
= rtx_to_poly_int64 (XEXP (XVECEXP (src
, 0, i
), 1));
2564 machine_mode mode
= GET_MODE (tmps
[i
]);
2565 poly_int64 bytelen
= GET_MODE_SIZE (mode
);
2566 poly_uint64 adj_bytelen
;
2569 /* Handle trailing fragments that run over the size of the struct.
2570 It's the target's responsibility to make sure that the fragment
2571 cannot be strictly smaller in some cases and strictly larger
2573 gcc_checking_assert (ordered_p (bytepos
+ bytelen
, ssize
));
2574 if (known_size_p (ssize
) && maybe_gt (bytepos
+ bytelen
, ssize
))
2575 adj_bytelen
= ssize
- bytepos
;
2577 adj_bytelen
= bytelen
;
2579 if (GET_CODE (dst
) == CONCAT
)
2581 if (known_le (bytepos
+ adj_bytelen
,
2582 GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)))))
2583 dest
= XEXP (dst
, 0);
2584 else if (known_ge (bytepos
, GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)))))
2586 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2587 dest
= XEXP (dst
, 1);
2591 machine_mode dest_mode
= GET_MODE (dest
);
2592 machine_mode tmp_mode
= GET_MODE (tmps
[i
]);
2594 gcc_assert (known_eq (bytepos
, 0) && XVECLEN (src
, 0));
2596 if (GET_MODE_ALIGNMENT (dest_mode
)
2597 >= GET_MODE_ALIGNMENT (tmp_mode
))
2599 dest
= assign_stack_temp (dest_mode
,
2600 GET_MODE_SIZE (dest_mode
));
2601 emit_move_insn (adjust_address (dest
,
2609 dest
= assign_stack_temp (tmp_mode
,
2610 GET_MODE_SIZE (tmp_mode
));
2611 emit_move_insn (dest
, tmps
[i
]);
2612 dst
= adjust_address (dest
, dest_mode
, bytepos
);
2618 /* Handle trailing fragments that run over the size of the struct. */
2619 if (known_size_p (ssize
) && maybe_gt (bytepos
+ bytelen
, ssize
))
2621 /* store_bit_field always takes its value from the lsb.
2622 Move the fragment to the lsb if it's not already there. */
2624 #ifdef BLOCK_REG_PADDING
2625 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2626 == (BYTES_BIG_ENDIAN
? PAD_UPWARD
: PAD_DOWNWARD
)
2632 poly_int64 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2633 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
2637 /* Make sure not to write past the end of the struct. */
2638 store_bit_field (dest
,
2639 adj_bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2640 bytepos
* BITS_PER_UNIT
, ssize
* BITS_PER_UNIT
- 1,
2641 VOIDmode
, tmps
[i
], false);
2644 /* Optimize the access just a bit. */
2645 else if (MEM_P (dest
)
2646 && (!targetm
.slow_unaligned_access (mode
, MEM_ALIGN (dest
))
2647 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2648 && multiple_p (bytepos
* BITS_PER_UNIT
,
2649 GET_MODE_ALIGNMENT (mode
))
2650 && known_eq (bytelen
, GET_MODE_SIZE (mode
)))
2651 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2654 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2655 0, 0, mode
, tmps
[i
], false);
2658 /* Copy from the pseudo into the (probable) hard reg. */
2659 if (orig_dst
!= dst
)
2660 emit_move_insn (orig_dst
, dst
);
2663 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2664 of the value stored in X. */
2667 maybe_emit_group_store (rtx x
, tree type
)
2669 machine_mode mode
= TYPE_MODE (type
);
2670 gcc_checking_assert (GET_MODE (x
) == VOIDmode
|| GET_MODE (x
) == mode
);
2671 if (GET_CODE (x
) == PARALLEL
)
2673 rtx result
= gen_reg_rtx (mode
);
2674 emit_group_store (result
, x
, type
, int_size_in_bytes (type
));
2680 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2682 This is used on targets that return BLKmode values in registers. */
2685 copy_blkmode_from_reg (rtx target
, rtx srcreg
, tree type
)
2687 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2688 rtx src
= NULL
, dst
= NULL
;
2689 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2690 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2691 /* No current ABI uses variable-sized modes to pass a BLKmnode type. */
2692 fixed_size_mode mode
= as_a
<fixed_size_mode
> (GET_MODE (srcreg
));
2693 fixed_size_mode tmode
= as_a
<fixed_size_mode
> (GET_MODE (target
));
2694 fixed_size_mode copy_mode
;
2696 /* BLKmode registers created in the back-end shouldn't have survived. */
2697 gcc_assert (mode
!= BLKmode
);
2699 /* If the structure doesn't take up a whole number of words, see whether
2700 SRCREG is padded on the left or on the right. If it's on the left,
2701 set PADDING_CORRECTION to the number of bits to skip.
2703 In most ABIs, the structure will be returned at the least end of
2704 the register, which translates to right padding on little-endian
2705 targets and left padding on big-endian targets. The opposite
2706 holds if the structure is returned at the most significant
2707 end of the register. */
2708 if (bytes
% UNITS_PER_WORD
!= 0
2709 && (targetm
.calls
.return_in_msb (type
)
2711 : BYTES_BIG_ENDIAN
))
2713 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2715 /* We can use a single move if we have an exact mode for the size. */
2716 else if (MEM_P (target
)
2717 && (!targetm
.slow_unaligned_access (mode
, MEM_ALIGN (target
))
2718 || MEM_ALIGN (target
) >= GET_MODE_ALIGNMENT (mode
))
2719 && bytes
== GET_MODE_SIZE (mode
))
2721 emit_move_insn (adjust_address (target
, mode
, 0), srcreg
);
2725 /* And if we additionally have the same mode for a register. */
2726 else if (REG_P (target
)
2727 && GET_MODE (target
) == mode
2728 && bytes
== GET_MODE_SIZE (mode
))
2730 emit_move_insn (target
, srcreg
);
2734 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2735 into a new pseudo which is a full word. */
2736 if (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
2738 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2742 /* Copy the structure BITSIZE bits at a time. If the target lives in
2743 memory, take care of not reading/writing past its end by selecting
2744 a copy mode suited to BITSIZE. This should always be possible given
2747 If the target lives in register, make sure not to select a copy mode
2748 larger than the mode of the register.
2750 We could probably emit more efficient code for machines which do not use
2751 strict alignment, but it doesn't seem worth the effort at the current
2754 copy_mode
= word_mode
;
2757 opt_scalar_int_mode mem_mode
= int_mode_for_size (bitsize
, 1);
2758 if (mem_mode
.exists ())
2759 copy_mode
= mem_mode
.require ();
2761 else if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2764 for (bitpos
= 0, xbitpos
= padding_correction
;
2765 bitpos
< bytes
* BITS_PER_UNIT
;
2766 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2768 /* We need a new source operand each time xbitpos is on a
2769 word boundary and when xbitpos == padding_correction
2770 (the first time through). */
2771 if (xbitpos
% BITS_PER_WORD
== 0 || xbitpos
== padding_correction
)
2772 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
, mode
);
2774 /* We need a new destination operand each time bitpos is on
2776 if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2778 else if (bitpos
% BITS_PER_WORD
== 0)
2779 dst
= operand_subword (target
, bitpos
/ BITS_PER_WORD
, 1, tmode
);
2781 /* Use xbitpos for the source extraction (right justified) and
2782 bitpos for the destination store (left justified). */
2783 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, 0, 0, copy_mode
,
2784 extract_bit_field (src
, bitsize
,
2785 xbitpos
% BITS_PER_WORD
, 1,
2786 NULL_RTX
, copy_mode
, copy_mode
,
2792 /* Copy BLKmode value SRC into a register of mode MODE_IN. Return the
2793 register if it contains any data, otherwise return null.
2795 This is used on targets that return BLKmode values in registers. */
2798 copy_blkmode_to_reg (machine_mode mode_in
, tree src
)
2801 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0, bytes
;
2802 unsigned int bitsize
;
2803 rtx
*dst_words
, dst
, x
, src_word
= NULL_RTX
, dst_word
= NULL_RTX
;
2804 /* No current ABI uses variable-sized modes to pass a BLKmnode type. */
2805 fixed_size_mode mode
= as_a
<fixed_size_mode
> (mode_in
);
2806 fixed_size_mode dst_mode
;
2807 scalar_int_mode min_mode
;
2809 gcc_assert (TYPE_MODE (TREE_TYPE (src
)) == BLKmode
);
2811 x
= expand_normal (src
);
2813 bytes
= arg_int_size_in_bytes (TREE_TYPE (src
));
2817 /* If the structure doesn't take up a whole number of words, see
2818 whether the register value should be padded on the left or on
2819 the right. Set PADDING_CORRECTION to the number of padding
2820 bits needed on the left side.
2822 In most ABIs, the structure will be returned at the least end of
2823 the register, which translates to right padding on little-endian
2824 targets and left padding on big-endian targets. The opposite
2825 holds if the structure is returned at the most significant
2826 end of the register. */
2827 if (bytes
% UNITS_PER_WORD
!= 0
2828 && (targetm
.calls
.return_in_msb (TREE_TYPE (src
))
2830 : BYTES_BIG_ENDIAN
))
2831 padding_correction
= (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
)
2834 n_regs
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
2835 dst_words
= XALLOCAVEC (rtx
, n_regs
);
2836 bitsize
= MIN (TYPE_ALIGN (TREE_TYPE (src
)), BITS_PER_WORD
);
2837 min_mode
= smallest_int_mode_for_size (bitsize
);
2839 /* Copy the structure BITSIZE bits at a time. */
2840 for (bitpos
= 0, xbitpos
= padding_correction
;
2841 bitpos
< bytes
* BITS_PER_UNIT
;
2842 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2844 /* We need a new destination pseudo each time xbitpos is
2845 on a word boundary and when xbitpos == padding_correction
2846 (the first time through). */
2847 if (xbitpos
% BITS_PER_WORD
== 0
2848 || xbitpos
== padding_correction
)
2850 /* Generate an appropriate register. */
2851 dst_word
= gen_reg_rtx (word_mode
);
2852 dst_words
[xbitpos
/ BITS_PER_WORD
] = dst_word
;
2854 /* Clear the destination before we move anything into it. */
2855 emit_move_insn (dst_word
, CONST0_RTX (word_mode
));
2858 /* Find the largest integer mode that can be used to copy all or as
2859 many bits as possible of the structure if the target supports larger
2860 copies. There are too many corner cases here w.r.t to alignments on
2861 the read/writes. So if there is any padding just use single byte
2863 opt_scalar_int_mode mode_iter
;
2864 if (padding_correction
== 0 && !STRICT_ALIGNMENT
)
2866 FOR_EACH_MODE_FROM (mode_iter
, min_mode
)
2868 unsigned int msize
= GET_MODE_BITSIZE (mode_iter
.require ());
2869 if (msize
<= ((bytes
* BITS_PER_UNIT
) - bitpos
)
2870 && msize
<= BITS_PER_WORD
)
2877 /* We need a new source operand each time bitpos is on a word
2879 if (bitpos
% BITS_PER_WORD
== 0)
2880 src_word
= operand_subword_force (x
, bitpos
/ BITS_PER_WORD
, BLKmode
);
2882 /* Use bitpos for the source extraction (left justified) and
2883 xbitpos for the destination store (right justified). */
2884 store_bit_field (dst_word
, bitsize
, xbitpos
% BITS_PER_WORD
,
2886 extract_bit_field (src_word
, bitsize
,
2887 bitpos
% BITS_PER_WORD
, 1,
2888 NULL_RTX
, word_mode
, word_mode
,
2893 if (mode
== BLKmode
)
2895 /* Find the smallest integer mode large enough to hold the
2896 entire structure. */
2897 opt_scalar_int_mode mode_iter
;
2898 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_INT
)
2899 if (GET_MODE_SIZE (mode_iter
.require ()) >= bytes
)
2902 /* A suitable mode should have been found. */
2903 mode
= mode_iter
.require ();
2906 if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
2907 dst_mode
= word_mode
;
2910 dst
= gen_reg_rtx (dst_mode
);
2912 for (i
= 0; i
< n_regs
; i
++)
2913 emit_move_insn (operand_subword (dst
, i
, 0, dst_mode
), dst_words
[i
]);
2915 if (mode
!= dst_mode
)
2916 dst
= gen_lowpart (mode
, dst
);
2921 /* Add a USE expression for REG to the (possibly empty) list pointed
2922 to by CALL_FUSAGE. REG must denote a hard register. */
2925 use_reg_mode (rtx
*call_fusage
, rtx reg
, machine_mode mode
)
2927 gcc_assert (REG_P (reg
));
2929 if (!HARD_REGISTER_P (reg
))
2933 = gen_rtx_EXPR_LIST (mode
, gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2936 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2937 to by CALL_FUSAGE. REG must denote a hard register. */
2940 clobber_reg_mode (rtx
*call_fusage
, rtx reg
, machine_mode mode
)
2942 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2945 = gen_rtx_EXPR_LIST (mode
, gen_rtx_CLOBBER (VOIDmode
, reg
), *call_fusage
);
2948 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2949 starting at REGNO. All of these registers must be hard registers. */
2952 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2956 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2958 for (i
= 0; i
< nregs
; i
++)
2959 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2962 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2963 PARALLEL REGS. This is for calls that pass values in multiple
2964 non-contiguous locations. The Irix 6 ABI has examples of this. */
2967 use_group_regs (rtx
*call_fusage
, rtx regs
)
2971 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2973 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2975 /* A NULL entry means the parameter goes both on the stack and in
2976 registers. This can also be a MEM for targets that pass values
2977 partially on the stack and partially in registers. */
2978 if (reg
!= 0 && REG_P (reg
))
2979 use_reg (call_fusage
, reg
);
2983 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2984 assigment and the code of the expresion on the RHS is CODE. Return
2988 get_def_for_expr (tree name
, enum tree_code code
)
2992 if (TREE_CODE (name
) != SSA_NAME
)
2995 def_stmt
= get_gimple_for_ssa_name (name
);
2997 || gimple_assign_rhs_code (def_stmt
) != code
)
3003 /* Return the defining gimple statement for SSA_NAME NAME if it is an
3004 assigment and the class of the expresion on the RHS is CLASS. Return
3008 get_def_for_expr_class (tree name
, enum tree_code_class tclass
)
3012 if (TREE_CODE (name
) != SSA_NAME
)
3015 def_stmt
= get_gimple_for_ssa_name (name
);
3017 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) != tclass
)
3023 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
3024 its length in bytes. */
3027 clear_storage_hints (rtx object
, rtx size
, enum block_op_methods method
,
3028 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
3029 unsigned HOST_WIDE_INT min_size
,
3030 unsigned HOST_WIDE_INT max_size
,
3031 unsigned HOST_WIDE_INT probable_max_size
)
3033 machine_mode mode
= GET_MODE (object
);
3036 gcc_assert (method
== BLOCK_OP_NORMAL
|| method
== BLOCK_OP_TAILCALL
);
3038 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
3039 just move a zero. Otherwise, do this a piece at a time. */
3040 poly_int64 size_val
;
3042 && poly_int_rtx_p (size
, &size_val
)
3043 && known_eq (size_val
, GET_MODE_SIZE (mode
)))
3045 rtx zero
= CONST0_RTX (mode
);
3048 emit_move_insn (object
, zero
);
3052 if (COMPLEX_MODE_P (mode
))
3054 zero
= CONST0_RTX (GET_MODE_INNER (mode
));
3057 write_complex_part (object
, zero
, 0);
3058 write_complex_part (object
, zero
, 1);
3064 if (size
== const0_rtx
)
3067 align
= MEM_ALIGN (object
);
3069 if (CONST_INT_P (size
)
3070 && targetm
.use_by_pieces_infrastructure_p (INTVAL (size
), align
,
3072 optimize_insn_for_speed_p ()))
3073 clear_by_pieces (object
, INTVAL (size
), align
);
3074 else if (set_storage_via_setmem (object
, size
, const0_rtx
, align
,
3075 expected_align
, expected_size
,
3076 min_size
, max_size
, probable_max_size
))
3078 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object
)))
3079 return set_storage_via_libcall (object
, size
, const0_rtx
,
3080 method
== BLOCK_OP_TAILCALL
);
3088 clear_storage (rtx object
, rtx size
, enum block_op_methods method
)
3090 unsigned HOST_WIDE_INT max
, min
= 0;
3091 if (GET_CODE (size
) == CONST_INT
)
3092 min
= max
= UINTVAL (size
);
3094 max
= GET_MODE_MASK (GET_MODE (size
));
3095 return clear_storage_hints (object
, size
, method
, 0, -1, min
, max
, max
);
3099 /* A subroutine of clear_storage. Expand a call to memset.
3100 Return the return value of memset, 0 otherwise. */
3103 set_storage_via_libcall (rtx object
, rtx size
, rtx val
, bool tailcall
)
3105 tree call_expr
, fn
, object_tree
, size_tree
, val_tree
;
3106 machine_mode size_mode
;
3108 object
= copy_addr_to_reg (XEXP (object
, 0));
3109 object_tree
= make_tree (ptr_type_node
, object
);
3111 if (!CONST_INT_P (val
))
3112 val
= convert_to_mode (TYPE_MODE (integer_type_node
), val
, 1);
3113 val_tree
= make_tree (integer_type_node
, val
);
3115 size_mode
= TYPE_MODE (sizetype
);
3116 size
= convert_to_mode (size_mode
, size
, 1);
3117 size
= copy_to_mode_reg (size_mode
, size
);
3118 size_tree
= make_tree (sizetype
, size
);
3120 /* It is incorrect to use the libcall calling conventions for calls to
3121 memset because it can be provided by the user. */
3122 fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
3123 call_expr
= build_call_expr (fn
, 3, object_tree
, val_tree
, size_tree
);
3124 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
3126 return expand_call (call_expr
, NULL_RTX
, false);
3129 /* Expand a setmem pattern; return true if successful. */
3132 set_storage_via_setmem (rtx object
, rtx size
, rtx val
, unsigned int align
,
3133 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
3134 unsigned HOST_WIDE_INT min_size
,
3135 unsigned HOST_WIDE_INT max_size
,
3136 unsigned HOST_WIDE_INT probable_max_size
)
3138 /* Try the most limited insn first, because there's no point
3139 including more than one in the machine description unless
3140 the more limited one has some advantage. */
3142 if (expected_align
< align
)
3143 expected_align
= align
;
3144 if (expected_size
!= -1)
3146 if ((unsigned HOST_WIDE_INT
)expected_size
> max_size
)
3147 expected_size
= max_size
;
3148 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
3149 expected_size
= min_size
;
3152 opt_scalar_int_mode mode_iter
;
3153 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_INT
)
3155 scalar_int_mode mode
= mode_iter
.require ();
3156 enum insn_code code
= direct_optab_handler (setmem_optab
, mode
);
3158 if (code
!= CODE_FOR_nothing
3159 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
3160 here because if SIZE is less than the mode mask, as it is
3161 returned by the macro, it will definitely be less than the
3162 actual mode mask. Since SIZE is within the Pmode address
3163 space, we limit MODE to Pmode. */
3164 && ((CONST_INT_P (size
)
3165 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
3166 <= (GET_MODE_MASK (mode
) >> 1)))
3167 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
3168 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
3170 class expand_operand ops
[9];
3173 nops
= insn_data
[(int) code
].n_generator_args
;
3174 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
3176 create_fixed_operand (&ops
[0], object
);
3177 /* The check above guarantees that this size conversion is valid. */
3178 create_convert_operand_to (&ops
[1], size
, mode
, true);
3179 create_convert_operand_from (&ops
[2], val
, byte_mode
, true);
3180 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
3183 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
3184 create_integer_operand (&ops
[5], expected_size
);
3188 create_integer_operand (&ops
[6], min_size
);
3189 /* If we cannot represent the maximal size,
3190 make parameter NULL. */
3191 if ((HOST_WIDE_INT
) max_size
!= -1)
3192 create_integer_operand (&ops
[7], max_size
);
3194 create_fixed_operand (&ops
[7], NULL
);
3198 /* If we cannot represent the maximal size,
3199 make parameter NULL. */
3200 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
3201 create_integer_operand (&ops
[8], probable_max_size
);
3203 create_fixed_operand (&ops
[8], NULL
);
3205 if (maybe_expand_insn (code
, nops
, ops
))
3214 /* Write to one of the components of the complex value CPLX. Write VAL to
3215 the real part if IMAG_P is false, and the imaginary part if its true. */
3218 write_complex_part (rtx cplx
, rtx val
, bool imag_p
)
3224 if (GET_CODE (cplx
) == CONCAT
)
3226 emit_move_insn (XEXP (cplx
, imag_p
), val
);
3230 cmode
= GET_MODE (cplx
);
3231 imode
= GET_MODE_INNER (cmode
);
3232 ibitsize
= GET_MODE_BITSIZE (imode
);
3234 /* For MEMs simplify_gen_subreg may generate an invalid new address
3235 because, e.g., the original address is considered mode-dependent
3236 by the target, which restricts simplify_subreg from invoking
3237 adjust_address_nv. Instead of preparing fallback support for an
3238 invalid address, we call adjust_address_nv directly. */
3241 emit_move_insn (adjust_address_nv (cplx
, imode
,
3242 imag_p
? GET_MODE_SIZE (imode
) : 0),
3247 /* If the sub-object is at least word sized, then we know that subregging
3248 will work. This special case is important, since store_bit_field
3249 wants to operate on integer modes, and there's rarely an OImode to
3250 correspond to TCmode. */
3251 if (ibitsize
>= BITS_PER_WORD
3252 /* For hard regs we have exact predicates. Assume we can split
3253 the original object if it spans an even number of hard regs.
3254 This special case is important for SCmode on 64-bit platforms
3255 where the natural size of floating-point regs is 32-bit. */
3257 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3258 && REG_NREGS (cplx
) % 2 == 0))
3260 rtx part
= simplify_gen_subreg (imode
, cplx
, cmode
,
3261 imag_p
? GET_MODE_SIZE (imode
) : 0);
3264 emit_move_insn (part
, val
);
3268 /* simplify_gen_subreg may fail for sub-word MEMs. */
3269 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3272 store_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0, 0, 0, imode
, val
,
3276 /* Extract one of the components of the complex value CPLX. Extract the
3277 real part if IMAG_P is false, and the imaginary part if it's true. */
3280 read_complex_part (rtx cplx
, bool imag_p
)
3286 if (GET_CODE (cplx
) == CONCAT
)
3287 return XEXP (cplx
, imag_p
);
3289 cmode
= GET_MODE (cplx
);
3290 imode
= GET_MODE_INNER (cmode
);
3291 ibitsize
= GET_MODE_BITSIZE (imode
);
3293 /* Special case reads from complex constants that got spilled to memory. */
3294 if (MEM_P (cplx
) && GET_CODE (XEXP (cplx
, 0)) == SYMBOL_REF
)
3296 tree decl
= SYMBOL_REF_DECL (XEXP (cplx
, 0));
3297 if (decl
&& TREE_CODE (decl
) == COMPLEX_CST
)
3299 tree part
= imag_p
? TREE_IMAGPART (decl
) : TREE_REALPART (decl
);
3300 if (CONSTANT_CLASS_P (part
))
3301 return expand_expr (part
, NULL_RTX
, imode
, EXPAND_NORMAL
);
3305 /* For MEMs simplify_gen_subreg may generate an invalid new address
3306 because, e.g., the original address is considered mode-dependent
3307 by the target, which restricts simplify_subreg from invoking
3308 adjust_address_nv. Instead of preparing fallback support for an
3309 invalid address, we call adjust_address_nv directly. */
3311 return adjust_address_nv (cplx
, imode
,
3312 imag_p
? GET_MODE_SIZE (imode
) : 0);
3314 /* If the sub-object is at least word sized, then we know that subregging
3315 will work. This special case is important, since extract_bit_field
3316 wants to operate on integer modes, and there's rarely an OImode to
3317 correspond to TCmode. */
3318 if (ibitsize
>= BITS_PER_WORD
3319 /* For hard regs we have exact predicates. Assume we can split
3320 the original object if it spans an even number of hard regs.
3321 This special case is important for SCmode on 64-bit platforms
3322 where the natural size of floating-point regs is 32-bit. */
3324 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3325 && REG_NREGS (cplx
) % 2 == 0))
3327 rtx ret
= simplify_gen_subreg (imode
, cplx
, cmode
,
3328 imag_p
? GET_MODE_SIZE (imode
) : 0);
3332 /* simplify_gen_subreg may fail for sub-word MEMs. */
3333 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3336 return extract_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0,
3337 true, NULL_RTX
, imode
, imode
, false, NULL
);
3340 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3341 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3342 represented in NEW_MODE. If FORCE is true, this will never happen, as
3343 we'll force-create a SUBREG if needed. */
3346 emit_move_change_mode (machine_mode new_mode
,
3347 machine_mode old_mode
, rtx x
, bool force
)
3351 if (push_operand (x
, GET_MODE (x
)))
3353 ret
= gen_rtx_MEM (new_mode
, XEXP (x
, 0));
3354 MEM_COPY_ATTRIBUTES (ret
, x
);
3358 /* We don't have to worry about changing the address since the
3359 size in bytes is supposed to be the same. */
3360 if (reload_in_progress
)
3362 /* Copy the MEM to change the mode and move any
3363 substitutions from the old MEM to the new one. */
3364 ret
= adjust_address_nv (x
, new_mode
, 0);
3365 copy_replacements (x
, ret
);
3368 ret
= adjust_address (x
, new_mode
, 0);
3372 /* Note that we do want simplify_subreg's behavior of validating
3373 that the new mode is ok for a hard register. If we were to use
3374 simplify_gen_subreg, we would create the subreg, but would
3375 probably run into the target not being able to implement it. */
3376 /* Except, of course, when FORCE is true, when this is exactly what
3377 we want. Which is needed for CCmodes on some targets. */
3379 ret
= simplify_gen_subreg (new_mode
, x
, old_mode
, 0);
3381 ret
= simplify_subreg (new_mode
, x
, old_mode
, 0);
3387 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3388 an integer mode of the same size as MODE. Returns the instruction
3389 emitted, or NULL if such a move could not be generated. */
3392 emit_move_via_integer (machine_mode mode
, rtx x
, rtx y
, bool force
)
3394 scalar_int_mode imode
;
3395 enum insn_code code
;
3397 /* There must exist a mode of the exact size we require. */
3398 if (!int_mode_for_mode (mode
).exists (&imode
))
3401 /* The target must support moves in this mode. */
3402 code
= optab_handler (mov_optab
, imode
);
3403 if (code
== CODE_FOR_nothing
)
3406 x
= emit_move_change_mode (imode
, mode
, x
, force
);
3409 y
= emit_move_change_mode (imode
, mode
, y
, force
);
3412 return emit_insn (GEN_FCN (code
) (x
, y
));
3415 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3416 Return an equivalent MEM that does not use an auto-increment. */
3419 emit_move_resolve_push (machine_mode mode
, rtx x
)
3421 enum rtx_code code
= GET_CODE (XEXP (x
, 0));
3424 poly_int64 adjust
= GET_MODE_SIZE (mode
);
3425 #ifdef PUSH_ROUNDING
3426 adjust
= PUSH_ROUNDING (adjust
);
3428 if (code
== PRE_DEC
|| code
== POST_DEC
)
3430 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
3432 rtx expr
= XEXP (XEXP (x
, 0), 1);
3434 gcc_assert (GET_CODE (expr
) == PLUS
|| GET_CODE (expr
) == MINUS
);
3435 poly_int64 val
= rtx_to_poly_int64 (XEXP (expr
, 1));
3436 if (GET_CODE (expr
) == MINUS
)
3438 gcc_assert (known_eq (adjust
, val
) || known_eq (adjust
, -val
));
3442 /* Do not use anti_adjust_stack, since we don't want to update
3443 stack_pointer_delta. */
3444 temp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
3445 gen_int_mode (adjust
, Pmode
), stack_pointer_rtx
,
3446 0, OPTAB_LIB_WIDEN
);
3447 if (temp
!= stack_pointer_rtx
)
3448 emit_move_insn (stack_pointer_rtx
, temp
);
3455 temp
= stack_pointer_rtx
;
3460 temp
= plus_constant (Pmode
, stack_pointer_rtx
, -adjust
);
3466 return replace_equiv_address (x
, temp
);
3469 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3470 X is known to satisfy push_operand, and MODE is known to be complex.
3471 Returns the last instruction emitted. */
3474 emit_move_complex_push (machine_mode mode
, rtx x
, rtx y
)
3476 scalar_mode submode
= GET_MODE_INNER (mode
);
3479 #ifdef PUSH_ROUNDING
3480 poly_int64 submodesize
= GET_MODE_SIZE (submode
);
3482 /* In case we output to the stack, but the size is smaller than the
3483 machine can push exactly, we need to use move instructions. */
3484 if (maybe_ne (PUSH_ROUNDING (submodesize
), submodesize
))
3486 x
= emit_move_resolve_push (mode
, x
);
3487 return emit_move_insn (x
, y
);
3491 /* Note that the real part always precedes the imag part in memory
3492 regardless of machine's endianness. */
3493 switch (GET_CODE (XEXP (x
, 0)))
3507 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3508 read_complex_part (y
, imag_first
));
3509 return emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3510 read_complex_part (y
, !imag_first
));
3513 /* A subroutine of emit_move_complex. Perform the move from Y to X
3514 via two moves of the parts. Returns the last instruction emitted. */
3517 emit_move_complex_parts (rtx x
, rtx y
)
3519 /* Show the output dies here. This is necessary for SUBREGs
3520 of pseudos since we cannot track their lifetimes correctly;
3521 hard regs shouldn't appear here except as return values. */
3522 if (!reload_completed
&& !reload_in_progress
3523 && REG_P (x
) && !reg_overlap_mentioned_p (x
, y
))
3526 write_complex_part (x
, read_complex_part (y
, false), false);
3527 write_complex_part (x
, read_complex_part (y
, true), true);
3529 return get_last_insn ();
3532 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3533 MODE is known to be complex. Returns the last instruction emitted. */
3536 emit_move_complex (machine_mode mode
, rtx x
, rtx y
)
3540 /* Need to take special care for pushes, to maintain proper ordering
3541 of the data, and possibly extra padding. */
3542 if (push_operand (x
, mode
))
3543 return emit_move_complex_push (mode
, x
, y
);
3545 /* See if we can coerce the target into moving both values at once, except
3546 for floating point where we favor moving as parts if this is easy. */
3547 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
3548 && optab_handler (mov_optab
, GET_MODE_INNER (mode
)) != CODE_FOR_nothing
3550 && HARD_REGISTER_P (x
)
3551 && REG_NREGS (x
) == 1)
3553 && HARD_REGISTER_P (y
)
3554 && REG_NREGS (y
) == 1))
3556 /* Not possible if the values are inherently not adjacent. */
3557 else if (GET_CODE (x
) == CONCAT
|| GET_CODE (y
) == CONCAT
)
3559 /* Is possible if both are registers (or subregs of registers). */
3560 else if (register_operand (x
, mode
) && register_operand (y
, mode
))
3562 /* If one of the operands is a memory, and alignment constraints
3563 are friendly enough, we may be able to do combined memory operations.
3564 We do not attempt this if Y is a constant because that combination is
3565 usually better with the by-parts thing below. */
3566 else if ((MEM_P (x
) ? !CONSTANT_P (y
) : MEM_P (y
))
3567 && (!STRICT_ALIGNMENT
3568 || get_mode_alignment (mode
) == BIGGEST_ALIGNMENT
))
3577 /* For memory to memory moves, optimal behavior can be had with the
3578 existing block move logic. */
3579 if (MEM_P (x
) && MEM_P (y
))
3581 emit_block_move (x
, y
, gen_int_mode (GET_MODE_SIZE (mode
), Pmode
),
3582 BLOCK_OP_NO_LIBCALL
);
3583 return get_last_insn ();
3586 ret
= emit_move_via_integer (mode
, x
, y
, true);
3591 return emit_move_complex_parts (x
, y
);
3594 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3595 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3598 emit_move_ccmode (machine_mode mode
, rtx x
, rtx y
)
3602 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3605 enum insn_code code
= optab_handler (mov_optab
, CCmode
);
3606 if (code
!= CODE_FOR_nothing
)
3608 x
= emit_move_change_mode (CCmode
, mode
, x
, true);
3609 y
= emit_move_change_mode (CCmode
, mode
, y
, true);
3610 return emit_insn (GEN_FCN (code
) (x
, y
));
3614 /* Otherwise, find the MODE_INT mode of the same width. */
3615 ret
= emit_move_via_integer (mode
, x
, y
, false);
3616 gcc_assert (ret
!= NULL
);
3620 /* Return true if word I of OP lies entirely in the
3621 undefined bits of a paradoxical subreg. */
3624 undefined_operand_subword_p (const_rtx op
, int i
)
3626 if (GET_CODE (op
) != SUBREG
)
3628 machine_mode innermostmode
= GET_MODE (SUBREG_REG (op
));
3629 poly_int64 offset
= i
* UNITS_PER_WORD
+ subreg_memory_offset (op
);
3630 return (known_ge (offset
, GET_MODE_SIZE (innermostmode
))
3631 || known_le (offset
, -UNITS_PER_WORD
));
3634 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3635 MODE is any multi-word or full-word mode that lacks a move_insn
3636 pattern. Note that you will get better code if you define such
3637 patterns, even if they must turn into multiple assembler instructions. */
3640 emit_move_multi_word (machine_mode mode
, rtx x
, rtx y
)
3642 rtx_insn
*last_insn
= 0;
3648 /* This function can only handle cases where the number of words is
3649 known at compile time. */
3650 mode_size
= GET_MODE_SIZE (mode
).to_constant ();
3651 gcc_assert (mode_size
>= UNITS_PER_WORD
);
3653 /* If X is a push on the stack, do the push now and replace
3654 X with a reference to the stack pointer. */
3655 if (push_operand (x
, mode
))
3656 x
= emit_move_resolve_push (mode
, x
);
3658 /* If we are in reload, see if either operand is a MEM whose address
3659 is scheduled for replacement. */
3660 if (reload_in_progress
&& MEM_P (x
)
3661 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3662 x
= replace_equiv_address_nv (x
, inner
);
3663 if (reload_in_progress
&& MEM_P (y
)
3664 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3665 y
= replace_equiv_address_nv (y
, inner
);
3669 need_clobber
= false;
3670 for (i
= 0; i
< CEIL (mode_size
, UNITS_PER_WORD
); i
++)
3672 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3675 /* Do not generate code for a move if it would come entirely
3676 from the undefined bits of a paradoxical subreg. */
3677 if (undefined_operand_subword_p (y
, i
))
3680 ypart
= operand_subword (y
, i
, 1, mode
);
3682 /* If we can't get a part of Y, put Y into memory if it is a
3683 constant. Otherwise, force it into a register. Then we must
3684 be able to get a part of Y. */
3685 if (ypart
== 0 && CONSTANT_P (y
))
3687 y
= use_anchored_address (force_const_mem (mode
, y
));
3688 ypart
= operand_subword (y
, i
, 1, mode
);
3690 else if (ypart
== 0)
3691 ypart
= operand_subword_force (y
, i
, mode
);
3693 gcc_assert (xpart
&& ypart
);
3695 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3697 last_insn
= emit_move_insn (xpart
, ypart
);
3703 /* Show the output dies here. This is necessary for SUBREGs
3704 of pseudos since we cannot track their lifetimes correctly;
3705 hard regs shouldn't appear here except as return values.
3706 We never want to emit such a clobber after reload. */
3708 && ! (reload_in_progress
|| reload_completed
)
3709 && need_clobber
!= 0)
3717 /* Low level part of emit_move_insn.
3718 Called just like emit_move_insn, but assumes X and Y
3719 are basically valid. */
3722 emit_move_insn_1 (rtx x
, rtx y
)
3724 machine_mode mode
= GET_MODE (x
);
3725 enum insn_code code
;
3727 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
3729 code
= optab_handler (mov_optab
, mode
);
3730 if (code
!= CODE_FOR_nothing
)
3731 return emit_insn (GEN_FCN (code
) (x
, y
));
3733 /* Expand complex moves by moving real part and imag part. */
3734 if (COMPLEX_MODE_P (mode
))
3735 return emit_move_complex (mode
, x
, y
);
3737 if (GET_MODE_CLASS (mode
) == MODE_DECIMAL_FLOAT
3738 || ALL_FIXED_POINT_MODE_P (mode
))
3740 rtx_insn
*result
= emit_move_via_integer (mode
, x
, y
, true);
3742 /* If we can't find an integer mode, use multi words. */
3746 return emit_move_multi_word (mode
, x
, y
);
3749 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3750 return emit_move_ccmode (mode
, x
, y
);
3752 /* Try using a move pattern for the corresponding integer mode. This is
3753 only safe when simplify_subreg can convert MODE constants into integer
3754 constants. At present, it can only do this reliably if the value
3755 fits within a HOST_WIDE_INT. */
3757 || known_le (GET_MODE_BITSIZE (mode
), HOST_BITS_PER_WIDE_INT
))
3759 rtx_insn
*ret
= emit_move_via_integer (mode
, x
, y
, lra_in_progress
);
3763 if (! lra_in_progress
|| recog (PATTERN (ret
), ret
, 0) >= 0)
3768 return emit_move_multi_word (mode
, x
, y
);
3771 /* Generate code to copy Y into X.
3772 Both Y and X must have the same mode, except that
3773 Y can be a constant with VOIDmode.
3774 This mode cannot be BLKmode; use emit_block_move for that.
3776 Return the last instruction emitted. */
3779 emit_move_insn (rtx x
, rtx y
)
3781 machine_mode mode
= GET_MODE (x
);
3782 rtx y_cst
= NULL_RTX
;
3783 rtx_insn
*last_insn
;
3786 gcc_assert (mode
!= BLKmode
3787 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
3792 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3793 && (last_insn
= compress_float_constant (x
, y
)))
3798 if (!targetm
.legitimate_constant_p (mode
, y
))
3800 y
= force_const_mem (mode
, y
);
3802 /* If the target's cannot_force_const_mem prevented the spill,
3803 assume that the target's move expanders will also take care
3804 of the non-legitimate constant. */
3808 y
= use_anchored_address (y
);
3812 /* If X or Y are memory references, verify that their addresses are valid
3815 && (! memory_address_addr_space_p (GET_MODE (x
), XEXP (x
, 0),
3817 && ! push_operand (x
, GET_MODE (x
))))
3818 x
= validize_mem (x
);
3821 && ! memory_address_addr_space_p (GET_MODE (y
), XEXP (y
, 0),
3822 MEM_ADDR_SPACE (y
)))
3823 y
= validize_mem (y
);
3825 gcc_assert (mode
!= BLKmode
);
3827 last_insn
= emit_move_insn_1 (x
, y
);
3829 if (y_cst
&& REG_P (x
)
3830 && (set
= single_set (last_insn
)) != NULL_RTX
3831 && SET_DEST (set
) == x
3832 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3833 set_unique_reg_note (last_insn
, REG_EQUAL
, copy_rtx (y_cst
));
3838 /* Generate the body of an instruction to copy Y into X.
3839 It may be a list of insns, if one insn isn't enough. */
3842 gen_move_insn (rtx x
, rtx y
)
3847 emit_move_insn_1 (x
, y
);
3853 /* If Y is representable exactly in a narrower mode, and the target can
3854 perform the extension directly from constant or memory, then emit the
3855 move as an extension. */
3858 compress_float_constant (rtx x
, rtx y
)
3860 machine_mode dstmode
= GET_MODE (x
);
3861 machine_mode orig_srcmode
= GET_MODE (y
);
3862 machine_mode srcmode
;
3863 const REAL_VALUE_TYPE
*r
;
3864 int oldcost
, newcost
;
3865 bool speed
= optimize_insn_for_speed_p ();
3867 r
= CONST_DOUBLE_REAL_VALUE (y
);
3869 if (targetm
.legitimate_constant_p (dstmode
, y
))
3870 oldcost
= set_src_cost (y
, orig_srcmode
, speed
);
3872 oldcost
= set_src_cost (force_const_mem (dstmode
, y
), dstmode
, speed
);
3874 FOR_EACH_MODE_UNTIL (srcmode
, orig_srcmode
)
3878 rtx_insn
*last_insn
;
3880 /* Skip if the target can't extend this way. */
3881 ic
= can_extend_p (dstmode
, srcmode
, 0);
3882 if (ic
== CODE_FOR_nothing
)
3885 /* Skip if the narrowed value isn't exact. */
3886 if (! exact_real_truncate (srcmode
, r
))
3889 trunc_y
= const_double_from_real_value (*r
, srcmode
);
3891 if (targetm
.legitimate_constant_p (srcmode
, trunc_y
))
3893 /* Skip if the target needs extra instructions to perform
3895 if (!insn_operand_matches (ic
, 1, trunc_y
))
3897 /* This is valid, but may not be cheaper than the original. */
3898 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3900 if (oldcost
< newcost
)
3903 else if (float_extend_from_mem
[dstmode
][srcmode
])
3905 trunc_y
= force_const_mem (srcmode
, trunc_y
);
3906 /* This is valid, but may not be cheaper than the original. */
3907 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3909 if (oldcost
< newcost
)
3911 trunc_y
= validize_mem (trunc_y
);
3916 /* For CSE's benefit, force the compressed constant pool entry
3917 into a new pseudo. This constant may be used in different modes,
3918 and if not, combine will put things back together for us. */
3919 trunc_y
= force_reg (srcmode
, trunc_y
);
3921 /* If x is a hard register, perform the extension into a pseudo,
3922 so that e.g. stack realignment code is aware of it. */
3924 if (REG_P (x
) && HARD_REGISTER_P (x
))
3925 target
= gen_reg_rtx (dstmode
);
3927 emit_unop_insn (ic
, target
, trunc_y
, UNKNOWN
);
3928 last_insn
= get_last_insn ();
3931 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3934 return emit_move_insn (x
, target
);
3941 /* Pushing data onto the stack. */
3943 /* Push a block of length SIZE (perhaps variable)
3944 and return an rtx to address the beginning of the block.
3945 The value may be virtual_outgoing_args_rtx.
3947 EXTRA is the number of bytes of padding to push in addition to SIZE.
3948 BELOW nonzero means this padding comes at low addresses;
3949 otherwise, the padding comes at high addresses. */
3952 push_block (rtx size
, poly_int64 extra
, int below
)
3956 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3957 if (CONSTANT_P (size
))
3958 anti_adjust_stack (plus_constant (Pmode
, size
, extra
));
3959 else if (REG_P (size
) && known_eq (extra
, 0))
3960 anti_adjust_stack (size
);
3963 temp
= copy_to_mode_reg (Pmode
, size
);
3964 if (maybe_ne (extra
, 0))
3965 temp
= expand_binop (Pmode
, add_optab
, temp
,
3966 gen_int_mode (extra
, Pmode
),
3967 temp
, 0, OPTAB_LIB_WIDEN
);
3968 anti_adjust_stack (temp
);
3971 if (STACK_GROWS_DOWNWARD
)
3973 temp
= virtual_outgoing_args_rtx
;
3974 if (maybe_ne (extra
, 0) && below
)
3975 temp
= plus_constant (Pmode
, temp
, extra
);
3980 if (poly_int_rtx_p (size
, &csize
))
3981 temp
= plus_constant (Pmode
, virtual_outgoing_args_rtx
,
3982 -csize
- (below
? 0 : extra
));
3983 else if (maybe_ne (extra
, 0) && !below
)
3984 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3985 negate_rtx (Pmode
, plus_constant (Pmode
, size
,
3988 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3989 negate_rtx (Pmode
, size
));
3992 return memory_address (NARROWEST_INT_MODE
, temp
);
3995 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3998 mem_autoinc_base (rtx mem
)
4002 rtx addr
= XEXP (mem
, 0);
4003 if (GET_RTX_CLASS (GET_CODE (addr
)) == RTX_AUTOINC
)
4004 return XEXP (addr
, 0);
4009 /* A utility routine used here, in reload, and in try_split. The insns
4010 after PREV up to and including LAST are known to adjust the stack,
4011 with a final value of END_ARGS_SIZE. Iterate backward from LAST
4012 placing notes as appropriate. PREV may be NULL, indicating the
4013 entire insn sequence prior to LAST should be scanned.
4015 The set of allowed stack pointer modifications is small:
4016 (1) One or more auto-inc style memory references (aka pushes),
4017 (2) One or more addition/subtraction with the SP as destination,
4018 (3) A single move insn with the SP as destination,
4019 (4) A call_pop insn,
4020 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
4022 Insns in the sequence that do not modify the SP are ignored,
4023 except for noreturn calls.
4025 The return value is the amount of adjustment that can be trivially
4026 verified, via immediate operand or auto-inc. If the adjustment
4027 cannot be trivially extracted, the return value is HOST_WIDE_INT_MIN. */
4030 find_args_size_adjust (rtx_insn
*insn
)
4035 pat
= PATTERN (insn
);
4038 /* Look for a call_pop pattern. */
4041 /* We have to allow non-call_pop patterns for the case
4042 of emit_single_push_insn of a TLS address. */
4043 if (GET_CODE (pat
) != PARALLEL
)
4046 /* All call_pop have a stack pointer adjust in the parallel.
4047 The call itself is always first, and the stack adjust is
4048 usually last, so search from the end. */
4049 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; --i
)
4051 set
= XVECEXP (pat
, 0, i
);
4052 if (GET_CODE (set
) != SET
)
4054 dest
= SET_DEST (set
);
4055 if (dest
== stack_pointer_rtx
)
4058 /* We'd better have found the stack pointer adjust. */
4061 /* Fall through to process the extracted SET and DEST
4062 as if it was a standalone insn. */
4064 else if (GET_CODE (pat
) == SET
)
4066 else if ((set
= single_set (insn
)) != NULL
)
4068 else if (GET_CODE (pat
) == PARALLEL
)
4070 /* ??? Some older ports use a parallel with a stack adjust
4071 and a store for a PUSH_ROUNDING pattern, rather than a
4072 PRE/POST_MODIFY rtx. Don't force them to update yet... */
4073 /* ??? See h8300 and m68k, pushqi1. */
4074 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; --i
)
4076 set
= XVECEXP (pat
, 0, i
);
4077 if (GET_CODE (set
) != SET
)
4079 dest
= SET_DEST (set
);
4080 if (dest
== stack_pointer_rtx
)
4083 /* We do not expect an auto-inc of the sp in the parallel. */
4084 gcc_checking_assert (mem_autoinc_base (dest
) != stack_pointer_rtx
);
4085 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
4086 != stack_pointer_rtx
);
4094 dest
= SET_DEST (set
);
4096 /* Look for direct modifications of the stack pointer. */
4097 if (REG_P (dest
) && REGNO (dest
) == STACK_POINTER_REGNUM
)
4099 /* Look for a trivial adjustment, otherwise assume nothing. */
4100 /* Note that the SPU restore_stack_block pattern refers to
4101 the stack pointer in V4SImode. Consider that non-trivial. */
4103 if (SCALAR_INT_MODE_P (GET_MODE (dest
))
4104 && strip_offset (SET_SRC (set
), &offset
) == stack_pointer_rtx
)
4106 /* ??? Reload can generate no-op moves, which will be cleaned
4107 up later. Recognize it and continue searching. */
4108 else if (rtx_equal_p (dest
, SET_SRC (set
)))
4111 return HOST_WIDE_INT_MIN
;
4117 /* Otherwise only think about autoinc patterns. */
4118 if (mem_autoinc_base (dest
) == stack_pointer_rtx
)
4121 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
4122 != stack_pointer_rtx
);
4124 else if (mem_autoinc_base (SET_SRC (set
)) == stack_pointer_rtx
)
4125 mem
= SET_SRC (set
);
4129 addr
= XEXP (mem
, 0);
4130 switch (GET_CODE (addr
))
4134 return GET_MODE_SIZE (GET_MODE (mem
));
4137 return -GET_MODE_SIZE (GET_MODE (mem
));
4140 addr
= XEXP (addr
, 1);
4141 gcc_assert (GET_CODE (addr
) == PLUS
);
4142 gcc_assert (XEXP (addr
, 0) == stack_pointer_rtx
);
4143 return rtx_to_poly_int64 (XEXP (addr
, 1));
4151 fixup_args_size_notes (rtx_insn
*prev
, rtx_insn
*last
,
4152 poly_int64 end_args_size
)
4154 poly_int64 args_size
= end_args_size
;
4155 bool saw_unknown
= false;
4158 for (insn
= last
; insn
!= prev
; insn
= PREV_INSN (insn
))
4160 if (!NONDEBUG_INSN_P (insn
))
4163 /* We might have existing REG_ARGS_SIZE notes, e.g. when pushing
4164 a call argument containing a TLS address that itself requires
4165 a call to __tls_get_addr. The handling of stack_pointer_delta
4166 in emit_single_push_insn is supposed to ensure that any such
4167 notes are already correct. */
4168 rtx note
= find_reg_note (insn
, REG_ARGS_SIZE
, NULL_RTX
);
4169 gcc_assert (!note
|| known_eq (args_size
, get_args_size (note
)));
4171 poly_int64 this_delta
= find_args_size_adjust (insn
);
4172 if (known_eq (this_delta
, 0))
4175 || ACCUMULATE_OUTGOING_ARGS
4176 || find_reg_note (insn
, REG_NORETURN
, NULL_RTX
) == NULL_RTX
)
4180 gcc_assert (!saw_unknown
);
4181 if (known_eq (this_delta
, HOST_WIDE_INT_MIN
))
4185 add_args_size_note (insn
, args_size
);
4186 if (STACK_GROWS_DOWNWARD
)
4187 this_delta
= -poly_uint64 (this_delta
);
4190 args_size
= HOST_WIDE_INT_MIN
;
4192 args_size
-= this_delta
;
4198 #ifdef PUSH_ROUNDING
4199 /* Emit single push insn. */
4202 emit_single_push_insn_1 (machine_mode mode
, rtx x
, tree type
)
4205 poly_int64 rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
4207 enum insn_code icode
;
4209 /* If there is push pattern, use it. Otherwise try old way of throwing
4210 MEM representing push operation to move expander. */
4211 icode
= optab_handler (push_optab
, mode
);
4212 if (icode
!= CODE_FOR_nothing
)
4214 class expand_operand ops
[1];
4216 create_input_operand (&ops
[0], x
, mode
);
4217 if (maybe_expand_insn (icode
, 1, ops
))
4220 if (known_eq (GET_MODE_SIZE (mode
), rounded_size
))
4221 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
4222 /* If we are to pad downward, adjust the stack pointer first and
4223 then store X into the stack location using an offset. This is
4224 because emit_move_insn does not know how to pad; it does not have
4226 else if (targetm
.calls
.function_arg_padding (mode
, type
) == PAD_DOWNWARD
)
4228 emit_move_insn (stack_pointer_rtx
,
4229 expand_binop (Pmode
,
4230 STACK_GROWS_DOWNWARD
? sub_optab
4233 gen_int_mode (rounded_size
, Pmode
),
4234 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
4236 poly_int64 offset
= rounded_size
- GET_MODE_SIZE (mode
);
4237 if (STACK_GROWS_DOWNWARD
&& STACK_PUSH_CODE
== POST_DEC
)
4238 /* We have already decremented the stack pointer, so get the
4240 offset
+= rounded_size
;
4242 if (!STACK_GROWS_DOWNWARD
&& STACK_PUSH_CODE
== POST_INC
)
4243 /* We have already incremented the stack pointer, so get the
4245 offset
-= rounded_size
;
4247 dest_addr
= plus_constant (Pmode
, stack_pointer_rtx
, offset
);
4251 if (STACK_GROWS_DOWNWARD
)
4252 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4253 dest_addr
= plus_constant (Pmode
, stack_pointer_rtx
, -rounded_size
);
4255 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4256 dest_addr
= plus_constant (Pmode
, stack_pointer_rtx
, rounded_size
);
4258 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
4261 dest
= gen_rtx_MEM (mode
, dest_addr
);
4265 set_mem_attributes (dest
, type
, 1);
4267 if (cfun
->tail_call_marked
)
4268 /* Function incoming arguments may overlap with sibling call
4269 outgoing arguments and we cannot allow reordering of reads
4270 from function arguments with stores to outgoing arguments
4271 of sibling calls. */
4272 set_mem_alias_set (dest
, 0);
4274 emit_move_insn (dest
, x
);
4277 /* Emit and annotate a single push insn. */
4280 emit_single_push_insn (machine_mode mode
, rtx x
, tree type
)
4282 poly_int64 delta
, old_delta
= stack_pointer_delta
;
4283 rtx_insn
*prev
= get_last_insn ();
4286 emit_single_push_insn_1 (mode
, x
, type
);
4288 /* Adjust stack_pointer_delta to describe the situation after the push
4289 we just performed. Note that we must do this after the push rather
4290 than before the push in case calculating X needs pushes and pops of
4291 its own (e.g. if calling __tls_get_addr). The REG_ARGS_SIZE notes
4292 for such pushes and pops must not include the effect of the future
4294 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
4296 last
= get_last_insn ();
4298 /* Notice the common case where we emitted exactly one insn. */
4299 if (PREV_INSN (last
) == prev
)
4301 add_args_size_note (last
, stack_pointer_delta
);
4305 delta
= fixup_args_size_notes (prev
, last
, stack_pointer_delta
);
4306 gcc_assert (known_eq (delta
, HOST_WIDE_INT_MIN
)
4307 || known_eq (delta
, old_delta
));
4311 /* If reading SIZE bytes from X will end up reading from
4312 Y return the number of bytes that overlap. Return -1
4313 if there is no overlap or -2 if we can't determine
4314 (for example when X and Y have different base registers). */
4317 memory_load_overlap (rtx x
, rtx y
, HOST_WIDE_INT size
)
4319 rtx tmp
= plus_constant (Pmode
, x
, size
);
4320 rtx sub
= simplify_gen_binary (MINUS
, Pmode
, tmp
, y
);
4322 if (!CONST_INT_P (sub
))
4325 HOST_WIDE_INT val
= INTVAL (sub
);
4327 return IN_RANGE (val
, 1, size
) ? val
: -1;
4330 /* Generate code to push X onto the stack, assuming it has mode MODE and
4332 MODE is redundant except when X is a CONST_INT (since they don't
4334 SIZE is an rtx for the size of data to be copied (in bytes),
4335 needed only if X is BLKmode.
4336 Return true if successful. May return false if asked to push a
4337 partial argument during a sibcall optimization (as specified by
4338 SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4341 ALIGN (in bits) is maximum alignment we can assume.
4343 If PARTIAL and REG are both nonzero, then copy that many of the first
4344 bytes of X into registers starting with REG, and push the rest of X.
4345 The amount of space pushed is decreased by PARTIAL bytes.
4346 REG must be a hard register in this case.
4347 If REG is zero but PARTIAL is not, take any all others actions for an
4348 argument partially in registers, but do not actually load any
4351 EXTRA is the amount in bytes of extra space to leave next to this arg.
4352 This is ignored if an argument block has already been allocated.
4354 On a machine that lacks real push insns, ARGS_ADDR is the address of
4355 the bottom of the argument block for this call. We use indexing off there
4356 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4357 argument block has not been preallocated.
4359 ARGS_SO_FAR is the size of args previously pushed for this call.
4361 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4362 for arguments passed in registers. If nonzero, it will be the number
4363 of bytes required. */
4366 emit_push_insn (rtx x
, machine_mode mode
, tree type
, rtx size
,
4367 unsigned int align
, int partial
, rtx reg
, poly_int64 extra
,
4368 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
4369 rtx alignment_pad
, bool sibcall_p
)
4372 pad_direction stack_direction
4373 = STACK_GROWS_DOWNWARD
? PAD_DOWNWARD
: PAD_UPWARD
;
4375 /* Decide where to pad the argument: PAD_DOWNWARD for below,
4376 PAD_UPWARD for above, or PAD_NONE for don't pad it.
4377 Default is below for small data on big-endian machines; else above. */
4378 pad_direction where_pad
= targetm
.calls
.function_arg_padding (mode
, type
);
4380 /* Invert direction if stack is post-decrement.
4382 if (STACK_PUSH_CODE
== POST_DEC
)
4383 if (where_pad
!= PAD_NONE
)
4384 where_pad
= (where_pad
== PAD_DOWNWARD
? PAD_UPWARD
: PAD_DOWNWARD
);
4388 int nregs
= partial
/ UNITS_PER_WORD
;
4389 rtx
*tmp_regs
= NULL
;
4390 int overlapping
= 0;
4393 || (STRICT_ALIGNMENT
&& align
< GET_MODE_ALIGNMENT (mode
)))
4395 /* Copy a block into the stack, entirely or partially. */
4402 offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4403 used
= partial
- offset
;
4405 if (mode
!= BLKmode
)
4407 /* A value is to be stored in an insufficiently aligned
4408 stack slot; copy via a suitably aligned slot if
4410 size
= gen_int_mode (GET_MODE_SIZE (mode
), Pmode
);
4411 if (!MEM_P (xinner
))
4413 temp
= assign_temp (type
, 1, 1);
4414 emit_move_insn (temp
, xinner
);
4421 /* USED is now the # of bytes we need not copy to the stack
4422 because registers will take care of them. */
4425 xinner
= adjust_address (xinner
, BLKmode
, used
);
4427 /* If the partial register-part of the arg counts in its stack size,
4428 skip the part of stack space corresponding to the registers.
4429 Otherwise, start copying to the beginning of the stack space,
4430 by setting SKIP to 0. */
4431 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
4433 #ifdef PUSH_ROUNDING
4434 /* Do it with several push insns if that doesn't take lots of insns
4435 and if there is no difficulty with push insns that skip bytes
4436 on the stack for alignment purposes. */
4439 && CONST_INT_P (size
)
4441 && MEM_ALIGN (xinner
) >= align
4442 && can_move_by_pieces ((unsigned) INTVAL (size
) - used
, align
)
4443 /* Here we avoid the case of a structure whose weak alignment
4444 forces many pushes of a small amount of data,
4445 and such small pushes do rounding that causes trouble. */
4446 && ((!targetm
.slow_unaligned_access (word_mode
, align
))
4447 || align
>= BIGGEST_ALIGNMENT
4448 || known_eq (PUSH_ROUNDING (align
/ BITS_PER_UNIT
),
4449 align
/ BITS_PER_UNIT
))
4450 && known_eq (PUSH_ROUNDING (INTVAL (size
)), INTVAL (size
)))
4452 /* Push padding now if padding above and stack grows down,
4453 or if padding below and stack grows up.
4454 But if space already allocated, this has already been done. */
4455 if (maybe_ne (extra
, 0)
4457 && where_pad
!= PAD_NONE
4458 && where_pad
!= stack_direction
)
4459 anti_adjust_stack (gen_int_mode (extra
, Pmode
));
4461 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
,
4465 #endif /* PUSH_ROUNDING */
4469 /* Otherwise make space on the stack and copy the data
4470 to the address of that space. */
4472 /* Deduct words put into registers from the size we must copy. */
4475 if (CONST_INT_P (size
))
4476 size
= GEN_INT (INTVAL (size
) - used
);
4478 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
4479 gen_int_mode (used
, GET_MODE (size
)),
4480 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4483 /* Get the address of the stack space.
4484 In this case, we do not deal with EXTRA separately.
4485 A single stack adjust will do. */
4489 temp
= push_block (size
, extra
, where_pad
== PAD_DOWNWARD
);
4492 else if (poly_int_rtx_p (args_so_far
, &offset
))
4493 temp
= memory_address (BLKmode
,
4494 plus_constant (Pmode
, args_addr
,
4497 temp
= memory_address (BLKmode
,
4498 plus_constant (Pmode
,
4499 gen_rtx_PLUS (Pmode
,
4504 if (!ACCUMULATE_OUTGOING_ARGS
)
4506 /* If the source is referenced relative to the stack pointer,
4507 copy it to another register to stabilize it. We do not need
4508 to do this if we know that we won't be changing sp. */
4510 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
4511 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
4512 temp
= copy_to_reg (temp
);
4515 target
= gen_rtx_MEM (BLKmode
, temp
);
4517 /* We do *not* set_mem_attributes here, because incoming arguments
4518 may overlap with sibling call outgoing arguments and we cannot
4519 allow reordering of reads from function arguments with stores
4520 to outgoing arguments of sibling calls. We do, however, want
4521 to record the alignment of the stack slot. */
4522 /* ALIGN may well be better aligned than TYPE, e.g. due to
4523 PARM_BOUNDARY. Assume the caller isn't lying. */
4524 set_mem_align (target
, align
);
4526 /* If part should go in registers and pushing to that part would
4527 overwrite some of the values that need to go into regs, load the
4528 overlapping values into temporary pseudos to be moved into the hard
4529 regs at the end after the stack pushing has completed.
4530 We cannot load them directly into the hard regs here because
4531 they can be clobbered by the block move expansions.
4534 if (partial
> 0 && reg
!= 0 && mode
== BLKmode
4535 && GET_CODE (reg
) != PARALLEL
)
4537 overlapping
= memory_load_overlap (XEXP (x
, 0), temp
, partial
);
4538 if (overlapping
> 0)
4540 gcc_assert (overlapping
% UNITS_PER_WORD
== 0);
4541 overlapping
/= UNITS_PER_WORD
;
4543 tmp_regs
= XALLOCAVEC (rtx
, overlapping
);
4545 for (int i
= 0; i
< overlapping
; i
++)
4546 tmp_regs
[i
] = gen_reg_rtx (word_mode
);
4548 for (int i
= 0; i
< overlapping
; i
++)
4549 emit_move_insn (tmp_regs
[i
],
4550 operand_subword_force (target
, i
, mode
));
4552 else if (overlapping
== -1)
4554 /* Could not determine whether there is overlap.
4555 Fail the sibcall. */
4563 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
4566 else if (partial
> 0)
4568 /* Scalar partly in registers. This case is only supported
4569 for fixed-wdth modes. */
4570 int size
= GET_MODE_SIZE (mode
).to_constant ();
4571 size
/= UNITS_PER_WORD
;
4574 /* # bytes of start of argument
4575 that we must make space for but need not store. */
4576 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4577 int args_offset
= INTVAL (args_so_far
);
4580 /* Push padding now if padding above and stack grows down,
4581 or if padding below and stack grows up.
4582 But if space already allocated, this has already been done. */
4583 if (maybe_ne (extra
, 0)
4585 && where_pad
!= PAD_NONE
4586 && where_pad
!= stack_direction
)
4587 anti_adjust_stack (gen_int_mode (extra
, Pmode
));
4589 /* If we make space by pushing it, we might as well push
4590 the real data. Otherwise, we can leave OFFSET nonzero
4591 and leave the space uninitialized. */
4595 /* Now NOT_STACK gets the number of words that we don't need to
4596 allocate on the stack. Convert OFFSET to words too. */
4597 not_stack
= (partial
- offset
) / UNITS_PER_WORD
;
4598 offset
/= UNITS_PER_WORD
;
4600 /* If the partial register-part of the arg counts in its stack size,
4601 skip the part of stack space corresponding to the registers.
4602 Otherwise, start copying to the beginning of the stack space,
4603 by setting SKIP to 0. */
4604 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
4606 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
4607 x
= validize_mem (force_const_mem (mode
, x
));
4609 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4610 SUBREGs of such registers are not allowed. */
4611 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
4612 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
4613 x
= copy_to_reg (x
);
4615 /* Loop over all the words allocated on the stack for this arg. */
4616 /* We can do it by words, because any scalar bigger than a word
4617 has a size a multiple of a word. */
4618 for (i
= size
- 1; i
>= not_stack
; i
--)
4619 if (i
>= not_stack
+ offset
)
4620 if (!emit_push_insn (operand_subword_force (x
, i
, mode
),
4621 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
4623 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
4625 reg_parm_stack_space
, alignment_pad
, sibcall_p
))
4633 /* Push padding now if padding above and stack grows down,
4634 or if padding below and stack grows up.
4635 But if space already allocated, this has already been done. */
4636 if (maybe_ne (extra
, 0)
4638 && where_pad
!= PAD_NONE
4639 && where_pad
!= stack_direction
)
4640 anti_adjust_stack (gen_int_mode (extra
, Pmode
));
4642 #ifdef PUSH_ROUNDING
4643 if (args_addr
== 0 && PUSH_ARGS
)
4644 emit_single_push_insn (mode
, x
, type
);
4648 addr
= simplify_gen_binary (PLUS
, Pmode
, args_addr
, args_so_far
);
4649 dest
= gen_rtx_MEM (mode
, memory_address (mode
, addr
));
4651 /* We do *not* set_mem_attributes here, because incoming arguments
4652 may overlap with sibling call outgoing arguments and we cannot
4653 allow reordering of reads from function arguments with stores
4654 to outgoing arguments of sibling calls. We do, however, want
4655 to record the alignment of the stack slot. */
4656 /* ALIGN may well be better aligned than TYPE, e.g. due to
4657 PARM_BOUNDARY. Assume the caller isn't lying. */
4658 set_mem_align (dest
, align
);
4660 emit_move_insn (dest
, x
);
4664 /* Move the partial arguments into the registers and any overlapping
4665 values that we moved into the pseudos in tmp_regs. */
4666 if (partial
> 0 && reg
!= 0)
4668 /* Handle calls that pass values in multiple non-contiguous locations.
4669 The Irix 6 ABI has examples of this. */
4670 if (GET_CODE (reg
) == PARALLEL
)
4671 emit_group_load (reg
, x
, type
, -1);
4674 gcc_assert (partial
% UNITS_PER_WORD
== 0);
4675 move_block_to_reg (REGNO (reg
), x
, nregs
- overlapping
, mode
);
4677 for (int i
= 0; i
< overlapping
; i
++)
4678 emit_move_insn (gen_rtx_REG (word_mode
, REGNO (reg
)
4679 + nregs
- overlapping
+ i
),
4685 if (maybe_ne (extra
, 0) && args_addr
== 0 && where_pad
== stack_direction
)
4686 anti_adjust_stack (gen_int_mode (extra
, Pmode
));
4688 if (alignment_pad
&& args_addr
== 0)
4689 anti_adjust_stack (alignment_pad
);
4694 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4698 get_subtarget (rtx x
)
4702 /* Only registers can be subtargets. */
4704 /* Don't use hard regs to avoid extending their life. */
4705 || REGNO (x
) < FIRST_PSEUDO_REGISTER
4709 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4710 FIELD is a bitfield. Returns true if the optimization was successful,
4711 and there's nothing else to do. */
4714 optimize_bitfield_assignment_op (poly_uint64 pbitsize
,
4715 poly_uint64 pbitpos
,
4716 poly_uint64 pbitregion_start
,
4717 poly_uint64 pbitregion_end
,
4718 machine_mode mode1
, rtx str_rtx
,
4719 tree to
, tree src
, bool reverse
)
4721 /* str_mode is not guaranteed to be a scalar type. */
4722 machine_mode str_mode
= GET_MODE (str_rtx
);
4723 unsigned int str_bitsize
;
4728 enum tree_code code
;
4730 unsigned HOST_WIDE_INT bitsize
, bitpos
, bitregion_start
, bitregion_end
;
4731 if (mode1
!= VOIDmode
4732 || !pbitsize
.is_constant (&bitsize
)
4733 || !pbitpos
.is_constant (&bitpos
)
4734 || !pbitregion_start
.is_constant (&bitregion_start
)
4735 || !pbitregion_end
.is_constant (&bitregion_end
)
4736 || bitsize
>= BITS_PER_WORD
4737 || !GET_MODE_BITSIZE (str_mode
).is_constant (&str_bitsize
)
4738 || str_bitsize
> BITS_PER_WORD
4739 || TREE_SIDE_EFFECTS (to
)
4740 || TREE_THIS_VOLATILE (to
))
4744 if (TREE_CODE (src
) != SSA_NAME
)
4746 if (TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
)
4749 srcstmt
= get_gimple_for_ssa_name (src
);
4751 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt
)) != tcc_binary
)
4754 code
= gimple_assign_rhs_code (srcstmt
);
4756 op0
= gimple_assign_rhs1 (srcstmt
);
4758 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4759 to find its initialization. Hopefully the initialization will
4760 be from a bitfield load. */
4761 if (TREE_CODE (op0
) == SSA_NAME
)
4763 gimple
*op0stmt
= get_gimple_for_ssa_name (op0
);
4765 /* We want to eventually have OP0 be the same as TO, which
4766 should be a bitfield. */
4768 || !is_gimple_assign (op0stmt
)
4769 || gimple_assign_rhs_code (op0stmt
) != TREE_CODE (to
))
4771 op0
= gimple_assign_rhs1 (op0stmt
);
4774 op1
= gimple_assign_rhs2 (srcstmt
);
4776 if (!operand_equal_p (to
, op0
, 0))
4779 if (MEM_P (str_rtx
))
4781 unsigned HOST_WIDE_INT offset1
;
4783 if (str_bitsize
== 0 || str_bitsize
> BITS_PER_WORD
)
4784 str_bitsize
= BITS_PER_WORD
;
4786 scalar_int_mode best_mode
;
4787 if (!get_best_mode (bitsize
, bitpos
, bitregion_start
, bitregion_end
,
4788 MEM_ALIGN (str_rtx
), str_bitsize
, false, &best_mode
))
4790 str_mode
= best_mode
;
4791 str_bitsize
= GET_MODE_BITSIZE (best_mode
);
4794 bitpos
%= str_bitsize
;
4795 offset1
= (offset1
- bitpos
) / BITS_PER_UNIT
;
4796 str_rtx
= adjust_address (str_rtx
, str_mode
, offset1
);
4798 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
4801 /* If the bit field covers the whole REG/MEM, store_field
4802 will likely generate better code. */
4803 if (bitsize
>= str_bitsize
)
4806 /* We can't handle fields split across multiple entities. */
4807 if (bitpos
+ bitsize
> str_bitsize
)
4810 if (reverse
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
4811 bitpos
= str_bitsize
- bitpos
- bitsize
;
4817 /* For now, just optimize the case of the topmost bitfield
4818 where we don't need to do any masking and also
4819 1 bit bitfields where xor can be used.
4820 We might win by one instruction for the other bitfields
4821 too if insv/extv instructions aren't used, so that
4822 can be added later. */
4823 if ((reverse
|| bitpos
+ bitsize
!= str_bitsize
)
4824 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
4827 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4828 value
= convert_modes (str_mode
,
4829 TYPE_MODE (TREE_TYPE (op1
)), value
,
4830 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4832 /* We may be accessing data outside the field, which means
4833 we can alias adjacent data. */
4834 if (MEM_P (str_rtx
))
4836 str_rtx
= shallow_copy_rtx (str_rtx
);
4837 set_mem_alias_set (str_rtx
, 0);
4838 set_mem_expr (str_rtx
, 0);
4841 if (bitsize
== 1 && (reverse
|| bitpos
+ bitsize
!= str_bitsize
))
4843 value
= expand_and (str_mode
, value
, const1_rtx
, NULL
);
4847 binop
= code
== PLUS_EXPR
? add_optab
: sub_optab
;
4849 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4851 value
= flip_storage_order (str_mode
, value
);
4852 result
= expand_binop (str_mode
, binop
, str_rtx
,
4853 value
, str_rtx
, 1, OPTAB_WIDEN
);
4854 if (result
!= str_rtx
)
4855 emit_move_insn (str_rtx
, result
);
4860 if (TREE_CODE (op1
) != INTEGER_CST
)
4862 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4863 value
= convert_modes (str_mode
,
4864 TYPE_MODE (TREE_TYPE (op1
)), value
,
4865 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4867 /* We may be accessing data outside the field, which means
4868 we can alias adjacent data. */
4869 if (MEM_P (str_rtx
))
4871 str_rtx
= shallow_copy_rtx (str_rtx
);
4872 set_mem_alias_set (str_rtx
, 0);
4873 set_mem_expr (str_rtx
, 0);
4876 binop
= code
== BIT_IOR_EXPR
? ior_optab
: xor_optab
;
4877 if (bitpos
+ bitsize
!= str_bitsize
)
4879 rtx mask
= gen_int_mode ((HOST_WIDE_INT_1U
<< bitsize
) - 1,
4881 value
= expand_and (str_mode
, value
, mask
, NULL_RTX
);
4883 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4885 value
= flip_storage_order (str_mode
, value
);
4886 result
= expand_binop (str_mode
, binop
, str_rtx
,
4887 value
, str_rtx
, 1, OPTAB_WIDEN
);
4888 if (result
!= str_rtx
)
4889 emit_move_insn (str_rtx
, result
);
4899 /* In the C++ memory model, consecutive bit fields in a structure are
4900 considered one memory location.
4902 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4903 returns the bit range of consecutive bits in which this COMPONENT_REF
4904 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4905 and *OFFSET may be adjusted in the process.
4907 If the access does not need to be restricted, 0 is returned in both
4908 *BITSTART and *BITEND. */
4911 get_bit_range (poly_uint64_pod
*bitstart
, poly_uint64_pod
*bitend
, tree exp
,
4912 poly_int64_pod
*bitpos
, tree
*offset
)
4914 poly_int64 bitoffset
;
4917 gcc_assert (TREE_CODE (exp
) == COMPONENT_REF
);
4919 field
= TREE_OPERAND (exp
, 1);
4920 repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
4921 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4922 need to limit the range we can access. */
4925 *bitstart
= *bitend
= 0;
4929 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4930 part of a larger bit field, then the representative does not serve any
4931 useful purpose. This can occur in Ada. */
4932 if (handled_component_p (TREE_OPERAND (exp
, 0)))
4935 poly_int64 rbitsize
, rbitpos
;
4937 int unsignedp
, reversep
, volatilep
= 0;
4938 get_inner_reference (TREE_OPERAND (exp
, 0), &rbitsize
, &rbitpos
,
4939 &roffset
, &rmode
, &unsignedp
, &reversep
,
4941 if (!multiple_p (rbitpos
, BITS_PER_UNIT
))
4943 *bitstart
= *bitend
= 0;
4948 /* Compute the adjustment to bitpos from the offset of the field
4949 relative to the representative. DECL_FIELD_OFFSET of field and
4950 repr are the same by construction if they are not constants,
4951 see finish_bitfield_layout. */
4952 poly_uint64 field_offset
, repr_offset
;
4953 if (poly_int_tree_p (DECL_FIELD_OFFSET (field
), &field_offset
)
4954 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
4955 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
4958 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
4959 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
4961 /* If the adjustment is larger than bitpos, we would have a negative bit
4962 position for the lower bound and this may wreak havoc later. Adjust
4963 offset and bitpos to make the lower bound non-negative in that case. */
4964 if (maybe_gt (bitoffset
, *bitpos
))
4966 poly_int64 adjust_bits
= upper_bound (bitoffset
, *bitpos
) - *bitpos
;
4967 poly_int64 adjust_bytes
= exact_div (adjust_bits
, BITS_PER_UNIT
);
4969 *bitpos
+= adjust_bits
;
4970 if (*offset
== NULL_TREE
)
4971 *offset
= size_int (-adjust_bytes
);
4973 *offset
= size_binop (MINUS_EXPR
, *offset
, size_int (adjust_bytes
));
4977 *bitstart
= *bitpos
- bitoffset
;
4979 *bitend
= *bitstart
+ tree_to_poly_uint64 (DECL_SIZE (repr
)) - 1;
4982 /* Returns true if BASE is a DECL that does not reside in memory and
4983 has non-BLKmode. DECL_RTL must not be a MEM; if
4984 DECL_RTL was not set yet, return false. */
4987 non_mem_decl_p (tree base
)
4990 || TREE_ADDRESSABLE (base
)
4991 || DECL_MODE (base
) == BLKmode
)
4994 if (!DECL_RTL_SET_P (base
))
4997 return (!MEM_P (DECL_RTL (base
)));
5000 /* Returns true if REF refers to an object that does not
5001 reside in memory and has non-BLKmode. */
5004 mem_ref_refers_to_non_mem_p (tree ref
)
5008 if (TREE_CODE (ref
) == MEM_REF
5009 || TREE_CODE (ref
) == TARGET_MEM_REF
)
5011 tree addr
= TREE_OPERAND (ref
, 0);
5013 if (TREE_CODE (addr
) != ADDR_EXPR
)
5016 base
= TREE_OPERAND (addr
, 0);
5021 return non_mem_decl_p (base
);
5024 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
5025 is true, try generating a nontemporal store. */
5028 expand_assignment (tree to
, tree from
, bool nontemporal
)
5034 enum insn_code icode
;
5036 /* Don't crash if the lhs of the assignment was erroneous. */
5037 if (TREE_CODE (to
) == ERROR_MARK
)
5039 expand_normal (from
);
5043 /* Optimize away no-op moves without side-effects. */
5044 if (operand_equal_p (to
, from
, 0))
5047 /* Handle misaligned stores. */
5048 mode
= TYPE_MODE (TREE_TYPE (to
));
5049 if ((TREE_CODE (to
) == MEM_REF
5050 || TREE_CODE (to
) == TARGET_MEM_REF
5053 && !mem_ref_refers_to_non_mem_p (to
)
5054 && ((align
= get_object_alignment (to
))
5055 < GET_MODE_ALIGNMENT (mode
))
5056 && (((icode
= optab_handler (movmisalign_optab
, mode
))
5057 != CODE_FOR_nothing
)
5058 || targetm
.slow_unaligned_access (mode
, align
)))
5062 reg
= expand_expr (from
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
5063 reg
= force_not_mem (reg
);
5064 mem
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5065 if (TREE_CODE (to
) == MEM_REF
&& REF_REVERSE_STORAGE_ORDER (to
))
5066 reg
= flip_storage_order (mode
, reg
);
5068 if (icode
!= CODE_FOR_nothing
)
5070 class expand_operand ops
[2];
5072 create_fixed_operand (&ops
[0], mem
);
5073 create_input_operand (&ops
[1], reg
, mode
);
5074 /* The movmisalign<mode> pattern cannot fail, else the assignment
5075 would silently be omitted. */
5076 expand_insn (icode
, 2, ops
);
5079 store_bit_field (mem
, GET_MODE_BITSIZE (mode
), 0, 0, 0, mode
, reg
,
5084 /* Assignment of a structure component needs special treatment
5085 if the structure component's rtx is not simply a MEM.
5086 Assignment of an array element at a constant index, and assignment of
5087 an array element in an unaligned packed structure field, has the same
5088 problem. Same for (partially) storing into a non-memory object. */
5089 if (handled_component_p (to
)
5090 || (TREE_CODE (to
) == MEM_REF
5091 && (REF_REVERSE_STORAGE_ORDER (to
)
5092 || mem_ref_refers_to_non_mem_p (to
)))
5093 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
5096 poly_int64 bitsize
, bitpos
;
5097 poly_uint64 bitregion_start
= 0;
5098 poly_uint64 bitregion_end
= 0;
5100 int unsignedp
, reversep
, volatilep
= 0;
5104 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
5105 &unsignedp
, &reversep
, &volatilep
);
5107 /* Make sure bitpos is not negative, it can wreak havoc later. */
5108 if (maybe_lt (bitpos
, 0))
5110 gcc_assert (offset
== NULL_TREE
);
5111 offset
= size_int (bits_to_bytes_round_down (bitpos
));
5112 bitpos
= num_trailing_bits (bitpos
);
5115 if (TREE_CODE (to
) == COMPONENT_REF
5116 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to
, 1)))
5117 get_bit_range (&bitregion_start
, &bitregion_end
, to
, &bitpos
, &offset
);
5118 /* The C++ memory model naturally applies to byte-aligned fields.
5119 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
5120 BITSIZE are not byte-aligned, there is no need to limit the range
5121 we can access. This can occur with packed structures in Ada. */
5122 else if (maybe_gt (bitsize
, 0)
5123 && multiple_p (bitsize
, BITS_PER_UNIT
)
5124 && multiple_p (bitpos
, BITS_PER_UNIT
))
5126 bitregion_start
= bitpos
;
5127 bitregion_end
= bitpos
+ bitsize
- 1;
5130 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5132 /* If the field has a mode, we want to access it in the
5133 field's mode, not the computed mode.
5134 If a MEM has VOIDmode (external with incomplete type),
5135 use BLKmode for it instead. */
5138 if (mode1
!= VOIDmode
)
5139 to_rtx
= adjust_address (to_rtx
, mode1
, 0);
5140 else if (GET_MODE (to_rtx
) == VOIDmode
)
5141 to_rtx
= adjust_address (to_rtx
, BLKmode
, 0);
5146 machine_mode address_mode
;
5149 if (!MEM_P (to_rtx
))
5151 /* We can get constant negative offsets into arrays with broken
5152 user code. Translate this to a trap instead of ICEing. */
5153 gcc_assert (TREE_CODE (offset
) == INTEGER_CST
);
5154 expand_builtin_trap ();
5155 to_rtx
= gen_rtx_MEM (BLKmode
, const0_rtx
);
5158 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
5159 address_mode
= get_address_mode (to_rtx
);
5160 if (GET_MODE (offset_rtx
) != address_mode
)
5162 /* We cannot be sure that the RTL in offset_rtx is valid outside
5163 of a memory address context, so force it into a register
5164 before attempting to convert it to the desired mode. */
5165 offset_rtx
= force_operand (offset_rtx
, NULL_RTX
);
5166 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
5169 /* If we have an expression in OFFSET_RTX and a non-zero
5170 byte offset in BITPOS, adding the byte offset before the
5171 OFFSET_RTX results in better intermediate code, which makes
5172 later rtl optimization passes perform better.
5174 We prefer intermediate code like this:
5176 r124:DI=r123:DI+0x18
5181 r124:DI=r123:DI+0x10
5182 [r124:DI+0x8]=r121:DI
5184 This is only done for aligned data values, as these can
5185 be expected to result in single move instructions. */
5187 if (mode1
!= VOIDmode
5188 && maybe_ne (bitpos
, 0)
5189 && maybe_gt (bitsize
, 0)
5190 && multiple_p (bitpos
, BITS_PER_UNIT
, &bytepos
)
5191 && multiple_p (bitpos
, bitsize
)
5192 && multiple_p (bitsize
, GET_MODE_ALIGNMENT (mode1
))
5193 && MEM_ALIGN (to_rtx
) >= GET_MODE_ALIGNMENT (mode1
))
5195 to_rtx
= adjust_address (to_rtx
, mode1
, bytepos
);
5196 bitregion_start
= 0;
5197 if (known_ge (bitregion_end
, poly_uint64 (bitpos
)))
5198 bitregion_end
-= bitpos
;
5202 to_rtx
= offset_address (to_rtx
, offset_rtx
,
5203 highest_pow2_factor_for_target (to
,
5207 /* No action is needed if the target is not a memory and the field
5208 lies completely outside that target. This can occur if the source
5209 code contains an out-of-bounds access to a small array. */
5211 && GET_MODE (to_rtx
) != BLKmode
5212 && known_ge (bitpos
, GET_MODE_PRECISION (GET_MODE (to_rtx
))))
5214 expand_normal (from
);
5217 /* Handle expand_expr of a complex value returning a CONCAT. */
5218 else if (GET_CODE (to_rtx
) == CONCAT
)
5220 machine_mode to_mode
= GET_MODE (to_rtx
);
5221 gcc_checking_assert (COMPLEX_MODE_P (to_mode
));
5222 poly_int64 mode_bitsize
= GET_MODE_BITSIZE (to_mode
);
5223 unsigned short inner_bitsize
= GET_MODE_UNIT_BITSIZE (to_mode
);
5224 if (TYPE_MODE (TREE_TYPE (from
)) == to_mode
5225 && known_eq (bitpos
, 0)
5226 && known_eq (bitsize
, mode_bitsize
))
5227 result
= store_expr (from
, to_rtx
, false, nontemporal
, reversep
);
5228 else if (TYPE_MODE (TREE_TYPE (from
)) == GET_MODE_INNER (to_mode
)
5229 && known_eq (bitsize
, inner_bitsize
)
5230 && (known_eq (bitpos
, 0)
5231 || known_eq (bitpos
, inner_bitsize
)))
5232 result
= store_expr (from
, XEXP (to_rtx
, maybe_ne (bitpos
, 0)),
5233 false, nontemporal
, reversep
);
5234 else if (known_le (bitpos
+ bitsize
, inner_bitsize
))
5235 result
= store_field (XEXP (to_rtx
, 0), bitsize
, bitpos
,
5236 bitregion_start
, bitregion_end
,
5237 mode1
, from
, get_alias_set (to
),
5238 nontemporal
, reversep
);
5239 else if (known_ge (bitpos
, inner_bitsize
))
5240 result
= store_field (XEXP (to_rtx
, 1), bitsize
,
5241 bitpos
- inner_bitsize
,
5242 bitregion_start
, bitregion_end
,
5243 mode1
, from
, get_alias_set (to
),
5244 nontemporal
, reversep
);
5245 else if (known_eq (bitpos
, 0) && known_eq (bitsize
, mode_bitsize
))
5247 result
= expand_normal (from
);
5248 if (GET_CODE (result
) == CONCAT
)
5250 to_mode
= GET_MODE_INNER (to_mode
);
5251 machine_mode from_mode
= GET_MODE_INNER (GET_MODE (result
));
5253 = simplify_gen_subreg (to_mode
, XEXP (result
, 0),
5256 = simplify_gen_subreg (to_mode
, XEXP (result
, 1),
5258 if (!from_real
|| !from_imag
)
5259 goto concat_store_slow
;
5260 emit_move_insn (XEXP (to_rtx
, 0), from_real
);
5261 emit_move_insn (XEXP (to_rtx
, 1), from_imag
);
5267 from_rtx
= change_address (result
, to_mode
, NULL_RTX
);
5270 = simplify_gen_subreg (to_mode
, result
,
5271 TYPE_MODE (TREE_TYPE (from
)), 0);
5274 emit_move_insn (XEXP (to_rtx
, 0),
5275 read_complex_part (from_rtx
, false));
5276 emit_move_insn (XEXP (to_rtx
, 1),
5277 read_complex_part (from_rtx
, true));
5281 machine_mode to_mode
5282 = GET_MODE_INNER (GET_MODE (to_rtx
));
5284 = simplify_gen_subreg (to_mode
, result
,
5285 TYPE_MODE (TREE_TYPE (from
)),
5288 = simplify_gen_subreg (to_mode
, result
,
5289 TYPE_MODE (TREE_TYPE (from
)),
5290 GET_MODE_SIZE (to_mode
));
5291 if (!from_real
|| !from_imag
)
5292 goto concat_store_slow
;
5293 emit_move_insn (XEXP (to_rtx
, 0), from_real
);
5294 emit_move_insn (XEXP (to_rtx
, 1), from_imag
);
5301 rtx temp
= assign_stack_temp (to_mode
,
5302 GET_MODE_SIZE (GET_MODE (to_rtx
)));
5303 write_complex_part (temp
, XEXP (to_rtx
, 0), false);
5304 write_complex_part (temp
, XEXP (to_rtx
, 1), true);
5305 result
= store_field (temp
, bitsize
, bitpos
,
5306 bitregion_start
, bitregion_end
,
5307 mode1
, from
, get_alias_set (to
),
5308 nontemporal
, reversep
);
5309 emit_move_insn (XEXP (to_rtx
, 0), read_complex_part (temp
, false));
5310 emit_move_insn (XEXP (to_rtx
, 1), read_complex_part (temp
, true));
5313 /* For calls to functions returning variable length structures, if TO_RTX
5314 is not a MEM, go through a MEM because we must not create temporaries
5316 else if (!MEM_P (to_rtx
)
5317 && TREE_CODE (from
) == CALL_EXPR
5318 && COMPLETE_TYPE_P (TREE_TYPE (from
))
5319 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) != INTEGER_CST
)
5321 rtx temp
= assign_stack_temp (GET_MODE (to_rtx
),
5322 GET_MODE_SIZE (GET_MODE (to_rtx
)));
5323 result
= store_field (temp
, bitsize
, bitpos
, bitregion_start
,
5324 bitregion_end
, mode1
, from
, get_alias_set (to
),
5325 nontemporal
, reversep
);
5326 emit_move_insn (to_rtx
, temp
);
5332 /* If the field is at offset zero, we could have been given the
5333 DECL_RTX of the parent struct. Don't munge it. */
5334 to_rtx
= shallow_copy_rtx (to_rtx
);
5335 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
5337 MEM_VOLATILE_P (to_rtx
) = 1;
5340 gcc_checking_assert (known_ge (bitpos
, 0));
5341 if (optimize_bitfield_assignment_op (bitsize
, bitpos
,
5342 bitregion_start
, bitregion_end
,
5343 mode1
, to_rtx
, to
, from
,
5347 result
= store_field (to_rtx
, bitsize
, bitpos
,
5348 bitregion_start
, bitregion_end
,
5349 mode1
, from
, get_alias_set (to
),
5350 nontemporal
, reversep
);
5354 preserve_temp_slots (result
);
5359 /* If the rhs is a function call and its value is not an aggregate,
5360 call the function before we start to compute the lhs.
5361 This is needed for correct code for cases such as
5362 val = setjmp (buf) on machines where reference to val
5363 requires loading up part of an address in a separate insn.
5365 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5366 since it might be a promoted variable where the zero- or sign- extension
5367 needs to be done. Handling this in the normal way is safe because no
5368 computation is done before the call. The same is true for SSA names. */
5369 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
5370 && COMPLETE_TYPE_P (TREE_TYPE (from
))
5371 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
5373 || TREE_CODE (to
) == PARM_DECL
5374 || TREE_CODE (to
) == RESULT_DECL
)
5375 && REG_P (DECL_RTL (to
)))
5376 || TREE_CODE (to
) == SSA_NAME
))
5381 value
= expand_normal (from
);
5384 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5386 /* Handle calls that return values in multiple non-contiguous locations.
5387 The Irix 6 ABI has examples of this. */
5388 if (GET_CODE (to_rtx
) == PARALLEL
)
5390 if (GET_CODE (value
) == PARALLEL
)
5391 emit_group_move (to_rtx
, value
);
5393 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
5394 int_size_in_bytes (TREE_TYPE (from
)));
5396 else if (GET_CODE (value
) == PARALLEL
)
5397 emit_group_store (to_rtx
, value
, TREE_TYPE (from
),
5398 int_size_in_bytes (TREE_TYPE (from
)));
5399 else if (GET_MODE (to_rtx
) == BLKmode
)
5401 /* Handle calls that return BLKmode values in registers. */
5403 copy_blkmode_from_reg (to_rtx
, value
, TREE_TYPE (from
));
5405 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
5409 if (POINTER_TYPE_P (TREE_TYPE (to
)))
5410 value
= convert_memory_address_addr_space
5411 (as_a
<scalar_int_mode
> (GET_MODE (to_rtx
)), value
,
5412 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to
))));
5414 emit_move_insn (to_rtx
, value
);
5417 preserve_temp_slots (to_rtx
);
5422 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5423 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5425 /* Don't move directly into a return register. */
5426 if (TREE_CODE (to
) == RESULT_DECL
5427 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
5433 /* If the source is itself a return value, it still is in a pseudo at
5434 this point so we can move it back to the return register directly. */
5436 && TYPE_MODE (TREE_TYPE (from
)) == BLKmode
5437 && TREE_CODE (from
) != CALL_EXPR
)
5438 temp
= copy_blkmode_to_reg (GET_MODE (to_rtx
), from
);
5440 temp
= expand_expr (from
, NULL_RTX
, GET_MODE (to_rtx
), EXPAND_NORMAL
);
5442 /* Handle calls that return values in multiple non-contiguous locations.
5443 The Irix 6 ABI has examples of this. */
5444 if (GET_CODE (to_rtx
) == PARALLEL
)
5446 if (GET_CODE (temp
) == PARALLEL
)
5447 emit_group_move (to_rtx
, temp
);
5449 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
5450 int_size_in_bytes (TREE_TYPE (from
)));
5453 emit_move_insn (to_rtx
, temp
);
5455 preserve_temp_slots (to_rtx
);
5460 /* In case we are returning the contents of an object which overlaps
5461 the place the value is being stored, use a safe function when copying
5462 a value through a pointer into a structure value return block. */
5463 if (TREE_CODE (to
) == RESULT_DECL
5464 && TREE_CODE (from
) == INDIRECT_REF
5465 && ADDR_SPACE_GENERIC_P
5466 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from
, 0)))))
5467 && refs_may_alias_p (to
, from
)
5468 && cfun
->returns_struct
5469 && !cfun
->returns_pcc_struct
)
5474 size
= expr_size (from
);
5475 from_rtx
= expand_normal (from
);
5477 emit_block_move_via_libcall (XEXP (to_rtx
, 0), XEXP (from_rtx
, 0), size
);
5479 preserve_temp_slots (to_rtx
);
5484 /* Compute FROM and store the value in the rtx we got. */
5487 result
= store_expr (from
, to_rtx
, 0, nontemporal
, false);
5488 preserve_temp_slots (result
);
5493 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5494 succeeded, false otherwise. */
5497 emit_storent_insn (rtx to
, rtx from
)
5499 class expand_operand ops
[2];
5500 machine_mode mode
= GET_MODE (to
);
5501 enum insn_code code
= optab_handler (storent_optab
, mode
);
5503 if (code
== CODE_FOR_nothing
)
5506 create_fixed_operand (&ops
[0], to
);
5507 create_input_operand (&ops
[1], from
, mode
);
5508 return maybe_expand_insn (code
, 2, ops
);
5511 /* Helper function for store_expr storing of STRING_CST. */
5514 string_cst_read_str (void *data
, HOST_WIDE_INT offset
, scalar_int_mode mode
)
5516 tree str
= (tree
) data
;
5518 gcc_assert (offset
>= 0);
5519 if (offset
>= TREE_STRING_LENGTH (str
))
5522 if ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
5523 > (unsigned HOST_WIDE_INT
) TREE_STRING_LENGTH (str
))
5525 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
5526 size_t l
= TREE_STRING_LENGTH (str
) - offset
;
5527 memcpy (p
, TREE_STRING_POINTER (str
) + offset
, l
);
5528 memset (p
+ l
, '\0', GET_MODE_SIZE (mode
) - l
);
5529 return c_readstr (p
, mode
, false);
5532 return c_readstr (TREE_STRING_POINTER (str
) + offset
, mode
, false);
5535 /* Generate code for computing expression EXP,
5536 and storing the value into TARGET.
5538 If the mode is BLKmode then we may return TARGET itself.
5539 It turns out that in BLKmode it doesn't cause a problem.
5540 because C has no operators that could combine two different
5541 assignments into the same BLKmode object with different values
5542 with no sequence point. Will other languages need this to
5545 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5546 stack, and block moves may need to be treated specially.
5548 If NONTEMPORAL is true, try using a nontemporal store instruction.
5550 If REVERSE is true, the store is to be done in reverse order. */
5553 store_expr (tree exp
, rtx target
, int call_param_p
,
5554 bool nontemporal
, bool reverse
)
5557 rtx alt_rtl
= NULL_RTX
;
5558 location_t loc
= curr_insn_location ();
5560 if (VOID_TYPE_P (TREE_TYPE (exp
)))
5562 /* C++ can generate ?: expressions with a throw expression in one
5563 branch and an rvalue in the other. Here, we resolve attempts to
5564 store the throw expression's nonexistent result. */
5565 gcc_assert (!call_param_p
);
5566 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5569 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
5571 /* Perform first part of compound expression, then assign from second
5573 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
5574 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5575 return store_expr (TREE_OPERAND (exp
, 1), target
,
5576 call_param_p
, nontemporal
, reverse
);
5578 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
5580 /* For conditional expression, get safe form of the target. Then
5581 test the condition, doing the appropriate assignment on either
5582 side. This avoids the creation of unnecessary temporaries.
5583 For non-BLKmode, it is more efficient not to do this. */
5585 rtx_code_label
*lab1
= gen_label_rtx (), *lab2
= gen_label_rtx ();
5587 do_pending_stack_adjust ();
5589 jumpifnot (TREE_OPERAND (exp
, 0), lab1
,
5590 profile_probability::uninitialized ());
5591 store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
,
5592 nontemporal
, reverse
);
5593 emit_jump_insn (targetm
.gen_jump (lab2
));
5596 store_expr (TREE_OPERAND (exp
, 2), target
, call_param_p
,
5597 nontemporal
, reverse
);
5603 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
5604 /* If this is a scalar in a register that is stored in a wider mode
5605 than the declared mode, compute the result into its declared mode
5606 and then convert to the wider mode. Our value is the computed
5609 rtx inner_target
= 0;
5610 scalar_int_mode outer_mode
= subreg_unpromoted_mode (target
);
5611 scalar_int_mode inner_mode
= subreg_promoted_mode (target
);
5613 /* We can do the conversion inside EXP, which will often result
5614 in some optimizations. Do the conversion in two steps: first
5615 change the signedness, if needed, then the extend. But don't
5616 do this if the type of EXP is a subtype of something else
5617 since then the conversion might involve more than just
5618 converting modes. */
5619 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
5620 && TREE_TYPE (TREE_TYPE (exp
)) == 0
5621 && GET_MODE_PRECISION (outer_mode
)
5622 == TYPE_PRECISION (TREE_TYPE (exp
)))
5624 if (!SUBREG_CHECK_PROMOTED_SIGN (target
,
5625 TYPE_UNSIGNED (TREE_TYPE (exp
))))
5627 /* Some types, e.g. Fortran's logical*4, won't have a signed
5628 version, so use the mode instead. */
5630 = (signed_or_unsigned_type_for
5631 (SUBREG_PROMOTED_SIGN (target
), TREE_TYPE (exp
)));
5633 ntype
= lang_hooks
.types
.type_for_mode
5634 (TYPE_MODE (TREE_TYPE (exp
)),
5635 SUBREG_PROMOTED_SIGN (target
));
5637 exp
= fold_convert_loc (loc
, ntype
, exp
);
5640 exp
= fold_convert_loc (loc
, lang_hooks
.types
.type_for_mode
5641 (inner_mode
, SUBREG_PROMOTED_SIGN (target
)),
5644 inner_target
= SUBREG_REG (target
);
5647 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
5648 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5651 /* If TEMP is a VOIDmode constant, use convert_modes to make
5652 sure that we properly convert it. */
5653 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
5655 temp
= convert_modes (outer_mode
, TYPE_MODE (TREE_TYPE (exp
)),
5656 temp
, SUBREG_PROMOTED_SIGN (target
));
5657 temp
= convert_modes (inner_mode
, outer_mode
, temp
,
5658 SUBREG_PROMOTED_SIGN (target
));
5661 convert_move (SUBREG_REG (target
), temp
,
5662 SUBREG_PROMOTED_SIGN (target
));
5666 else if ((TREE_CODE (exp
) == STRING_CST
5667 || (TREE_CODE (exp
) == MEM_REF
5668 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
5669 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5671 && integer_zerop (TREE_OPERAND (exp
, 1))))
5672 && !nontemporal
&& !call_param_p
5675 /* Optimize initialization of an array with a STRING_CST. */
5676 HOST_WIDE_INT exp_len
, str_copy_len
;
5678 tree str
= TREE_CODE (exp
) == STRING_CST
5679 ? exp
: TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5681 exp_len
= int_expr_size (exp
);
5685 if (TREE_STRING_LENGTH (str
) <= 0)
5688 if (can_store_by_pieces (exp_len
, string_cst_read_str
, (void *) str
,
5689 MEM_ALIGN (target
), false))
5691 store_by_pieces (target
, exp_len
, string_cst_read_str
, (void *) str
,
5692 MEM_ALIGN (target
), false, RETURN_BEGIN
);
5696 str_copy_len
= TREE_STRING_LENGTH (str
);
5697 if ((STORE_MAX_PIECES
& (STORE_MAX_PIECES
- 1)) == 0)
5699 str_copy_len
+= STORE_MAX_PIECES
- 1;
5700 str_copy_len
&= ~(STORE_MAX_PIECES
- 1);
5702 if (str_copy_len
>= exp_len
)
5705 if (!can_store_by_pieces (str_copy_len
, string_cst_read_str
,
5706 (void *) str
, MEM_ALIGN (target
), false))
5709 dest_mem
= store_by_pieces (target
, str_copy_len
, string_cst_read_str
,
5710 (void *) str
, MEM_ALIGN (target
), false,
5712 clear_storage (adjust_address_1 (dest_mem
, BLKmode
, 0, 1, 1, 0,
5713 exp_len
- str_copy_len
),
5714 GEN_INT (exp_len
- str_copy_len
), BLOCK_OP_NORMAL
);
5722 /* If we want to use a nontemporal or a reverse order store, force the
5723 value into a register first. */
5724 tmp_target
= nontemporal
|| reverse
? NULL_RTX
: target
;
5725 temp
= expand_expr_real (exp
, tmp_target
, GET_MODE (target
),
5727 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
5731 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5732 the same as that of TARGET, adjust the constant. This is needed, for
5733 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5734 only a word-sized value. */
5735 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
5736 && TREE_CODE (exp
) != ERROR_MARK
5737 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
5739 if (GET_MODE_CLASS (GET_MODE (target
))
5740 != GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp
)))
5741 && known_eq (GET_MODE_BITSIZE (GET_MODE (target
)),
5742 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
)))))
5744 rtx t
= simplify_gen_subreg (GET_MODE (target
), temp
,
5745 TYPE_MODE (TREE_TYPE (exp
)), 0);
5749 if (GET_MODE (temp
) == VOIDmode
)
5750 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5751 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5754 /* If value was not generated in the target, store it there.
5755 Convert the value to TARGET's type first if necessary and emit the
5756 pending incrementations that have been queued when expanding EXP.
5757 Note that we cannot emit the whole queue blindly because this will
5758 effectively disable the POST_INC optimization later.
5760 If TEMP and TARGET compare equal according to rtx_equal_p, but
5761 one or both of them are volatile memory refs, we have to distinguish
5763 - expand_expr has used TARGET. In this case, we must not generate
5764 another copy. This can be detected by TARGET being equal according
5766 - expand_expr has not used TARGET - that means that the source just
5767 happens to have the same RTX form. Since temp will have been created
5768 by expand_expr, it will compare unequal according to == .
5769 We must generate a copy in this case, to reach the correct number
5770 of volatile memory references. */
5772 if ((! rtx_equal_p (temp
, target
)
5773 || (temp
!= target
&& (side_effects_p (temp
)
5774 || side_effects_p (target
))))
5775 && TREE_CODE (exp
) != ERROR_MARK
5776 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5777 but TARGET is not valid memory reference, TEMP will differ
5778 from TARGET although it is really the same location. */
5780 && rtx_equal_p (alt_rtl
, target
)
5781 && !side_effects_p (alt_rtl
)
5782 && !side_effects_p (target
))
5783 /* If there's nothing to copy, don't bother. Don't call
5784 expr_size unless necessary, because some front-ends (C++)
5785 expr_size-hook must not be given objects that are not
5786 supposed to be bit-copied or bit-initialized. */
5787 && expr_size (exp
) != const0_rtx
)
5789 if (GET_MODE (temp
) != GET_MODE (target
) && GET_MODE (temp
) != VOIDmode
)
5791 if (GET_MODE (target
) == BLKmode
)
5793 /* Handle calls that return BLKmode values in registers. */
5794 if (REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
5795 copy_blkmode_from_reg (target
, temp
, TREE_TYPE (exp
));
5797 store_bit_field (target
,
5798 INTVAL (expr_size (exp
)) * BITS_PER_UNIT
,
5799 0, 0, 0, GET_MODE (temp
), temp
, reverse
);
5802 convert_move (target
, temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5805 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
5807 /* Handle copying a string constant into an array. The string
5808 constant may be shorter than the array. So copy just the string's
5809 actual length, and clear the rest. First get the size of the data
5810 type of the string, which is actually the size of the target. */
5811 rtx size
= expr_size (exp
);
5813 if (CONST_INT_P (size
)
5814 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
5815 emit_block_move (target
, temp
, size
,
5817 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5820 machine_mode pointer_mode
5821 = targetm
.addr_space
.pointer_mode (MEM_ADDR_SPACE (target
));
5822 machine_mode address_mode
= get_address_mode (target
);
5824 /* Compute the size of the data to copy from the string. */
5826 = size_binop_loc (loc
, MIN_EXPR
,
5827 make_tree (sizetype
, size
),
5828 size_int (TREE_STRING_LENGTH (exp
)));
5830 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
5832 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
5833 rtx_code_label
*label
= 0;
5835 /* Copy that much. */
5836 copy_size_rtx
= convert_to_mode (pointer_mode
, copy_size_rtx
,
5837 TYPE_UNSIGNED (sizetype
));
5838 emit_block_move (target
, temp
, copy_size_rtx
,
5840 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5842 /* Figure out how much is left in TARGET that we have to clear.
5843 Do all calculations in pointer_mode. */
5844 poly_int64 const_copy_size
;
5845 if (poly_int_rtx_p (copy_size_rtx
, &const_copy_size
))
5847 size
= plus_constant (address_mode
, size
, -const_copy_size
);
5848 target
= adjust_address (target
, BLKmode
, const_copy_size
);
5852 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
5853 copy_size_rtx
, NULL_RTX
, 0,
5856 if (GET_MODE (copy_size_rtx
) != address_mode
)
5857 copy_size_rtx
= convert_to_mode (address_mode
,
5859 TYPE_UNSIGNED (sizetype
));
5861 target
= offset_address (target
, copy_size_rtx
,
5862 highest_pow2_factor (copy_size
));
5863 label
= gen_label_rtx ();
5864 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
5865 GET_MODE (size
), 0, label
);
5868 if (size
!= const0_rtx
)
5869 clear_storage (target
, size
, BLOCK_OP_NORMAL
);
5875 /* Handle calls that return values in multiple non-contiguous locations.
5876 The Irix 6 ABI has examples of this. */
5877 else if (GET_CODE (target
) == PARALLEL
)
5879 if (GET_CODE (temp
) == PARALLEL
)
5880 emit_group_move (target
, temp
);
5882 emit_group_load (target
, temp
, TREE_TYPE (exp
),
5883 int_size_in_bytes (TREE_TYPE (exp
)));
5885 else if (GET_CODE (temp
) == PARALLEL
)
5886 emit_group_store (target
, temp
, TREE_TYPE (exp
),
5887 int_size_in_bytes (TREE_TYPE (exp
)));
5888 else if (GET_MODE (temp
) == BLKmode
)
5889 emit_block_move (target
, temp
, expr_size (exp
),
5891 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5892 /* If we emit a nontemporal store, there is nothing else to do. */
5893 else if (nontemporal
&& emit_storent_insn (target
, temp
))
5898 temp
= flip_storage_order (GET_MODE (target
), temp
);
5899 temp
= force_operand (temp
, target
);
5901 emit_move_insn (target
, temp
);
5908 /* Return true if field F of structure TYPE is a flexible array. */
5911 flexible_array_member_p (const_tree f
, const_tree type
)
5916 return (DECL_CHAIN (f
) == NULL
5917 && TREE_CODE (tf
) == ARRAY_TYPE
5919 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf
))
5920 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf
)))
5921 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf
))
5922 && int_size_in_bytes (type
) >= 0);
5925 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5926 must have in order for it to completely initialize a value of type TYPE.
5927 Return -1 if the number isn't known.
5929 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5931 static HOST_WIDE_INT
5932 count_type_elements (const_tree type
, bool for_ctor_p
)
5934 switch (TREE_CODE (type
))
5940 nelts
= array_type_nelts (type
);
5941 if (nelts
&& tree_fits_uhwi_p (nelts
))
5943 unsigned HOST_WIDE_INT n
;
5945 n
= tree_to_uhwi (nelts
) + 1;
5946 if (n
== 0 || for_ctor_p
)
5949 return n
* count_type_elements (TREE_TYPE (type
), false);
5951 return for_ctor_p
? -1 : 1;
5956 unsigned HOST_WIDE_INT n
;
5960 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5961 if (TREE_CODE (f
) == FIELD_DECL
)
5964 n
+= count_type_elements (TREE_TYPE (f
), false);
5965 else if (!flexible_array_member_p (f
, type
))
5966 /* Don't count flexible arrays, which are not supposed
5967 to be initialized. */
5975 case QUAL_UNION_TYPE
:
5980 gcc_assert (!for_ctor_p
);
5981 /* Estimate the number of scalars in each field and pick the
5982 maximum. Other estimates would do instead; the idea is simply
5983 to make sure that the estimate is not sensitive to the ordering
5986 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5987 if (TREE_CODE (f
) == FIELD_DECL
)
5989 m
= count_type_elements (TREE_TYPE (f
), false);
5990 /* If the field doesn't span the whole union, add an extra
5991 scalar for the rest. */
5992 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f
)),
5993 TYPE_SIZE (type
)) != 1)
6006 unsigned HOST_WIDE_INT nelts
;
6007 if (TYPE_VECTOR_SUBPARTS (type
).is_constant (&nelts
))
6015 case FIXED_POINT_TYPE
:
6020 case REFERENCE_TYPE
:
6036 /* Helper for categorize_ctor_elements. Identical interface. */
6039 categorize_ctor_elements_1 (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
6040 HOST_WIDE_INT
*p_unique_nz_elts
,
6041 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
6043 unsigned HOST_WIDE_INT idx
;
6044 HOST_WIDE_INT nz_elts
, unique_nz_elts
, init_elts
, num_fields
;
6045 tree value
, purpose
, elt_type
;
6047 /* Whether CTOR is a valid constant initializer, in accordance with what
6048 initializer_constant_valid_p does. If inferred from the constructor
6049 elements, true until proven otherwise. */
6050 bool const_from_elts_p
= constructor_static_from_elts_p (ctor
);
6051 bool const_p
= const_from_elts_p
? true : TREE_STATIC (ctor
);
6057 elt_type
= NULL_TREE
;
6059 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), idx
, purpose
, value
)
6061 HOST_WIDE_INT mult
= 1;
6063 if (purpose
&& TREE_CODE (purpose
) == RANGE_EXPR
)
6065 tree lo_index
= TREE_OPERAND (purpose
, 0);
6066 tree hi_index
= TREE_OPERAND (purpose
, 1);
6068 if (tree_fits_uhwi_p (lo_index
) && tree_fits_uhwi_p (hi_index
))
6069 mult
= (tree_to_uhwi (hi_index
)
6070 - tree_to_uhwi (lo_index
) + 1);
6073 elt_type
= TREE_TYPE (value
);
6075 switch (TREE_CODE (value
))
6079 HOST_WIDE_INT nz
= 0, unz
= 0, ic
= 0;
6081 bool const_elt_p
= categorize_ctor_elements_1 (value
, &nz
, &unz
,
6084 nz_elts
+= mult
* nz
;
6085 unique_nz_elts
+= unz
;
6086 init_elts
+= mult
* ic
;
6088 if (const_from_elts_p
&& const_p
)
6089 const_p
= const_elt_p
;
6096 if (!initializer_zerop (value
))
6105 nz_elts
+= mult
* TREE_STRING_LENGTH (value
);
6106 unique_nz_elts
+= TREE_STRING_LENGTH (value
);
6107 init_elts
+= mult
* TREE_STRING_LENGTH (value
);
6111 if (!initializer_zerop (TREE_REALPART (value
)))
6116 if (!initializer_zerop (TREE_IMAGPART (value
)))
6121 init_elts
+= 2 * mult
;
6126 /* We can only construct constant-length vectors using
6128 unsigned int nunits
= VECTOR_CST_NELTS (value
).to_constant ();
6129 for (unsigned int i
= 0; i
< nunits
; ++i
)
6131 tree v
= VECTOR_CST_ELT (value
, i
);
6132 if (!initializer_zerop (v
))
6144 HOST_WIDE_INT tc
= count_type_elements (elt_type
, false);
6145 nz_elts
+= mult
* tc
;
6146 unique_nz_elts
+= tc
;
6147 init_elts
+= mult
* tc
;
6149 if (const_from_elts_p
&& const_p
)
6151 = initializer_constant_valid_p (value
,
6153 TYPE_REVERSE_STORAGE_ORDER
6161 if (*p_complete
&& !complete_ctor_at_level_p (TREE_TYPE (ctor
),
6162 num_fields
, elt_type
))
6163 *p_complete
= false;
6165 *p_nz_elts
+= nz_elts
;
6166 *p_unique_nz_elts
+= unique_nz_elts
;
6167 *p_init_elts
+= init_elts
;
6172 /* Examine CTOR to discover:
6173 * how many scalar fields are set to nonzero values,
6174 and place it in *P_NZ_ELTS;
6175 * the same, but counting RANGE_EXPRs as multiplier of 1 instead of
6176 high - low + 1 (this can be useful for callers to determine ctors
6177 that could be cheaply initialized with - perhaps nested - loops
6178 compared to copied from huge read-only data),
6179 and place it in *P_UNIQUE_NZ_ELTS;
6180 * how many scalar fields in total are in CTOR,
6181 and place it in *P_ELT_COUNT.
6182 * whether the constructor is complete -- in the sense that every
6183 meaningful byte is explicitly given a value --
6184 and place it in *P_COMPLETE.
6186 Return whether or not CTOR is a valid static constant initializer, the same
6187 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
6190 categorize_ctor_elements (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
6191 HOST_WIDE_INT
*p_unique_nz_elts
,
6192 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
6195 *p_unique_nz_elts
= 0;
6199 return categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_unique_nz_elts
,
6200 p_init_elts
, p_complete
);
6203 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
6204 of which had type LAST_TYPE. Each element was itself a complete
6205 initializer, in the sense that every meaningful byte was explicitly
6206 given a value. Return true if the same is true for the constructor
6210 complete_ctor_at_level_p (const_tree type
, HOST_WIDE_INT num_elts
,
6211 const_tree last_type
)
6213 if (TREE_CODE (type
) == UNION_TYPE
6214 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
6219 gcc_assert (num_elts
== 1 && last_type
);
6221 /* ??? We could look at each element of the union, and find the
6222 largest element. Which would avoid comparing the size of the
6223 initialized element against any tail padding in the union.
6224 Doesn't seem worth the effort... */
6225 return simple_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (last_type
)) == 1;
6228 return count_type_elements (type
, true) == num_elts
;
6231 /* Return 1 if EXP contains mostly (3/4) zeros. */
6234 mostly_zeros_p (const_tree exp
)
6236 if (TREE_CODE (exp
) == CONSTRUCTOR
)
6238 HOST_WIDE_INT nz_elts
, unz_elts
, init_elts
;
6241 categorize_ctor_elements (exp
, &nz_elts
, &unz_elts
, &init_elts
,
6243 return !complete_p
|| nz_elts
< init_elts
/ 4;
6246 return initializer_zerop (exp
);
6249 /* Return 1 if EXP contains all zeros. */
6252 all_zeros_p (const_tree exp
)
6254 if (TREE_CODE (exp
) == CONSTRUCTOR
)
6256 HOST_WIDE_INT nz_elts
, unz_elts
, init_elts
;
6259 categorize_ctor_elements (exp
, &nz_elts
, &unz_elts
, &init_elts
,
6261 return nz_elts
== 0;
6264 return initializer_zerop (exp
);
6267 /* Helper function for store_constructor.
6268 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
6269 CLEARED is as for store_constructor.
6270 ALIAS_SET is the alias set to use for any stores.
6271 If REVERSE is true, the store is to be done in reverse order.
6273 This provides a recursive shortcut back to store_constructor when it isn't
6274 necessary to go through store_field. This is so that we can pass through
6275 the cleared field to let store_constructor know that we may not have to
6276 clear a substructure if the outer structure has already been cleared. */
6279 store_constructor_field (rtx target
, poly_uint64 bitsize
, poly_int64 bitpos
,
6280 poly_uint64 bitregion_start
,
6281 poly_uint64 bitregion_end
,
6283 tree exp
, int cleared
,
6284 alias_set_type alias_set
, bool reverse
)
6287 poly_uint64 bytesize
;
6288 if (TREE_CODE (exp
) == CONSTRUCTOR
6289 /* We can only call store_constructor recursively if the size and
6290 bit position are on a byte boundary. */
6291 && multiple_p (bitpos
, BITS_PER_UNIT
, &bytepos
)
6292 && maybe_ne (bitsize
, 0U)
6293 && multiple_p (bitsize
, BITS_PER_UNIT
, &bytesize
)
6294 /* If we have a nonzero bitpos for a register target, then we just
6295 let store_field do the bitfield handling. This is unlikely to
6296 generate unnecessary clear instructions anyways. */
6297 && (known_eq (bitpos
, 0) || MEM_P (target
)))
6301 machine_mode target_mode
= GET_MODE (target
);
6302 if (target_mode
!= BLKmode
6303 && !multiple_p (bitpos
, GET_MODE_ALIGNMENT (target_mode
)))
6304 target_mode
= BLKmode
;
6305 target
= adjust_address (target
, target_mode
, bytepos
);
6309 /* Update the alias set, if required. */
6310 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
6311 && MEM_ALIAS_SET (target
) != 0)
6313 target
= copy_rtx (target
);
6314 set_mem_alias_set (target
, alias_set
);
6317 store_constructor (exp
, target
, cleared
, bytesize
, reverse
);
6320 store_field (target
, bitsize
, bitpos
, bitregion_start
, bitregion_end
, mode
,
6321 exp
, alias_set
, false, reverse
);
6325 /* Returns the number of FIELD_DECLs in TYPE. */
6328 fields_length (const_tree type
)
6330 tree t
= TYPE_FIELDS (type
);
6333 for (; t
; t
= DECL_CHAIN (t
))
6334 if (TREE_CODE (t
) == FIELD_DECL
)
6341 /* Store the value of constructor EXP into the rtx TARGET.
6342 TARGET is either a REG or a MEM; we know it cannot conflict, since
6343 safe_from_p has been called.
6344 CLEARED is true if TARGET is known to have been zero'd.
6345 SIZE is the number of bytes of TARGET we are allowed to modify: this
6346 may not be the same as the size of EXP if we are assigning to a field
6347 which has been packed to exclude padding bits.
6348 If REVERSE is true, the store is to be done in reverse order. */
6351 store_constructor (tree exp
, rtx target
, int cleared
, poly_int64 size
,
6354 tree type
= TREE_TYPE (exp
);
6355 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
6356 poly_int64 bitregion_end
= known_gt (size
, 0) ? size
* BITS_PER_UNIT
- 1 : 0;
6358 switch (TREE_CODE (type
))
6362 case QUAL_UNION_TYPE
:
6364 unsigned HOST_WIDE_INT idx
;
6367 /* The storage order is specified for every aggregate type. */
6368 reverse
= TYPE_REVERSE_STORAGE_ORDER (type
);
6370 /* If size is zero or the target is already cleared, do nothing. */
6371 if (known_eq (size
, 0) || cleared
)
6373 /* We either clear the aggregate or indicate the value is dead. */
6374 else if ((TREE_CODE (type
) == UNION_TYPE
6375 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
6376 && ! CONSTRUCTOR_ELTS (exp
))
6377 /* If the constructor is empty, clear the union. */
6379 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
6383 /* If we are building a static constructor into a register,
6384 set the initial value as zero so we can fold the value into
6385 a constant. But if more than one register is involved,
6386 this probably loses. */
6387 else if (REG_P (target
) && TREE_STATIC (exp
)
6388 && known_le (GET_MODE_SIZE (GET_MODE (target
)),
6389 REGMODE_NATURAL_SIZE (GET_MODE (target
))))
6391 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6395 /* If the constructor has fewer fields than the structure or
6396 if we are initializing the structure to mostly zeros, clear
6397 the whole structure first. Don't do this if TARGET is a
6398 register whose mode size isn't equal to SIZE since
6399 clear_storage can't handle this case. */
6400 else if (known_size_p (size
)
6401 && (((int) CONSTRUCTOR_NELTS (exp
) != fields_length (type
))
6402 || mostly_zeros_p (exp
))
6404 || known_eq (GET_MODE_SIZE (GET_MODE (target
)), size
)))
6406 clear_storage (target
, gen_int_mode (size
, Pmode
),
6411 if (REG_P (target
) && !cleared
)
6412 emit_clobber (target
);
6414 /* Store each element of the constructor into the
6415 corresponding field of TARGET. */
6416 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, field
, value
)
6419 HOST_WIDE_INT bitsize
;
6420 HOST_WIDE_INT bitpos
= 0;
6422 rtx to_rtx
= target
;
6424 /* Just ignore missing fields. We cleared the whole
6425 structure, above, if any fields are missing. */
6429 if (cleared
&& initializer_zerop (value
))
6432 if (tree_fits_uhwi_p (DECL_SIZE (field
)))
6433 bitsize
= tree_to_uhwi (DECL_SIZE (field
));
6437 mode
= DECL_MODE (field
);
6438 if (DECL_BIT_FIELD (field
))
6441 offset
= DECL_FIELD_OFFSET (field
);
6442 if (tree_fits_shwi_p (offset
)
6443 && tree_fits_shwi_p (bit_position (field
)))
6445 bitpos
= int_bit_position (field
);
6451 /* If this initializes a field that is smaller than a
6452 word, at the start of a word, try to widen it to a full
6453 word. This special case allows us to output C++ member
6454 function initializations in a form that the optimizers
6456 if (WORD_REGISTER_OPERATIONS
6458 && bitsize
< BITS_PER_WORD
6459 && bitpos
% BITS_PER_WORD
== 0
6460 && GET_MODE_CLASS (mode
) == MODE_INT
6461 && TREE_CODE (value
) == INTEGER_CST
6463 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
6465 tree type
= TREE_TYPE (value
);
6467 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
6469 type
= lang_hooks
.types
.type_for_mode
6470 (word_mode
, TYPE_UNSIGNED (type
));
6471 value
= fold_convert (type
, value
);
6472 /* Make sure the bits beyond the original bitsize are zero
6473 so that we can correctly avoid extra zeroing stores in
6474 later constructor elements. */
6476 = wide_int_to_tree (type
, wi::mask (bitsize
, false,
6478 value
= fold_build2 (BIT_AND_EXPR
, type
, value
, bitsize_mask
);
6481 if (BYTES_BIG_ENDIAN
)
6483 = fold_build2 (LSHIFT_EXPR
, type
, value
,
6484 build_int_cst (type
,
6485 BITS_PER_WORD
- bitsize
));
6486 bitsize
= BITS_PER_WORD
;
6490 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
6491 && DECL_NONADDRESSABLE_P (field
))
6493 to_rtx
= copy_rtx (to_rtx
);
6494 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
6497 store_constructor_field (to_rtx
, bitsize
, bitpos
,
6498 0, bitregion_end
, mode
,
6500 get_alias_set (TREE_TYPE (field
)),
6508 unsigned HOST_WIDE_INT i
;
6511 tree elttype
= TREE_TYPE (type
);
6513 HOST_WIDE_INT minelt
= 0;
6514 HOST_WIDE_INT maxelt
= 0;
6516 /* The storage order is specified for every aggregate type. */
6517 reverse
= TYPE_REVERSE_STORAGE_ORDER (type
);
6519 domain
= TYPE_DOMAIN (type
);
6520 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
6521 && TYPE_MAX_VALUE (domain
)
6522 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain
))
6523 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain
)));
6525 /* If we have constant bounds for the range of the type, get them. */
6528 minelt
= tree_to_shwi (TYPE_MIN_VALUE (domain
));
6529 maxelt
= tree_to_shwi (TYPE_MAX_VALUE (domain
));
6532 /* If the constructor has fewer elements than the array, clear
6533 the whole array first. Similarly if this is static
6534 constructor of a non-BLKmode object. */
6537 else if (REG_P (target
) && TREE_STATIC (exp
))
6541 unsigned HOST_WIDE_INT idx
;
6543 HOST_WIDE_INT count
= 0, zero_count
= 0;
6544 need_to_clear
= ! const_bounds_p
;
6546 /* This loop is a more accurate version of the loop in
6547 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6548 is also needed to check for missing elements. */
6549 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, index
, value
)
6551 HOST_WIDE_INT this_node_count
;
6556 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6558 tree lo_index
= TREE_OPERAND (index
, 0);
6559 tree hi_index
= TREE_OPERAND (index
, 1);
6561 if (! tree_fits_uhwi_p (lo_index
)
6562 || ! tree_fits_uhwi_p (hi_index
))
6568 this_node_count
= (tree_to_uhwi (hi_index
)
6569 - tree_to_uhwi (lo_index
) + 1);
6572 this_node_count
= 1;
6574 count
+= this_node_count
;
6575 if (mostly_zeros_p (value
))
6576 zero_count
+= this_node_count
;
6579 /* Clear the entire array first if there are any missing
6580 elements, or if the incidence of zero elements is >=
6583 && (count
< maxelt
- minelt
+ 1
6584 || 4 * zero_count
>= 3 * count
))
6588 if (need_to_clear
&& maybe_gt (size
, 0))
6591 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6593 clear_storage (target
, gen_int_mode (size
, Pmode
),
6598 if (!cleared
&& REG_P (target
))
6599 /* Inform later passes that the old value is dead. */
6600 emit_clobber (target
);
6602 /* Store each element of the constructor into the
6603 corresponding element of TARGET, determined by counting the
6605 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), i
, index
, value
)
6609 HOST_WIDE_INT bitpos
;
6610 rtx xtarget
= target
;
6612 if (cleared
&& initializer_zerop (value
))
6615 mode
= TYPE_MODE (elttype
);
6616 if (mode
!= BLKmode
)
6617 bitsize
= GET_MODE_BITSIZE (mode
);
6618 else if (!poly_int_tree_p (TYPE_SIZE (elttype
), &bitsize
))
6621 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6623 tree lo_index
= TREE_OPERAND (index
, 0);
6624 tree hi_index
= TREE_OPERAND (index
, 1);
6625 rtx index_r
, pos_rtx
;
6626 HOST_WIDE_INT lo
, hi
, count
;
6629 /* If the range is constant and "small", unroll the loop. */
6631 && tree_fits_shwi_p (lo_index
)
6632 && tree_fits_shwi_p (hi_index
)
6633 && (lo
= tree_to_shwi (lo_index
),
6634 hi
= tree_to_shwi (hi_index
),
6635 count
= hi
- lo
+ 1,
6638 || (tree_fits_uhwi_p (TYPE_SIZE (elttype
))
6639 && (tree_to_uhwi (TYPE_SIZE (elttype
)) * count
6642 lo
-= minelt
; hi
-= minelt
;
6643 for (; lo
<= hi
; lo
++)
6645 bitpos
= lo
* tree_to_shwi (TYPE_SIZE (elttype
));
6648 && !MEM_KEEP_ALIAS_SET_P (target
)
6649 && TREE_CODE (type
) == ARRAY_TYPE
6650 && TYPE_NONALIASED_COMPONENT (type
))
6652 target
= copy_rtx (target
);
6653 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6656 store_constructor_field
6657 (target
, bitsize
, bitpos
, 0, bitregion_end
,
6658 mode
, value
, cleared
,
6659 get_alias_set (elttype
), reverse
);
6664 rtx_code_label
*loop_start
= gen_label_rtx ();
6665 rtx_code_label
*loop_end
= gen_label_rtx ();
6668 expand_normal (hi_index
);
6670 index
= build_decl (EXPR_LOCATION (exp
),
6671 VAR_DECL
, NULL_TREE
, domain
);
6672 index_r
= gen_reg_rtx (promote_decl_mode (index
, NULL
));
6673 SET_DECL_RTL (index
, index_r
);
6674 store_expr (lo_index
, index_r
, 0, false, reverse
);
6676 /* Build the head of the loop. */
6677 do_pending_stack_adjust ();
6678 emit_label (loop_start
);
6680 /* Assign value to element index. */
6682 fold_convert (ssizetype
,
6683 fold_build2 (MINUS_EXPR
,
6686 TYPE_MIN_VALUE (domain
)));
6689 size_binop (MULT_EXPR
, position
,
6690 fold_convert (ssizetype
,
6691 TYPE_SIZE_UNIT (elttype
)));
6693 pos_rtx
= expand_normal (position
);
6694 xtarget
= offset_address (target
, pos_rtx
,
6695 highest_pow2_factor (position
));
6696 xtarget
= adjust_address (xtarget
, mode
, 0);
6697 if (TREE_CODE (value
) == CONSTRUCTOR
)
6698 store_constructor (value
, xtarget
, cleared
,
6699 exact_div (bitsize
, BITS_PER_UNIT
),
6702 store_expr (value
, xtarget
, 0, false, reverse
);
6704 /* Generate a conditional jump to exit the loop. */
6705 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
6707 jumpif (exit_cond
, loop_end
,
6708 profile_probability::uninitialized ());
6710 /* Update the loop counter, and jump to the head of
6712 expand_assignment (index
,
6713 build2 (PLUS_EXPR
, TREE_TYPE (index
),
6714 index
, integer_one_node
),
6717 emit_jump (loop_start
);
6719 /* Build the end of the loop. */
6720 emit_label (loop_end
);
6723 else if ((index
!= 0 && ! tree_fits_shwi_p (index
))
6724 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype
)))
6729 index
= ssize_int (1);
6732 index
= fold_convert (ssizetype
,
6733 fold_build2 (MINUS_EXPR
,
6736 TYPE_MIN_VALUE (domain
)));
6739 size_binop (MULT_EXPR
, index
,
6740 fold_convert (ssizetype
,
6741 TYPE_SIZE_UNIT (elttype
)));
6742 xtarget
= offset_address (target
,
6743 expand_normal (position
),
6744 highest_pow2_factor (position
));
6745 xtarget
= adjust_address (xtarget
, mode
, 0);
6746 store_expr (value
, xtarget
, 0, false, reverse
);
6751 bitpos
= ((tree_to_shwi (index
) - minelt
)
6752 * tree_to_uhwi (TYPE_SIZE (elttype
)));
6754 bitpos
= (i
* tree_to_uhwi (TYPE_SIZE (elttype
)));
6756 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
6757 && TREE_CODE (type
) == ARRAY_TYPE
6758 && TYPE_NONALIASED_COMPONENT (type
))
6760 target
= copy_rtx (target
);
6761 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6763 store_constructor_field (target
, bitsize
, bitpos
, 0,
6764 bitregion_end
, mode
, value
,
6765 cleared
, get_alias_set (elttype
),
6774 unsigned HOST_WIDE_INT idx
;
6775 constructor_elt
*ce
;
6778 insn_code icode
= CODE_FOR_nothing
;
6780 tree elttype
= TREE_TYPE (type
);
6781 int elt_size
= tree_to_uhwi (TYPE_SIZE (elttype
));
6782 machine_mode eltmode
= TYPE_MODE (elttype
);
6783 HOST_WIDE_INT bitsize
;
6784 HOST_WIDE_INT bitpos
;
6785 rtvec vector
= NULL
;
6787 unsigned HOST_WIDE_INT const_n_elts
;
6788 alias_set_type alias
;
6789 bool vec_vec_init_p
= false;
6790 machine_mode mode
= GET_MODE (target
);
6792 gcc_assert (eltmode
!= BLKmode
);
6794 /* Try using vec_duplicate_optab for uniform vectors. */
6795 if (!TREE_SIDE_EFFECTS (exp
)
6796 && VECTOR_MODE_P (mode
)
6797 && eltmode
== GET_MODE_INNER (mode
)
6798 && ((icode
= optab_handler (vec_duplicate_optab
, mode
))
6799 != CODE_FOR_nothing
)
6800 && (elt
= uniform_vector_p (exp
)))
6802 class expand_operand ops
[2];
6803 create_output_operand (&ops
[0], target
, mode
);
6804 create_input_operand (&ops
[1], expand_normal (elt
), eltmode
);
6805 expand_insn (icode
, 2, ops
);
6806 if (!rtx_equal_p (target
, ops
[0].value
))
6807 emit_move_insn (target
, ops
[0].value
);
6811 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
6813 && VECTOR_MODE_P (mode
)
6814 && n_elts
.is_constant (&const_n_elts
))
6816 machine_mode emode
= eltmode
;
6818 if (CONSTRUCTOR_NELTS (exp
)
6819 && (TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (exp
, 0)->value
))
6822 tree etype
= TREE_TYPE (CONSTRUCTOR_ELT (exp
, 0)->value
);
6823 gcc_assert (known_eq (CONSTRUCTOR_NELTS (exp
)
6824 * TYPE_VECTOR_SUBPARTS (etype
),
6826 emode
= TYPE_MODE (etype
);
6828 icode
= convert_optab_handler (vec_init_optab
, mode
, emode
);
6829 if (icode
!= CODE_FOR_nothing
)
6831 unsigned int i
, n
= const_n_elts
;
6833 if (emode
!= eltmode
)
6835 n
= CONSTRUCTOR_NELTS (exp
);
6836 vec_vec_init_p
= true;
6838 vector
= rtvec_alloc (n
);
6839 for (i
= 0; i
< n
; i
++)
6840 RTVEC_ELT (vector
, i
) = CONST0_RTX (emode
);
6844 /* If the constructor has fewer elements than the vector,
6845 clear the whole array first. Similarly if this is static
6846 constructor of a non-BLKmode object. */
6849 else if (REG_P (target
) && TREE_STATIC (exp
))
6853 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
6856 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
6858 tree sz
= TYPE_SIZE (TREE_TYPE (value
));
6860 = tree_to_uhwi (int_const_binop (TRUNC_DIV_EXPR
, sz
,
6861 TYPE_SIZE (elttype
)));
6863 count
+= n_elts_here
;
6864 if (mostly_zeros_p (value
))
6865 zero_count
+= n_elts_here
;
6868 /* Clear the entire vector first if there are any missing elements,
6869 or if the incidence of zero elements is >= 75%. */
6870 need_to_clear
= (maybe_lt (count
, n_elts
)
6871 || 4 * zero_count
>= 3 * count
);
6874 if (need_to_clear
&& maybe_gt (size
, 0) && !vector
)
6877 emit_move_insn (target
, CONST0_RTX (mode
));
6879 clear_storage (target
, gen_int_mode (size
, Pmode
),
6884 /* Inform later passes that the old value is dead. */
6885 if (!cleared
&& !vector
&& REG_P (target
))
6886 emit_move_insn (target
, CONST0_RTX (mode
));
6889 alias
= MEM_ALIAS_SET (target
);
6891 alias
= get_alias_set (elttype
);
6893 /* Store each element of the constructor into the corresponding
6894 element of TARGET, determined by counting the elements. */
6895 for (idx
= 0, i
= 0;
6896 vec_safe_iterate (CONSTRUCTOR_ELTS (exp
), idx
, &ce
);
6897 idx
++, i
+= bitsize
/ elt_size
)
6899 HOST_WIDE_INT eltpos
;
6900 tree value
= ce
->value
;
6902 bitsize
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value
)));
6903 if (cleared
&& initializer_zerop (value
))
6907 eltpos
= tree_to_uhwi (ce
->index
);
6915 gcc_assert (ce
->index
== NULL_TREE
);
6916 gcc_assert (TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
);
6920 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
6921 RTVEC_ELT (vector
, eltpos
) = expand_normal (value
);
6925 machine_mode value_mode
6926 = (TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
6927 ? TYPE_MODE (TREE_TYPE (value
)) : eltmode
);
6928 bitpos
= eltpos
* elt_size
;
6929 store_constructor_field (target
, bitsize
, bitpos
, 0,
6930 bitregion_end
, value_mode
,
6931 value
, cleared
, alias
, reverse
);
6936 emit_insn (GEN_FCN (icode
) (target
,
6937 gen_rtx_PARALLEL (mode
, vector
)));
6946 /* Store the value of EXP (an expression tree)
6947 into a subfield of TARGET which has mode MODE and occupies
6948 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6949 If MODE is VOIDmode, it means that we are storing into a bit-field.
6951 BITREGION_START is bitpos of the first bitfield in this region.
6952 BITREGION_END is the bitpos of the ending bitfield in this region.
6953 These two fields are 0, if the C++ memory model does not apply,
6954 or we are not interested in keeping track of bitfield regions.
6956 Always return const0_rtx unless we have something particular to
6959 ALIAS_SET is the alias set for the destination. This value will
6960 (in general) be different from that for TARGET, since TARGET is a
6961 reference to the containing structure.
6963 If NONTEMPORAL is true, try generating a nontemporal store.
6965 If REVERSE is true, the store is to be done in reverse order. */
6968 store_field (rtx target
, poly_int64 bitsize
, poly_int64 bitpos
,
6969 poly_uint64 bitregion_start
, poly_uint64 bitregion_end
,
6970 machine_mode mode
, tree exp
,
6971 alias_set_type alias_set
, bool nontemporal
, bool reverse
)
6973 if (TREE_CODE (exp
) == ERROR_MARK
)
6976 /* If we have nothing to store, do nothing unless the expression has
6977 side-effects. Don't do that for zero sized addressable lhs of
6979 if (known_eq (bitsize
, 0)
6980 && (!TREE_ADDRESSABLE (TREE_TYPE (exp
))
6981 || TREE_CODE (exp
) != CALL_EXPR
))
6982 return expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6984 if (GET_CODE (target
) == CONCAT
)
6986 /* We're storing into a struct containing a single __complex. */
6988 gcc_assert (known_eq (bitpos
, 0));
6989 return store_expr (exp
, target
, 0, nontemporal
, reverse
);
6992 /* If the structure is in a register or if the component
6993 is a bit field, we cannot use addressing to access it.
6994 Use bit-field techniques or SUBREG to store in it. */
6996 poly_int64 decl_bitsize
;
6997 if (mode
== VOIDmode
6998 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
6999 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7000 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
7002 || GET_CODE (target
) == SUBREG
7003 /* If the field isn't aligned enough to store as an ordinary memref,
7004 store it as a bit field. */
7006 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
7007 || !multiple_p (bitpos
, GET_MODE_ALIGNMENT (mode
)))
7008 && targetm
.slow_unaligned_access (mode
, MEM_ALIGN (target
)))
7009 || !multiple_p (bitpos
, BITS_PER_UNIT
)))
7010 || (known_size_p (bitsize
)
7012 && maybe_gt (GET_MODE_BITSIZE (mode
), bitsize
))
7013 /* If the RHS and field are a constant size and the size of the
7014 RHS isn't the same size as the bitfield, we must use bitfield
7016 || (known_size_p (bitsize
)
7017 && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp
)))
7018 && maybe_ne (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp
))),
7020 /* Except for initialization of full bytes from a CONSTRUCTOR, which
7021 we will handle specially below. */
7022 && !(TREE_CODE (exp
) == CONSTRUCTOR
7023 && multiple_p (bitsize
, BITS_PER_UNIT
))
7024 /* And except for bitwise copying of TREE_ADDRESSABLE types,
7025 where the FIELD_DECL has the right bitsize, but TREE_TYPE (exp)
7026 includes some extra padding. store_expr / expand_expr will in
7027 that case call get_inner_reference that will have the bitsize
7028 we check here and thus the block move will not clobber the
7029 padding that shouldn't be clobbered. In the future we could
7030 replace the TREE_ADDRESSABLE check with a check that
7031 get_base_address needs to live in memory. */
7032 && (!TREE_ADDRESSABLE (TREE_TYPE (exp
))
7033 || TREE_CODE (exp
) != COMPONENT_REF
7034 || !multiple_p (bitsize
, BITS_PER_UNIT
)
7035 || !multiple_p (bitpos
, BITS_PER_UNIT
)
7036 || !poly_int_tree_p (DECL_SIZE (TREE_OPERAND (exp
, 1)),
7038 || maybe_ne (decl_bitsize
, bitsize
)))
7039 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
7040 decl we must use bitfield operations. */
7041 || (known_size_p (bitsize
)
7042 && TREE_CODE (exp
) == MEM_REF
7043 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7044 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7045 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7046 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) != BLKmode
))
7051 /* If EXP is a NOP_EXPR of precision less than its mode, then that
7052 implies a mask operation. If the precision is the same size as
7053 the field we're storing into, that mask is redundant. This is
7054 particularly common with bit field assignments generated by the
7056 nop_def
= get_def_for_expr (exp
, NOP_EXPR
);
7059 tree type
= TREE_TYPE (exp
);
7060 if (INTEGRAL_TYPE_P (type
)
7061 && maybe_ne (TYPE_PRECISION (type
),
7062 GET_MODE_BITSIZE (TYPE_MODE (type
)))
7063 && known_eq (bitsize
, TYPE_PRECISION (type
)))
7065 tree op
= gimple_assign_rhs1 (nop_def
);
7066 type
= TREE_TYPE (op
);
7067 if (INTEGRAL_TYPE_P (type
)
7068 && known_ge (TYPE_PRECISION (type
), bitsize
))
7073 temp
= expand_normal (exp
);
7075 /* We don't support variable-sized BLKmode bitfields, since our
7076 handling of BLKmode is bound up with the ability to break
7077 things into words. */
7078 gcc_assert (mode
!= BLKmode
|| bitsize
.is_constant ());
7080 /* Handle calls that return values in multiple non-contiguous locations.
7081 The Irix 6 ABI has examples of this. */
7082 if (GET_CODE (temp
) == PARALLEL
)
7084 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
7085 machine_mode temp_mode
= GET_MODE (temp
);
7086 if (temp_mode
== BLKmode
|| temp_mode
== VOIDmode
)
7087 temp_mode
= smallest_int_mode_for_size (size
* BITS_PER_UNIT
);
7088 rtx temp_target
= gen_reg_rtx (temp_mode
);
7089 emit_group_store (temp_target
, temp
, TREE_TYPE (exp
), size
);
7093 /* Handle calls that return BLKmode values in registers. */
7094 else if (mode
== BLKmode
&& REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
7096 rtx temp_target
= gen_reg_rtx (GET_MODE (temp
));
7097 copy_blkmode_from_reg (temp_target
, temp
, TREE_TYPE (exp
));
7101 /* If the value has aggregate type and an integral mode then, if BITSIZE
7102 is narrower than this mode and this is for big-endian data, we first
7103 need to put the value into the low-order bits for store_bit_field,
7104 except when MODE is BLKmode and BITSIZE larger than the word size
7105 (see the handling of fields larger than a word in store_bit_field).
7106 Moreover, the field may be not aligned on a byte boundary; in this
7107 case, if it has reverse storage order, it needs to be accessed as a
7108 scalar field with reverse storage order and we must first put the
7109 value into target order. */
7110 scalar_int_mode temp_mode
;
7111 if (AGGREGATE_TYPE_P (TREE_TYPE (exp
))
7112 && is_int_mode (GET_MODE (temp
), &temp_mode
))
7114 HOST_WIDE_INT size
= GET_MODE_BITSIZE (temp_mode
);
7116 reverse
= TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (exp
));
7119 temp
= flip_storage_order (temp_mode
, temp
);
7121 gcc_checking_assert (known_le (bitsize
, size
));
7122 if (maybe_lt (bitsize
, size
)
7123 && reverse
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
7124 /* Use of to_constant for BLKmode was checked above. */
7125 && !(mode
== BLKmode
&& bitsize
.to_constant () > BITS_PER_WORD
))
7126 temp
= expand_shift (RSHIFT_EXPR
, temp_mode
, temp
,
7127 size
- bitsize
, NULL_RTX
, 1);
7130 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
7131 if (mode
!= VOIDmode
&& mode
!= BLKmode
7132 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
7133 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
7135 /* If the mode of TEMP and TARGET is BLKmode, both must be in memory
7136 and BITPOS must be aligned on a byte boundary. If so, we simply do
7137 a block copy. Likewise for a BLKmode-like TARGET. */
7138 if (GET_MODE (temp
) == BLKmode
7139 && (GET_MODE (target
) == BLKmode
7141 && GET_MODE_CLASS (GET_MODE (target
)) == MODE_INT
7142 && multiple_p (bitpos
, BITS_PER_UNIT
)
7143 && multiple_p (bitsize
, BITS_PER_UNIT
))))
7145 gcc_assert (MEM_P (target
) && MEM_P (temp
));
7146 poly_int64 bytepos
= exact_div (bitpos
, BITS_PER_UNIT
);
7147 poly_int64 bytesize
= bits_to_bytes_round_up (bitsize
);
7149 target
= adjust_address (target
, VOIDmode
, bytepos
);
7150 emit_block_move (target
, temp
,
7151 gen_int_mode (bytesize
, Pmode
),
7157 /* If the mode of TEMP is still BLKmode and BITSIZE not larger than the
7158 word size, we need to load the value (see again store_bit_field). */
7159 if (GET_MODE (temp
) == BLKmode
&& known_le (bitsize
, BITS_PER_WORD
))
7161 scalar_int_mode temp_mode
= smallest_int_mode_for_size (bitsize
);
7162 temp
= extract_bit_field (temp
, bitsize
, 0, 1, NULL_RTX
, temp_mode
,
7163 temp_mode
, false, NULL
);
7166 /* Store the value in the bitfield. */
7167 gcc_checking_assert (known_ge (bitpos
, 0));
7168 store_bit_field (target
, bitsize
, bitpos
,
7169 bitregion_start
, bitregion_end
,
7170 mode
, temp
, reverse
);
7176 /* Now build a reference to just the desired component. */
7177 rtx to_rtx
= adjust_address (target
, mode
,
7178 exact_div (bitpos
, BITS_PER_UNIT
));
7180 if (to_rtx
== target
)
7181 to_rtx
= copy_rtx (to_rtx
);
7183 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
7184 set_mem_alias_set (to_rtx
, alias_set
);
7186 /* Above we avoided using bitfield operations for storing a CONSTRUCTOR
7187 into a target smaller than its type; handle that case now. */
7188 if (TREE_CODE (exp
) == CONSTRUCTOR
&& known_size_p (bitsize
))
7190 poly_int64 bytesize
= exact_div (bitsize
, BITS_PER_UNIT
);
7191 store_constructor (exp
, to_rtx
, 0, bytesize
, reverse
);
7195 return store_expr (exp
, to_rtx
, 0, nontemporal
, reverse
);
7199 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
7200 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
7201 codes and find the ultimate containing object, which we return.
7203 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
7204 bit position, *PUNSIGNEDP to the signedness and *PREVERSEP to the
7205 storage order of the field.
7206 If the position of the field is variable, we store a tree
7207 giving the variable offset (in units) in *POFFSET.
7208 This offset is in addition to the bit position.
7209 If the position is not variable, we store 0 in *POFFSET.
7211 If any of the extraction expressions is volatile,
7212 we store 1 in *PVOLATILEP. Otherwise we don't change that.
7214 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
7215 Otherwise, it is a mode that can be used to access the field.
7217 If the field describes a variable-sized object, *PMODE is set to
7218 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
7219 this case, but the address of the object can be found. */
7222 get_inner_reference (tree exp
, poly_int64_pod
*pbitsize
,
7223 poly_int64_pod
*pbitpos
, tree
*poffset
,
7224 machine_mode
*pmode
, int *punsignedp
,
7225 int *preversep
, int *pvolatilep
)
7228 machine_mode mode
= VOIDmode
;
7229 bool blkmode_bitfield
= false;
7230 tree offset
= size_zero_node
;
7231 poly_offset_int bit_offset
= 0;
7233 /* First get the mode, signedness, storage order and size. We do this from
7234 just the outermost expression. */
7236 if (TREE_CODE (exp
) == COMPONENT_REF
)
7238 tree field
= TREE_OPERAND (exp
, 1);
7239 size_tree
= DECL_SIZE (field
);
7240 if (flag_strict_volatile_bitfields
> 0
7241 && TREE_THIS_VOLATILE (exp
)
7242 && DECL_BIT_FIELD_TYPE (field
)
7243 && DECL_MODE (field
) != BLKmode
)
7244 /* Volatile bitfields should be accessed in the mode of the
7245 field's type, not the mode computed based on the bit
7247 mode
= TYPE_MODE (DECL_BIT_FIELD_TYPE (field
));
7248 else if (!DECL_BIT_FIELD (field
))
7250 mode
= DECL_MODE (field
);
7251 /* For vector fields re-check the target flags, as DECL_MODE
7252 could have been set with different target flags than
7253 the current function has. */
7255 && VECTOR_TYPE_P (TREE_TYPE (field
))
7256 && VECTOR_MODE_P (TYPE_MODE_RAW (TREE_TYPE (field
))))
7257 mode
= TYPE_MODE (TREE_TYPE (field
));
7259 else if (DECL_MODE (field
) == BLKmode
)
7260 blkmode_bitfield
= true;
7262 *punsignedp
= DECL_UNSIGNED (field
);
7264 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
7266 size_tree
= TREE_OPERAND (exp
, 1);
7267 *punsignedp
= (! INTEGRAL_TYPE_P (TREE_TYPE (exp
))
7268 || TYPE_UNSIGNED (TREE_TYPE (exp
)));
7270 /* For vector element types with the correct size of access or for
7271 vector typed accesses use the mode of the access type. */
7272 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == VECTOR_TYPE
7273 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
7274 && tree_int_cst_equal (size_tree
, TYPE_SIZE (TREE_TYPE (exp
))))
7275 || VECTOR_TYPE_P (TREE_TYPE (exp
)))
7276 mode
= TYPE_MODE (TREE_TYPE (exp
));
7280 mode
= TYPE_MODE (TREE_TYPE (exp
));
7281 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
7283 if (mode
== BLKmode
)
7284 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
7286 *pbitsize
= GET_MODE_BITSIZE (mode
);
7291 if (! tree_fits_uhwi_p (size_tree
))
7292 mode
= BLKmode
, *pbitsize
= -1;
7294 *pbitsize
= tree_to_uhwi (size_tree
);
7297 *preversep
= reverse_storage_order_for_component_p (exp
);
7299 /* Compute cumulative bit-offset for nested component-refs and array-refs,
7300 and find the ultimate containing object. */
7303 switch (TREE_CODE (exp
))
7306 bit_offset
+= wi::to_poly_offset (TREE_OPERAND (exp
, 2));
7311 tree field
= TREE_OPERAND (exp
, 1);
7312 tree this_offset
= component_ref_field_offset (exp
);
7314 /* If this field hasn't been filled in yet, don't go past it.
7315 This should only happen when folding expressions made during
7316 type construction. */
7317 if (this_offset
== 0)
7320 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
7321 bit_offset
+= wi::to_poly_offset (DECL_FIELD_BIT_OFFSET (field
));
7323 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
7328 case ARRAY_RANGE_REF
:
7330 tree index
= TREE_OPERAND (exp
, 1);
7331 tree low_bound
= array_ref_low_bound (exp
);
7332 tree unit_size
= array_ref_element_size (exp
);
7334 /* We assume all arrays have sizes that are a multiple of a byte.
7335 First subtract the lower bound, if any, in the type of the
7336 index, then convert to sizetype and multiply by the size of
7337 the array element. */
7338 if (! integer_zerop (low_bound
))
7339 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
7342 offset
= size_binop (PLUS_EXPR
, offset
,
7343 size_binop (MULT_EXPR
,
7344 fold_convert (sizetype
, index
),
7353 bit_offset
+= *pbitsize
;
7356 case VIEW_CONVERT_EXPR
:
7360 /* Hand back the decl for MEM[&decl, off]. */
7361 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
7363 tree off
= TREE_OPERAND (exp
, 1);
7364 if (!integer_zerop (off
))
7366 poly_offset_int boff
= mem_ref_offset (exp
);
7367 boff
<<= LOG2_BITS_PER_UNIT
;
7370 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7378 /* If any reference in the chain is volatile, the effect is volatile. */
7379 if (TREE_THIS_VOLATILE (exp
))
7382 exp
= TREE_OPERAND (exp
, 0);
7386 /* If OFFSET is constant, see if we can return the whole thing as a
7387 constant bit position. Make sure to handle overflow during
7389 if (poly_int_tree_p (offset
))
7391 poly_offset_int tem
= wi::sext (wi::to_poly_offset (offset
),
7392 TYPE_PRECISION (sizetype
));
7393 tem
<<= LOG2_BITS_PER_UNIT
;
7395 if (tem
.to_shwi (pbitpos
))
7396 *poffset
= offset
= NULL_TREE
;
7399 /* Otherwise, split it up. */
7402 /* Avoid returning a negative bitpos as this may wreak havoc later. */
7403 if (!bit_offset
.to_shwi (pbitpos
) || maybe_lt (*pbitpos
, 0))
7405 *pbitpos
= num_trailing_bits (bit_offset
.force_shwi ());
7406 poly_offset_int bytes
= bits_to_bytes_round_down (bit_offset
);
7407 offset
= size_binop (PLUS_EXPR
, offset
,
7408 build_int_cst (sizetype
, bytes
.force_shwi ()));
7414 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
7415 if (mode
== VOIDmode
7417 && multiple_p (*pbitpos
, BITS_PER_UNIT
)
7418 && multiple_p (*pbitsize
, BITS_PER_UNIT
))
7426 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7428 static unsigned HOST_WIDE_INT
7429 target_align (const_tree target
)
7431 /* We might have a chain of nested references with intermediate misaligning
7432 bitfields components, so need to recurse to find out. */
7434 unsigned HOST_WIDE_INT this_align
, outer_align
;
7436 switch (TREE_CODE (target
))
7442 this_align
= DECL_ALIGN (TREE_OPERAND (target
, 1));
7443 outer_align
= target_align (TREE_OPERAND (target
, 0));
7444 return MIN (this_align
, outer_align
);
7447 case ARRAY_RANGE_REF
:
7448 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7449 outer_align
= target_align (TREE_OPERAND (target
, 0));
7450 return MIN (this_align
, outer_align
);
7453 case NON_LVALUE_EXPR
:
7454 case VIEW_CONVERT_EXPR
:
7455 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7456 outer_align
= target_align (TREE_OPERAND (target
, 0));
7457 return MAX (this_align
, outer_align
);
7460 return TYPE_ALIGN (TREE_TYPE (target
));
7465 /* Given an rtx VALUE that may contain additions and multiplications, return
7466 an equivalent value that just refers to a register, memory, or constant.
7467 This is done by generating instructions to perform the arithmetic and
7468 returning a pseudo-register containing the value.
7470 The returned value may be a REG, SUBREG, MEM or constant. */
7473 force_operand (rtx value
, rtx target
)
7476 /* Use subtarget as the target for operand 0 of a binary operation. */
7477 rtx subtarget
= get_subtarget (target
);
7478 enum rtx_code code
= GET_CODE (value
);
7480 /* Check for subreg applied to an expression produced by loop optimizer. */
7482 && !REG_P (SUBREG_REG (value
))
7483 && !MEM_P (SUBREG_REG (value
)))
7486 = simplify_gen_subreg (GET_MODE (value
),
7487 force_reg (GET_MODE (SUBREG_REG (value
)),
7488 force_operand (SUBREG_REG (value
),
7490 GET_MODE (SUBREG_REG (value
)),
7491 SUBREG_BYTE (value
));
7492 code
= GET_CODE (value
);
7495 /* Check for a PIC address load. */
7496 if ((code
== PLUS
|| code
== MINUS
)
7497 && XEXP (value
, 0) == pic_offset_table_rtx
7498 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
7499 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
7500 || GET_CODE (XEXP (value
, 1)) == CONST
))
7503 subtarget
= gen_reg_rtx (GET_MODE (value
));
7504 emit_move_insn (subtarget
, value
);
7508 if (ARITHMETIC_P (value
))
7510 op2
= XEXP (value
, 1);
7511 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
7513 if (code
== MINUS
&& CONST_INT_P (op2
))
7516 op2
= negate_rtx (GET_MODE (value
), op2
);
7519 /* Check for an addition with OP2 a constant integer and our first
7520 operand a PLUS of a virtual register and something else. In that
7521 case, we want to emit the sum of the virtual register and the
7522 constant first and then add the other value. This allows virtual
7523 register instantiation to simply modify the constant rather than
7524 creating another one around this addition. */
7525 if (code
== PLUS
&& CONST_INT_P (op2
)
7526 && GET_CODE (XEXP (value
, 0)) == PLUS
7527 && REG_P (XEXP (XEXP (value
, 0), 0))
7528 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7529 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
7531 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
7532 XEXP (XEXP (value
, 0), 0), op2
,
7533 subtarget
, 0, OPTAB_LIB_WIDEN
);
7534 return expand_simple_binop (GET_MODE (value
), code
, temp
,
7535 force_operand (XEXP (XEXP (value
,
7537 target
, 0, OPTAB_LIB_WIDEN
);
7540 op1
= force_operand (XEXP (value
, 0), subtarget
);
7541 op2
= force_operand (op2
, NULL_RTX
);
7545 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
7547 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
7548 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7549 target
, 1, OPTAB_LIB_WIDEN
);
7551 return expand_divmod (0,
7552 FLOAT_MODE_P (GET_MODE (value
))
7553 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
7554 GET_MODE (value
), op1
, op2
, target
, 0);
7556 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7559 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
7562 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7565 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7566 target
, 0, OPTAB_LIB_WIDEN
);
7568 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7569 target
, 1, OPTAB_LIB_WIDEN
);
7572 if (UNARY_P (value
))
7575 target
= gen_reg_rtx (GET_MODE (value
));
7576 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
7583 case FLOAT_TRUNCATE
:
7584 convert_move (target
, op1
, code
== ZERO_EXTEND
);
7589 expand_fix (target
, op1
, code
== UNSIGNED_FIX
);
7593 case UNSIGNED_FLOAT
:
7594 expand_float (target
, op1
, code
== UNSIGNED_FLOAT
);
7598 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
7602 #ifdef INSN_SCHEDULING
7603 /* On machines that have insn scheduling, we want all memory reference to be
7604 explicit, so we need to deal with such paradoxical SUBREGs. */
7605 if (paradoxical_subreg_p (value
) && MEM_P (SUBREG_REG (value
)))
7607 = simplify_gen_subreg (GET_MODE (value
),
7608 force_reg (GET_MODE (SUBREG_REG (value
)),
7609 force_operand (SUBREG_REG (value
),
7611 GET_MODE (SUBREG_REG (value
)),
7612 SUBREG_BYTE (value
));
7618 /* Subroutine of expand_expr: return nonzero iff there is no way that
7619 EXP can reference X, which is being modified. TOP_P is nonzero if this
7620 call is going to be used to determine whether we need a temporary
7621 for EXP, as opposed to a recursive call to this function.
7623 It is always safe for this routine to return zero since it merely
7624 searches for optimization opportunities. */
7627 safe_from_p (const_rtx x
, tree exp
, int top_p
)
7633 /* If EXP has varying size, we MUST use a target since we currently
7634 have no way of allocating temporaries of variable size
7635 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7636 So we assume here that something at a higher level has prevented a
7637 clash. This is somewhat bogus, but the best we can do. Only
7638 do this when X is BLKmode and when we are at the top level. */
7639 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
7640 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
7641 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
7642 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
7643 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
7645 && GET_MODE (x
) == BLKmode
)
7646 /* If X is in the outgoing argument area, it is always safe. */
7648 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
7649 || (GET_CODE (XEXP (x
, 0)) == PLUS
7650 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
7653 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7654 find the underlying pseudo. */
7655 if (GET_CODE (x
) == SUBREG
)
7658 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7662 /* Now look at our tree code and possibly recurse. */
7663 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
7665 case tcc_declaration
:
7666 exp_rtl
= DECL_RTL_IF_SET (exp
);
7672 case tcc_exceptional
:
7673 if (TREE_CODE (exp
) == TREE_LIST
)
7677 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
7679 exp
= TREE_CHAIN (exp
);
7682 if (TREE_CODE (exp
) != TREE_LIST
)
7683 return safe_from_p (x
, exp
, 0);
7686 else if (TREE_CODE (exp
) == CONSTRUCTOR
)
7688 constructor_elt
*ce
;
7689 unsigned HOST_WIDE_INT idx
;
7691 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp
), idx
, ce
)
7692 if ((ce
->index
!= NULL_TREE
&& !safe_from_p (x
, ce
->index
, 0))
7693 || !safe_from_p (x
, ce
->value
, 0))
7697 else if (TREE_CODE (exp
) == ERROR_MARK
)
7698 return 1; /* An already-visited SAVE_EXPR? */
7703 /* The only case we look at here is the DECL_INITIAL inside a
7705 return (TREE_CODE (exp
) != DECL_EXPR
7706 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
7707 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
7708 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
7711 case tcc_comparison
:
7712 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
7717 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7719 case tcc_expression
:
7722 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7723 the expression. If it is set, we conflict iff we are that rtx or
7724 both are in memory. Otherwise, we check all operands of the
7725 expression recursively. */
7727 switch (TREE_CODE (exp
))
7730 /* If the operand is static or we are static, we can't conflict.
7731 Likewise if we don't conflict with the operand at all. */
7732 if (staticp (TREE_OPERAND (exp
, 0))
7733 || TREE_STATIC (exp
)
7734 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
7737 /* Otherwise, the only way this can conflict is if we are taking
7738 the address of a DECL a that address if part of X, which is
7740 exp
= TREE_OPERAND (exp
, 0);
7743 if (!DECL_RTL_SET_P (exp
)
7744 || !MEM_P (DECL_RTL (exp
)))
7747 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
7753 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
7754 get_alias_set (exp
)))
7759 /* Assume that the call will clobber all hard registers and
7761 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7766 case WITH_CLEANUP_EXPR
:
7767 case CLEANUP_POINT_EXPR
:
7768 /* Lowered by gimplify.c. */
7772 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7778 /* If we have an rtx, we do not need to scan our operands. */
7782 nops
= TREE_OPERAND_LENGTH (exp
);
7783 for (i
= 0; i
< nops
; i
++)
7784 if (TREE_OPERAND (exp
, i
) != 0
7785 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
7791 /* Should never get a type here. */
7795 /* If we have an rtl, find any enclosed object. Then see if we conflict
7799 if (GET_CODE (exp_rtl
) == SUBREG
)
7801 exp_rtl
= SUBREG_REG (exp_rtl
);
7803 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
7807 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7808 are memory and they conflict. */
7809 return ! (rtx_equal_p (x
, exp_rtl
)
7810 || (MEM_P (x
) && MEM_P (exp_rtl
)
7811 && true_dependence (exp_rtl
, VOIDmode
, x
)));
7814 /* If we reach here, it is safe. */
7819 /* Return the highest power of two that EXP is known to be a multiple of.
7820 This is used in updating alignment of MEMs in array references. */
7822 unsigned HOST_WIDE_INT
7823 highest_pow2_factor (const_tree exp
)
7825 unsigned HOST_WIDE_INT ret
;
7826 int trailing_zeros
= tree_ctz (exp
);
7827 if (trailing_zeros
>= HOST_BITS_PER_WIDE_INT
)
7828 return BIGGEST_ALIGNMENT
;
7829 ret
= HOST_WIDE_INT_1U
<< trailing_zeros
;
7830 if (ret
> BIGGEST_ALIGNMENT
)
7831 return BIGGEST_ALIGNMENT
;
7835 /* Similar, except that the alignment requirements of TARGET are
7836 taken into account. Assume it is at least as aligned as its
7837 type, unless it is a COMPONENT_REF in which case the layout of
7838 the structure gives the alignment. */
7840 static unsigned HOST_WIDE_INT
7841 highest_pow2_factor_for_target (const_tree target
, const_tree exp
)
7843 unsigned HOST_WIDE_INT talign
= target_align (target
) / BITS_PER_UNIT
;
7844 unsigned HOST_WIDE_INT factor
= highest_pow2_factor (exp
);
7846 return MAX (factor
, talign
);
7849 /* Convert the tree comparison code TCODE to the rtl one where the
7850 signedness is UNSIGNEDP. */
7852 static enum rtx_code
7853 convert_tree_comp_to_rtx (enum tree_code tcode
, int unsignedp
)
7865 code
= unsignedp
? LTU
: LT
;
7868 code
= unsignedp
? LEU
: LE
;
7871 code
= unsignedp
? GTU
: GT
;
7874 code
= unsignedp
? GEU
: GE
;
7876 case UNORDERED_EXPR
:
7907 /* Subroutine of expand_expr. Expand the two operands of a binary
7908 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7909 The value may be stored in TARGET if TARGET is nonzero. The
7910 MODIFIER argument is as documented by expand_expr. */
7913 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
7914 enum expand_modifier modifier
)
7916 if (! safe_from_p (target
, exp1
, 1))
7918 if (operand_equal_p (exp0
, exp1
, 0))
7920 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7921 *op1
= copy_rtx (*op0
);
7925 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7926 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
7931 /* Return a MEM that contains constant EXP. DEFER is as for
7932 output_constant_def and MODIFIER is as for expand_expr. */
7935 expand_expr_constant (tree exp
, int defer
, enum expand_modifier modifier
)
7939 mem
= output_constant_def (exp
, defer
);
7940 if (modifier
!= EXPAND_INITIALIZER
)
7941 mem
= use_anchored_address (mem
);
7945 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7946 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7949 expand_expr_addr_expr_1 (tree exp
, rtx target
, scalar_int_mode tmode
,
7950 enum expand_modifier modifier
, addr_space_t as
)
7952 rtx result
, subtarget
;
7954 poly_int64 bitsize
, bitpos
;
7955 int unsignedp
, reversep
, volatilep
= 0;
7958 /* If we are taking the address of a constant and are at the top level,
7959 we have to use output_constant_def since we can't call force_const_mem
7961 /* ??? This should be considered a front-end bug. We should not be
7962 generating ADDR_EXPR of something that isn't an LVALUE. The only
7963 exception here is STRING_CST. */
7964 if (CONSTANT_CLASS_P (exp
))
7966 result
= XEXP (expand_expr_constant (exp
, 0, modifier
), 0);
7967 if (modifier
< EXPAND_SUM
)
7968 result
= force_operand (result
, target
);
7972 /* Everything must be something allowed by is_gimple_addressable. */
7973 switch (TREE_CODE (exp
))
7976 /* This case will happen via recursion for &a->b. */
7977 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7981 tree tem
= TREE_OPERAND (exp
, 0);
7982 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
7983 tem
= fold_build_pointer_plus (tem
, TREE_OPERAND (exp
, 1));
7984 return expand_expr (tem
, target
, tmode
, modifier
);
7987 case TARGET_MEM_REF
:
7988 return addr_for_mem_ref (exp
, as
, true);
7991 /* Expand the initializer like constants above. */
7992 result
= XEXP (expand_expr_constant (DECL_INITIAL (exp
),
7994 if (modifier
< EXPAND_SUM
)
7995 result
= force_operand (result
, target
);
7999 /* The real part of the complex number is always first, therefore
8000 the address is the same as the address of the parent object. */
8003 inner
= TREE_OPERAND (exp
, 0);
8007 /* The imaginary part of the complex number is always second.
8008 The expression is therefore always offset by the size of the
8011 bitpos
= GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (exp
)));
8012 inner
= TREE_OPERAND (exp
, 0);
8015 case COMPOUND_LITERAL_EXPR
:
8016 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
8017 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
8018 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
8019 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
8020 the initializers aren't gimplified. */
8021 if (COMPOUND_LITERAL_EXPR_DECL (exp
)
8022 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp
)))
8023 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp
),
8024 target
, tmode
, modifier
, as
);
8027 /* If the object is a DECL, then expand it for its rtl. Don't bypass
8028 expand_expr, as that can have various side effects; LABEL_DECLs for
8029 example, may not have their DECL_RTL set yet. Expand the rtl of
8030 CONSTRUCTORs too, which should yield a memory reference for the
8031 constructor's contents. Assume language specific tree nodes can
8032 be expanded in some interesting way. */
8033 gcc_assert (TREE_CODE (exp
) < LAST_AND_UNUSED_TREE_CODE
);
8035 || TREE_CODE (exp
) == CONSTRUCTOR
8036 || TREE_CODE (exp
) == COMPOUND_LITERAL_EXPR
)
8038 result
= expand_expr (exp
, target
, tmode
,
8039 modifier
== EXPAND_INITIALIZER
8040 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
8042 /* If the DECL isn't in memory, then the DECL wasn't properly
8043 marked TREE_ADDRESSABLE, which will be either a front-end
8044 or a tree optimizer bug. */
8046 gcc_assert (MEM_P (result
));
8047 result
= XEXP (result
, 0);
8049 /* ??? Is this needed anymore? */
8051 TREE_USED (exp
) = 1;
8053 if (modifier
!= EXPAND_INITIALIZER
8054 && modifier
!= EXPAND_CONST_ADDRESS
8055 && modifier
!= EXPAND_SUM
)
8056 result
= force_operand (result
, target
);
8060 /* Pass FALSE as the last argument to get_inner_reference although
8061 we are expanding to RTL. The rationale is that we know how to
8062 handle "aligning nodes" here: we can just bypass them because
8063 they won't change the final object whose address will be returned
8064 (they actually exist only for that purpose). */
8065 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode1
,
8066 &unsignedp
, &reversep
, &volatilep
);
8070 /* We must have made progress. */
8071 gcc_assert (inner
!= exp
);
8073 subtarget
= offset
|| maybe_ne (bitpos
, 0) ? NULL_RTX
: target
;
8074 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
8075 inner alignment, force the inner to be sufficiently aligned. */
8076 if (CONSTANT_CLASS_P (inner
)
8077 && TYPE_ALIGN (TREE_TYPE (inner
)) < TYPE_ALIGN (TREE_TYPE (exp
)))
8079 inner
= copy_node (inner
);
8080 TREE_TYPE (inner
) = copy_node (TREE_TYPE (inner
));
8081 SET_TYPE_ALIGN (TREE_TYPE (inner
), TYPE_ALIGN (TREE_TYPE (exp
)));
8082 TYPE_USER_ALIGN (TREE_TYPE (inner
)) = 1;
8084 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
, as
);
8090 if (modifier
!= EXPAND_NORMAL
)
8091 result
= force_operand (result
, NULL
);
8092 tmp
= expand_expr (offset
, NULL_RTX
, tmode
,
8093 modifier
== EXPAND_INITIALIZER
8094 ? EXPAND_INITIALIZER
: EXPAND_NORMAL
);
8096 /* expand_expr is allowed to return an object in a mode other
8097 than TMODE. If it did, we need to convert. */
8098 if (GET_MODE (tmp
) != VOIDmode
&& tmode
!= GET_MODE (tmp
))
8099 tmp
= convert_modes (tmode
, GET_MODE (tmp
),
8100 tmp
, TYPE_UNSIGNED (TREE_TYPE (offset
)));
8101 result
= convert_memory_address_addr_space (tmode
, result
, as
);
8102 tmp
= convert_memory_address_addr_space (tmode
, tmp
, as
);
8104 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8105 result
= simplify_gen_binary (PLUS
, tmode
, result
, tmp
);
8108 subtarget
= maybe_ne (bitpos
, 0) ? NULL_RTX
: target
;
8109 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
8110 1, OPTAB_LIB_WIDEN
);
8114 if (maybe_ne (bitpos
, 0))
8116 /* Someone beforehand should have rejected taking the address
8117 of an object that isn't byte-aligned. */
8118 poly_int64 bytepos
= exact_div (bitpos
, BITS_PER_UNIT
);
8119 result
= convert_memory_address_addr_space (tmode
, result
, as
);
8120 result
= plus_constant (tmode
, result
, bytepos
);
8121 if (modifier
< EXPAND_SUM
)
8122 result
= force_operand (result
, target
);
8128 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
8129 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
8132 expand_expr_addr_expr (tree exp
, rtx target
, machine_mode tmode
,
8133 enum expand_modifier modifier
)
8135 addr_space_t as
= ADDR_SPACE_GENERIC
;
8136 scalar_int_mode address_mode
= Pmode
;
8137 scalar_int_mode pointer_mode
= ptr_mode
;
8141 /* Target mode of VOIDmode says "whatever's natural". */
8142 if (tmode
== VOIDmode
)
8143 tmode
= TYPE_MODE (TREE_TYPE (exp
));
8145 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
8147 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
8148 address_mode
= targetm
.addr_space
.address_mode (as
);
8149 pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
8152 /* We can get called with some Weird Things if the user does silliness
8153 like "(short) &a". In that case, convert_memory_address won't do
8154 the right thing, so ignore the given target mode. */
8155 scalar_int_mode new_tmode
= (tmode
== pointer_mode
8159 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
8160 new_tmode
, modifier
, as
);
8162 /* Despite expand_expr claims concerning ignoring TMODE when not
8163 strictly convenient, stuff breaks if we don't honor it. Note
8164 that combined with the above, we only do this for pointer modes. */
8165 rmode
= GET_MODE (result
);
8166 if (rmode
== VOIDmode
)
8168 if (rmode
!= new_tmode
)
8169 result
= convert_memory_address_addr_space (new_tmode
, result
, as
);
8174 /* Generate code for computing CONSTRUCTOR EXP.
8175 An rtx for the computed value is returned. If AVOID_TEMP_MEM
8176 is TRUE, instead of creating a temporary variable in memory
8177 NULL is returned and the caller needs to handle it differently. */
8180 expand_constructor (tree exp
, rtx target
, enum expand_modifier modifier
,
8181 bool avoid_temp_mem
)
8183 tree type
= TREE_TYPE (exp
);
8184 machine_mode mode
= TYPE_MODE (type
);
8186 /* Try to avoid creating a temporary at all. This is possible
8187 if all of the initializer is zero.
8188 FIXME: try to handle all [0..255] initializers we can handle
8190 if (TREE_STATIC (exp
)
8191 && !TREE_ADDRESSABLE (exp
)
8192 && target
!= 0 && mode
== BLKmode
8193 && all_zeros_p (exp
))
8195 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
8199 /* All elts simple constants => refer to a constant in memory. But
8200 if this is a non-BLKmode mode, let it store a field at a time
8201 since that should make a CONST_INT, CONST_WIDE_INT or
8202 CONST_DOUBLE when we fold. Likewise, if we have a target we can
8203 use, it is best to store directly into the target unless the type
8204 is large enough that memcpy will be used. If we are making an
8205 initializer and all operands are constant, put it in memory as
8208 FIXME: Avoid trying to fill vector constructors piece-meal.
8209 Output them with output_constant_def below unless we're sure
8210 they're zeros. This should go away when vector initializers
8211 are treated like VECTOR_CST instead of arrays. */
8212 if ((TREE_STATIC (exp
)
8213 && ((mode
== BLKmode
8214 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
8215 || TREE_ADDRESSABLE (exp
)
8216 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type
))
8217 && (! can_move_by_pieces
8218 (tree_to_uhwi (TYPE_SIZE_UNIT (type
)),
8220 && ! mostly_zeros_p (exp
))))
8221 || ((modifier
== EXPAND_INITIALIZER
|| modifier
== EXPAND_CONST_ADDRESS
)
8222 && TREE_CONSTANT (exp
)))
8229 constructor
= expand_expr_constant (exp
, 1, modifier
);
8231 if (modifier
!= EXPAND_CONST_ADDRESS
8232 && modifier
!= EXPAND_INITIALIZER
8233 && modifier
!= EXPAND_SUM
)
8234 constructor
= validize_mem (constructor
);
8239 /* Handle calls that pass values in multiple non-contiguous
8240 locations. The Irix 6 ABI has examples of this. */
8241 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
8242 || GET_CODE (target
) == PARALLEL
|| modifier
== EXPAND_STACK_PARM
)
8247 target
= assign_temp (type
, TREE_ADDRESSABLE (exp
), 1);
8250 store_constructor (exp
, target
, 0, int_expr_size (exp
), false);
8255 /* expand_expr: generate code for computing expression EXP.
8256 An rtx for the computed value is returned. The value is never null.
8257 In the case of a void EXP, const0_rtx is returned.
8259 The value may be stored in TARGET if TARGET is nonzero.
8260 TARGET is just a suggestion; callers must assume that
8261 the rtx returned may not be the same as TARGET.
8263 If TARGET is CONST0_RTX, it means that the value will be ignored.
8265 If TMODE is not VOIDmode, it suggests generating the
8266 result in mode TMODE. But this is done only when convenient.
8267 Otherwise, TMODE is ignored and the value generated in its natural mode.
8268 TMODE is just a suggestion; callers must assume that
8269 the rtx returned may not have mode TMODE.
8271 Note that TARGET may have neither TMODE nor MODE. In that case, it
8272 probably will not be used.
8274 If MODIFIER is EXPAND_SUM then when EXP is an addition
8275 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
8276 or a nest of (PLUS ...) and (MINUS ...) where the terms are
8277 products as above, or REG or MEM, or constant.
8278 Ordinarily in such cases we would output mul or add instructions
8279 and then return a pseudo reg containing the sum.
8281 EXPAND_INITIALIZER is much like EXPAND_SUM except that
8282 it also marks a label as absolutely required (it can't be dead).
8283 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
8284 This is used for outputting expressions used in initializers.
8286 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
8287 with a constant address even if that address is not normally legitimate.
8288 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
8290 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
8291 a call parameter. Such targets require special care as we haven't yet
8292 marked TARGET so that it's safe from being trashed by libcalls. We
8293 don't want to use TARGET for anything but the final result;
8294 Intermediate values must go elsewhere. Additionally, calls to
8295 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
8297 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
8298 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
8299 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
8300 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
8302 If the result can be stored at TARGET, and ALT_RTL is non-NULL,
8303 then *ALT_RTL is set to TARGET (before legitimziation).
8305 If INNER_REFERENCE_P is true, we are expanding an inner reference.
8306 In this case, we don't adjust a returned MEM rtx that wouldn't be
8307 sufficiently aligned for its mode; instead, it's up to the caller
8308 to deal with it afterwards. This is used to make sure that unaligned
8309 base objects for which out-of-bounds accesses are supported, for
8310 example record types with trailing arrays, aren't realigned behind
8311 the back of the caller.
8312 The normal operating mode is to pass FALSE for this parameter. */
8315 expand_expr_real (tree exp
, rtx target
, machine_mode tmode
,
8316 enum expand_modifier modifier
, rtx
*alt_rtl
,
8317 bool inner_reference_p
)
8321 /* Handle ERROR_MARK before anybody tries to access its type. */
8322 if (TREE_CODE (exp
) == ERROR_MARK
8323 || (TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
))
8325 ret
= CONST0_RTX (tmode
);
8326 return ret
? ret
: const0_rtx
;
8329 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
,
8334 /* Try to expand the conditional expression which is represented by
8335 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If it succeeds
8336 return the rtl reg which represents the result. Otherwise return
8340 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED
,
8341 tree treeop1 ATTRIBUTE_UNUSED
,
8342 tree treeop2 ATTRIBUTE_UNUSED
)
8345 rtx op00
, op01
, op1
, op2
;
8346 enum rtx_code comparison_code
;
8347 machine_mode comparison_mode
;
8350 tree type
= TREE_TYPE (treeop1
);
8351 int unsignedp
= TYPE_UNSIGNED (type
);
8352 machine_mode mode
= TYPE_MODE (type
);
8353 machine_mode orig_mode
= mode
;
8354 static bool expanding_cond_expr_using_cmove
= false;
8356 /* Conditional move expansion can end up TERing two operands which,
8357 when recursively hitting conditional expressions can result in
8358 exponential behavior if the cmove expansion ultimatively fails.
8359 It's hardly profitable to TER a cmove into a cmove so avoid doing
8360 that by failing early if we end up recursing. */
8361 if (expanding_cond_expr_using_cmove
)
8364 /* If we cannot do a conditional move on the mode, try doing it
8365 with the promoted mode. */
8366 if (!can_conditionally_move_p (mode
))
8368 mode
= promote_mode (type
, mode
, &unsignedp
);
8369 if (!can_conditionally_move_p (mode
))
8371 temp
= assign_temp (type
, 0, 0); /* Use promoted mode for temp. */
8374 temp
= assign_temp (type
, 0, 1);
8376 expanding_cond_expr_using_cmove
= true;
8378 expand_operands (treeop1
, treeop2
,
8379 temp
, &op1
, &op2
, EXPAND_NORMAL
);
8381 if (TREE_CODE (treeop0
) == SSA_NAME
8382 && (srcstmt
= get_def_for_expr_class (treeop0
, tcc_comparison
)))
8384 tree type
= TREE_TYPE (gimple_assign_rhs1 (srcstmt
));
8385 enum tree_code cmpcode
= gimple_assign_rhs_code (srcstmt
);
8386 op00
= expand_normal (gimple_assign_rhs1 (srcstmt
));
8387 op01
= expand_normal (gimple_assign_rhs2 (srcstmt
));
8388 comparison_mode
= TYPE_MODE (type
);
8389 unsignedp
= TYPE_UNSIGNED (type
);
8390 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
8392 else if (COMPARISON_CLASS_P (treeop0
))
8394 tree type
= TREE_TYPE (TREE_OPERAND (treeop0
, 0));
8395 enum tree_code cmpcode
= TREE_CODE (treeop0
);
8396 op00
= expand_normal (TREE_OPERAND (treeop0
, 0));
8397 op01
= expand_normal (TREE_OPERAND (treeop0
, 1));
8398 unsignedp
= TYPE_UNSIGNED (type
);
8399 comparison_mode
= TYPE_MODE (type
);
8400 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
8404 op00
= expand_normal (treeop0
);
8406 comparison_code
= NE
;
8407 comparison_mode
= GET_MODE (op00
);
8408 if (comparison_mode
== VOIDmode
)
8409 comparison_mode
= TYPE_MODE (TREE_TYPE (treeop0
));
8411 expanding_cond_expr_using_cmove
= false;
8413 if (GET_MODE (op1
) != mode
)
8414 op1
= gen_lowpart (mode
, op1
);
8416 if (GET_MODE (op2
) != mode
)
8417 op2
= gen_lowpart (mode
, op2
);
8419 /* Try to emit the conditional move. */
8420 insn
= emit_conditional_move (temp
, comparison_code
,
8421 op00
, op01
, comparison_mode
,
8425 /* If we could do the conditional move, emit the sequence,
8429 rtx_insn
*seq
= get_insns ();
8432 return convert_modes (orig_mode
, mode
, temp
, 0);
8435 /* Otherwise discard the sequence and fall back to code with
8441 /* A helper function for expand_expr_real_2 to be used with a
8442 misaligned mem_ref TEMP. Assume an unsigned type if UNSIGNEDP
8443 is nonzero, with alignment ALIGN in bits.
8444 Store the value at TARGET if possible (if TARGET is nonzero).
8445 Regardless of TARGET, we return the rtx for where the value is placed.
8446 If the result can be stored at TARGET, and ALT_RTL is non-NULL,
8447 then *ALT_RTL is set to TARGET (before legitimziation). */
8450 expand_misaligned_mem_ref (rtx temp
, machine_mode mode
, int unsignedp
,
8451 unsigned int align
, rtx target
, rtx
*alt_rtl
)
8453 enum insn_code icode
;
8455 if ((icode
= optab_handler (movmisalign_optab
, mode
))
8456 != CODE_FOR_nothing
)
8458 class expand_operand ops
[2];
8460 /* We've already validated the memory, and we're creating a
8461 new pseudo destination. The predicates really can't fail,
8462 nor can the generator. */
8463 create_output_operand (&ops
[0], NULL_RTX
, mode
);
8464 create_fixed_operand (&ops
[1], temp
);
8465 expand_insn (icode
, 2, ops
);
8466 temp
= ops
[0].value
;
8468 else if (targetm
.slow_unaligned_access (mode
, align
))
8469 temp
= extract_bit_field (temp
, GET_MODE_BITSIZE (mode
),
8470 0, unsignedp
, target
,
8471 mode
, mode
, false, alt_rtl
);
8476 expand_expr_real_2 (sepops ops
, rtx target
, machine_mode tmode
,
8477 enum expand_modifier modifier
)
8479 rtx op0
, op1
, op2
, temp
;
8480 rtx_code_label
*lab
;
8484 scalar_int_mode int_mode
;
8485 enum tree_code code
= ops
->code
;
8487 rtx subtarget
, original_target
;
8489 bool reduce_bit_field
;
8490 location_t loc
= ops
->location
;
8491 tree treeop0
, treeop1
, treeop2
;
8492 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8493 ? reduce_to_bit_field_precision ((expr), \
8499 mode
= TYPE_MODE (type
);
8500 unsignedp
= TYPE_UNSIGNED (type
);
8506 /* We should be called only on simple (binary or unary) expressions,
8507 exactly those that are valid in gimple expressions that aren't
8508 GIMPLE_SINGLE_RHS (or invalid). */
8509 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
8510 || get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
8511 || get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
);
8513 ignore
= (target
== const0_rtx
8514 || ((CONVERT_EXPR_CODE_P (code
)
8515 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
8516 && TREE_CODE (type
) == VOID_TYPE
));
8518 /* We should be called only if we need the result. */
8519 gcc_assert (!ignore
);
8521 /* An operation in what may be a bit-field type needs the
8522 result to be reduced to the precision of the bit-field type,
8523 which is narrower than that of the type's mode. */
8524 reduce_bit_field
= (INTEGRAL_TYPE_P (type
)
8525 && !type_has_mode_precision_p (type
));
8527 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
8530 /* Use subtarget as the target for operand 0 of a binary operation. */
8531 subtarget
= get_subtarget (target
);
8532 original_target
= target
;
8536 case NON_LVALUE_EXPR
:
8539 if (treeop0
== error_mark_node
)
8542 if (TREE_CODE (type
) == UNION_TYPE
)
8544 tree valtype
= TREE_TYPE (treeop0
);
8546 /* If both input and output are BLKmode, this conversion isn't doing
8547 anything except possibly changing memory attribute. */
8548 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
8550 rtx result
= expand_expr (treeop0
, target
, tmode
,
8553 result
= copy_rtx (result
);
8554 set_mem_attributes (result
, type
, 0);
8560 if (TYPE_MODE (type
) != BLKmode
)
8561 target
= gen_reg_rtx (TYPE_MODE (type
));
8563 target
= assign_temp (type
, 1, 1);
8567 /* Store data into beginning of memory target. */
8568 store_expr (treeop0
,
8569 adjust_address (target
, TYPE_MODE (valtype
), 0),
8570 modifier
== EXPAND_STACK_PARM
,
8571 false, TYPE_REVERSE_STORAGE_ORDER (type
));
8575 gcc_assert (REG_P (target
)
8576 && !TYPE_REVERSE_STORAGE_ORDER (type
));
8578 /* Store this field into a union of the proper type. */
8579 poly_uint64 op0_size
8580 = tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (treeop0
)));
8581 poly_uint64 union_size
= GET_MODE_BITSIZE (mode
);
8582 store_field (target
,
8583 /* The conversion must be constructed so that
8584 we know at compile time how many bits
8586 ordered_min (op0_size
, union_size
),
8587 0, 0, 0, TYPE_MODE (valtype
), treeop0
, 0,
8591 /* Return the entire union. */
8595 if (mode
== TYPE_MODE (TREE_TYPE (treeop0
)))
8597 op0
= expand_expr (treeop0
, target
, VOIDmode
,
8600 /* If the signedness of the conversion differs and OP0 is
8601 a promoted SUBREG, clear that indication since we now
8602 have to do the proper extension. */
8603 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)) != unsignedp
8604 && GET_CODE (op0
) == SUBREG
)
8605 SUBREG_PROMOTED_VAR_P (op0
) = 0;
8607 return REDUCE_BIT_FIELD (op0
);
8610 op0
= expand_expr (treeop0
, NULL_RTX
, mode
,
8611 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
);
8612 if (GET_MODE (op0
) == mode
)
8615 /* If OP0 is a constant, just convert it into the proper mode. */
8616 else if (CONSTANT_P (op0
))
8618 tree inner_type
= TREE_TYPE (treeop0
);
8619 machine_mode inner_mode
= GET_MODE (op0
);
8621 if (inner_mode
== VOIDmode
)
8622 inner_mode
= TYPE_MODE (inner_type
);
8624 if (modifier
== EXPAND_INITIALIZER
)
8625 op0
= lowpart_subreg (mode
, op0
, inner_mode
);
8627 op0
= convert_modes (mode
, inner_mode
, op0
,
8628 TYPE_UNSIGNED (inner_type
));
8631 else if (modifier
== EXPAND_INITIALIZER
)
8632 op0
= gen_rtx_fmt_e (TYPE_UNSIGNED (TREE_TYPE (treeop0
))
8633 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
8635 else if (target
== 0)
8636 op0
= convert_to_mode (mode
, op0
,
8637 TYPE_UNSIGNED (TREE_TYPE
8641 convert_move (target
, op0
,
8642 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8646 return REDUCE_BIT_FIELD (op0
);
8648 case ADDR_SPACE_CONVERT_EXPR
:
8650 tree treeop0_type
= TREE_TYPE (treeop0
);
8652 gcc_assert (POINTER_TYPE_P (type
));
8653 gcc_assert (POINTER_TYPE_P (treeop0_type
));
8655 addr_space_t as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
8656 addr_space_t as_from
= TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type
));
8658 /* Conversions between pointers to the same address space should
8659 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8660 gcc_assert (as_to
!= as_from
);
8662 op0
= expand_expr (treeop0
, NULL_RTX
, VOIDmode
, modifier
);
8664 /* Ask target code to handle conversion between pointers
8665 to overlapping address spaces. */
8666 if (targetm
.addr_space
.subset_p (as_to
, as_from
)
8667 || targetm
.addr_space
.subset_p (as_from
, as_to
))
8669 op0
= targetm
.addr_space
.convert (op0
, treeop0_type
, type
);
8673 /* For disjoint address spaces, converting anything but a null
8674 pointer invokes undefined behavior. We truncate or extend the
8675 value as if we'd converted via integers, which handles 0 as
8676 required, and all others as the programmer likely expects. */
8677 #ifndef POINTERS_EXTEND_UNSIGNED
8678 const int POINTERS_EXTEND_UNSIGNED
= 1;
8680 op0
= convert_modes (mode
, TYPE_MODE (treeop0_type
),
8681 op0
, POINTERS_EXTEND_UNSIGNED
);
8687 case POINTER_PLUS_EXPR
:
8688 /* Even though the sizetype mode and the pointer's mode can be different
8689 expand is able to handle this correctly and get the correct result out
8690 of the PLUS_EXPR code. */
8691 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8692 if sizetype precision is smaller than pointer precision. */
8693 if (TYPE_PRECISION (sizetype
) < TYPE_PRECISION (type
))
8694 treeop1
= fold_convert_loc (loc
, type
,
8695 fold_convert_loc (loc
, ssizetype
,
8697 /* If sizetype precision is larger than pointer precision, truncate the
8698 offset to have matching modes. */
8699 else if (TYPE_PRECISION (sizetype
) > TYPE_PRECISION (type
))
8700 treeop1
= fold_convert_loc (loc
, type
, treeop1
);
8704 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8705 something else, make sure we add the register to the constant and
8706 then to the other thing. This case can occur during strength
8707 reduction and doing it this way will produce better code if the
8708 frame pointer or argument pointer is eliminated.
8710 fold-const.c will ensure that the constant is always in the inner
8711 PLUS_EXPR, so the only case we need to do anything about is if
8712 sp, ap, or fp is our second argument, in which case we must swap
8713 the innermost first argument and our second argument. */
8715 if (TREE_CODE (treeop0
) == PLUS_EXPR
8716 && TREE_CODE (TREE_OPERAND (treeop0
, 1)) == INTEGER_CST
8718 && (DECL_RTL (treeop1
) == frame_pointer_rtx
8719 || DECL_RTL (treeop1
) == stack_pointer_rtx
8720 || DECL_RTL (treeop1
) == arg_pointer_rtx
))
8725 /* If the result is to be ptr_mode and we are adding an integer to
8726 something, we might be forming a constant. So try to use
8727 plus_constant. If it produces a sum and we can't accept it,
8728 use force_operand. This allows P = &ARR[const] to generate
8729 efficient code on machines where a SYMBOL_REF is not a valid
8732 If this is an EXPAND_SUM call, always return the sum. */
8733 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
8734 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
8736 if (modifier
== EXPAND_STACK_PARM
)
8738 if (TREE_CODE (treeop0
) == INTEGER_CST
8739 && HWI_COMPUTABLE_MODE_P (mode
)
8740 && TREE_CONSTANT (treeop1
))
8744 machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop1
));
8746 op1
= expand_expr (treeop1
, subtarget
, VOIDmode
,
8748 /* Use wi::shwi to ensure that the constant is
8749 truncated according to the mode of OP1, then sign extended
8750 to a HOST_WIDE_INT. Using the constant directly can result
8751 in non-canonical RTL in a 64x32 cross compile. */
8752 wc
= TREE_INT_CST_LOW (treeop0
);
8754 immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8755 op1
= plus_constant (mode
, op1
, INTVAL (constant_part
));
8756 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8757 op1
= force_operand (op1
, target
);
8758 return REDUCE_BIT_FIELD (op1
);
8761 else if (TREE_CODE (treeop1
) == INTEGER_CST
8762 && HWI_COMPUTABLE_MODE_P (mode
)
8763 && TREE_CONSTANT (treeop0
))
8767 machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop0
));
8769 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8770 (modifier
== EXPAND_INITIALIZER
8771 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
8772 if (! CONSTANT_P (op0
))
8774 op1
= expand_expr (treeop1
, NULL_RTX
,
8775 VOIDmode
, modifier
);
8776 /* Return a PLUS if modifier says it's OK. */
8777 if (modifier
== EXPAND_SUM
8778 || modifier
== EXPAND_INITIALIZER
)
8779 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
8782 /* Use wi::shwi to ensure that the constant is
8783 truncated according to the mode of OP1, then sign extended
8784 to a HOST_WIDE_INT. Using the constant directly can result
8785 in non-canonical RTL in a 64x32 cross compile. */
8786 wc
= TREE_INT_CST_LOW (treeop1
);
8788 = immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8789 op0
= plus_constant (mode
, op0
, INTVAL (constant_part
));
8790 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8791 op0
= force_operand (op0
, target
);
8792 return REDUCE_BIT_FIELD (op0
);
8796 /* Use TER to expand pointer addition of a negated value
8797 as pointer subtraction. */
8798 if ((POINTER_TYPE_P (TREE_TYPE (treeop0
))
8799 || (TREE_CODE (TREE_TYPE (treeop0
)) == VECTOR_TYPE
8800 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0
)))))
8801 && TREE_CODE (treeop1
) == SSA_NAME
8802 && TYPE_MODE (TREE_TYPE (treeop0
))
8803 == TYPE_MODE (TREE_TYPE (treeop1
)))
8805 gimple
*def
= get_def_for_expr (treeop1
, NEGATE_EXPR
);
8808 treeop1
= gimple_assign_rhs1 (def
);
8814 /* No sense saving up arithmetic to be done
8815 if it's all in the wrong mode to form part of an address.
8816 And force_operand won't know whether to sign-extend or
8818 if (modifier
!= EXPAND_INITIALIZER
8819 && (modifier
!= EXPAND_SUM
|| mode
!= ptr_mode
))
8821 expand_operands (treeop0
, treeop1
,
8822 subtarget
, &op0
, &op1
, modifier
);
8823 if (op0
== const0_rtx
)
8825 if (op1
== const0_rtx
)
8830 expand_operands (treeop0
, treeop1
,
8831 subtarget
, &op0
, &op1
, modifier
);
8832 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8835 case POINTER_DIFF_EXPR
:
8837 /* For initializers, we are allowed to return a MINUS of two
8838 symbolic constants. Here we handle all cases when both operands
8840 /* Handle difference of two symbolic constants,
8841 for the sake of an initializer. */
8842 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8843 && really_constant_p (treeop0
)
8844 && really_constant_p (treeop1
))
8846 expand_operands (treeop0
, treeop1
,
8847 NULL_RTX
, &op0
, &op1
, modifier
);
8848 return simplify_gen_binary (MINUS
, mode
, op0
, op1
);
8851 /* No sense saving up arithmetic to be done
8852 if it's all in the wrong mode to form part of an address.
8853 And force_operand won't know whether to sign-extend or
8855 if (modifier
!= EXPAND_INITIALIZER
8856 && (modifier
!= EXPAND_SUM
|| mode
!= ptr_mode
))
8859 expand_operands (treeop0
, treeop1
,
8860 subtarget
, &op0
, &op1
, modifier
);
8862 /* Convert A - const to A + (-const). */
8863 if (CONST_INT_P (op1
))
8865 op1
= negate_rtx (mode
, op1
);
8866 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8871 case WIDEN_MULT_PLUS_EXPR
:
8872 case WIDEN_MULT_MINUS_EXPR
:
8873 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8874 op2
= expand_normal (treeop2
);
8875 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
8879 case WIDEN_MULT_EXPR
:
8880 /* If first operand is constant, swap them.
8881 Thus the following special case checks need only
8882 check the second operand. */
8883 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8884 std::swap (treeop0
, treeop1
);
8886 /* First, check if we have a multiplication of one signed and one
8887 unsigned operand. */
8888 if (TREE_CODE (treeop1
) != INTEGER_CST
8889 && (TYPE_UNSIGNED (TREE_TYPE (treeop0
))
8890 != TYPE_UNSIGNED (TREE_TYPE (treeop1
))))
8892 machine_mode innermode
= TYPE_MODE (TREE_TYPE (treeop0
));
8893 this_optab
= usmul_widen_optab
;
8894 if (find_widening_optab_handler (this_optab
, mode
, innermode
)
8895 != CODE_FOR_nothing
)
8897 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8898 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8901 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op1
, &op0
,
8903 /* op0 and op1 might still be constant, despite the above
8904 != INTEGER_CST check. Handle it. */
8905 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8907 op0
= convert_modes (mode
, innermode
, op0
, true);
8908 op1
= convert_modes (mode
, innermode
, op1
, false);
8909 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8910 target
, unsignedp
));
8915 /* Check for a multiplication with matching signedness. */
8916 else if ((TREE_CODE (treeop1
) == INTEGER_CST
8917 && int_fits_type_p (treeop1
, TREE_TYPE (treeop0
)))
8918 || (TYPE_UNSIGNED (TREE_TYPE (treeop1
))
8919 == TYPE_UNSIGNED (TREE_TYPE (treeop0
))))
8921 tree op0type
= TREE_TYPE (treeop0
);
8922 machine_mode innermode
= TYPE_MODE (op0type
);
8923 bool zextend_p
= TYPE_UNSIGNED (op0type
);
8924 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
8925 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
8927 if (TREE_CODE (treeop0
) != INTEGER_CST
)
8929 if (find_widening_optab_handler (this_optab
, mode
, innermode
)
8930 != CODE_FOR_nothing
)
8932 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8934 /* op0 and op1 might still be constant, despite the above
8935 != INTEGER_CST check. Handle it. */
8936 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8939 op0
= convert_modes (mode
, innermode
, op0
, zextend_p
);
8941 = convert_modes (mode
, innermode
, op1
,
8942 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8943 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8947 temp
= expand_widening_mult (mode
, op0
, op1
, target
,
8948 unsignedp
, this_optab
);
8949 return REDUCE_BIT_FIELD (temp
);
8951 if (find_widening_optab_handler (other_optab
, mode
, innermode
)
8953 && innermode
== word_mode
)
8956 op0
= expand_normal (treeop0
);
8957 op1
= expand_normal (treeop1
);
8958 /* op0 and op1 might be constants, despite the above
8959 != INTEGER_CST check. Handle it. */
8960 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8961 goto widen_mult_const
;
8962 if (TREE_CODE (treeop1
) == INTEGER_CST
)
8963 op1
= convert_modes (mode
, word_mode
, op1
,
8964 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8965 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8966 unsignedp
, OPTAB_LIB_WIDEN
);
8967 hipart
= gen_highpart (word_mode
, temp
);
8968 htem
= expand_mult_highpart_adjust (word_mode
, hipart
,
8972 emit_move_insn (hipart
, htem
);
8973 return REDUCE_BIT_FIELD (temp
);
8977 treeop0
= fold_build1 (CONVERT_EXPR
, type
, treeop0
);
8978 treeop1
= fold_build1 (CONVERT_EXPR
, type
, treeop1
);
8979 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8980 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8983 /* If this is a fixed-point operation, then we cannot use the code
8984 below because "expand_mult" doesn't support sat/no-sat fixed-point
8986 if (ALL_FIXED_POINT_MODE_P (mode
))
8989 /* If first operand is constant, swap them.
8990 Thus the following special case checks need only
8991 check the second operand. */
8992 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8993 std::swap (treeop0
, treeop1
);
8995 /* Attempt to return something suitable for generating an
8996 indexed address, for machines that support that. */
8998 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8999 && tree_fits_shwi_p (treeop1
))
9001 tree exp1
= treeop1
;
9003 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
9007 op0
= force_operand (op0
, NULL_RTX
);
9009 op0
= copy_to_mode_reg (mode
, op0
);
9011 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
9012 gen_int_mode (tree_to_shwi (exp1
),
9013 TYPE_MODE (TREE_TYPE (exp1
)))));
9016 if (modifier
== EXPAND_STACK_PARM
)
9019 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9020 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
9022 case TRUNC_MOD_EXPR
:
9023 case FLOOR_MOD_EXPR
:
9025 case ROUND_MOD_EXPR
:
9027 case TRUNC_DIV_EXPR
:
9028 case FLOOR_DIV_EXPR
:
9030 case ROUND_DIV_EXPR
:
9031 case EXACT_DIV_EXPR
:
9033 /* If this is a fixed-point operation, then we cannot use the code
9034 below because "expand_divmod" doesn't support sat/no-sat fixed-point
9036 if (ALL_FIXED_POINT_MODE_P (mode
))
9039 if (modifier
== EXPAND_STACK_PARM
)
9041 /* Possible optimization: compute the dividend with EXPAND_SUM
9042 then if the divisor is constant can optimize the case
9043 where some terms of the dividend have coeffs divisible by it. */
9044 expand_operands (treeop0
, treeop1
,
9045 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9046 bool mod_p
= code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
9047 || code
== CEIL_MOD_EXPR
|| code
== ROUND_MOD_EXPR
;
9048 if (SCALAR_INT_MODE_P (mode
)
9050 && get_range_pos_neg (treeop0
) == 1
9051 && get_range_pos_neg (treeop1
) == 1)
9053 /* If both arguments are known to be positive when interpreted
9054 as signed, we can expand it as both signed and unsigned
9055 division or modulo. Choose the cheaper sequence in that case. */
9056 bool speed_p
= optimize_insn_for_speed_p ();
9057 do_pending_stack_adjust ();
9059 rtx uns_ret
= expand_divmod (mod_p
, code
, mode
, op0
, op1
, target
, 1);
9060 rtx_insn
*uns_insns
= get_insns ();
9063 rtx sgn_ret
= expand_divmod (mod_p
, code
, mode
, op0
, op1
, target
, 0);
9064 rtx_insn
*sgn_insns
= get_insns ();
9066 unsigned uns_cost
= seq_cost (uns_insns
, speed_p
);
9067 unsigned sgn_cost
= seq_cost (sgn_insns
, speed_p
);
9069 /* If costs are the same then use as tie breaker the other
9071 if (uns_cost
== sgn_cost
)
9073 uns_cost
= seq_cost (uns_insns
, !speed_p
);
9074 sgn_cost
= seq_cost (sgn_insns
, !speed_p
);
9077 if (uns_cost
< sgn_cost
|| (uns_cost
== sgn_cost
&& unsignedp
))
9079 emit_insn (uns_insns
);
9082 emit_insn (sgn_insns
);
9085 return expand_divmod (mod_p
, code
, mode
, op0
, op1
, target
, unsignedp
);
9090 case MULT_HIGHPART_EXPR
:
9091 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9092 temp
= expand_mult_highpart (mode
, op0
, op1
, target
, unsignedp
);
9096 case FIXED_CONVERT_EXPR
:
9097 op0
= expand_normal (treeop0
);
9098 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
9099 target
= gen_reg_rtx (mode
);
9101 if ((TREE_CODE (TREE_TYPE (treeop0
)) == INTEGER_TYPE
9102 && TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
9103 || (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_UNSIGNED (type
)))
9104 expand_fixed_convert (target
, op0
, 1, TYPE_SATURATING (type
));
9106 expand_fixed_convert (target
, op0
, 0, TYPE_SATURATING (type
));
9109 case FIX_TRUNC_EXPR
:
9110 op0
= expand_normal (treeop0
);
9111 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
9112 target
= gen_reg_rtx (mode
);
9113 expand_fix (target
, op0
, unsignedp
);
9117 op0
= expand_normal (treeop0
);
9118 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
9119 target
= gen_reg_rtx (mode
);
9120 /* expand_float can't figure out what to do if FROM has VOIDmode.
9121 So give it the correct mode. With -O, cse will optimize this. */
9122 if (GET_MODE (op0
) == VOIDmode
)
9123 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0
)),
9125 expand_float (target
, op0
,
9126 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
9130 op0
= expand_expr (treeop0
, subtarget
,
9131 VOIDmode
, EXPAND_NORMAL
);
9132 if (modifier
== EXPAND_STACK_PARM
)
9134 temp
= expand_unop (mode
,
9135 optab_for_tree_code (NEGATE_EXPR
, type
,
9139 return REDUCE_BIT_FIELD (temp
);
9143 op0
= expand_expr (treeop0
, subtarget
,
9144 VOIDmode
, EXPAND_NORMAL
);
9145 if (modifier
== EXPAND_STACK_PARM
)
9148 /* ABS_EXPR is not valid for complex arguments. */
9149 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
9150 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
9152 /* Unsigned abs is simply the operand. Testing here means we don't
9153 risk generating incorrect code below. */
9154 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
9157 return expand_abs (mode
, op0
, target
, unsignedp
,
9158 safe_from_p (target
, treeop0
, 1));
9162 target
= original_target
;
9164 || modifier
== EXPAND_STACK_PARM
9165 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
9166 || GET_MODE (target
) != mode
9168 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
9169 target
= gen_reg_rtx (mode
);
9170 expand_operands (treeop0
, treeop1
,
9171 target
, &op0
, &op1
, EXPAND_NORMAL
);
9173 /* First try to do it with a special MIN or MAX instruction.
9174 If that does not win, use a conditional jump to select the proper
9176 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9177 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
9182 /* For vector MIN <x, y>, expand it a VEC_COND_EXPR <x <= y, x, y>
9183 and similarly for MAX <x, y>. */
9184 if (VECTOR_TYPE_P (type
))
9186 tree t0
= make_tree (type
, op0
);
9187 tree t1
= make_tree (type
, op1
);
9188 tree comparison
= build2 (code
== MIN_EXPR
? LE_EXPR
: GE_EXPR
,
9190 return expand_vec_cond_expr (type
, comparison
, t0
, t1
,
9194 /* At this point, a MEM target is no longer useful; we will get better
9197 if (! REG_P (target
))
9198 target
= gen_reg_rtx (mode
);
9200 /* If op1 was placed in target, swap op0 and op1. */
9201 if (target
!= op0
&& target
== op1
)
9202 std::swap (op0
, op1
);
9204 /* We generate better code and avoid problems with op1 mentioning
9205 target by forcing op1 into a pseudo if it isn't a constant. */
9206 if (! CONSTANT_P (op1
))
9207 op1
= force_reg (mode
, op1
);
9210 enum rtx_code comparison_code
;
9213 if (code
== MAX_EXPR
)
9214 comparison_code
= unsignedp
? GEU
: GE
;
9216 comparison_code
= unsignedp
? LEU
: LE
;
9218 /* Canonicalize to comparisons against 0. */
9219 if (op1
== const1_rtx
)
9221 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
9222 or (a != 0 ? a : 1) for unsigned.
9223 For MIN we are safe converting (a <= 1 ? a : 1)
9224 into (a <= 0 ? a : 1) */
9225 cmpop1
= const0_rtx
;
9226 if (code
== MAX_EXPR
)
9227 comparison_code
= unsignedp
? NE
: GT
;
9229 if (op1
== constm1_rtx
&& !unsignedp
)
9231 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
9232 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
9233 cmpop1
= const0_rtx
;
9234 if (code
== MIN_EXPR
)
9235 comparison_code
= LT
;
9238 /* Use a conditional move if possible. */
9239 if (can_conditionally_move_p (mode
))
9245 /* Try to emit the conditional move. */
9246 insn
= emit_conditional_move (target
, comparison_code
,
9251 /* If we could do the conditional move, emit the sequence,
9255 rtx_insn
*seq
= get_insns ();
9261 /* Otherwise discard the sequence and fall back to code with
9267 emit_move_insn (target
, op0
);
9269 lab
= gen_label_rtx ();
9270 do_compare_rtx_and_jump (target
, cmpop1
, comparison_code
,
9271 unsignedp
, mode
, NULL_RTX
, NULL
, lab
,
9272 profile_probability::uninitialized ());
9274 emit_move_insn (target
, op1
);
9279 op0
= expand_expr (treeop0
, subtarget
,
9280 VOIDmode
, EXPAND_NORMAL
);
9281 if (modifier
== EXPAND_STACK_PARM
)
9283 /* In case we have to reduce the result to bitfield precision
9284 for unsigned bitfield expand this as XOR with a proper constant
9286 if (reduce_bit_field
&& TYPE_UNSIGNED (type
))
9288 int_mode
= SCALAR_INT_TYPE_MODE (type
);
9289 wide_int mask
= wi::mask (TYPE_PRECISION (type
),
9290 false, GET_MODE_PRECISION (int_mode
));
9292 temp
= expand_binop (int_mode
, xor_optab
, op0
,
9293 immed_wide_int_const (mask
, int_mode
),
9294 target
, 1, OPTAB_LIB_WIDEN
);
9297 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
9301 /* ??? Can optimize bitwise operations with one arg constant.
9302 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
9303 and (a bitwise1 b) bitwise2 b (etc)
9304 but that is probably not worth while. */
9313 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type
))
9314 || type_has_mode_precision_p (type
));
9320 /* If this is a fixed-point operation, then we cannot use the code
9321 below because "expand_shift" doesn't support sat/no-sat fixed-point
9323 if (ALL_FIXED_POINT_MODE_P (mode
))
9326 if (! safe_from_p (subtarget
, treeop1
, 1))
9328 if (modifier
== EXPAND_STACK_PARM
)
9330 op0
= expand_expr (treeop0
, subtarget
,
9331 VOIDmode
, EXPAND_NORMAL
);
9333 /* Left shift optimization when shifting across word_size boundary.
9335 If mode == GET_MODE_WIDER_MODE (word_mode), then normally
9336 there isn't native instruction to support this wide mode
9337 left shift. Given below scenario:
9339 Type A = (Type) B << C
9342 | dest_high | dest_low |
9346 If the shift amount C caused we shift B to across the word
9347 size boundary, i.e part of B shifted into high half of
9348 destination register, and part of B remains in the low
9349 half, then GCC will use the following left shift expand
9352 1. Initialize dest_low to B.
9353 2. Initialize every bit of dest_high to the sign bit of B.
9354 3. Logic left shift dest_low by C bit to finalize dest_low.
9355 The value of dest_low before this shift is kept in a temp D.
9356 4. Logic left shift dest_high by C.
9357 5. Logic right shift D by (word_size - C).
9358 6. Or the result of 4 and 5 to finalize dest_high.
9360 While, by checking gimple statements, if operand B is
9361 coming from signed extension, then we can simplify above
9364 1. dest_high = src_low >> (word_size - C).
9365 2. dest_low = src_low << C.
9367 We can use one arithmetic right shift to finish all the
9368 purpose of steps 2, 4, 5, 6, thus we reduce the steps
9369 needed from 6 into 2.
9371 The case is similar for zero extension, except that we
9372 initialize dest_high to zero rather than copies of the sign
9373 bit from B. Furthermore, we need to use a logical right shift
9376 The choice of sign-extension versus zero-extension is
9377 determined entirely by whether or not B is signed and is
9378 independent of the current setting of unsignedp. */
9381 if (code
== LSHIFT_EXPR
9384 && GET_MODE_2XWIDER_MODE (word_mode
).exists (&int_mode
)
9386 && TREE_CONSTANT (treeop1
)
9387 && TREE_CODE (treeop0
) == SSA_NAME
)
9389 gimple
*def
= SSA_NAME_DEF_STMT (treeop0
);
9390 if (is_gimple_assign (def
)
9391 && gimple_assign_rhs_code (def
) == NOP_EXPR
)
9393 scalar_int_mode rmode
= SCALAR_INT_TYPE_MODE
9394 (TREE_TYPE (gimple_assign_rhs1 (def
)));
9396 if (GET_MODE_SIZE (rmode
) < GET_MODE_SIZE (int_mode
)
9397 && TREE_INT_CST_LOW (treeop1
) < GET_MODE_BITSIZE (word_mode
)
9398 && ((TREE_INT_CST_LOW (treeop1
) + GET_MODE_BITSIZE (rmode
))
9399 >= GET_MODE_BITSIZE (word_mode
)))
9401 rtx_insn
*seq
, *seq_old
;
9402 poly_uint64 high_off
= subreg_highpart_offset (word_mode
,
9404 bool extend_unsigned
9405 = TYPE_UNSIGNED (TREE_TYPE (gimple_assign_rhs1 (def
)));
9406 rtx low
= lowpart_subreg (word_mode
, op0
, int_mode
);
9407 rtx dest_low
= lowpart_subreg (word_mode
, target
, int_mode
);
9408 rtx dest_high
= simplify_gen_subreg (word_mode
, target
,
9409 int_mode
, high_off
);
9410 HOST_WIDE_INT ramount
= (BITS_PER_WORD
9411 - TREE_INT_CST_LOW (treeop1
));
9412 tree rshift
= build_int_cst (TREE_TYPE (treeop1
), ramount
);
9415 /* dest_high = src_low >> (word_size - C). */
9416 temp
= expand_variable_shift (RSHIFT_EXPR
, word_mode
, low
,
9419 if (temp
!= dest_high
)
9420 emit_move_insn (dest_high
, temp
);
9422 /* dest_low = src_low << C. */
9423 temp
= expand_variable_shift (LSHIFT_EXPR
, word_mode
, low
,
9424 treeop1
, dest_low
, unsignedp
);
9425 if (temp
!= dest_low
)
9426 emit_move_insn (dest_low
, temp
);
9432 if (have_insn_for (ASHIFT
, int_mode
))
9434 bool speed_p
= optimize_insn_for_speed_p ();
9436 rtx ret_old
= expand_variable_shift (code
, int_mode
,
9441 seq_old
= get_insns ();
9443 if (seq_cost (seq
, speed_p
)
9444 >= seq_cost (seq_old
, speed_p
))
9455 if (temp
== NULL_RTX
)
9456 temp
= expand_variable_shift (code
, mode
, op0
, treeop1
, target
,
9458 if (code
== LSHIFT_EXPR
)
9459 temp
= REDUCE_BIT_FIELD (temp
);
9463 /* Could determine the answer when only additive constants differ. Also,
9464 the addition of one can be handled by changing the condition. */
9471 case UNORDERED_EXPR
:
9480 temp
= do_store_flag (ops
,
9481 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
9482 tmode
!= VOIDmode
? tmode
: mode
);
9486 /* Use a compare and a jump for BLKmode comparisons, or for function
9487 type comparisons is have_canonicalize_funcptr_for_compare. */
9490 || modifier
== EXPAND_STACK_PARM
9491 || ! safe_from_p (target
, treeop0
, 1)
9492 || ! safe_from_p (target
, treeop1
, 1)
9493 /* Make sure we don't have a hard reg (such as function's return
9494 value) live across basic blocks, if not optimizing. */
9495 || (!optimize
&& REG_P (target
)
9496 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
9497 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
9499 emit_move_insn (target
, const0_rtx
);
9501 rtx_code_label
*lab1
= gen_label_rtx ();
9502 jumpifnot_1 (code
, treeop0
, treeop1
, lab1
,
9503 profile_probability::uninitialized ());
9505 if (TYPE_PRECISION (type
) == 1 && !TYPE_UNSIGNED (type
))
9506 emit_move_insn (target
, constm1_rtx
);
9508 emit_move_insn (target
, const1_rtx
);
9514 /* Get the rtx code of the operands. */
9515 op0
= expand_normal (treeop0
);
9516 op1
= expand_normal (treeop1
);
9519 target
= gen_reg_rtx (TYPE_MODE (type
));
9521 /* If target overlaps with op1, then either we need to force
9522 op1 into a pseudo (if target also overlaps with op0),
9523 or write the complex parts in reverse order. */
9524 switch (GET_CODE (target
))
9527 if (reg_overlap_mentioned_p (XEXP (target
, 0), op1
))
9529 if (reg_overlap_mentioned_p (XEXP (target
, 1), op0
))
9531 complex_expr_force_op1
:
9532 temp
= gen_reg_rtx (GET_MODE_INNER (GET_MODE (target
)));
9533 emit_move_insn (temp
, op1
);
9537 complex_expr_swap_order
:
9538 /* Move the imaginary (op1) and real (op0) parts to their
9540 write_complex_part (target
, op1
, true);
9541 write_complex_part (target
, op0
, false);
9547 temp
= adjust_address_nv (target
,
9548 GET_MODE_INNER (GET_MODE (target
)), 0);
9549 if (reg_overlap_mentioned_p (temp
, op1
))
9551 scalar_mode imode
= GET_MODE_INNER (GET_MODE (target
));
9552 temp
= adjust_address_nv (target
, imode
,
9553 GET_MODE_SIZE (imode
));
9554 if (reg_overlap_mentioned_p (temp
, op0
))
9555 goto complex_expr_force_op1
;
9556 goto complex_expr_swap_order
;
9560 if (reg_overlap_mentioned_p (target
, op1
))
9562 if (reg_overlap_mentioned_p (target
, op0
))
9563 goto complex_expr_force_op1
;
9564 goto complex_expr_swap_order
;
9569 /* Move the real (op0) and imaginary (op1) parts to their location. */
9570 write_complex_part (target
, op0
, false);
9571 write_complex_part (target
, op1
, true);
9575 case WIDEN_SUM_EXPR
:
9577 tree oprnd0
= treeop0
;
9578 tree oprnd1
= treeop1
;
9580 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9581 target
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, op1
,
9586 case VEC_UNPACK_HI_EXPR
:
9587 case VEC_UNPACK_LO_EXPR
:
9588 case VEC_UNPACK_FIX_TRUNC_HI_EXPR
:
9589 case VEC_UNPACK_FIX_TRUNC_LO_EXPR
:
9591 op0
= expand_normal (treeop0
);
9592 temp
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, NULL_RTX
,
9598 case VEC_UNPACK_FLOAT_HI_EXPR
:
9599 case VEC_UNPACK_FLOAT_LO_EXPR
:
9601 op0
= expand_normal (treeop0
);
9602 /* The signedness is determined from input operand. */
9603 temp
= expand_widen_pattern_expr
9604 (ops
, op0
, NULL_RTX
, NULL_RTX
,
9605 target
, TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
9611 case VEC_WIDEN_MULT_HI_EXPR
:
9612 case VEC_WIDEN_MULT_LO_EXPR
:
9613 case VEC_WIDEN_MULT_EVEN_EXPR
:
9614 case VEC_WIDEN_MULT_ODD_EXPR
:
9615 case VEC_WIDEN_LSHIFT_HI_EXPR
:
9616 case VEC_WIDEN_LSHIFT_LO_EXPR
:
9617 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9618 target
= expand_widen_pattern_expr (ops
, op0
, op1
, NULL_RTX
,
9620 gcc_assert (target
);
9623 case VEC_PACK_SAT_EXPR
:
9624 case VEC_PACK_FIX_TRUNC_EXPR
:
9625 mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9628 case VEC_PACK_TRUNC_EXPR
:
9629 if (VECTOR_BOOLEAN_TYPE_P (type
)
9630 && VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (treeop0
))
9631 && mode
== TYPE_MODE (TREE_TYPE (treeop0
))
9632 && SCALAR_INT_MODE_P (mode
))
9634 class expand_operand eops
[4];
9635 machine_mode imode
= TYPE_MODE (TREE_TYPE (treeop0
));
9636 expand_operands (treeop0
, treeop1
,
9637 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9638 this_optab
= vec_pack_sbool_trunc_optab
;
9639 enum insn_code icode
= optab_handler (this_optab
, imode
);
9640 create_output_operand (&eops
[0], target
, mode
);
9641 create_convert_operand_from (&eops
[1], op0
, imode
, false);
9642 create_convert_operand_from (&eops
[2], op1
, imode
, false);
9643 temp
= GEN_INT (TYPE_VECTOR_SUBPARTS (type
).to_constant ());
9644 create_input_operand (&eops
[3], temp
, imode
);
9645 expand_insn (icode
, 4, eops
);
9646 return eops
[0].value
;
9648 mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9651 case VEC_PACK_FLOAT_EXPR
:
9652 mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9653 expand_operands (treeop0
, treeop1
,
9654 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9655 this_optab
= optab_for_tree_code (code
, TREE_TYPE (treeop0
),
9657 target
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9658 TYPE_UNSIGNED (TREE_TYPE (treeop0
)),
9660 gcc_assert (target
);
9665 expand_operands (treeop0
, treeop1
, target
, &op0
, &op1
, EXPAND_NORMAL
);
9666 vec_perm_builder sel
;
9667 if (TREE_CODE (treeop2
) == VECTOR_CST
9668 && tree_to_vec_perm_builder (&sel
, treeop2
))
9670 machine_mode sel_mode
= TYPE_MODE (TREE_TYPE (treeop2
));
9671 temp
= expand_vec_perm_const (mode
, op0
, op1
, sel
,
9676 op2
= expand_normal (treeop2
);
9677 temp
= expand_vec_perm_var (mode
, op0
, op1
, op2
, target
);
9685 tree oprnd0
= treeop0
;
9686 tree oprnd1
= treeop1
;
9687 tree oprnd2
= treeop2
;
9690 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9691 op2
= expand_normal (oprnd2
);
9692 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9699 tree oprnd0
= treeop0
;
9700 tree oprnd1
= treeop1
;
9701 tree oprnd2
= treeop2
;
9704 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9705 op2
= expand_normal (oprnd2
);
9706 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9711 case REALIGN_LOAD_EXPR
:
9713 tree oprnd0
= treeop0
;
9714 tree oprnd1
= treeop1
;
9715 tree oprnd2
= treeop2
;
9718 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9719 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9720 op2
= expand_normal (oprnd2
);
9721 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
9729 /* A COND_EXPR with its type being VOID_TYPE represents a
9730 conditional jump and is handled in
9731 expand_gimple_cond_expr. */
9732 gcc_assert (!VOID_TYPE_P (type
));
9734 /* Note that COND_EXPRs whose type is a structure or union
9735 are required to be constructed to contain assignments of
9736 a temporary variable, so that we can evaluate them here
9737 for side effect only. If type is void, we must do likewise. */
9739 gcc_assert (!TREE_ADDRESSABLE (type
)
9741 && TREE_TYPE (treeop1
) != void_type_node
9742 && TREE_TYPE (treeop2
) != void_type_node
);
9744 temp
= expand_cond_expr_using_cmove (treeop0
, treeop1
, treeop2
);
9748 /* If we are not to produce a result, we have no target. Otherwise,
9749 if a target was specified use it; it will not be used as an
9750 intermediate target unless it is safe. If no target, use a
9753 if (modifier
!= EXPAND_STACK_PARM
9755 && safe_from_p (original_target
, treeop0
, 1)
9756 && GET_MODE (original_target
) == mode
9757 && !MEM_P (original_target
))
9758 temp
= original_target
;
9760 temp
= assign_temp (type
, 0, 1);
9762 do_pending_stack_adjust ();
9764 rtx_code_label
*lab0
= gen_label_rtx ();
9765 rtx_code_label
*lab1
= gen_label_rtx ();
9766 jumpifnot (treeop0
, lab0
,
9767 profile_probability::uninitialized ());
9768 store_expr (treeop1
, temp
,
9769 modifier
== EXPAND_STACK_PARM
,
9772 emit_jump_insn (targetm
.gen_jump (lab1
));
9775 store_expr (treeop2
, temp
,
9776 modifier
== EXPAND_STACK_PARM
,
9785 target
= expand_vec_cond_expr (type
, treeop0
, treeop1
, treeop2
, target
);
9788 case VEC_DUPLICATE_EXPR
:
9789 op0
= expand_expr (treeop0
, NULL_RTX
, VOIDmode
, modifier
);
9790 target
= expand_vector_broadcast (mode
, op0
);
9791 gcc_assert (target
);
9794 case VEC_SERIES_EXPR
:
9795 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, modifier
);
9796 return expand_vec_series_expr (mode
, op0
, op1
, target
);
9798 case BIT_INSERT_EXPR
:
9800 unsigned bitpos
= tree_to_uhwi (treeop2
);
9802 if (INTEGRAL_TYPE_P (TREE_TYPE (treeop1
)))
9803 bitsize
= TYPE_PRECISION (TREE_TYPE (treeop1
));
9805 bitsize
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (treeop1
)));
9806 rtx op0
= expand_normal (treeop0
);
9807 rtx op1
= expand_normal (treeop1
);
9808 rtx dst
= gen_reg_rtx (mode
);
9809 emit_move_insn (dst
, op0
);
9810 store_bit_field (dst
, bitsize
, bitpos
, 0, 0,
9811 TYPE_MODE (TREE_TYPE (treeop1
)), op1
, false);
9819 /* Here to do an ordinary binary operator. */
9821 expand_operands (treeop0
, treeop1
,
9822 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9824 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9826 if (modifier
== EXPAND_STACK_PARM
)
9828 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9829 unsignedp
, OPTAB_LIB_WIDEN
);
9831 /* Bitwise operations do not need bitfield reduction as we expect their
9832 operands being properly truncated. */
9833 if (code
== BIT_XOR_EXPR
9834 || code
== BIT_AND_EXPR
9835 || code
== BIT_IOR_EXPR
)
9837 return REDUCE_BIT_FIELD (temp
);
9839 #undef REDUCE_BIT_FIELD
9842 /* Return TRUE if expression STMT is suitable for replacement.
9843 Never consider memory loads as replaceable, because those don't ever lead
9844 into constant expressions. */
9847 stmt_is_replaceable_p (gimple
*stmt
)
9849 if (ssa_is_replaceable_p (stmt
))
9851 /* Don't move around loads. */
9852 if (!gimple_assign_single_p (stmt
)
9853 || is_gimple_val (gimple_assign_rhs1 (stmt
)))
9860 expand_expr_real_1 (tree exp
, rtx target
, machine_mode tmode
,
9861 enum expand_modifier modifier
, rtx
*alt_rtl
,
9862 bool inner_reference_p
)
9864 rtx op0
, op1
, temp
, decl_rtl
;
9867 machine_mode mode
, dmode
;
9868 enum tree_code code
= TREE_CODE (exp
);
9869 rtx subtarget
, original_target
;
9872 bool reduce_bit_field
;
9873 location_t loc
= EXPR_LOCATION (exp
);
9874 struct separate_ops ops
;
9875 tree treeop0
, treeop1
, treeop2
;
9876 tree ssa_name
= NULL_TREE
;
9879 type
= TREE_TYPE (exp
);
9880 mode
= TYPE_MODE (type
);
9881 unsignedp
= TYPE_UNSIGNED (type
);
9883 treeop0
= treeop1
= treeop2
= NULL_TREE
;
9884 if (!VL_EXP_CLASS_P (exp
))
9885 switch (TREE_CODE_LENGTH (code
))
9888 case 3: treeop2
= TREE_OPERAND (exp
, 2); /* FALLTHRU */
9889 case 2: treeop1
= TREE_OPERAND (exp
, 1); /* FALLTHRU */
9890 case 1: treeop0
= TREE_OPERAND (exp
, 0); /* FALLTHRU */
9900 ignore
= (target
== const0_rtx
9901 || ((CONVERT_EXPR_CODE_P (code
)
9902 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
9903 && TREE_CODE (type
) == VOID_TYPE
));
9905 /* An operation in what may be a bit-field type needs the
9906 result to be reduced to the precision of the bit-field type,
9907 which is narrower than that of the type's mode. */
9908 reduce_bit_field
= (!ignore
9909 && INTEGRAL_TYPE_P (type
)
9910 && !type_has_mode_precision_p (type
));
9912 /* If we are going to ignore this result, we need only do something
9913 if there is a side-effect somewhere in the expression. If there
9914 is, short-circuit the most common cases here. Note that we must
9915 not call expand_expr with anything but const0_rtx in case this
9916 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9920 if (! TREE_SIDE_EFFECTS (exp
))
9923 /* Ensure we reference a volatile object even if value is ignored, but
9924 don't do this if all we are doing is taking its address. */
9925 if (TREE_THIS_VOLATILE (exp
)
9926 && TREE_CODE (exp
) != FUNCTION_DECL
9927 && mode
!= VOIDmode
&& mode
!= BLKmode
9928 && modifier
!= EXPAND_CONST_ADDRESS
)
9930 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
9936 if (TREE_CODE_CLASS (code
) == tcc_unary
9937 || code
== BIT_FIELD_REF
9938 || code
== COMPONENT_REF
9939 || code
== INDIRECT_REF
)
9940 return expand_expr (treeop0
, const0_rtx
, VOIDmode
,
9943 else if (TREE_CODE_CLASS (code
) == tcc_binary
9944 || TREE_CODE_CLASS (code
) == tcc_comparison
9945 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
9947 expand_expr (treeop0
, const0_rtx
, VOIDmode
, modifier
);
9948 expand_expr (treeop1
, const0_rtx
, VOIDmode
, modifier
);
9955 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
9958 /* Use subtarget as the target for operand 0 of a binary operation. */
9959 subtarget
= get_subtarget (target
);
9960 original_target
= target
;
9966 tree function
= decl_function_context (exp
);
9968 temp
= label_rtx (exp
);
9969 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
9971 if (function
!= current_function_decl
9973 LABEL_REF_NONLOCAL_P (temp
) = 1;
9975 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
9980 /* ??? ivopts calls expander, without any preparation from
9981 out-of-ssa. So fake instructions as if this was an access to the
9982 base variable. This unnecessarily allocates a pseudo, see how we can
9983 reuse it, if partition base vars have it set already. */
9984 if (!currently_expanding_to_rtl
)
9986 tree var
= SSA_NAME_VAR (exp
);
9987 if (var
&& DECL_RTL_SET_P (var
))
9988 return DECL_RTL (var
);
9989 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp
)),
9990 LAST_VIRTUAL_REGISTER
+ 1);
9993 g
= get_gimple_for_ssa_name (exp
);
9994 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9996 && modifier
== EXPAND_INITIALIZER
9997 && !SSA_NAME_IS_DEFAULT_DEF (exp
)
9998 && (optimize
|| !SSA_NAME_VAR (exp
)
9999 || DECL_IGNORED_P (SSA_NAME_VAR (exp
)))
10000 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp
)))
10001 g
= SSA_NAME_DEF_STMT (exp
);
10005 location_t saved_loc
= curr_insn_location ();
10006 location_t loc
= gimple_location (g
);
10007 if (loc
!= UNKNOWN_LOCATION
)
10008 set_curr_insn_location (loc
);
10009 ops
.code
= gimple_assign_rhs_code (g
);
10010 switch (get_gimple_rhs_class (ops
.code
))
10012 case GIMPLE_TERNARY_RHS
:
10013 ops
.op2
= gimple_assign_rhs3 (g
);
10015 case GIMPLE_BINARY_RHS
:
10016 ops
.op1
= gimple_assign_rhs2 (g
);
10018 /* Try to expand conditonal compare. */
10019 if (targetm
.gen_ccmp_first
)
10021 gcc_checking_assert (targetm
.gen_ccmp_next
!= NULL
);
10022 r
= expand_ccmp_expr (g
, mode
);
10027 case GIMPLE_UNARY_RHS
:
10028 ops
.op0
= gimple_assign_rhs1 (g
);
10029 ops
.type
= TREE_TYPE (gimple_assign_lhs (g
));
10030 ops
.location
= loc
;
10031 r
= expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
10033 case GIMPLE_SINGLE_RHS
:
10035 r
= expand_expr_real (gimple_assign_rhs1 (g
), target
,
10036 tmode
, modifier
, alt_rtl
,
10037 inner_reference_p
);
10041 gcc_unreachable ();
10043 set_curr_insn_location (saved_loc
);
10044 if (REG_P (r
) && !REG_EXPR (r
))
10045 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp
), r
);
10050 decl_rtl
= get_rtx_for_ssa_name (ssa_name
);
10051 exp
= SSA_NAME_VAR (ssa_name
);
10052 goto expand_decl_rtl
;
10056 /* If a static var's type was incomplete when the decl was written,
10057 but the type is complete now, lay out the decl now. */
10058 if (DECL_SIZE (exp
) == 0
10059 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
10060 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
10061 layout_decl (exp
, 0);
10065 case FUNCTION_DECL
:
10067 decl_rtl
= DECL_RTL (exp
);
10069 gcc_assert (decl_rtl
);
10071 /* DECL_MODE might change when TYPE_MODE depends on attribute target
10072 settings for VECTOR_TYPE_P that might switch for the function. */
10073 if (currently_expanding_to_rtl
10074 && code
== VAR_DECL
&& MEM_P (decl_rtl
)
10075 && VECTOR_TYPE_P (type
) && exp
&& DECL_MODE (exp
) != mode
)
10076 decl_rtl
= change_address (decl_rtl
, TYPE_MODE (type
), 0);
10078 decl_rtl
= copy_rtx (decl_rtl
);
10080 /* Record writes to register variables. */
10081 if (modifier
== EXPAND_WRITE
10082 && REG_P (decl_rtl
)
10083 && HARD_REGISTER_P (decl_rtl
))
10084 add_to_hard_reg_set (&crtl
->asm_clobbers
,
10085 GET_MODE (decl_rtl
), REGNO (decl_rtl
));
10087 /* Ensure variable marked as used even if it doesn't go through
10088 a parser. If it hasn't be used yet, write out an external
10091 TREE_USED (exp
) = 1;
10093 /* Show we haven't gotten RTL for this yet. */
10096 /* Variables inherited from containing functions should have
10097 been lowered by this point. */
10099 context
= decl_function_context (exp
);
10101 || SCOPE_FILE_SCOPE_P (context
)
10102 || context
== current_function_decl
10103 || TREE_STATIC (exp
)
10104 || DECL_EXTERNAL (exp
)
10105 /* ??? C++ creates functions that are not TREE_STATIC. */
10106 || TREE_CODE (exp
) == FUNCTION_DECL
);
10108 /* This is the case of an array whose size is to be determined
10109 from its initializer, while the initializer is still being parsed.
10110 ??? We aren't parsing while expanding anymore. */
10112 if (MEM_P (decl_rtl
) && REG_P (XEXP (decl_rtl
, 0)))
10113 temp
= validize_mem (decl_rtl
);
10115 /* If DECL_RTL is memory, we are in the normal case and the
10116 address is not valid, get the address into a register. */
10118 else if (MEM_P (decl_rtl
) && modifier
!= EXPAND_INITIALIZER
)
10121 *alt_rtl
= decl_rtl
;
10122 decl_rtl
= use_anchored_address (decl_rtl
);
10123 if (modifier
!= EXPAND_CONST_ADDRESS
10124 && modifier
!= EXPAND_SUM
10125 && !memory_address_addr_space_p (exp
? DECL_MODE (exp
)
10126 : GET_MODE (decl_rtl
),
10127 XEXP (decl_rtl
, 0),
10128 MEM_ADDR_SPACE (decl_rtl
)))
10129 temp
= replace_equiv_address (decl_rtl
,
10130 copy_rtx (XEXP (decl_rtl
, 0)));
10133 /* If we got something, return it. But first, set the alignment
10134 if the address is a register. */
10137 if (exp
&& MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
10138 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
10140 else if (MEM_P (decl_rtl
))
10146 && modifier
!= EXPAND_WRITE
10147 && modifier
!= EXPAND_MEMORY
10148 && modifier
!= EXPAND_INITIALIZER
10149 && modifier
!= EXPAND_CONST_ADDRESS
10150 && modifier
!= EXPAND_SUM
10151 && !inner_reference_p
10153 && MEM_ALIGN (temp
) < GET_MODE_ALIGNMENT (mode
))
10154 temp
= expand_misaligned_mem_ref (temp
, mode
, unsignedp
,
10155 MEM_ALIGN (temp
), NULL_RTX
, NULL
);
10161 dmode
= DECL_MODE (exp
);
10163 dmode
= TYPE_MODE (TREE_TYPE (ssa_name
));
10165 /* If the mode of DECL_RTL does not match that of the decl,
10166 there are two cases: we are dealing with a BLKmode value
10167 that is returned in a register, or we are dealing with
10168 a promoted value. In the latter case, return a SUBREG
10169 of the wanted mode, but mark it so that we know that it
10170 was already extended. */
10171 if (REG_P (decl_rtl
)
10172 && dmode
!= BLKmode
10173 && GET_MODE (decl_rtl
) != dmode
)
10175 machine_mode pmode
;
10177 /* Get the signedness to be used for this variable. Ensure we get
10178 the same mode we got when the variable was declared. */
10179 if (code
!= SSA_NAME
)
10180 pmode
= promote_decl_mode (exp
, &unsignedp
);
10181 else if ((g
= SSA_NAME_DEF_STMT (ssa_name
))
10182 && gimple_code (g
) == GIMPLE_CALL
10183 && !gimple_call_internal_p (g
))
10184 pmode
= promote_function_mode (type
, mode
, &unsignedp
,
10185 gimple_call_fntype (g
),
10188 pmode
= promote_ssa_mode (ssa_name
, &unsignedp
);
10189 gcc_assert (GET_MODE (decl_rtl
) == pmode
);
10191 temp
= gen_lowpart_SUBREG (mode
, decl_rtl
);
10192 SUBREG_PROMOTED_VAR_P (temp
) = 1;
10193 SUBREG_PROMOTED_SET (temp
, unsignedp
);
10201 /* Given that TYPE_PRECISION (type) is not always equal to
10202 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
10203 the former to the latter according to the signedness of the
10205 scalar_int_mode mode
= SCALAR_INT_TYPE_MODE (type
);
10206 temp
= immed_wide_int_const
10207 (wi::to_wide (exp
, GET_MODE_PRECISION (mode
)), mode
);
10213 tree tmp
= NULL_TREE
;
10214 if (VECTOR_MODE_P (mode
))
10215 return const_vector_from_tree (exp
);
10216 scalar_int_mode int_mode
;
10217 if (is_int_mode (mode
, &int_mode
))
10219 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp
)))
10220 return const_scalar_mask_from_tree (int_mode
, exp
);
10224 = lang_hooks
.types
.type_for_mode (int_mode
, 1);
10226 tmp
= fold_unary_loc (loc
, VIEW_CONVERT_EXPR
,
10227 type_for_mode
, exp
);
10232 vec
<constructor_elt
, va_gc
> *v
;
10233 /* Constructors need to be fixed-length. FIXME. */
10234 unsigned int nunits
= VECTOR_CST_NELTS (exp
).to_constant ();
10235 vec_alloc (v
, nunits
);
10236 for (unsigned int i
= 0; i
< nunits
; ++i
)
10237 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, VECTOR_CST_ELT (exp
, i
));
10238 tmp
= build_constructor (type
, v
);
10240 return expand_expr (tmp
, ignore
? const0_rtx
: target
,
10245 if (modifier
== EXPAND_WRITE
)
10247 /* Writing into CONST_DECL is always invalid, but handle it
10249 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (exp
));
10250 scalar_int_mode address_mode
= targetm
.addr_space
.address_mode (as
);
10251 op0
= expand_expr_addr_expr_1 (exp
, NULL_RTX
, address_mode
,
10252 EXPAND_NORMAL
, as
);
10253 op0
= memory_address_addr_space (mode
, op0
, as
);
10254 temp
= gen_rtx_MEM (mode
, op0
);
10255 set_mem_addr_space (temp
, as
);
10258 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
10261 /* If optimized, generate immediate CONST_DOUBLE
10262 which will be turned into memory by reload if necessary.
10264 We used to force a register so that loop.c could see it. But
10265 this does not allow gen_* patterns to perform optimizations with
10266 the constants. It also produces two insns in cases like "x = 1.0;".
10267 On most machines, floating-point constants are not permitted in
10268 many insns, so we'd end up copying it to a register in any case.
10270 Now, we do the copying in expand_binop, if appropriate. */
10271 return const_double_from_real_value (TREE_REAL_CST (exp
),
10272 TYPE_MODE (TREE_TYPE (exp
)));
10275 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp
),
10276 TYPE_MODE (TREE_TYPE (exp
)));
10279 /* Handle evaluating a complex constant in a CONCAT target. */
10280 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
10282 machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
10285 rtarg
= XEXP (original_target
, 0);
10286 itarg
= XEXP (original_target
, 1);
10288 /* Move the real and imaginary parts separately. */
10289 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, EXPAND_NORMAL
);
10290 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, EXPAND_NORMAL
);
10293 emit_move_insn (rtarg
, op0
);
10295 emit_move_insn (itarg
, op1
);
10297 return original_target
;
10303 temp
= expand_expr_constant (exp
, 1, modifier
);
10305 /* temp contains a constant address.
10306 On RISC machines where a constant address isn't valid,
10307 make some insns to get that address into a register. */
10308 if (modifier
!= EXPAND_CONST_ADDRESS
10309 && modifier
!= EXPAND_INITIALIZER
10310 && modifier
!= EXPAND_SUM
10311 && ! memory_address_addr_space_p (mode
, XEXP (temp
, 0),
10312 MEM_ADDR_SPACE (temp
)))
10313 return replace_equiv_address (temp
,
10314 copy_rtx (XEXP (temp
, 0)));
10318 return immed_wide_int_const (poly_int_cst_value (exp
), mode
);
10322 tree val
= treeop0
;
10323 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
,
10324 inner_reference_p
);
10326 if (!SAVE_EXPR_RESOLVED_P (exp
))
10328 /* We can indeed still hit this case, typically via builtin
10329 expanders calling save_expr immediately before expanding
10330 something. Assume this means that we only have to deal
10331 with non-BLKmode values. */
10332 gcc_assert (GET_MODE (ret
) != BLKmode
);
10334 val
= build_decl (curr_insn_location (),
10335 VAR_DECL
, NULL
, TREE_TYPE (exp
));
10336 DECL_ARTIFICIAL (val
) = 1;
10337 DECL_IGNORED_P (val
) = 1;
10339 TREE_OPERAND (exp
, 0) = treeop0
;
10340 SAVE_EXPR_RESOLVED_P (exp
) = 1;
10342 if (!CONSTANT_P (ret
))
10343 ret
= copy_to_reg (ret
);
10344 SET_DECL_RTL (val
, ret
);
10352 /* If we don't need the result, just ensure we evaluate any
10356 unsigned HOST_WIDE_INT idx
;
10359 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
10360 expand_expr (value
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
10365 return expand_constructor (exp
, target
, modifier
, false);
10367 case TARGET_MEM_REF
:
10370 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
10371 unsigned int align
;
10373 op0
= addr_for_mem_ref (exp
, as
, true);
10374 op0
= memory_address_addr_space (mode
, op0
, as
);
10375 temp
= gen_rtx_MEM (mode
, op0
);
10376 set_mem_attributes (temp
, exp
, 0);
10377 set_mem_addr_space (temp
, as
);
10378 align
= get_object_alignment (exp
);
10379 if (modifier
!= EXPAND_WRITE
10380 && modifier
!= EXPAND_MEMORY
10382 && align
< GET_MODE_ALIGNMENT (mode
))
10383 temp
= expand_misaligned_mem_ref (temp
, mode
, unsignedp
,
10384 align
, NULL_RTX
, NULL
);
10390 const bool reverse
= REF_REVERSE_STORAGE_ORDER (exp
);
10392 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
10393 machine_mode address_mode
;
10394 tree base
= TREE_OPERAND (exp
, 0);
10397 /* Handle expansion of non-aliased memory with non-BLKmode. That
10398 might end up in a register. */
10399 if (mem_ref_refers_to_non_mem_p (exp
))
10401 poly_int64 offset
= mem_ref_offset (exp
).force_shwi ();
10402 base
= TREE_OPERAND (base
, 0);
10403 poly_uint64 type_size
;
10404 if (known_eq (offset
, 0)
10406 && poly_int_tree_p (TYPE_SIZE (type
), &type_size
)
10407 && known_eq (GET_MODE_BITSIZE (DECL_MODE (base
)), type_size
))
10408 return expand_expr (build1 (VIEW_CONVERT_EXPR
, type
, base
),
10409 target
, tmode
, modifier
);
10410 if (TYPE_MODE (type
) == BLKmode
)
10412 temp
= assign_stack_temp (DECL_MODE (base
),
10413 GET_MODE_SIZE (DECL_MODE (base
)));
10414 store_expr (base
, temp
, 0, false, false);
10415 temp
= adjust_address (temp
, BLKmode
, offset
);
10416 set_mem_size (temp
, int_size_in_bytes (type
));
10419 exp
= build3 (BIT_FIELD_REF
, type
, base
, TYPE_SIZE (type
),
10420 bitsize_int (offset
* BITS_PER_UNIT
));
10421 REF_REVERSE_STORAGE_ORDER (exp
) = reverse
;
10422 return expand_expr (exp
, target
, tmode
, modifier
);
10424 address_mode
= targetm
.addr_space
.address_mode (as
);
10425 if ((def_stmt
= get_def_for_expr (base
, BIT_AND_EXPR
)))
10427 tree mask
= gimple_assign_rhs2 (def_stmt
);
10428 base
= build2 (BIT_AND_EXPR
, TREE_TYPE (base
),
10429 gimple_assign_rhs1 (def_stmt
), mask
);
10430 TREE_OPERAND (exp
, 0) = base
;
10432 align
= get_object_alignment (exp
);
10433 op0
= expand_expr (base
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
10434 op0
= memory_address_addr_space (mode
, op0
, as
);
10435 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
10437 rtx off
= immed_wide_int_const (mem_ref_offset (exp
), address_mode
);
10438 op0
= simplify_gen_binary (PLUS
, address_mode
, op0
, off
);
10439 op0
= memory_address_addr_space (mode
, op0
, as
);
10441 temp
= gen_rtx_MEM (mode
, op0
);
10442 set_mem_attributes (temp
, exp
, 0);
10443 set_mem_addr_space (temp
, as
);
10444 if (TREE_THIS_VOLATILE (exp
))
10445 MEM_VOLATILE_P (temp
) = 1;
10446 if (modifier
!= EXPAND_WRITE
10447 && modifier
!= EXPAND_MEMORY
10448 && !inner_reference_p
10450 && align
< GET_MODE_ALIGNMENT (mode
))
10451 temp
= expand_misaligned_mem_ref (temp
, mode
, unsignedp
, align
,
10452 modifier
== EXPAND_STACK_PARM
10453 ? NULL_RTX
: target
, alt_rtl
);
10455 && modifier
!= EXPAND_MEMORY
10456 && modifier
!= EXPAND_WRITE
)
10457 temp
= flip_storage_order (mode
, temp
);
10464 tree array
= treeop0
;
10465 tree index
= treeop1
;
10468 /* Fold an expression like: "foo"[2].
10469 This is not done in fold so it won't happen inside &.
10470 Don't fold if this is for wide characters since it's too
10471 difficult to do correctly and this is a very rare case. */
10473 if (modifier
!= EXPAND_CONST_ADDRESS
10474 && modifier
!= EXPAND_INITIALIZER
10475 && modifier
!= EXPAND_MEMORY
)
10477 tree t
= fold_read_from_constant_string (exp
);
10480 return expand_expr (t
, target
, tmode
, modifier
);
10483 /* If this is a constant index into a constant array,
10484 just get the value from the array. Handle both the cases when
10485 we have an explicit constructor and when our operand is a variable
10486 that was declared const. */
10488 if (modifier
!= EXPAND_CONST_ADDRESS
10489 && modifier
!= EXPAND_INITIALIZER
10490 && modifier
!= EXPAND_MEMORY
10491 && TREE_CODE (array
) == CONSTRUCTOR
10492 && ! TREE_SIDE_EFFECTS (array
)
10493 && TREE_CODE (index
) == INTEGER_CST
)
10495 unsigned HOST_WIDE_INT ix
;
10498 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array
), ix
,
10500 if (tree_int_cst_equal (field
, index
))
10502 if (!TREE_SIDE_EFFECTS (value
))
10503 return expand_expr (fold (value
), target
, tmode
, modifier
);
10508 else if (optimize
>= 1
10509 && modifier
!= EXPAND_CONST_ADDRESS
10510 && modifier
!= EXPAND_INITIALIZER
10511 && modifier
!= EXPAND_MEMORY
10512 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
10513 && TREE_CODE (index
) == INTEGER_CST
10514 && (VAR_P (array
) || TREE_CODE (array
) == CONST_DECL
)
10515 && (init
= ctor_for_folding (array
)) != error_mark_node
)
10517 if (init
== NULL_TREE
)
10519 tree value
= build_zero_cst (type
);
10520 if (TREE_CODE (value
) == CONSTRUCTOR
)
10522 /* If VALUE is a CONSTRUCTOR, this optimization is only
10523 useful if this doesn't store the CONSTRUCTOR into
10524 memory. If it does, it is more efficient to just
10525 load the data from the array directly. */
10526 rtx ret
= expand_constructor (value
, target
,
10528 if (ret
== NULL_RTX
)
10533 return expand_expr (value
, target
, tmode
, modifier
);
10535 else if (TREE_CODE (init
) == CONSTRUCTOR
)
10537 unsigned HOST_WIDE_INT ix
;
10540 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), ix
,
10542 if (tree_int_cst_equal (field
, index
))
10544 if (TREE_SIDE_EFFECTS (value
))
10547 if (TREE_CODE (value
) == CONSTRUCTOR
)
10549 /* If VALUE is a CONSTRUCTOR, this
10550 optimization is only useful if
10551 this doesn't store the CONSTRUCTOR
10552 into memory. If it does, it is more
10553 efficient to just load the data from
10554 the array directly. */
10555 rtx ret
= expand_constructor (value
, target
,
10557 if (ret
== NULL_RTX
)
10562 expand_expr (fold (value
), target
, tmode
, modifier
);
10565 else if (TREE_CODE (init
) == STRING_CST
)
10567 tree low_bound
= array_ref_low_bound (exp
);
10568 tree index1
= fold_convert_loc (loc
, sizetype
, treeop1
);
10570 /* Optimize the special case of a zero lower bound.
10572 We convert the lower bound to sizetype to avoid problems
10573 with constant folding. E.g. suppose the lower bound is
10574 1 and its mode is QI. Without the conversion
10575 (ARRAY + (INDEX - (unsigned char)1))
10577 (ARRAY + (-(unsigned char)1) + INDEX)
10579 (ARRAY + 255 + INDEX). Oops! */
10580 if (!integer_zerop (low_bound
))
10581 index1
= size_diffop_loc (loc
, index1
,
10582 fold_convert_loc (loc
, sizetype
,
10585 if (tree_fits_uhwi_p (index1
)
10586 && compare_tree_int (index1
, TREE_STRING_LENGTH (init
)) < 0)
10588 tree type
= TREE_TYPE (TREE_TYPE (init
));
10589 scalar_int_mode mode
;
10591 if (is_int_mode (TYPE_MODE (type
), &mode
)
10592 && GET_MODE_SIZE (mode
) == 1)
10593 return gen_int_mode (TREE_STRING_POINTER (init
)
10594 [TREE_INT_CST_LOW (index1
)],
10600 goto normal_inner_ref
;
10602 case COMPONENT_REF
:
10603 /* If the operand is a CONSTRUCTOR, we can just extract the
10604 appropriate field if it is present. */
10605 if (TREE_CODE (treeop0
) == CONSTRUCTOR
)
10607 unsigned HOST_WIDE_INT idx
;
10609 scalar_int_mode field_mode
;
10611 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0
),
10613 if (field
== treeop1
10614 /* We can normally use the value of the field in the
10615 CONSTRUCTOR. However, if this is a bitfield in
10616 an integral mode that we can fit in a HOST_WIDE_INT,
10617 we must mask only the number of bits in the bitfield,
10618 since this is done implicitly by the constructor. If
10619 the bitfield does not meet either of those conditions,
10620 we can't do this optimization. */
10621 && (! DECL_BIT_FIELD (field
)
10622 || (is_int_mode (DECL_MODE (field
), &field_mode
)
10623 && (GET_MODE_PRECISION (field_mode
)
10624 <= HOST_BITS_PER_WIDE_INT
))))
10626 if (DECL_BIT_FIELD (field
)
10627 && modifier
== EXPAND_STACK_PARM
)
10629 op0
= expand_expr (value
, target
, tmode
, modifier
);
10630 if (DECL_BIT_FIELD (field
))
10632 HOST_WIDE_INT bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
10633 scalar_int_mode imode
10634 = SCALAR_INT_TYPE_MODE (TREE_TYPE (field
));
10636 if (TYPE_UNSIGNED (TREE_TYPE (field
)))
10638 op1
= gen_int_mode ((HOST_WIDE_INT_1
<< bitsize
) - 1,
10640 op0
= expand_and (imode
, op0
, op1
, target
);
10644 int count
= GET_MODE_PRECISION (imode
) - bitsize
;
10646 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
10648 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
10656 goto normal_inner_ref
;
10658 case BIT_FIELD_REF
:
10659 case ARRAY_RANGE_REF
:
10662 machine_mode mode1
, mode2
;
10663 poly_int64 bitsize
, bitpos
, bytepos
;
10665 int reversep
, volatilep
= 0, must_force_mem
;
10667 = get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode1
,
10668 &unsignedp
, &reversep
, &volatilep
);
10669 rtx orig_op0
, memloc
;
10670 bool clear_mem_expr
= false;
10672 /* If we got back the original object, something is wrong. Perhaps
10673 we are evaluating an expression too early. In any event, don't
10674 infinitely recurse. */
10675 gcc_assert (tem
!= exp
);
10677 /* If TEM's type is a union of variable size, pass TARGET to the inner
10678 computation, since it will need a temporary and TARGET is known
10679 to have to do. This occurs in unchecked conversion in Ada. */
10681 = expand_expr_real (tem
,
10682 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
10683 && COMPLETE_TYPE_P (TREE_TYPE (tem
))
10684 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
10686 && modifier
!= EXPAND_STACK_PARM
10687 ? target
: NULL_RTX
),
10689 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
10692 /* If the field has a mode, we want to access it in the
10693 field's mode, not the computed mode.
10694 If a MEM has VOIDmode (external with incomplete type),
10695 use BLKmode for it instead. */
10698 if (mode1
!= VOIDmode
)
10699 op0
= adjust_address (op0
, mode1
, 0);
10700 else if (GET_MODE (op0
) == VOIDmode
)
10701 op0
= adjust_address (op0
, BLKmode
, 0);
10705 = CONSTANT_P (op0
) ? TYPE_MODE (TREE_TYPE (tem
)) : GET_MODE (op0
);
10707 /* Make sure bitpos is not negative, it can wreak havoc later. */
10708 if (maybe_lt (bitpos
, 0))
10710 gcc_checking_assert (offset
== NULL_TREE
);
10711 offset
= size_int (bits_to_bytes_round_down (bitpos
));
10712 bitpos
= num_trailing_bits (bitpos
);
10715 /* If we have either an offset, a BLKmode result, or a reference
10716 outside the underlying object, we must force it to memory.
10717 Such a case can occur in Ada if we have unchecked conversion
10718 of an expression from a scalar type to an aggregate type or
10719 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10720 passed a partially uninitialized object or a view-conversion
10721 to a larger size. */
10722 must_force_mem
= (offset
10723 || mode1
== BLKmode
10724 || (mode
== BLKmode
10725 && !int_mode_for_size (bitsize
, 1).exists ())
10726 || maybe_gt (bitpos
+ bitsize
,
10727 GET_MODE_BITSIZE (mode2
)));
10729 /* Handle CONCAT first. */
10730 if (GET_CODE (op0
) == CONCAT
&& !must_force_mem
)
10732 if (known_eq (bitpos
, 0)
10733 && known_eq (bitsize
, GET_MODE_BITSIZE (GET_MODE (op0
)))
10734 && COMPLEX_MODE_P (mode1
)
10735 && COMPLEX_MODE_P (GET_MODE (op0
))
10736 && (GET_MODE_PRECISION (GET_MODE_INNER (mode1
))
10737 == GET_MODE_PRECISION (GET_MODE_INNER (GET_MODE (op0
)))))
10740 op0
= flip_storage_order (GET_MODE (op0
), op0
);
10741 if (mode1
!= GET_MODE (op0
))
10744 for (int i
= 0; i
< 2; i
++)
10746 rtx op
= read_complex_part (op0
, i
!= 0);
10747 if (GET_CODE (op
) == SUBREG
)
10748 op
= force_reg (GET_MODE (op
), op
);
10749 rtx temp
= gen_lowpart_common (GET_MODE_INNER (mode1
),
10755 if (!REG_P (op
) && !MEM_P (op
))
10756 op
= force_reg (GET_MODE (op
), op
);
10757 op
= gen_lowpart (GET_MODE_INNER (mode1
), op
);
10761 op0
= gen_rtx_CONCAT (mode1
, parts
[0], parts
[1]);
10765 if (known_eq (bitpos
, 0)
10766 && known_eq (bitsize
,
10767 GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0))))
10768 && maybe_ne (bitsize
, 0))
10770 op0
= XEXP (op0
, 0);
10771 mode2
= GET_MODE (op0
);
10773 else if (known_eq (bitpos
,
10774 GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0))))
10775 && known_eq (bitsize
,
10776 GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 1))))
10777 && maybe_ne (bitpos
, 0)
10778 && maybe_ne (bitsize
, 0))
10780 op0
= XEXP (op0
, 1);
10782 mode2
= GET_MODE (op0
);
10785 /* Otherwise force into memory. */
10786 must_force_mem
= 1;
10789 /* If this is a constant, put it in a register if it is a legitimate
10790 constant and we don't need a memory reference. */
10791 if (CONSTANT_P (op0
)
10792 && mode2
!= BLKmode
10793 && targetm
.legitimate_constant_p (mode2
, op0
)
10794 && !must_force_mem
)
10795 op0
= force_reg (mode2
, op0
);
10797 /* Otherwise, if this is a constant, try to force it to the constant
10798 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10799 is a legitimate constant. */
10800 else if (CONSTANT_P (op0
) && (memloc
= force_const_mem (mode2
, op0
)))
10801 op0
= validize_mem (memloc
);
10803 /* Otherwise, if this is a constant or the object is not in memory
10804 and need be, put it there. */
10805 else if (CONSTANT_P (op0
) || (!MEM_P (op0
) && must_force_mem
))
10807 memloc
= assign_temp (TREE_TYPE (tem
), 1, 1);
10808 emit_move_insn (memloc
, op0
);
10810 clear_mem_expr
= true;
10815 machine_mode address_mode
;
10816 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
10819 gcc_assert (MEM_P (op0
));
10821 address_mode
= get_address_mode (op0
);
10822 if (GET_MODE (offset_rtx
) != address_mode
)
10824 /* We cannot be sure that the RTL in offset_rtx is valid outside
10825 of a memory address context, so force it into a register
10826 before attempting to convert it to the desired mode. */
10827 offset_rtx
= force_operand (offset_rtx
, NULL_RTX
);
10828 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
10831 /* See the comment in expand_assignment for the rationale. */
10832 if (mode1
!= VOIDmode
10833 && maybe_ne (bitpos
, 0)
10834 && maybe_gt (bitsize
, 0)
10835 && multiple_p (bitpos
, BITS_PER_UNIT
, &bytepos
)
10836 && multiple_p (bitpos
, bitsize
)
10837 && multiple_p (bitsize
, GET_MODE_ALIGNMENT (mode1
))
10838 && MEM_ALIGN (op0
) >= GET_MODE_ALIGNMENT (mode1
))
10840 op0
= adjust_address (op0
, mode1
, bytepos
);
10844 op0
= offset_address (op0
, offset_rtx
,
10845 highest_pow2_factor (offset
));
10848 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10849 record its alignment as BIGGEST_ALIGNMENT. */
10851 && known_eq (bitpos
, 0)
10853 && is_aligning_offset (offset
, tem
))
10854 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
10856 /* Don't forget about volatility even if this is a bitfield. */
10857 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
10859 if (op0
== orig_op0
)
10860 op0
= copy_rtx (op0
);
10862 MEM_VOLATILE_P (op0
) = 1;
10865 if (MEM_P (op0
) && TREE_CODE (tem
) == FUNCTION_DECL
)
10867 if (op0
== orig_op0
)
10868 op0
= copy_rtx (op0
);
10870 set_mem_align (op0
, BITS_PER_UNIT
);
10873 /* In cases where an aligned union has an unaligned object
10874 as a field, we might be extracting a BLKmode value from
10875 an integer-mode (e.g., SImode) object. Handle this case
10876 by doing the extract into an object as wide as the field
10877 (which we know to be the width of a basic mode), then
10878 storing into memory, and changing the mode to BLKmode. */
10879 if (mode1
== VOIDmode
10880 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
10881 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
10882 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
10883 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
10884 && modifier
!= EXPAND_CONST_ADDRESS
10885 && modifier
!= EXPAND_INITIALIZER
10886 && modifier
!= EXPAND_MEMORY
)
10887 /* If the bitfield is volatile and the bitsize
10888 is narrower than the access size of the bitfield,
10889 we need to extract bitfields from the access. */
10890 || (volatilep
&& TREE_CODE (exp
) == COMPONENT_REF
10891 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp
, 1))
10892 && mode1
!= BLKmode
10893 && maybe_lt (bitsize
, GET_MODE_SIZE (mode1
) * BITS_PER_UNIT
))
10894 /* If the field isn't aligned enough to fetch as a memref,
10895 fetch it as a bit field. */
10896 || (mode1
!= BLKmode
10898 ? MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
10899 || !multiple_p (bitpos
, GET_MODE_ALIGNMENT (mode1
))
10900 : TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
10901 || !multiple_p (bitpos
, GET_MODE_ALIGNMENT (mode
)))
10902 && modifier
!= EXPAND_MEMORY
10903 && ((modifier
== EXPAND_CONST_ADDRESS
10904 || modifier
== EXPAND_INITIALIZER
)
10906 : targetm
.slow_unaligned_access (mode1
,
10908 || !multiple_p (bitpos
, BITS_PER_UNIT
)))
10909 /* If the type and the field are a constant size and the
10910 size of the type isn't the same size as the bitfield,
10911 we must use bitfield operations. */
10912 || (known_size_p (bitsize
)
10913 && TYPE_SIZE (TREE_TYPE (exp
))
10914 && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp
)))
10915 && maybe_ne (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp
))),
10918 machine_mode ext_mode
= mode
;
10920 if (ext_mode
== BLKmode
10921 && ! (target
!= 0 && MEM_P (op0
)
10923 && multiple_p (bitpos
, BITS_PER_UNIT
)))
10924 ext_mode
= int_mode_for_size (bitsize
, 1).else_blk ();
10926 if (ext_mode
== BLKmode
)
10929 target
= assign_temp (type
, 1, 1);
10931 /* ??? Unlike the similar test a few lines below, this one is
10932 very likely obsolete. */
10933 if (known_eq (bitsize
, 0))
10936 /* In this case, BITPOS must start at a byte boundary and
10937 TARGET, if specified, must be a MEM. */
10938 gcc_assert (MEM_P (op0
)
10939 && (!target
|| MEM_P (target
)));
10941 bytepos
= exact_div (bitpos
, BITS_PER_UNIT
);
10942 poly_int64 bytesize
= bits_to_bytes_round_up (bitsize
);
10943 emit_block_move (target
,
10944 adjust_address (op0
, VOIDmode
, bytepos
),
10945 gen_int_mode (bytesize
, Pmode
),
10946 (modifier
== EXPAND_STACK_PARM
10947 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
10952 /* If we have nothing to extract, the result will be 0 for targets
10953 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10954 return 0 for the sake of consistency, as reading a zero-sized
10955 bitfield is valid in Ada and the value is fully specified. */
10956 if (known_eq (bitsize
, 0))
10959 op0
= validize_mem (op0
);
10961 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
10962 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10964 /* If the result has aggregate type and the extraction is done in
10965 an integral mode, then the field may be not aligned on a byte
10966 boundary; in this case, if it has reverse storage order, it
10967 needs to be extracted as a scalar field with reverse storage
10968 order and put back into memory order afterwards. */
10969 if (AGGREGATE_TYPE_P (type
)
10970 && GET_MODE_CLASS (ext_mode
) == MODE_INT
)
10971 reversep
= TYPE_REVERSE_STORAGE_ORDER (type
);
10973 gcc_checking_assert (known_ge (bitpos
, 0));
10974 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
10975 (modifier
== EXPAND_STACK_PARM
10976 ? NULL_RTX
: target
),
10977 ext_mode
, ext_mode
, reversep
, alt_rtl
);
10979 /* If the result has aggregate type and the mode of OP0 is an
10980 integral mode then, if BITSIZE is narrower than this mode
10981 and this is for big-endian data, we must put the field
10982 into the high-order bits. And we must also put it back
10983 into memory order if it has been previously reversed. */
10984 scalar_int_mode op0_mode
;
10985 if (AGGREGATE_TYPE_P (type
)
10986 && is_int_mode (GET_MODE (op0
), &op0_mode
))
10988 HOST_WIDE_INT size
= GET_MODE_BITSIZE (op0_mode
);
10990 gcc_checking_assert (known_le (bitsize
, size
));
10991 if (maybe_lt (bitsize
, size
)
10992 && reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
10993 op0
= expand_shift (LSHIFT_EXPR
, op0_mode
, op0
,
10994 size
- bitsize
, op0
, 1);
10997 op0
= flip_storage_order (op0_mode
, op0
);
11000 /* If the result type is BLKmode, store the data into a temporary
11001 of the appropriate type, but with the mode corresponding to the
11002 mode for the data we have (op0's mode). */
11003 if (mode
== BLKmode
)
11006 = assign_stack_temp_for_type (ext_mode
,
11007 GET_MODE_BITSIZE (ext_mode
),
11009 emit_move_insn (new_rtx
, op0
);
11010 op0
= copy_rtx (new_rtx
);
11011 PUT_MODE (op0
, BLKmode
);
11017 /* If the result is BLKmode, use that to access the object
11019 if (mode
== BLKmode
)
11022 /* Get a reference to just this component. */
11023 bytepos
= bits_to_bytes_round_down (bitpos
);
11024 if (modifier
== EXPAND_CONST_ADDRESS
11025 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
11026 op0
= adjust_address_nv (op0
, mode1
, bytepos
);
11028 op0
= adjust_address (op0
, mode1
, bytepos
);
11030 if (op0
== orig_op0
)
11031 op0
= copy_rtx (op0
);
11033 /* Don't set memory attributes if the base expression is
11034 SSA_NAME that got expanded as a MEM or a CONSTANT. In that case,
11035 we should just honor its original memory attributes. */
11036 if (!(TREE_CODE (tem
) == SSA_NAME
11037 && (MEM_P (orig_op0
) || CONSTANT_P (orig_op0
))))
11038 set_mem_attributes (op0
, exp
, 0);
11040 if (REG_P (XEXP (op0
, 0)))
11041 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
11043 /* If op0 is a temporary because the original expressions was forced
11044 to memory, clear MEM_EXPR so that the original expression cannot
11045 be marked as addressable through MEM_EXPR of the temporary. */
11046 if (clear_mem_expr
)
11047 set_mem_expr (op0
, NULL_TREE
);
11049 MEM_VOLATILE_P (op0
) |= volatilep
;
11052 && modifier
!= EXPAND_MEMORY
11053 && modifier
!= EXPAND_WRITE
)
11054 op0
= flip_storage_order (mode1
, op0
);
11056 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
11057 || modifier
== EXPAND_CONST_ADDRESS
11058 || modifier
== EXPAND_INITIALIZER
)
11062 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
11064 convert_move (target
, op0
, unsignedp
);
11069 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
11072 /* All valid uses of __builtin_va_arg_pack () are removed during
11074 if (CALL_EXPR_VA_ARG_PACK (exp
))
11075 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
11077 tree fndecl
= get_callee_fndecl (exp
), attr
;
11080 /* Don't diagnose the error attribute in thunks, those are
11081 artificially created. */
11082 && !CALL_FROM_THUNK_P (exp
)
11083 && (attr
= lookup_attribute ("error",
11084 DECL_ATTRIBUTES (fndecl
))) != NULL
)
11086 const char *ident
= lang_hooks
.decl_printable_name (fndecl
, 1);
11087 error ("%Kcall to %qs declared with attribute error: %s", exp
,
11088 identifier_to_locale (ident
),
11089 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
11092 /* Don't diagnose the warning attribute in thunks, those are
11093 artificially created. */
11094 && !CALL_FROM_THUNK_P (exp
)
11095 && (attr
= lookup_attribute ("warning",
11096 DECL_ATTRIBUTES (fndecl
))) != NULL
)
11098 const char *ident
= lang_hooks
.decl_printable_name (fndecl
, 1);
11099 warning_at (tree_nonartificial_location (exp
),
11100 OPT_Wattribute_warning
,
11101 "%Kcall to %qs declared with attribute warning: %s",
11102 exp
, identifier_to_locale (ident
),
11103 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
11106 /* Check for a built-in function. */
11107 if (fndecl
&& fndecl_built_in_p (fndecl
))
11109 gcc_assert (DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_FRONTEND
);
11110 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
11113 return expand_call (exp
, target
, ignore
);
11115 case VIEW_CONVERT_EXPR
:
11118 /* If we are converting to BLKmode, try to avoid an intermediate
11119 temporary by fetching an inner memory reference. */
11120 if (mode
== BLKmode
11121 && poly_int_tree_p (TYPE_SIZE (type
))
11122 && TYPE_MODE (TREE_TYPE (treeop0
)) != BLKmode
11123 && handled_component_p (treeop0
))
11125 machine_mode mode1
;
11126 poly_int64 bitsize
, bitpos
, bytepos
;
11128 int reversep
, volatilep
= 0;
11130 = get_inner_reference (treeop0
, &bitsize
, &bitpos
, &offset
, &mode1
,
11131 &unsignedp
, &reversep
, &volatilep
);
11133 /* ??? We should work harder and deal with non-zero offsets. */
11135 && multiple_p (bitpos
, BITS_PER_UNIT
, &bytepos
)
11137 && known_size_p (bitsize
)
11138 && known_eq (wi::to_poly_offset (TYPE_SIZE (type
)), bitsize
))
11140 /* See the normal_inner_ref case for the rationale. */
11142 = expand_expr_real (tem
,
11143 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
11144 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
11146 && modifier
!= EXPAND_STACK_PARM
11147 ? target
: NULL_RTX
),
11149 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
11152 if (MEM_P (orig_op0
))
11156 /* Get a reference to just this component. */
11157 if (modifier
== EXPAND_CONST_ADDRESS
11158 || modifier
== EXPAND_SUM
11159 || modifier
== EXPAND_INITIALIZER
)
11160 op0
= adjust_address_nv (op0
, mode
, bytepos
);
11162 op0
= adjust_address (op0
, mode
, bytepos
);
11164 if (op0
== orig_op0
)
11165 op0
= copy_rtx (op0
);
11167 set_mem_attributes (op0
, treeop0
, 0);
11168 if (REG_P (XEXP (op0
, 0)))
11169 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
11171 MEM_VOLATILE_P (op0
) |= volatilep
;
11177 op0
= expand_expr_real (treeop0
, NULL_RTX
, VOIDmode
, modifier
,
11178 NULL
, inner_reference_p
);
11180 /* If the input and output modes are both the same, we are done. */
11181 if (mode
== GET_MODE (op0
))
11183 /* If neither mode is BLKmode, and both modes are the same size
11184 then we can use gen_lowpart. */
11185 else if (mode
!= BLKmode
11186 && GET_MODE (op0
) != BLKmode
11187 && known_eq (GET_MODE_PRECISION (mode
),
11188 GET_MODE_PRECISION (GET_MODE (op0
)))
11189 && !COMPLEX_MODE_P (GET_MODE (op0
)))
11191 if (GET_CODE (op0
) == SUBREG
)
11192 op0
= force_reg (GET_MODE (op0
), op0
);
11193 temp
= gen_lowpart_common (mode
, op0
);
11198 if (!REG_P (op0
) && !MEM_P (op0
))
11199 op0
= force_reg (GET_MODE (op0
), op0
);
11200 op0
= gen_lowpart (mode
, op0
);
11203 /* If both types are integral, convert from one mode to the other. */
11204 else if (INTEGRAL_TYPE_P (type
) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0
)))
11205 op0
= convert_modes (mode
, GET_MODE (op0
), op0
,
11206 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
11207 /* If the output type is a bit-field type, do an extraction. */
11208 else if (reduce_bit_field
)
11209 return extract_bit_field (op0
, TYPE_PRECISION (type
), 0,
11210 TYPE_UNSIGNED (type
), NULL_RTX
,
11211 mode
, mode
, false, NULL
);
11212 /* As a last resort, spill op0 to memory, and reload it in a
11214 else if (!MEM_P (op0
))
11216 /* If the operand is not a MEM, force it into memory. Since we
11217 are going to be changing the mode of the MEM, don't call
11218 force_const_mem for constants because we don't allow pool
11219 constants to change mode. */
11220 tree inner_type
= TREE_TYPE (treeop0
);
11222 gcc_assert (!TREE_ADDRESSABLE (exp
));
11224 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
11226 = assign_stack_temp_for_type
11227 (TYPE_MODE (inner_type
),
11228 GET_MODE_SIZE (TYPE_MODE (inner_type
)), inner_type
);
11230 emit_move_insn (target
, op0
);
11234 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
11235 output type is such that the operand is known to be aligned, indicate
11236 that it is. Otherwise, we need only be concerned about alignment for
11237 non-BLKmode results. */
11240 enum insn_code icode
;
11242 if (modifier
!= EXPAND_WRITE
11243 && modifier
!= EXPAND_MEMORY
11244 && !inner_reference_p
11246 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode
))
11248 /* If the target does have special handling for unaligned
11249 loads of mode then use them. */
11250 if ((icode
= optab_handler (movmisalign_optab
, mode
))
11251 != CODE_FOR_nothing
)
11255 op0
= adjust_address (op0
, mode
, 0);
11256 /* We've already validated the memory, and we're creating a
11257 new pseudo destination. The predicates really can't
11259 reg
= gen_reg_rtx (mode
);
11261 /* Nor can the insn generator. */
11262 rtx_insn
*insn
= GEN_FCN (icode
) (reg
, op0
);
11266 else if (STRICT_ALIGNMENT
)
11268 poly_uint64 mode_size
= GET_MODE_SIZE (mode
);
11269 poly_uint64 temp_size
= mode_size
;
11270 if (GET_MODE (op0
) != BLKmode
)
11271 temp_size
= upper_bound (temp_size
,
11272 GET_MODE_SIZE (GET_MODE (op0
)));
11274 = assign_stack_temp_for_type (mode
, temp_size
, type
);
11275 rtx new_with_op0_mode
11276 = adjust_address (new_rtx
, GET_MODE (op0
), 0);
11278 gcc_assert (!TREE_ADDRESSABLE (exp
));
11280 if (GET_MODE (op0
) == BLKmode
)
11282 rtx size_rtx
= gen_int_mode (mode_size
, Pmode
);
11283 emit_block_move (new_with_op0_mode
, op0
, size_rtx
,
11284 (modifier
== EXPAND_STACK_PARM
11285 ? BLOCK_OP_CALL_PARM
11286 : BLOCK_OP_NORMAL
));
11289 emit_move_insn (new_with_op0_mode
, op0
);
11295 op0
= adjust_address (op0
, mode
, 0);
11302 tree lhs
= treeop0
;
11303 tree rhs
= treeop1
;
11304 gcc_assert (ignore
);
11306 /* Check for |= or &= of a bitfield of size one into another bitfield
11307 of size 1. In this case, (unless we need the result of the
11308 assignment) we can do this more efficiently with a
11309 test followed by an assignment, if necessary.
11311 ??? At this point, we can't get a BIT_FIELD_REF here. But if
11312 things change so we do, this code should be enhanced to
11314 if (TREE_CODE (lhs
) == COMPONENT_REF
11315 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
11316 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
11317 && TREE_OPERAND (rhs
, 0) == lhs
11318 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
11319 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
11320 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
11322 rtx_code_label
*label
= gen_label_rtx ();
11323 int value
= TREE_CODE (rhs
) == BIT_IOR_EXPR
;
11324 profile_probability prob
= profile_probability::uninitialized ();
11326 jumpifnot (TREE_OPERAND (rhs
, 1), label
, prob
);
11328 jumpif (TREE_OPERAND (rhs
, 1), label
, prob
);
11329 expand_assignment (lhs
, build_int_cst (TREE_TYPE (rhs
), value
),
11331 do_pending_stack_adjust ();
11332 emit_label (label
);
11336 expand_assignment (lhs
, rhs
, false);
11341 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
11343 case REALPART_EXPR
:
11344 op0
= expand_normal (treeop0
);
11345 return read_complex_part (op0
, false);
11347 case IMAGPART_EXPR
:
11348 op0
= expand_normal (treeop0
);
11349 return read_complex_part (op0
, true);
11356 /* Expanded in cfgexpand.c. */
11357 gcc_unreachable ();
11359 case TRY_CATCH_EXPR
:
11361 case EH_FILTER_EXPR
:
11362 case TRY_FINALLY_EXPR
:
11364 /* Lowered by tree-eh.c. */
11365 gcc_unreachable ();
11367 case WITH_CLEANUP_EXPR
:
11368 case CLEANUP_POINT_EXPR
:
11370 case CASE_LABEL_EXPR
:
11375 case COMPOUND_EXPR
:
11376 case PREINCREMENT_EXPR
:
11377 case PREDECREMENT_EXPR
:
11378 case POSTINCREMENT_EXPR
:
11379 case POSTDECREMENT_EXPR
:
11382 case COMPOUND_LITERAL_EXPR
:
11383 /* Lowered by gimplify.c. */
11384 gcc_unreachable ();
11387 /* Function descriptors are not valid except for as
11388 initialization constants, and should not be expanded. */
11389 gcc_unreachable ();
11391 case WITH_SIZE_EXPR
:
11392 /* WITH_SIZE_EXPR expands to its first argument. The caller should
11393 have pulled out the size to use in whatever context it needed. */
11394 return expand_expr_real (treeop0
, original_target
, tmode
,
11395 modifier
, alt_rtl
, inner_reference_p
);
11398 return expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
11402 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
11403 signedness of TYPE), possibly returning the result in TARGET.
11404 TYPE is known to be a partial integer type. */
11406 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
11408 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
11409 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
11411 /* For constant values, reduce using build_int_cst_type. */
11412 poly_int64 const_exp
;
11413 if (poly_int_rtx_p (exp
, &const_exp
))
11415 tree t
= build_int_cst_type (type
, const_exp
);
11416 return expand_expr (t
, target
, VOIDmode
, EXPAND_NORMAL
);
11418 else if (TYPE_UNSIGNED (type
))
11420 scalar_int_mode mode
= as_a
<scalar_int_mode
> (GET_MODE (exp
));
11421 rtx mask
= immed_wide_int_const
11422 (wi::mask (prec
, false, GET_MODE_PRECISION (mode
)), mode
);
11423 return expand_and (mode
, exp
, mask
, target
);
11427 scalar_int_mode mode
= as_a
<scalar_int_mode
> (GET_MODE (exp
));
11428 int count
= GET_MODE_PRECISION (mode
) - prec
;
11429 exp
= expand_shift (LSHIFT_EXPR
, mode
, exp
, count
, target
, 0);
11430 return expand_shift (RSHIFT_EXPR
, mode
, exp
, count
, target
, 0);
11434 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
11435 when applied to the address of EXP produces an address known to be
11436 aligned more than BIGGEST_ALIGNMENT. */
11439 is_aligning_offset (const_tree offset
, const_tree exp
)
11441 /* Strip off any conversions. */
11442 while (CONVERT_EXPR_P (offset
))
11443 offset
= TREE_OPERAND (offset
, 0);
11445 /* We must now have a BIT_AND_EXPR with a constant that is one less than
11446 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
11447 if (TREE_CODE (offset
) != BIT_AND_EXPR
11448 || !tree_fits_uhwi_p (TREE_OPERAND (offset
, 1))
11449 || compare_tree_int (TREE_OPERAND (offset
, 1),
11450 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
11451 || !pow2p_hwi (tree_to_uhwi (TREE_OPERAND (offset
, 1)) + 1))
11454 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
11455 It must be NEGATE_EXPR. Then strip any more conversions. */
11456 offset
= TREE_OPERAND (offset
, 0);
11457 while (CONVERT_EXPR_P (offset
))
11458 offset
= TREE_OPERAND (offset
, 0);
11460 if (TREE_CODE (offset
) != NEGATE_EXPR
)
11463 offset
= TREE_OPERAND (offset
, 0);
11464 while (CONVERT_EXPR_P (offset
))
11465 offset
= TREE_OPERAND (offset
, 0);
11467 /* This must now be the address of EXP. */
11468 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
11471 /* Return the tree node if an ARG corresponds to a string constant or zero
11472 if it doesn't. If we return nonzero, set *PTR_OFFSET to the (possibly
11473 non-constant) offset in bytes within the string that ARG is accessing.
11474 If MEM_SIZE is non-zero the storage size of the memory is returned.
11475 If DECL is non-zero the constant declaration is returned if available. */
11478 string_constant (tree arg
, tree
*ptr_offset
, tree
*mem_size
, tree
*decl
)
11480 tree dummy
= NULL_TREE
;;
11484 /* Store the type of the original expression before conversions
11485 via NOP_EXPR or POINTER_PLUS_EXPR to other types have been
11487 tree argtype
= TREE_TYPE (arg
);
11492 /* Non-constant index into the character array in an ARRAY_REF
11493 expression or null. */
11494 tree varidx
= NULL_TREE
;
11496 poly_int64 base_off
= 0;
11498 if (TREE_CODE (arg
) == ADDR_EXPR
)
11500 arg
= TREE_OPERAND (arg
, 0);
11502 if (TREE_CODE (arg
) == ARRAY_REF
)
11504 tree idx
= TREE_OPERAND (arg
, 1);
11505 if (TREE_CODE (idx
) != INTEGER_CST
)
11507 /* From a pointer (but not array) argument extract the variable
11508 index to prevent get_addr_base_and_unit_offset() from failing
11509 due to it. Use it later to compute the non-constant offset
11510 into the string and return it to the caller. */
11512 ref
= TREE_OPERAND (arg
, 0);
11514 if (TREE_CODE (TREE_TYPE (arg
)) == ARRAY_TYPE
)
11517 if (!integer_zerop (array_ref_low_bound (arg
)))
11520 if (!integer_onep (array_ref_element_size (arg
)))
11524 array
= get_addr_base_and_unit_offset (ref
, &base_off
);
11526 || (TREE_CODE (array
) != VAR_DECL
11527 && TREE_CODE (array
) != CONST_DECL
11528 && TREE_CODE (array
) != STRING_CST
))
11531 else if (TREE_CODE (arg
) == PLUS_EXPR
|| TREE_CODE (arg
) == POINTER_PLUS_EXPR
)
11533 tree arg0
= TREE_OPERAND (arg
, 0);
11534 tree arg1
= TREE_OPERAND (arg
, 1);
11537 tree str
= string_constant (arg0
, &offset
, mem_size
, decl
);
11540 str
= string_constant (arg1
, &offset
, mem_size
, decl
);
11546 /* Avoid pointers to arrays (see bug 86622). */
11547 if (POINTER_TYPE_P (TREE_TYPE (arg
))
11548 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == ARRAY_TYPE
11549 && !(decl
&& !*decl
)
11550 && !(decl
&& tree_fits_uhwi_p (DECL_SIZE_UNIT (*decl
))
11551 && tree_fits_uhwi_p (*mem_size
)
11552 && tree_int_cst_equal (*mem_size
, DECL_SIZE_UNIT (*decl
))))
11555 tree type
= TREE_TYPE (offset
);
11556 arg1
= fold_convert (type
, arg1
);
11557 *ptr_offset
= fold_build2 (PLUS_EXPR
, type
, offset
, arg1
);
11562 else if (TREE_CODE (arg
) == SSA_NAME
)
11564 gimple
*stmt
= SSA_NAME_DEF_STMT (arg
);
11565 if (!is_gimple_assign (stmt
))
11568 tree rhs1
= gimple_assign_rhs1 (stmt
);
11569 tree_code code
= gimple_assign_rhs_code (stmt
);
11570 if (code
== ADDR_EXPR
)
11571 return string_constant (rhs1
, ptr_offset
, mem_size
, decl
);
11572 else if (code
!= POINTER_PLUS_EXPR
)
11576 if (tree str
= string_constant (rhs1
, &offset
, mem_size
, decl
))
11578 /* Avoid pointers to arrays (see bug 86622). */
11579 if (POINTER_TYPE_P (TREE_TYPE (rhs1
))
11580 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs1
))) == ARRAY_TYPE
11581 && !(decl
&& !*decl
)
11582 && !(decl
&& tree_fits_uhwi_p (DECL_SIZE_UNIT (*decl
))
11583 && tree_fits_uhwi_p (*mem_size
)
11584 && tree_int_cst_equal (*mem_size
, DECL_SIZE_UNIT (*decl
))))
11587 tree rhs2
= gimple_assign_rhs2 (stmt
);
11588 tree type
= TREE_TYPE (offset
);
11589 rhs2
= fold_convert (type
, rhs2
);
11590 *ptr_offset
= fold_build2 (PLUS_EXPR
, type
, offset
, rhs2
);
11595 else if (DECL_P (arg
))
11600 tree offset
= wide_int_to_tree (sizetype
, base_off
);
11603 if (TREE_CODE (TREE_TYPE (array
)) != ARRAY_TYPE
)
11606 gcc_assert (TREE_CODE (arg
) == ARRAY_REF
);
11607 tree chartype
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (arg
, 0)));
11608 if (TREE_CODE (chartype
) != INTEGER_TYPE
)
11611 offset
= fold_convert (sizetype
, varidx
);
11614 if (TREE_CODE (array
) == STRING_CST
)
11616 *ptr_offset
= fold_convert (sizetype
, offset
);
11617 *mem_size
= TYPE_SIZE_UNIT (TREE_TYPE (array
));
11620 gcc_checking_assert (tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (array
)))
11621 >= TREE_STRING_LENGTH (array
));
11625 if (!VAR_P (array
) && TREE_CODE (array
) != CONST_DECL
)
11628 tree init
= ctor_for_folding (array
);
11630 /* Handle variables initialized with string literals. */
11631 if (!init
|| init
== error_mark_node
)
11633 if (TREE_CODE (init
) == CONSTRUCTOR
)
11635 /* Convert the 64-bit constant offset to a wider type to avoid
11638 if (!base_off
.is_constant (&wioff
))
11641 wioff
*= BITS_PER_UNIT
;
11642 if (!wi::fits_uhwi_p (wioff
))
11645 base_off
= wioff
.to_uhwi ();
11646 unsigned HOST_WIDE_INT fieldoff
= 0;
11647 init
= fold_ctor_reference (TREE_TYPE (arg
), init
, base_off
, 0, array
,
11649 HOST_WIDE_INT cstoff
;
11650 if (!base_off
.is_constant (&cstoff
))
11653 cstoff
= (cstoff
- fieldoff
) / BITS_PER_UNIT
;
11654 tree off
= build_int_cst (sizetype
, cstoff
);
11656 offset
= fold_build2 (PLUS_EXPR
, TREE_TYPE (offset
), offset
, off
);
11664 *ptr_offset
= offset
;
11666 tree inittype
= TREE_TYPE (init
);
11668 if (TREE_CODE (init
) == INTEGER_CST
11669 && (TREE_CODE (TREE_TYPE (array
)) == INTEGER_TYPE
11670 || TYPE_MAIN_VARIANT (inittype
) == char_type_node
))
11672 /* For a reference to (address of) a single constant character,
11673 store the native representation of the character in CHARBUF.
11674 If the reference is to an element of an array or a member
11675 of a struct, only consider narrow characters until ctors
11676 for wide character arrays are transformed to STRING_CSTs
11677 like those for narrow arrays. */
11678 unsigned char charbuf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
11679 int len
= native_encode_expr (init
, charbuf
, sizeof charbuf
, 0);
11682 /* Construct a string literal with elements of INITTYPE and
11683 the representation above. Then strip
11684 the ADDR_EXPR (ARRAY_REF (...)) around the STRING_CST. */
11685 init
= build_string_literal (len
, (char *)charbuf
, inittype
);
11686 init
= TREE_OPERAND (TREE_OPERAND (init
, 0), 0);
11690 tree initsize
= TYPE_SIZE_UNIT (inittype
);
11692 if (TREE_CODE (init
) == CONSTRUCTOR
&& initializer_zerop (init
))
11694 /* Fold an empty/zero constructor for an implicitly initialized
11695 object or subobject into the empty string. */
11697 /* Determine the character type from that of the original
11699 tree chartype
= argtype
;
11700 if (POINTER_TYPE_P (chartype
))
11701 chartype
= TREE_TYPE (chartype
);
11702 while (TREE_CODE (chartype
) == ARRAY_TYPE
)
11703 chartype
= TREE_TYPE (chartype
);
11704 /* Convert a char array to an empty STRING_CST having an array
11705 of the expected type. */
11707 initsize
= integer_zero_node
;
11709 unsigned HOST_WIDE_INT size
= tree_to_uhwi (initsize
);
11710 init
= build_string_literal (size
? 1 : 0, "", chartype
, size
);
11711 init
= TREE_OPERAND (init
, 0);
11712 init
= TREE_OPERAND (init
, 0);
11714 *ptr_offset
= integer_zero_node
;
11720 if (TREE_CODE (init
) != STRING_CST
)
11723 *mem_size
= initsize
;
11725 gcc_checking_assert (tree_to_shwi (initsize
) >= TREE_STRING_LENGTH (init
));
11730 /* Compute the modular multiplicative inverse of A modulo M
11731 using extended Euclid's algorithm. Assumes A and M are coprime. */
11733 mod_inv (const wide_int
&a
, const wide_int
&b
)
11735 /* Verify the assumption. */
11736 gcc_checking_assert (wi::eq_p (wi::gcd (a
, b
), 1));
11738 unsigned int p
= a
.get_precision () + 1;
11739 gcc_checking_assert (b
.get_precision () + 1 == p
);
11740 wide_int c
= wide_int::from (a
, p
, UNSIGNED
);
11741 wide_int d
= wide_int::from (b
, p
, UNSIGNED
);
11742 wide_int x0
= wide_int::from (0, p
, UNSIGNED
);
11743 wide_int x1
= wide_int::from (1, p
, UNSIGNED
);
11745 if (wi::eq_p (b
, 1))
11746 return wide_int::from (1, p
, UNSIGNED
);
11748 while (wi::gt_p (c
, 1, UNSIGNED
))
11751 wide_int q
= wi::divmod_trunc (c
, d
, UNSIGNED
, &d
);
11754 x0
= wi::sub (x1
, wi::mul (q
, x0
));
11757 if (wi::lt_p (x1
, 0, SIGNED
))
11762 /* Optimize x % C1 == C2 for signed modulo if C1 is a power of two and C2
11763 is non-zero and C3 ((1<<(prec-1)) | (C1 - 1)):
11764 for C2 > 0 to x & C3 == C2
11765 for C2 < 0 to x & C3 == (C2 & C3). */
11767 maybe_optimize_pow2p_mod_cmp (enum tree_code code
, tree
*arg0
, tree
*arg1
)
11769 gimple
*stmt
= get_def_for_expr (*arg0
, TRUNC_MOD_EXPR
);
11770 tree treeop0
= gimple_assign_rhs1 (stmt
);
11771 tree treeop1
= gimple_assign_rhs2 (stmt
);
11772 tree type
= TREE_TYPE (*arg0
);
11773 scalar_int_mode mode
;
11774 if (!is_a
<scalar_int_mode
> (TYPE_MODE (type
), &mode
))
11776 if (GET_MODE_BITSIZE (mode
) != TYPE_PRECISION (type
)
11777 || TYPE_PRECISION (type
) <= 1
11778 || TYPE_UNSIGNED (type
)
11779 /* Signed x % c == 0 should have been optimized into unsigned modulo
11781 || integer_zerop (*arg1
)
11782 /* If c is known to be non-negative, modulo will be expanded as unsigned
11784 || get_range_pos_neg (treeop0
) == 1)
11787 /* x % c == d where d < 0 && d <= -c should be always false. */
11788 if (tree_int_cst_sgn (*arg1
) == -1
11789 && -wi::to_widest (treeop1
) >= wi::to_widest (*arg1
))
11792 int prec
= TYPE_PRECISION (type
);
11793 wide_int w
= wi::to_wide (treeop1
) - 1;
11794 w
|= wi::shifted_mask (0, prec
- 1, true, prec
);
11795 tree c3
= wide_int_to_tree (type
, w
);
11797 if (tree_int_cst_sgn (*arg1
) == -1)
11798 c4
= wide_int_to_tree (type
, w
& wi::to_wide (*arg1
));
11800 rtx op0
= expand_normal (treeop0
);
11801 treeop0
= make_tree (TREE_TYPE (treeop0
), op0
);
11803 bool speed_p
= optimize_insn_for_speed_p ();
11805 do_pending_stack_adjust ();
11807 location_t loc
= gimple_location (stmt
);
11808 struct separate_ops ops
;
11809 ops
.code
= TRUNC_MOD_EXPR
;
11810 ops
.location
= loc
;
11811 ops
.type
= TREE_TYPE (treeop0
);
11814 ops
.op2
= NULL_TREE
;
11816 rtx mor
= expand_expr_real_2 (&ops
, NULL_RTX
, TYPE_MODE (ops
.type
),
11818 rtx_insn
*moinsns
= get_insns ();
11821 unsigned mocost
= seq_cost (moinsns
, speed_p
);
11822 mocost
+= rtx_cost (mor
, mode
, EQ
, 0, speed_p
);
11823 mocost
+= rtx_cost (expand_normal (*arg1
), mode
, EQ
, 1, speed_p
);
11825 ops
.code
= BIT_AND_EXPR
;
11826 ops
.location
= loc
;
11827 ops
.type
= TREE_TYPE (treeop0
);
11830 ops
.op2
= NULL_TREE
;
11832 rtx mur
= expand_expr_real_2 (&ops
, NULL_RTX
, TYPE_MODE (ops
.type
),
11834 rtx_insn
*muinsns
= get_insns ();
11837 unsigned mucost
= seq_cost (muinsns
, speed_p
);
11838 mucost
+= rtx_cost (mur
, mode
, EQ
, 0, speed_p
);
11839 mucost
+= rtx_cost (expand_normal (c4
), mode
, EQ
, 1, speed_p
);
11841 if (mocost
<= mucost
)
11843 emit_insn (moinsns
);
11844 *arg0
= make_tree (TREE_TYPE (*arg0
), mor
);
11848 emit_insn (muinsns
);
11849 *arg0
= make_tree (TREE_TYPE (*arg0
), mur
);
11854 /* Attempt to optimize unsigned (X % C1) == C2 (or (X % C1) != C2).
11856 (X - C2) * C3 <= C4 (or >), where
11857 C3 is modular multiplicative inverse of C1 and 1<<prec and
11858 C4 is ((1<<prec) - 1) / C1 or ((1<<prec) - 1) / C1 - 1 (the latter
11859 if C2 > ((1<<prec) - 1) % C1).
11860 If C1 is even, S = ctz (C1) and C2 is 0, use
11861 ((X * C3) r>> S) <= C4, where C3 is modular multiplicative
11862 inverse of C1>>S and 1<<prec and C4 is (((1<<prec) - 1) / (C1>>S)) >> S.
11864 For signed (X % C1) == 0 if C1 is odd to (all operations in it
11866 (X * C3) + C4 <= 2 * C4, where
11867 C3 is modular multiplicative inverse of (unsigned) C1 and 1<<prec and
11868 C4 is ((1<<(prec - 1) - 1) / C1).
11869 If C1 is even, S = ctz(C1), use
11870 ((X * C3) + C4) r>> S <= (C4 >> (S - 1))
11871 where C3 is modular multiplicative inverse of (unsigned)(C1>>S) and 1<<prec
11872 and C4 is ((1<<(prec - 1) - 1) / (C1>>S)) & (-1<<S).
11874 See the Hacker's Delight book, section 10-17. */
11876 maybe_optimize_mod_cmp (enum tree_code code
, tree
*arg0
, tree
*arg1
)
11878 gcc_checking_assert (code
== EQ_EXPR
|| code
== NE_EXPR
);
11879 gcc_checking_assert (TREE_CODE (*arg1
) == INTEGER_CST
);
11884 gimple
*stmt
= get_def_for_expr (*arg0
, TRUNC_MOD_EXPR
);
11888 tree treeop0
= gimple_assign_rhs1 (stmt
);
11889 tree treeop1
= gimple_assign_rhs2 (stmt
);
11890 if (TREE_CODE (treeop0
) != SSA_NAME
11891 || TREE_CODE (treeop1
) != INTEGER_CST
11892 /* Don't optimize the undefined behavior case x % 0;
11893 x % 1 should have been optimized into zero, punt if
11894 it makes it here for whatever reason;
11895 x % -c should have been optimized into x % c. */
11896 || compare_tree_int (treeop1
, 2) <= 0
11897 /* Likewise x % c == d where d >= c should be always false. */
11898 || tree_int_cst_le (treeop1
, *arg1
))
11901 /* Unsigned x % pow2 is handled right already, for signed
11902 modulo handle it in maybe_optimize_pow2p_mod_cmp. */
11903 if (integer_pow2p (treeop1
))
11904 return maybe_optimize_pow2p_mod_cmp (code
, arg0
, arg1
);
11906 tree type
= TREE_TYPE (*arg0
);
11907 scalar_int_mode mode
;
11908 if (!is_a
<scalar_int_mode
> (TYPE_MODE (type
), &mode
))
11910 if (GET_MODE_BITSIZE (mode
) != TYPE_PRECISION (type
)
11911 || TYPE_PRECISION (type
) <= 1)
11914 signop sgn
= UNSIGNED
;
11915 /* If both operands are known to have the sign bit clear, handle
11916 even the signed modulo case as unsigned. treeop1 is always
11917 positive >= 2, checked above. */
11918 if (!TYPE_UNSIGNED (type
) && get_range_pos_neg (treeop0
) != 1)
11921 if (!TYPE_UNSIGNED (type
))
11923 if (tree_int_cst_sgn (*arg1
) == -1)
11925 type
= unsigned_type_for (type
);
11926 if (!type
|| TYPE_MODE (type
) != TYPE_MODE (TREE_TYPE (*arg0
)))
11930 int prec
= TYPE_PRECISION (type
);
11931 wide_int w
= wi::to_wide (treeop1
);
11932 int shift
= wi::ctz (w
);
11933 /* Unsigned (X % C1) == C2 is equivalent to (X - C2) % C1 == 0 if
11934 C2 <= -1U % C1, because for any Z >= 0U - C2 in that case (Z % C1) != 0.
11935 If C1 is odd, we can handle all cases by subtracting
11936 C4 below. We could handle even the even C1 and C2 > -1U % C1 cases
11937 e.g. by testing for overflow on the subtraction, punt on that for now
11939 if ((sgn
== SIGNED
|| shift
) && !integer_zerop (*arg1
))
11943 wide_int x
= wi::umod_trunc (wi::mask (prec
, false, prec
), w
);
11944 if (wi::gtu_p (wi::to_wide (*arg1
), x
))
11948 imm_use_iterator imm_iter
;
11949 use_operand_p use_p
;
11950 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, treeop0
)
11952 gimple
*use_stmt
= USE_STMT (use_p
);
11953 /* Punt if treeop0 is used in the same bb in a division
11954 or another modulo with the same divisor. We should expect
11955 the division and modulo combined together. */
11956 if (use_stmt
== stmt
11957 || gimple_bb (use_stmt
) != gimple_bb (stmt
))
11959 if (!is_gimple_assign (use_stmt
)
11960 || (gimple_assign_rhs_code (use_stmt
) != TRUNC_DIV_EXPR
11961 && gimple_assign_rhs_code (use_stmt
) != TRUNC_MOD_EXPR
))
11963 if (gimple_assign_rhs1 (use_stmt
) != treeop0
11964 || !operand_equal_p (gimple_assign_rhs2 (use_stmt
), treeop1
, 0))
11969 w
= wi::lrshift (w
, shift
);
11970 wide_int a
= wide_int::from (w
, prec
+ 1, UNSIGNED
);
11971 wide_int b
= wi::shifted_mask (prec
, 1, false, prec
+ 1);
11972 wide_int m
= wide_int::from (mod_inv (a
, b
), prec
, UNSIGNED
);
11973 tree c3
= wide_int_to_tree (type
, m
);
11974 tree c5
= NULL_TREE
;
11976 if (sgn
== UNSIGNED
)
11978 d
= wi::divmod_trunc (wi::mask (prec
, false, prec
), w
, UNSIGNED
, &e
);
11979 /* Use <= floor ((1<<prec) - 1) / C1 only if C2 <= ((1<<prec) - 1) % C1,
11980 otherwise use < or subtract one from C4. E.g. for
11981 x % 3U == 0 we transform this into x * 0xaaaaaaab <= 0x55555555, but
11982 x % 3U == 1 already needs to be
11983 (x - 1) * 0xaaaaaaabU <= 0x55555554. */
11984 if (!shift
&& wi::gtu_p (wi::to_wide (*arg1
), e
))
11987 d
= wi::lrshift (d
, shift
);
11991 e
= wi::udiv_trunc (wi::mask (prec
- 1, false, prec
), w
);
11993 d
= wi::lshift (e
, 1);
11996 e
= wi::bit_and (e
, wi::mask (shift
, true, prec
));
11997 d
= wi::lrshift (e
, shift
- 1);
11999 c5
= wide_int_to_tree (type
, e
);
12001 tree c4
= wide_int_to_tree (type
, d
);
12003 rtx op0
= expand_normal (treeop0
);
12004 treeop0
= make_tree (TREE_TYPE (treeop0
), op0
);
12006 bool speed_p
= optimize_insn_for_speed_p ();
12008 do_pending_stack_adjust ();
12010 location_t loc
= gimple_location (stmt
);
12011 struct separate_ops ops
;
12012 ops
.code
= TRUNC_MOD_EXPR
;
12013 ops
.location
= loc
;
12014 ops
.type
= TREE_TYPE (treeop0
);
12017 ops
.op2
= NULL_TREE
;
12019 rtx mor
= expand_expr_real_2 (&ops
, NULL_RTX
, TYPE_MODE (ops
.type
),
12021 rtx_insn
*moinsns
= get_insns ();
12024 unsigned mocost
= seq_cost (moinsns
, speed_p
);
12025 mocost
+= rtx_cost (mor
, mode
, EQ
, 0, speed_p
);
12026 mocost
+= rtx_cost (expand_normal (*arg1
), mode
, EQ
, 1, speed_p
);
12028 tree t
= fold_convert_loc (loc
, type
, treeop0
);
12029 if (!integer_zerop (*arg1
))
12030 t
= fold_build2_loc (loc
, MINUS_EXPR
, type
, t
, fold_convert (type
, *arg1
));
12031 t
= fold_build2_loc (loc
, MULT_EXPR
, type
, t
, c3
);
12033 t
= fold_build2_loc (loc
, PLUS_EXPR
, type
, t
, c5
);
12036 tree s
= build_int_cst (NULL_TREE
, shift
);
12037 t
= fold_build2_loc (loc
, RROTATE_EXPR
, type
, t
, s
);
12041 rtx mur
= expand_normal (t
);
12042 rtx_insn
*muinsns
= get_insns ();
12045 unsigned mucost
= seq_cost (muinsns
, speed_p
);
12046 mucost
+= rtx_cost (mur
, mode
, LE
, 0, speed_p
);
12047 mucost
+= rtx_cost (expand_normal (c4
), mode
, LE
, 1, speed_p
);
12049 if (mocost
<= mucost
)
12051 emit_insn (moinsns
);
12052 *arg0
= make_tree (TREE_TYPE (*arg0
), mor
);
12056 emit_insn (muinsns
);
12057 *arg0
= make_tree (type
, mur
);
12059 return code
== EQ_EXPR
? LE_EXPR
: GT_EXPR
;
12062 /* Generate code to calculate OPS, and exploded expression
12063 using a store-flag instruction and return an rtx for the result.
12064 OPS reflects a comparison.
12066 If TARGET is nonzero, store the result there if convenient.
12068 Return zero if there is no suitable set-flag instruction
12069 available on this machine.
12071 Once expand_expr has been called on the arguments of the comparison,
12072 we are committed to doing the store flag, since it is not safe to
12073 re-evaluate the expression. We emit the store-flag insn by calling
12074 emit_store_flag, but only expand the arguments if we have a reason
12075 to believe that emit_store_flag will be successful. If we think that
12076 it will, but it isn't, we have to simulate the store-flag with a
12077 set/jump/set sequence. */
12080 do_store_flag (sepops ops
, rtx target
, machine_mode mode
)
12082 enum rtx_code code
;
12083 tree arg0
, arg1
, type
;
12084 machine_mode operand_mode
;
12087 rtx subtarget
= target
;
12088 location_t loc
= ops
->location
;
12093 /* Don't crash if the comparison was erroneous. */
12094 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
12097 type
= TREE_TYPE (arg0
);
12098 operand_mode
= TYPE_MODE (type
);
12099 unsignedp
= TYPE_UNSIGNED (type
);
12101 /* We won't bother with BLKmode store-flag operations because it would mean
12102 passing a lot of information to emit_store_flag. */
12103 if (operand_mode
== BLKmode
)
12106 /* We won't bother with store-flag operations involving function pointers
12107 when function pointers must be canonicalized before comparisons. */
12108 if (targetm
.have_canonicalize_funcptr_for_compare ()
12109 && ((POINTER_TYPE_P (TREE_TYPE (arg0
))
12110 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (arg0
))))
12111 || (POINTER_TYPE_P (TREE_TYPE (arg1
))
12112 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (arg1
))))))
12118 /* For vector typed comparisons emit code to generate the desired
12119 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
12120 expander for this. */
12121 if (TREE_CODE (ops
->type
) == VECTOR_TYPE
)
12123 tree ifexp
= build2 (ops
->code
, ops
->type
, arg0
, arg1
);
12124 if (VECTOR_BOOLEAN_TYPE_P (ops
->type
)
12125 && expand_vec_cmp_expr_p (TREE_TYPE (arg0
), ops
->type
, ops
->code
))
12126 return expand_vec_cmp_expr (ops
->type
, ifexp
, target
);
12129 tree if_true
= constant_boolean_node (true, ops
->type
);
12130 tree if_false
= constant_boolean_node (false, ops
->type
);
12131 return expand_vec_cond_expr (ops
->type
, ifexp
, if_true
,
12136 /* Optimize (x % C1) == C2 or (x % C1) != C2 if it is beneficial
12137 into (x - C2) * C3 < C4. */
12138 if ((ops
->code
== EQ_EXPR
|| ops
->code
== NE_EXPR
)
12139 && TREE_CODE (arg0
) == SSA_NAME
12140 && TREE_CODE (arg1
) == INTEGER_CST
)
12142 enum tree_code code
= maybe_optimize_mod_cmp (ops
->code
, &arg0
, &arg1
);
12143 if (code
!= ops
->code
)
12145 struct separate_ops nops
= *ops
;
12146 nops
.code
= ops
->code
= code
;
12149 nops
.type
= TREE_TYPE (arg0
);
12150 return do_store_flag (&nops
, target
, mode
);
12154 /* Get the rtx comparison code to use. We know that EXP is a comparison
12155 operation of some type. Some comparisons against 1 and -1 can be
12156 converted to comparisons with zero. Do so here so that the tests
12157 below will be aware that we have a comparison with zero. These
12158 tests will not catch constants in the first operand, but constants
12159 are rarely passed as the first operand. */
12170 if (integer_onep (arg1
))
12171 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
12173 code
= unsignedp
? LTU
: LT
;
12176 if (! unsignedp
&& integer_all_onesp (arg1
))
12177 arg1
= integer_zero_node
, code
= LT
;
12179 code
= unsignedp
? LEU
: LE
;
12182 if (! unsignedp
&& integer_all_onesp (arg1
))
12183 arg1
= integer_zero_node
, code
= GE
;
12185 code
= unsignedp
? GTU
: GT
;
12188 if (integer_onep (arg1
))
12189 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
12191 code
= unsignedp
? GEU
: GE
;
12194 case UNORDERED_EXPR
:
12220 gcc_unreachable ();
12223 /* Put a constant second. */
12224 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
12225 || TREE_CODE (arg0
) == FIXED_CST
)
12227 std::swap (arg0
, arg1
);
12228 code
= swap_condition (code
);
12231 /* If this is an equality or inequality test of a single bit, we can
12232 do this by shifting the bit being tested to the low-order bit and
12233 masking the result with the constant 1. If the condition was EQ,
12234 we xor it with 1. This does not require an scc insn and is faster
12235 than an scc insn even if we have it.
12237 The code to make this transformation was moved into fold_single_bit_test,
12238 so we just call into the folder and expand its result. */
12240 if ((code
== NE
|| code
== EQ
)
12241 && integer_zerop (arg1
)
12242 && (TYPE_PRECISION (ops
->type
) != 1 || TYPE_UNSIGNED (ops
->type
)))
12244 gimple
*srcstmt
= get_def_for_expr (arg0
, BIT_AND_EXPR
);
12246 && integer_pow2p (gimple_assign_rhs2 (srcstmt
)))
12248 enum tree_code tcode
= code
== NE
? NE_EXPR
: EQ_EXPR
;
12249 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
12250 tree temp
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg1
),
12251 gimple_assign_rhs1 (srcstmt
),
12252 gimple_assign_rhs2 (srcstmt
));
12253 temp
= fold_single_bit_test (loc
, tcode
, temp
, arg1
, type
);
12255 return expand_expr (temp
, target
, VOIDmode
, EXPAND_NORMAL
);
12259 if (! get_subtarget (target
)
12260 || GET_MODE (subtarget
) != operand_mode
)
12263 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
12266 target
= gen_reg_rtx (mode
);
12268 /* Try a cstore if possible. */
12269 return emit_store_flag_force (target
, code
, op0
, op1
,
12270 operand_mode
, unsignedp
,
12271 (TYPE_PRECISION (ops
->type
) == 1
12272 && !TYPE_UNSIGNED (ops
->type
)) ? -1 : 1);
12275 /* Attempt to generate a casesi instruction. Returns 1 if successful,
12276 0 otherwise (i.e. if there is no casesi instruction).
12278 DEFAULT_PROBABILITY is the probability of jumping to the default
12281 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
12282 rtx table_label
, rtx default_label
, rtx fallback_label
,
12283 profile_probability default_probability
)
12285 class expand_operand ops
[5];
12286 scalar_int_mode index_mode
= SImode
;
12287 rtx op1
, op2
, index
;
12289 if (! targetm
.have_casesi ())
12292 /* The index must be some form of integer. Convert it to SImode. */
12293 scalar_int_mode omode
= SCALAR_INT_TYPE_MODE (index_type
);
12294 if (GET_MODE_BITSIZE (omode
) > GET_MODE_BITSIZE (index_mode
))
12296 rtx rangertx
= expand_normal (range
);
12298 /* We must handle the endpoints in the original mode. */
12299 index_expr
= build2 (MINUS_EXPR
, index_type
,
12300 index_expr
, minval
);
12301 minval
= integer_zero_node
;
12302 index
= expand_normal (index_expr
);
12304 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
12305 omode
, 1, default_label
,
12306 default_probability
);
12307 /* Now we can safely truncate. */
12308 index
= convert_to_mode (index_mode
, index
, 0);
12312 if (omode
!= index_mode
)
12314 index_type
= lang_hooks
.types
.type_for_mode (index_mode
, 0);
12315 index_expr
= fold_convert (index_type
, index_expr
);
12318 index
= expand_normal (index_expr
);
12321 do_pending_stack_adjust ();
12323 op1
= expand_normal (minval
);
12324 op2
= expand_normal (range
);
12326 create_input_operand (&ops
[0], index
, index_mode
);
12327 create_convert_operand_from_type (&ops
[1], op1
, TREE_TYPE (minval
));
12328 create_convert_operand_from_type (&ops
[2], op2
, TREE_TYPE (range
));
12329 create_fixed_operand (&ops
[3], table_label
);
12330 create_fixed_operand (&ops
[4], (default_label
12332 : fallback_label
));
12333 expand_jump_insn (targetm
.code_for_casesi
, 5, ops
);
12337 /* Attempt to generate a tablejump instruction; same concept. */
12338 /* Subroutine of the next function.
12340 INDEX is the value being switched on, with the lowest value
12341 in the table already subtracted.
12342 MODE is its expected mode (needed if INDEX is constant).
12343 RANGE is the length of the jump table.
12344 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
12346 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
12347 index value is out of range.
12348 DEFAULT_PROBABILITY is the probability of jumping to
12349 the default label. */
12352 do_tablejump (rtx index
, machine_mode mode
, rtx range
, rtx table_label
,
12353 rtx default_label
, profile_probability default_probability
)
12357 if (INTVAL (range
) > cfun
->cfg
->max_jumptable_ents
)
12358 cfun
->cfg
->max_jumptable_ents
= INTVAL (range
);
12360 /* Do an unsigned comparison (in the proper mode) between the index
12361 expression and the value which represents the length of the range.
12362 Since we just finished subtracting the lower bound of the range
12363 from the index expression, this comparison allows us to simultaneously
12364 check that the original index expression value is both greater than
12365 or equal to the minimum value of the range and less than or equal to
12366 the maximum value of the range. */
12369 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
12370 default_label
, default_probability
);
12372 /* If index is in range, it must fit in Pmode.
12373 Convert to Pmode so we can index with it. */
12376 unsigned int width
;
12378 /* We know the value of INDEX is between 0 and RANGE. If we have a
12379 sign-extended subreg, and RANGE does not have the sign bit set, then
12380 we have a value that is valid for both sign and zero extension. In
12381 this case, we get better code if we sign extend. */
12382 if (GET_CODE (index
) == SUBREG
12383 && SUBREG_PROMOTED_VAR_P (index
)
12384 && SUBREG_PROMOTED_SIGNED_P (index
)
12385 && ((width
= GET_MODE_PRECISION (as_a
<scalar_int_mode
> (mode
)))
12386 <= HOST_BITS_PER_WIDE_INT
)
12387 && ! (UINTVAL (range
) & (HOST_WIDE_INT_1U
<< (width
- 1))))
12388 index
= convert_to_mode (Pmode
, index
, 0);
12390 index
= convert_to_mode (Pmode
, index
, 1);
12393 /* Don't let a MEM slip through, because then INDEX that comes
12394 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
12395 and break_out_memory_refs will go to work on it and mess it up. */
12396 #ifdef PIC_CASE_VECTOR_ADDRESS
12397 if (flag_pic
&& !REG_P (index
))
12398 index
= copy_to_mode_reg (Pmode
, index
);
12401 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
12402 GET_MODE_SIZE, because this indicates how large insns are. The other
12403 uses should all be Pmode, because they are addresses. This code
12404 could fail if addresses and insns are not the same size. */
12405 index
= simplify_gen_binary (MULT
, Pmode
, index
,
12406 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE
),
12408 index
= simplify_gen_binary (PLUS
, Pmode
, index
,
12409 gen_rtx_LABEL_REF (Pmode
, table_label
));
12411 #ifdef PIC_CASE_VECTOR_ADDRESS
12413 index
= PIC_CASE_VECTOR_ADDRESS (index
);
12416 index
= memory_address (CASE_VECTOR_MODE
, index
);
12417 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
12418 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
12419 convert_move (temp
, vector
, 0);
12421 emit_jump_insn (targetm
.gen_tablejump (temp
, table_label
));
12423 /* If we are generating PIC code or if the table is PC-relative, the
12424 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
12425 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
12430 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
12431 rtx table_label
, rtx default_label
,
12432 profile_probability default_probability
)
12436 if (! targetm
.have_tablejump ())
12439 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
12440 fold_convert (index_type
, index_expr
),
12441 fold_convert (index_type
, minval
));
12442 index
= expand_normal (index_expr
);
12443 do_pending_stack_adjust ();
12445 do_tablejump (index
, TYPE_MODE (index_type
),
12446 convert_modes (TYPE_MODE (index_type
),
12447 TYPE_MODE (TREE_TYPE (range
)),
12448 expand_normal (range
),
12449 TYPE_UNSIGNED (TREE_TYPE (range
))),
12450 table_label
, default_label
, default_probability
);
12454 /* Return a CONST_VECTOR rtx representing vector mask for
12455 a VECTOR_CST of booleans. */
12457 const_vector_mask_from_tree (tree exp
)
12459 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
12460 machine_mode inner
= GET_MODE_INNER (mode
);
12462 rtx_vector_builder
builder (mode
, VECTOR_CST_NPATTERNS (exp
),
12463 VECTOR_CST_NELTS_PER_PATTERN (exp
));
12464 unsigned int count
= builder
.encoded_nelts ();
12465 for (unsigned int i
= 0; i
< count
; ++i
)
12467 tree elt
= VECTOR_CST_ELT (exp
, i
);
12468 gcc_assert (TREE_CODE (elt
) == INTEGER_CST
);
12469 if (integer_zerop (elt
))
12470 builder
.quick_push (CONST0_RTX (inner
));
12471 else if (integer_onep (elt
)
12472 || integer_minus_onep (elt
))
12473 builder
.quick_push (CONSTM1_RTX (inner
));
12475 gcc_unreachable ();
12477 return builder
.build ();
12480 /* EXP is a VECTOR_CST in which each element is either all-zeros or all-ones.
12481 Return a constant scalar rtx of mode MODE in which bit X is set if element
12482 X of EXP is nonzero. */
12484 const_scalar_mask_from_tree (scalar_int_mode mode
, tree exp
)
12486 wide_int res
= wi::zero (GET_MODE_PRECISION (mode
));
12489 /* The result has a fixed number of bits so the input must too. */
12490 unsigned int nunits
= VECTOR_CST_NELTS (exp
).to_constant ();
12491 for (unsigned int i
= 0; i
< nunits
; ++i
)
12493 elt
= VECTOR_CST_ELT (exp
, i
);
12494 gcc_assert (TREE_CODE (elt
) == INTEGER_CST
);
12495 if (integer_all_onesp (elt
))
12496 res
= wi::set_bit (res
, i
);
12498 gcc_assert (integer_zerop (elt
));
12501 return immed_wide_int_const (res
, mode
);
12504 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
12506 const_vector_from_tree (tree exp
)
12508 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
12510 if (initializer_zerop (exp
))
12511 return CONST0_RTX (mode
);
12513 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp
)))
12514 return const_vector_mask_from_tree (exp
);
12516 machine_mode inner
= GET_MODE_INNER (mode
);
12518 rtx_vector_builder
builder (mode
, VECTOR_CST_NPATTERNS (exp
),
12519 VECTOR_CST_NELTS_PER_PATTERN (exp
));
12520 unsigned int count
= builder
.encoded_nelts ();
12521 for (unsigned int i
= 0; i
< count
; ++i
)
12523 tree elt
= VECTOR_CST_ELT (exp
, i
);
12524 if (TREE_CODE (elt
) == REAL_CST
)
12525 builder
.quick_push (const_double_from_real_value (TREE_REAL_CST (elt
),
12527 else if (TREE_CODE (elt
) == FIXED_CST
)
12528 builder
.quick_push (CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt
),
12531 builder
.quick_push (immed_wide_int_const (wi::to_poly_wide (elt
),
12534 return builder
.build ();
12537 /* Build a decl for a personality function given a language prefix. */
12540 build_personality_function (const char *lang
)
12542 const char *unwind_and_version
;
12546 switch (targetm_common
.except_unwind_info (&global_options
))
12551 unwind_and_version
= "_sj0";
12555 unwind_and_version
= "_v0";
12558 unwind_and_version
= "_seh0";
12561 gcc_unreachable ();
12564 name
= ACONCAT (("__", lang
, "_personality", unwind_and_version
, NULL
));
12566 type
= build_function_type_list (integer_type_node
, integer_type_node
,
12567 long_long_unsigned_type_node
,
12568 ptr_type_node
, ptr_type_node
, NULL_TREE
);
12569 decl
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
,
12570 get_identifier (name
), type
);
12571 DECL_ARTIFICIAL (decl
) = 1;
12572 DECL_EXTERNAL (decl
) = 1;
12573 TREE_PUBLIC (decl
) = 1;
12575 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
12576 are the flags assigned by targetm.encode_section_info. */
12577 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl
), 0), NULL
);
12582 /* Extracts the personality function of DECL and returns the corresponding
12586 get_personality_function (tree decl
)
12588 tree personality
= DECL_FUNCTION_PERSONALITY (decl
);
12589 enum eh_personality_kind pk
;
12591 pk
= function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl
));
12592 if (pk
== eh_personality_none
)
12596 && pk
== eh_personality_any
)
12597 personality
= lang_hooks
.eh_personality ();
12599 if (pk
== eh_personality_lang
)
12600 gcc_assert (personality
!= NULL_TREE
);
12602 return XEXP (DECL_RTL (personality
), 0);
12605 /* Returns a tree for the size of EXP in bytes. */
12608 tree_expr_size (const_tree exp
)
12611 && DECL_SIZE_UNIT (exp
) != 0)
12612 return DECL_SIZE_UNIT (exp
);
12614 return size_in_bytes (TREE_TYPE (exp
));
12617 /* Return an rtx for the size in bytes of the value of EXP. */
12620 expr_size (tree exp
)
12624 if (TREE_CODE (exp
) == WITH_SIZE_EXPR
)
12625 size
= TREE_OPERAND (exp
, 1);
12628 size
= tree_expr_size (exp
);
12630 gcc_assert (size
== SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, exp
));
12633 return expand_expr (size
, NULL_RTX
, TYPE_MODE (sizetype
), EXPAND_NORMAL
);
12636 /* Return a wide integer for the size in bytes of the value of EXP, or -1
12637 if the size can vary or is larger than an integer. */
12639 static HOST_WIDE_INT
12640 int_expr_size (tree exp
)
12644 if (TREE_CODE (exp
) == WITH_SIZE_EXPR
)
12645 size
= TREE_OPERAND (exp
, 1);
12648 size
= tree_expr_size (exp
);
12652 if (size
== 0 || !tree_fits_shwi_p (size
))
12655 return tree_to_shwi (size
);