1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
38 #include "diagnostic.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
45 #include "insn-attr.h"
50 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
52 #include "optabs-tree.h"
55 #include "langhooks.h"
56 #include "common/common-target.h"
58 #include "tree-ssa-live.h"
59 #include "tree-outof-ssa.h"
60 #include "tree-ssa-address.h"
63 #include "gimple-fold.h"
64 #include "rtx-vector-builder.h"
67 /* If this is nonzero, we do not bother generating VOLATILE
68 around volatile memory references, and we are willing to
69 output indirect addresses. If cse is to follow, we reject
70 indirect addresses so a useful potential cse is generated;
71 if it is used only once, instruction combination will produce
72 the same indirect address eventually. */
75 static bool block_move_libcall_safe_for_call_parm (void);
76 static bool emit_block_move_via_cpymem (rtx
, rtx
, rtx
, unsigned, unsigned, HOST_WIDE_INT
,
77 unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
78 unsigned HOST_WIDE_INT
);
79 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
80 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
81 static rtx_insn
*compress_float_constant (rtx
, rtx
);
82 static rtx
get_subtarget (rtx
);
83 static void store_constructor (tree
, rtx
, int, poly_int64
, bool);
84 static rtx
store_field (rtx
, poly_int64
, poly_int64
, poly_uint64
, poly_uint64
,
85 machine_mode
, tree
, alias_set_type
, bool, bool);
87 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (const_tree
, const_tree
);
89 static int is_aligning_offset (const_tree
, const_tree
);
90 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
91 static rtx
do_store_flag (sepops
, rtx
, machine_mode
);
93 static void emit_single_push_insn (machine_mode
, rtx
, tree
);
95 static void do_tablejump (rtx
, machine_mode
, rtx
, rtx
, rtx
,
97 static rtx
const_vector_from_tree (tree
);
98 static rtx
const_scalar_mask_from_tree (scalar_int_mode
, tree
);
99 static tree
tree_expr_size (const_tree
);
100 static HOST_WIDE_INT
int_expr_size (tree
);
101 static void convert_mode_scalar (rtx
, rtx
, int);
104 /* This is run to set up which modes can be used
105 directly in memory and to initialize the block move optab. It is run
106 at the beginning of compilation and when the target is reinitialized. */
109 init_expr_target (void)
116 /* Try indexing by frame ptr and try by stack ptr.
117 It is known that on the Convex the stack ptr isn't a valid index.
118 With luck, one or the other is valid on any machine. */
119 mem
= gen_rtx_MEM (word_mode
, stack_pointer_rtx
);
120 mem1
= gen_rtx_MEM (word_mode
, frame_pointer_rtx
);
122 /* A scratch register we can modify in-place below to avoid
123 useless RTL allocations. */
124 reg
= gen_rtx_REG (word_mode
, LAST_VIRTUAL_REGISTER
+ 1);
126 rtx_insn
*insn
= as_a
<rtx_insn
*> (rtx_alloc (INSN
));
127 pat
= gen_rtx_SET (NULL_RTX
, NULL_RTX
);
128 PATTERN (insn
) = pat
;
130 for (machine_mode mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
131 mode
= (machine_mode
) ((int) mode
+ 1))
135 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
136 PUT_MODE (mem
, mode
);
137 PUT_MODE (mem1
, mode
);
139 /* See if there is some register that can be used in this mode and
140 directly loaded or stored from memory. */
142 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
143 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
144 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
147 if (!targetm
.hard_regno_mode_ok (regno
, mode
))
150 set_mode_and_regno (reg
, mode
, regno
);
153 SET_DEST (pat
) = reg
;
154 if (recog (pat
, insn
, &num_clobbers
) >= 0)
155 direct_load
[(int) mode
] = 1;
157 SET_SRC (pat
) = mem1
;
158 SET_DEST (pat
) = reg
;
159 if (recog (pat
, insn
, &num_clobbers
) >= 0)
160 direct_load
[(int) mode
] = 1;
163 SET_DEST (pat
) = mem
;
164 if (recog (pat
, insn
, &num_clobbers
) >= 0)
165 direct_store
[(int) mode
] = 1;
168 SET_DEST (pat
) = mem1
;
169 if (recog (pat
, insn
, &num_clobbers
) >= 0)
170 direct_store
[(int) mode
] = 1;
174 mem
= gen_rtx_MEM (VOIDmode
, gen_raw_REG (Pmode
, LAST_VIRTUAL_REGISTER
+ 1));
176 opt_scalar_float_mode mode_iter
;
177 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_FLOAT
)
179 scalar_float_mode mode
= mode_iter
.require ();
180 scalar_float_mode srcmode
;
181 FOR_EACH_MODE_UNTIL (srcmode
, mode
)
185 ic
= can_extend_p (mode
, srcmode
, 0);
186 if (ic
== CODE_FOR_nothing
)
189 PUT_MODE (mem
, srcmode
);
191 if (insn_operand_matches (ic
, 1, mem
))
192 float_extend_from_mem
[mode
][srcmode
] = true;
197 /* This is run at the start of compiling a function. */
202 memset (&crtl
->expr
, 0, sizeof (crtl
->expr
));
205 /* Copy data from FROM to TO, where the machine modes are not the same.
206 Both modes may be integer, or both may be floating, or both may be
208 UNSIGNEDP should be nonzero if FROM is an unsigned type.
209 This causes zero-extension instead of sign-extension. */
212 convert_move (rtx to
, rtx from
, int unsignedp
)
214 machine_mode to_mode
= GET_MODE (to
);
215 machine_mode from_mode
= GET_MODE (from
);
217 gcc_assert (to_mode
!= BLKmode
);
218 gcc_assert (from_mode
!= BLKmode
);
220 /* If the source and destination are already the same, then there's
225 /* If FROM is a SUBREG that indicates that we have already done at least
226 the required extension, strip it. We don't handle such SUBREGs as
229 scalar_int_mode to_int_mode
;
230 if (GET_CODE (from
) == SUBREG
231 && SUBREG_PROMOTED_VAR_P (from
)
232 && is_a
<scalar_int_mode
> (to_mode
, &to_int_mode
)
233 && (GET_MODE_PRECISION (subreg_promoted_mode (from
))
234 >= GET_MODE_PRECISION (to_int_mode
))
235 && SUBREG_CHECK_PROMOTED_SIGN (from
, unsignedp
))
237 from
= gen_lowpart (to_int_mode
, SUBREG_REG (from
));
238 from_mode
= to_int_mode
;
241 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
243 if (to_mode
== from_mode
244 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
246 emit_move_insn (to
, from
);
250 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
252 gcc_assert (known_eq (GET_MODE_BITSIZE (from_mode
),
253 GET_MODE_BITSIZE (to_mode
)));
255 if (VECTOR_MODE_P (to_mode
))
256 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
258 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
260 emit_move_insn (to
, from
);
264 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
266 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
267 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
271 convert_mode_scalar (to
, from
, unsignedp
);
274 /* Like convert_move, but deals only with scalar modes. */
277 convert_mode_scalar (rtx to
, rtx from
, int unsignedp
)
279 /* Both modes should be scalar types. */
280 scalar_mode from_mode
= as_a
<scalar_mode
> (GET_MODE (from
));
281 scalar_mode to_mode
= as_a
<scalar_mode
> (GET_MODE (to
));
282 bool to_real
= SCALAR_FLOAT_MODE_P (to_mode
);
283 bool from_real
= SCALAR_FLOAT_MODE_P (from_mode
);
287 gcc_assert (to_real
== from_real
);
289 /* rtx code for making an equivalent value. */
290 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
291 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
299 gcc_assert ((GET_MODE_PRECISION (from_mode
)
300 != GET_MODE_PRECISION (to_mode
))
301 || (DECIMAL_FLOAT_MODE_P (from_mode
)
302 != DECIMAL_FLOAT_MODE_P (to_mode
)));
304 if (GET_MODE_PRECISION (from_mode
) == GET_MODE_PRECISION (to_mode
))
305 /* Conversion between decimal float and binary float, same size. */
306 tab
= DECIMAL_FLOAT_MODE_P (from_mode
) ? trunc_optab
: sext_optab
;
307 else if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
312 /* Try converting directly if the insn is supported. */
314 code
= convert_optab_handler (tab
, to_mode
, from_mode
);
315 if (code
!= CODE_FOR_nothing
)
317 emit_unop_insn (code
, to
, from
,
318 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
322 /* Otherwise use a libcall. */
323 libcall
= convert_optab_libfunc (tab
, to_mode
, from_mode
);
325 /* Is this conversion implemented yet? */
326 gcc_assert (libcall
);
329 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
331 insns
= get_insns ();
333 emit_libcall_block (insns
, to
, value
,
334 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
336 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
340 /* Handle pointer conversion. */ /* SPEE 900220. */
341 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
345 if (GET_MODE_PRECISION (from_mode
) > GET_MODE_PRECISION (to_mode
))
352 if (convert_optab_handler (ctab
, to_mode
, from_mode
)
355 emit_unop_insn (convert_optab_handler (ctab
, to_mode
, from_mode
),
361 /* Targets are expected to provide conversion insns between PxImode and
362 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
363 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
365 scalar_int_mode full_mode
366 = smallest_int_mode_for_size (GET_MODE_BITSIZE (to_mode
));
368 gcc_assert (convert_optab_handler (trunc_optab
, to_mode
, full_mode
)
369 != CODE_FOR_nothing
);
371 if (full_mode
!= from_mode
)
372 from
= convert_to_mode (full_mode
, from
, unsignedp
);
373 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, full_mode
),
377 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
380 scalar_int_mode full_mode
381 = smallest_int_mode_for_size (GET_MODE_BITSIZE (from_mode
));
382 convert_optab ctab
= unsignedp
? zext_optab
: sext_optab
;
383 enum insn_code icode
;
385 icode
= convert_optab_handler (ctab
, full_mode
, from_mode
);
386 gcc_assert (icode
!= CODE_FOR_nothing
);
388 if (to_mode
== full_mode
)
390 emit_unop_insn (icode
, to
, from
, UNKNOWN
);
394 new_from
= gen_reg_rtx (full_mode
);
395 emit_unop_insn (icode
, new_from
, from
, UNKNOWN
);
397 /* else proceed to integer conversions below. */
398 from_mode
= full_mode
;
402 /* Make sure both are fixed-point modes or both are not. */
403 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
) ==
404 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode
));
405 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
))
407 /* If we widen from_mode to to_mode and they are in the same class,
408 we won't saturate the result.
409 Otherwise, always saturate the result to play safe. */
410 if (GET_MODE_CLASS (from_mode
) == GET_MODE_CLASS (to_mode
)
411 && GET_MODE_SIZE (from_mode
) < GET_MODE_SIZE (to_mode
))
412 expand_fixed_convert (to
, from
, 0, 0);
414 expand_fixed_convert (to
, from
, 0, 1);
418 /* Now both modes are integers. */
420 /* Handle expanding beyond a word. */
421 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
)
422 && GET_MODE_PRECISION (to_mode
) > BITS_PER_WORD
)
429 scalar_mode lowpart_mode
;
430 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
432 /* Try converting directly if the insn is supported. */
433 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
436 /* If FROM is a SUBREG, put it into a register. Do this
437 so that we always generate the same set of insns for
438 better cse'ing; if an intermediate assignment occurred,
439 we won't be doing the operation directly on the SUBREG. */
440 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
441 from
= force_reg (from_mode
, from
);
442 emit_unop_insn (code
, to
, from
, equiv_code
);
445 /* Next, try converting via full word. */
446 else if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
447 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
448 != CODE_FOR_nothing
))
450 rtx word_to
= gen_reg_rtx (word_mode
);
453 if (reg_overlap_mentioned_p (to
, from
))
454 from
= force_reg (from_mode
, from
);
457 convert_move (word_to
, from
, unsignedp
);
458 emit_unop_insn (code
, to
, word_to
, equiv_code
);
462 /* No special multiword conversion insn; do it by hand. */
465 /* Since we will turn this into a no conflict block, we must ensure
466 the source does not overlap the target so force it into an isolated
467 register when maybe so. Likewise for any MEM input, since the
468 conversion sequence might require several references to it and we
469 must ensure we're getting the same value every time. */
471 if (MEM_P (from
) || reg_overlap_mentioned_p (to
, from
))
472 from
= force_reg (from_mode
, from
);
474 /* Get a copy of FROM widened to a word, if necessary. */
475 if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
)
476 lowpart_mode
= word_mode
;
478 lowpart_mode
= from_mode
;
480 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
482 lowpart
= gen_lowpart (lowpart_mode
, to
);
483 emit_move_insn (lowpart
, lowfrom
);
485 /* Compute the value to put in each remaining word. */
487 fill_value
= const0_rtx
;
489 fill_value
= emit_store_flag_force (gen_reg_rtx (word_mode
),
490 LT
, lowfrom
, const0_rtx
,
491 lowpart_mode
, 0, -1);
493 /* Fill the remaining words. */
494 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
496 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
497 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
499 gcc_assert (subword
);
501 if (fill_value
!= subword
)
502 emit_move_insn (subword
, fill_value
);
505 insns
= get_insns ();
512 /* Truncating multi-word to a word or less. */
513 if (GET_MODE_PRECISION (from_mode
) > BITS_PER_WORD
514 && GET_MODE_PRECISION (to_mode
) <= BITS_PER_WORD
)
517 && ! MEM_VOLATILE_P (from
)
518 && direct_load
[(int) to_mode
]
519 && ! mode_dependent_address_p (XEXP (from
, 0),
520 MEM_ADDR_SPACE (from
)))
522 || GET_CODE (from
) == SUBREG
))
523 from
= force_reg (from_mode
, from
);
524 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
528 /* Now follow all the conversions between integers
529 no more than a word long. */
531 /* For truncation, usually we can just refer to FROM in a narrower mode. */
532 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
533 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, from_mode
))
536 && ! MEM_VOLATILE_P (from
)
537 && direct_load
[(int) to_mode
]
538 && ! mode_dependent_address_p (XEXP (from
, 0),
539 MEM_ADDR_SPACE (from
)))
541 || GET_CODE (from
) == SUBREG
))
542 from
= force_reg (from_mode
, from
);
543 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
544 && !targetm
.hard_regno_mode_ok (REGNO (from
), to_mode
))
545 from
= copy_to_reg (from
);
546 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
550 /* Handle extension. */
551 if (GET_MODE_PRECISION (to_mode
) > GET_MODE_PRECISION (from_mode
))
553 /* Convert directly if that works. */
554 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
557 emit_unop_insn (code
, to
, from
, equiv_code
);
562 scalar_mode intermediate
;
566 /* Search for a mode to convert via. */
567 opt_scalar_mode intermediate_iter
;
568 FOR_EACH_MODE_FROM (intermediate_iter
, from_mode
)
570 scalar_mode intermediate
= intermediate_iter
.require ();
571 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
573 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
574 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
,
576 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
577 != CODE_FOR_nothing
))
579 convert_move (to
, convert_to_mode (intermediate
, from
,
580 unsignedp
), unsignedp
);
585 /* No suitable intermediate mode.
586 Generate what we need with shifts. */
587 shift_amount
= (GET_MODE_PRECISION (to_mode
)
588 - GET_MODE_PRECISION (from_mode
));
589 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
590 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
592 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
595 emit_move_insn (to
, tmp
);
600 /* Support special truncate insns for certain modes. */
601 if (convert_optab_handler (trunc_optab
, to_mode
,
602 from_mode
) != CODE_FOR_nothing
)
604 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, from_mode
),
609 /* Handle truncation of volatile memrefs, and so on;
610 the things that couldn't be truncated directly,
611 and for which there was no special instruction.
613 ??? Code above formerly short-circuited this, for most integer
614 mode pairs, with a force_reg in from_mode followed by a recursive
615 call to this routine. Appears always to have been wrong. */
616 if (GET_MODE_PRECISION (to_mode
) < GET_MODE_PRECISION (from_mode
))
618 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
619 emit_move_insn (to
, temp
);
623 /* Mode combination is not recognized. */
627 /* Return an rtx for a value that would result
628 from converting X to mode MODE.
629 Both X and MODE may be floating, or both integer.
630 UNSIGNEDP is nonzero if X is an unsigned value.
631 This can be done by referring to a part of X in place
632 or by copying to a new temporary with conversion. */
635 convert_to_mode (machine_mode mode
, rtx x
, int unsignedp
)
637 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
640 /* Return an rtx for a value that would result
641 from converting X from mode OLDMODE to mode MODE.
642 Both modes may be floating, or both integer.
643 UNSIGNEDP is nonzero if X is an unsigned value.
645 This can be done by referring to a part of X in place
646 or by copying to a new temporary with conversion.
648 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
651 convert_modes (machine_mode mode
, machine_mode oldmode
, rtx x
, int unsignedp
)
654 scalar_int_mode int_mode
;
656 /* If FROM is a SUBREG that indicates that we have already done at least
657 the required extension, strip it. */
659 if (GET_CODE (x
) == SUBREG
660 && SUBREG_PROMOTED_VAR_P (x
)
661 && is_a
<scalar_int_mode
> (mode
, &int_mode
)
662 && (GET_MODE_PRECISION (subreg_promoted_mode (x
))
663 >= GET_MODE_PRECISION (int_mode
))
664 && SUBREG_CHECK_PROMOTED_SIGN (x
, unsignedp
))
665 x
= gen_lowpart (int_mode
, SUBREG_REG (x
));
667 if (GET_MODE (x
) != VOIDmode
)
668 oldmode
= GET_MODE (x
);
673 if (CONST_SCALAR_INT_P (x
)
674 && is_int_mode (mode
, &int_mode
))
676 /* If the caller did not tell us the old mode, then there is not
677 much to do with respect to canonicalization. We have to
678 assume that all the bits are significant. */
679 if (GET_MODE_CLASS (oldmode
) != MODE_INT
)
680 oldmode
= MAX_MODE_INT
;
681 wide_int w
= wide_int::from (rtx_mode_t (x
, oldmode
),
682 GET_MODE_PRECISION (int_mode
),
683 unsignedp
? UNSIGNED
: SIGNED
);
684 return immed_wide_int_const (w
, int_mode
);
687 /* We can do this with a gen_lowpart if both desired and current modes
688 are integer, and this is either a constant integer, a register, or a
690 scalar_int_mode int_oldmode
;
691 if (is_int_mode (mode
, &int_mode
)
692 && is_int_mode (oldmode
, &int_oldmode
)
693 && GET_MODE_PRECISION (int_mode
) <= GET_MODE_PRECISION (int_oldmode
)
694 && ((MEM_P (x
) && !MEM_VOLATILE_P (x
) && direct_load
[(int) int_mode
])
695 || CONST_POLY_INT_P (x
)
697 && (!HARD_REGISTER_P (x
)
698 || targetm
.hard_regno_mode_ok (REGNO (x
), int_mode
))
699 && TRULY_NOOP_TRUNCATION_MODES_P (int_mode
, GET_MODE (x
)))))
700 return gen_lowpart (int_mode
, x
);
702 /* Converting from integer constant into mode is always equivalent to an
704 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
706 gcc_assert (known_eq (GET_MODE_BITSIZE (mode
),
707 GET_MODE_BITSIZE (oldmode
)));
708 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
711 temp
= gen_reg_rtx (mode
);
712 convert_move (temp
, x
, unsignedp
);
716 /* Return the largest alignment we can use for doing a move (or store)
717 of MAX_PIECES. ALIGN is the largest alignment we could use. */
720 alignment_for_piecewise_move (unsigned int max_pieces
, unsigned int align
)
722 scalar_int_mode tmode
723 = int_mode_for_size (max_pieces
* BITS_PER_UNIT
, 1).require ();
725 if (align
>= GET_MODE_ALIGNMENT (tmode
))
726 align
= GET_MODE_ALIGNMENT (tmode
);
729 scalar_int_mode xmode
= NARROWEST_INT_MODE
;
730 opt_scalar_int_mode mode_iter
;
731 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_INT
)
733 tmode
= mode_iter
.require ();
734 if (GET_MODE_SIZE (tmode
) > max_pieces
735 || targetm
.slow_unaligned_access (tmode
, align
))
740 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
746 /* Return the widest integer mode that is narrower than SIZE bytes. */
748 static scalar_int_mode
749 widest_int_mode_for_size (unsigned int size
)
751 scalar_int_mode result
= NARROWEST_INT_MODE
;
753 gcc_checking_assert (size
> 1);
755 opt_scalar_int_mode tmode
;
756 FOR_EACH_MODE_IN_CLASS (tmode
, MODE_INT
)
757 if (GET_MODE_SIZE (tmode
.require ()) < size
)
758 result
= tmode
.require ();
763 /* Determine whether an operation OP on LEN bytes with alignment ALIGN can
764 and should be performed piecewise. */
767 can_do_by_pieces (unsigned HOST_WIDE_INT len
, unsigned int align
,
768 enum by_pieces_operation op
)
770 return targetm
.use_by_pieces_infrastructure_p (len
, align
, op
,
771 optimize_insn_for_speed_p ());
774 /* Determine whether the LEN bytes can be moved by using several move
775 instructions. Return nonzero if a call to move_by_pieces should
779 can_move_by_pieces (unsigned HOST_WIDE_INT len
, unsigned int align
)
781 return can_do_by_pieces (len
, align
, MOVE_BY_PIECES
);
784 /* Return number of insns required to perform operation OP by pieces
785 for L bytes. ALIGN (in bits) is maximum alignment we can assume. */
787 unsigned HOST_WIDE_INT
788 by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
789 unsigned int max_size
, by_pieces_operation op
)
791 unsigned HOST_WIDE_INT n_insns
= 0;
793 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
795 while (max_size
> 1 && l
> 0)
797 scalar_int_mode mode
= widest_int_mode_for_size (max_size
);
798 enum insn_code icode
;
800 unsigned int modesize
= GET_MODE_SIZE (mode
);
802 icode
= optab_handler (mov_optab
, mode
);
803 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
805 unsigned HOST_WIDE_INT n_pieces
= l
/ modesize
;
813 case COMPARE_BY_PIECES
:
814 int batch
= targetm
.compare_by_pieces_branch_ratio (mode
);
815 int batch_ops
= 4 * batch
- 1;
816 unsigned HOST_WIDE_INT full
= n_pieces
/ batch
;
817 n_insns
+= full
* batch_ops
;
818 if (n_pieces
% batch
!= 0)
831 /* Used when performing piecewise block operations, holds information
832 about one of the memory objects involved. The member functions
833 can be used to generate code for loading from the object and
834 updating the address when iterating. */
838 /* The object being referenced, a MEM. Can be NULL_RTX to indicate
841 /* The address of the object. Can differ from that seen in the
842 MEM rtx if we copied the address to a register. */
844 /* Nonzero if the address on the object has an autoincrement already,
845 signifies whether that was an increment or decrement. */
846 signed char m_addr_inc
;
847 /* Nonzero if we intend to use autoinc without the address already
848 having autoinc form. We will insert add insns around each memory
849 reference, expecting later passes to form autoinc addressing modes.
850 The only supported options are predecrement and postincrement. */
851 signed char m_explicit_inc
;
852 /* True if we have either of the two possible cases of using
855 /* True if this is an address to be used for load operations rather
859 /* Optionally, a function to obtain constants for any given offset into
860 the objects, and data associated with it. */
861 by_pieces_constfn m_constfn
;
864 pieces_addr (rtx
, bool, by_pieces_constfn
, void *);
865 rtx
adjust (scalar_int_mode
, HOST_WIDE_INT
);
866 void increment_address (HOST_WIDE_INT
);
867 void maybe_predec (HOST_WIDE_INT
);
868 void maybe_postinc (HOST_WIDE_INT
);
869 void decide_autoinc (machine_mode
, bool, HOST_WIDE_INT
);
876 /* Initialize a pieces_addr structure from an object OBJ. IS_LOAD is
877 true if the operation to be performed on this object is a load
878 rather than a store. For stores, OBJ can be NULL, in which case we
879 assume the operation is a stack push. For loads, the optional
880 CONSTFN and its associated CFNDATA can be used in place of the
883 pieces_addr::pieces_addr (rtx obj
, bool is_load
, by_pieces_constfn constfn
,
885 : m_obj (obj
), m_is_load (is_load
), m_constfn (constfn
), m_cfndata (cfndata
)
891 rtx addr
= XEXP (obj
, 0);
892 rtx_code code
= GET_CODE (addr
);
894 bool dec
= code
== PRE_DEC
|| code
== POST_DEC
;
895 bool inc
= code
== PRE_INC
|| code
== POST_INC
;
898 m_addr_inc
= dec
? -1 : 1;
900 /* While we have always looked for these codes here, the code
901 implementing the memory operation has never handled them.
902 Support could be added later if necessary or beneficial. */
903 gcc_assert (code
!= PRE_INC
&& code
!= POST_DEC
);
911 if (STACK_GROWS_DOWNWARD
)
917 gcc_assert (constfn
!= NULL
);
921 gcc_assert (is_load
);
924 /* Decide whether to use autoinc for an address involved in a memory op.
925 MODE is the mode of the accesses, REVERSE is true if we've decided to
926 perform the operation starting from the end, and LEN is the length of
927 the operation. Don't override an earlier decision to set m_auto. */
930 pieces_addr::decide_autoinc (machine_mode
ARG_UNUSED (mode
), bool reverse
,
933 if (m_auto
|| m_obj
== NULL_RTX
)
936 bool use_predec
= (m_is_load
937 ? USE_LOAD_PRE_DECREMENT (mode
)
938 : USE_STORE_PRE_DECREMENT (mode
));
939 bool use_postinc
= (m_is_load
940 ? USE_LOAD_POST_INCREMENT (mode
)
941 : USE_STORE_POST_INCREMENT (mode
));
942 machine_mode addr_mode
= get_address_mode (m_obj
);
944 if (use_predec
&& reverse
)
946 m_addr
= copy_to_mode_reg (addr_mode
,
947 plus_constant (addr_mode
,
952 else if (use_postinc
&& !reverse
)
954 m_addr
= copy_to_mode_reg (addr_mode
, m_addr
);
958 else if (CONSTANT_P (m_addr
))
959 m_addr
= copy_to_mode_reg (addr_mode
, m_addr
);
962 /* Adjust the address to refer to the data at OFFSET in MODE. If we
963 are using autoincrement for this address, we don't add the offset,
964 but we still modify the MEM's properties. */
967 pieces_addr::adjust (scalar_int_mode mode
, HOST_WIDE_INT offset
)
970 return m_constfn (m_cfndata
, offset
, mode
);
971 if (m_obj
== NULL_RTX
)
974 return adjust_automodify_address (m_obj
, mode
, m_addr
, offset
);
976 return adjust_address (m_obj
, mode
, offset
);
979 /* Emit an add instruction to increment the address by SIZE. */
982 pieces_addr::increment_address (HOST_WIDE_INT size
)
984 rtx amount
= gen_int_mode (size
, GET_MODE (m_addr
));
985 emit_insn (gen_add2_insn (m_addr
, amount
));
988 /* If we are supposed to decrement the address after each access, emit code
989 to do so now. Increment by SIZE (which has should have the correct sign
993 pieces_addr::maybe_predec (HOST_WIDE_INT size
)
995 if (m_explicit_inc
>= 0)
997 gcc_assert (HAVE_PRE_DECREMENT
);
998 increment_address (size
);
1001 /* If we are supposed to decrement the address after each access, emit code
1002 to do so now. Increment by SIZE. */
1005 pieces_addr::maybe_postinc (HOST_WIDE_INT size
)
1007 if (m_explicit_inc
<= 0)
1009 gcc_assert (HAVE_POST_INCREMENT
);
1010 increment_address (size
);
1013 /* This structure is used by do_op_by_pieces to describe the operation
1016 class op_by_pieces_d
1019 pieces_addr m_to
, m_from
;
1020 unsigned HOST_WIDE_INT m_len
;
1021 HOST_WIDE_INT m_offset
;
1022 unsigned int m_align
;
1023 unsigned int m_max_size
;
1026 /* Virtual functions, overriden by derived classes for the specific
1028 virtual void generate (rtx
, rtx
, machine_mode
) = 0;
1029 virtual bool prepare_mode (machine_mode
, unsigned int) = 0;
1030 virtual void finish_mode (machine_mode
)
1035 op_by_pieces_d (rtx
, bool, rtx
, bool, by_pieces_constfn
, void *,
1036 unsigned HOST_WIDE_INT
, unsigned int);
1040 /* The constructor for an op_by_pieces_d structure. We require two
1041 objects named TO and FROM, which are identified as loads or stores
1042 by TO_LOAD and FROM_LOAD. If FROM is a load, the optional FROM_CFN
1043 and its associated FROM_CFN_DATA can be used to replace loads with
1044 constant values. LEN describes the length of the operation. */
1046 op_by_pieces_d::op_by_pieces_d (rtx to
, bool to_load
,
1047 rtx from
, bool from_load
,
1048 by_pieces_constfn from_cfn
,
1049 void *from_cfn_data
,
1050 unsigned HOST_WIDE_INT len
,
1052 : m_to (to
, to_load
, NULL
, NULL
),
1053 m_from (from
, from_load
, from_cfn
, from_cfn_data
),
1054 m_len (len
), m_max_size (MOVE_MAX_PIECES
+ 1)
1056 int toi
= m_to
.get_addr_inc ();
1057 int fromi
= m_from
.get_addr_inc ();
1058 if (toi
>= 0 && fromi
>= 0)
1060 else if (toi
<= 0 && fromi
<= 0)
1065 m_offset
= m_reverse
? len
: 0;
1066 align
= MIN (to
? MEM_ALIGN (to
) : align
,
1067 from
? MEM_ALIGN (from
) : align
);
1069 /* If copying requires more than two move insns,
1070 copy addresses to registers (to make displacements shorter)
1071 and use post-increment if available. */
1072 if (by_pieces_ninsns (len
, align
, m_max_size
, MOVE_BY_PIECES
) > 2)
1074 /* Find the mode of the largest comparison. */
1075 scalar_int_mode mode
= widest_int_mode_for_size (m_max_size
);
1077 m_from
.decide_autoinc (mode
, m_reverse
, len
);
1078 m_to
.decide_autoinc (mode
, m_reverse
, len
);
1081 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
1085 /* This function contains the main loop used for expanding a block
1086 operation. First move what we can in the largest integer mode,
1087 then go to successively smaller modes. For every access, call
1088 GENFUN with the two operands and the EXTRA_DATA. */
1091 op_by_pieces_d::run ()
1093 while (m_max_size
> 1 && m_len
> 0)
1095 scalar_int_mode mode
= widest_int_mode_for_size (m_max_size
);
1097 if (prepare_mode (mode
, m_align
))
1099 unsigned int size
= GET_MODE_SIZE (mode
);
1100 rtx to1
= NULL_RTX
, from1
;
1102 while (m_len
>= size
)
1107 to1
= m_to
.adjust (mode
, m_offset
);
1108 from1
= m_from
.adjust (mode
, m_offset
);
1110 m_to
.maybe_predec (-(HOST_WIDE_INT
)size
);
1111 m_from
.maybe_predec (-(HOST_WIDE_INT
)size
);
1113 generate (to1
, from1
, mode
);
1115 m_to
.maybe_postinc (size
);
1116 m_from
.maybe_postinc (size
);
1127 m_max_size
= GET_MODE_SIZE (mode
);
1130 /* The code above should have handled everything. */
1131 gcc_assert (!m_len
);
1134 /* Derived class from op_by_pieces_d, providing support for block move
1137 class move_by_pieces_d
: public op_by_pieces_d
1139 insn_gen_fn m_gen_fun
;
1140 void generate (rtx
, rtx
, machine_mode
);
1141 bool prepare_mode (machine_mode
, unsigned int);
1144 move_by_pieces_d (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
1146 : op_by_pieces_d (to
, false, from
, true, NULL
, NULL
, len
, align
)
1149 rtx
finish_retmode (memop_ret
);
1152 /* Return true if MODE can be used for a set of copies, given an
1153 alignment ALIGN. Prepare whatever data is necessary for later
1154 calls to generate. */
1157 move_by_pieces_d::prepare_mode (machine_mode mode
, unsigned int align
)
1159 insn_code icode
= optab_handler (mov_optab
, mode
);
1160 m_gen_fun
= GEN_FCN (icode
);
1161 return icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
);
1164 /* A callback used when iterating for a compare_by_pieces_operation.
1165 OP0 and OP1 are the values that have been loaded and should be
1166 compared in MODE. If OP0 is NULL, this means we should generate a
1167 push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1168 gen function that should be used to generate the mode. */
1171 move_by_pieces_d::generate (rtx op0
, rtx op1
,
1172 machine_mode mode ATTRIBUTE_UNUSED
)
1174 #ifdef PUSH_ROUNDING
1175 if (op0
== NULL_RTX
)
1177 emit_single_push_insn (mode
, op1
, NULL
);
1181 emit_insn (m_gen_fun (op0
, op1
));
1184 /* Perform the final adjustment at the end of a string to obtain the
1185 correct return value for the block operation.
1186 Return value is based on RETMODE argument. */
1189 move_by_pieces_d::finish_retmode (memop_ret retmode
)
1191 gcc_assert (!m_reverse
);
1192 if (retmode
== RETURN_END_MINUS_ONE
)
1194 m_to
.maybe_postinc (-1);
1197 return m_to
.adjust (QImode
, m_offset
);
1200 /* Generate several move instructions to copy LEN bytes from block FROM to
1201 block TO. (These are MEM rtx's with BLKmode).
1203 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1204 used to push FROM to the stack.
1206 ALIGN is maximum stack alignment we can assume.
1208 Return value is based on RETMODE argument. */
1211 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
1212 unsigned int align
, memop_ret retmode
)
1214 #ifndef PUSH_ROUNDING
1219 move_by_pieces_d
data (to
, from
, len
, align
);
1223 if (retmode
!= RETURN_BEGIN
)
1224 return data
.finish_retmode (retmode
);
1229 /* Derived class from op_by_pieces_d, providing support for block move
1232 class store_by_pieces_d
: public op_by_pieces_d
1234 insn_gen_fn m_gen_fun
;
1235 void generate (rtx
, rtx
, machine_mode
);
1236 bool prepare_mode (machine_mode
, unsigned int);
1239 store_by_pieces_d (rtx to
, by_pieces_constfn cfn
, void *cfn_data
,
1240 unsigned HOST_WIDE_INT len
, unsigned int align
)
1241 : op_by_pieces_d (to
, false, NULL_RTX
, true, cfn
, cfn_data
, len
, align
)
1244 rtx
finish_retmode (memop_ret
);
1247 /* Return true if MODE can be used for a set of stores, given an
1248 alignment ALIGN. Prepare whatever data is necessary for later
1249 calls to generate. */
1252 store_by_pieces_d::prepare_mode (machine_mode mode
, unsigned int align
)
1254 insn_code icode
= optab_handler (mov_optab
, mode
);
1255 m_gen_fun
= GEN_FCN (icode
);
1256 return icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
);
1259 /* A callback used when iterating for a store_by_pieces_operation.
1260 OP0 and OP1 are the values that have been loaded and should be
1261 compared in MODE. If OP0 is NULL, this means we should generate a
1262 push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1263 gen function that should be used to generate the mode. */
1266 store_by_pieces_d::generate (rtx op0
, rtx op1
, machine_mode
)
1268 emit_insn (m_gen_fun (op0
, op1
));
1271 /* Perform the final adjustment at the end of a string to obtain the
1272 correct return value for the block operation.
1273 Return value is based on RETMODE argument. */
1276 store_by_pieces_d::finish_retmode (memop_ret retmode
)
1278 gcc_assert (!m_reverse
);
1279 if (retmode
== RETURN_END_MINUS_ONE
)
1281 m_to
.maybe_postinc (-1);
1284 return m_to
.adjust (QImode
, m_offset
);
1287 /* Determine whether the LEN bytes generated by CONSTFUN can be
1288 stored to memory using several move instructions. CONSTFUNDATA is
1289 a pointer which will be passed as argument in every CONSTFUN call.
1290 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
1291 a memset operation and false if it's a copy of a constant string.
1292 Return nonzero if a call to store_by_pieces should succeed. */
1295 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
1296 rtx (*constfun
) (void *, HOST_WIDE_INT
, scalar_int_mode
),
1297 void *constfundata
, unsigned int align
, bool memsetp
)
1299 unsigned HOST_WIDE_INT l
;
1300 unsigned int max_size
;
1301 HOST_WIDE_INT offset
= 0;
1302 enum insn_code icode
;
1304 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
1305 rtx cst ATTRIBUTE_UNUSED
;
1310 if (!targetm
.use_by_pieces_infrastructure_p (len
, align
,
1314 optimize_insn_for_speed_p ()))
1317 align
= alignment_for_piecewise_move (STORE_MAX_PIECES
, align
);
1319 /* We would first store what we can in the largest integer mode, then go to
1320 successively smaller modes. */
1323 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
1327 max_size
= STORE_MAX_PIECES
+ 1;
1328 while (max_size
> 1 && l
> 0)
1330 scalar_int_mode mode
= widest_int_mode_for_size (max_size
);
1332 icode
= optab_handler (mov_optab
, mode
);
1333 if (icode
!= CODE_FOR_nothing
1334 && align
>= GET_MODE_ALIGNMENT (mode
))
1336 unsigned int size
= GET_MODE_SIZE (mode
);
1343 cst
= (*constfun
) (constfundata
, offset
, mode
);
1344 if (!targetm
.legitimate_constant_p (mode
, cst
))
1354 max_size
= GET_MODE_SIZE (mode
);
1357 /* The code above should have handled everything. */
1364 /* Generate several move instructions to store LEN bytes generated by
1365 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
1366 pointer which will be passed as argument in every CONSTFUN call.
1367 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
1368 a memset operation and false if it's a copy of a constant string.
1369 Return value is based on RETMODE argument. */
1372 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
1373 rtx (*constfun
) (void *, HOST_WIDE_INT
, scalar_int_mode
),
1374 void *constfundata
, unsigned int align
, bool memsetp
,
1379 gcc_assert (retmode
!= RETURN_END_MINUS_ONE
);
1383 gcc_assert (targetm
.use_by_pieces_infrastructure_p
1385 memsetp
? SET_BY_PIECES
: STORE_BY_PIECES
,
1386 optimize_insn_for_speed_p ()));
1388 store_by_pieces_d
data (to
, constfun
, constfundata
, len
, align
);
1391 if (retmode
!= RETURN_BEGIN
)
1392 return data
.finish_retmode (retmode
);
1397 /* Callback routine for clear_by_pieces.
1398 Return const0_rtx unconditionally. */
1401 clear_by_pieces_1 (void *, HOST_WIDE_INT
, scalar_int_mode
)
1406 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
1407 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
1410 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
1415 store_by_pieces_d
data (to
, clear_by_pieces_1
, NULL
, len
, align
);
1419 /* Context used by compare_by_pieces_genfn. It stores the fail label
1420 to jump to in case of miscomparison, and for branch ratios greater than 1,
1421 it stores an accumulator and the current and maximum counts before
1422 emitting another branch. */
1424 class compare_by_pieces_d
: public op_by_pieces_d
1426 rtx_code_label
*m_fail_label
;
1428 int m_count
, m_batch
;
1430 void generate (rtx
, rtx
, machine_mode
);
1431 bool prepare_mode (machine_mode
, unsigned int);
1432 void finish_mode (machine_mode
);
1434 compare_by_pieces_d (rtx op0
, rtx op1
, by_pieces_constfn op1_cfn
,
1435 void *op1_cfn_data
, HOST_WIDE_INT len
, int align
,
1436 rtx_code_label
*fail_label
)
1437 : op_by_pieces_d (op0
, true, op1
, true, op1_cfn
, op1_cfn_data
, len
, align
)
1439 m_fail_label
= fail_label
;
1443 /* A callback used when iterating for a compare_by_pieces_operation.
1444 OP0 and OP1 are the values that have been loaded and should be
1445 compared in MODE. DATA holds a pointer to the compare_by_pieces_data
1446 context structure. */
1449 compare_by_pieces_d::generate (rtx op0
, rtx op1
, machine_mode mode
)
1453 rtx temp
= expand_binop (mode
, sub_optab
, op0
, op1
, NULL_RTX
,
1454 true, OPTAB_LIB_WIDEN
);
1456 temp
= expand_binop (mode
, ior_optab
, m_accumulator
, temp
, temp
,
1457 true, OPTAB_LIB_WIDEN
);
1458 m_accumulator
= temp
;
1460 if (++m_count
< m_batch
)
1464 op0
= m_accumulator
;
1466 m_accumulator
= NULL_RTX
;
1468 do_compare_rtx_and_jump (op0
, op1
, NE
, true, mode
, NULL_RTX
, NULL
,
1469 m_fail_label
, profile_probability::uninitialized ());
1472 /* Return true if MODE can be used for a set of moves and comparisons,
1473 given an alignment ALIGN. Prepare whatever data is necessary for
1474 later calls to generate. */
1477 compare_by_pieces_d::prepare_mode (machine_mode mode
, unsigned int align
)
1479 insn_code icode
= optab_handler (mov_optab
, mode
);
1480 if (icode
== CODE_FOR_nothing
1481 || align
< GET_MODE_ALIGNMENT (mode
)
1482 || !can_compare_p (EQ
, mode
, ccp_jump
))
1484 m_batch
= targetm
.compare_by_pieces_branch_ratio (mode
);
1487 m_accumulator
= NULL_RTX
;
1492 /* Called after expanding a series of comparisons in MODE. If we have
1493 accumulated results for which we haven't emitted a branch yet, do
1497 compare_by_pieces_d::finish_mode (machine_mode mode
)
1499 if (m_accumulator
!= NULL_RTX
)
1500 do_compare_rtx_and_jump (m_accumulator
, const0_rtx
, NE
, true, mode
,
1501 NULL_RTX
, NULL
, m_fail_label
,
1502 profile_probability::uninitialized ());
1505 /* Generate several move instructions to compare LEN bytes from blocks
1506 ARG0 and ARG1. (These are MEM rtx's with BLKmode).
1508 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1509 used to push FROM to the stack.
1511 ALIGN is maximum stack alignment we can assume.
1513 Optionally, the caller can pass a constfn and associated data in A1_CFN
1514 and A1_CFN_DATA. describing that the second operand being compared is a
1515 known constant and how to obtain its data. */
1518 compare_by_pieces (rtx arg0
, rtx arg1
, unsigned HOST_WIDE_INT len
,
1519 rtx target
, unsigned int align
,
1520 by_pieces_constfn a1_cfn
, void *a1_cfn_data
)
1522 rtx_code_label
*fail_label
= gen_label_rtx ();
1523 rtx_code_label
*end_label
= gen_label_rtx ();
1525 if (target
== NULL_RTX
1526 || !REG_P (target
) || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
1527 target
= gen_reg_rtx (TYPE_MODE (integer_type_node
));
1529 compare_by_pieces_d
data (arg0
, arg1
, a1_cfn
, a1_cfn_data
, len
, align
,
1534 emit_move_insn (target
, const0_rtx
);
1535 emit_jump (end_label
);
1537 emit_label (fail_label
);
1538 emit_move_insn (target
, const1_rtx
);
1539 emit_label (end_label
);
1544 /* Emit code to move a block Y to a block X. This may be done with
1545 string-move instructions, with multiple scalar move instructions,
1546 or with a library call.
1548 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1549 SIZE is an rtx that says how long they are.
1550 ALIGN is the maximum alignment we can assume they have.
1551 METHOD describes what kind of copy this is, and what mechanisms may be used.
1552 MIN_SIZE is the minimal size of block to move
1553 MAX_SIZE is the maximal size of block to move, if it cannot be represented
1554 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1556 Return the address of the new block, if memcpy is called and returns it,
1560 emit_block_move_hints (rtx x
, rtx y
, rtx size
, enum block_op_methods method
,
1561 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1562 unsigned HOST_WIDE_INT min_size
,
1563 unsigned HOST_WIDE_INT max_size
,
1564 unsigned HOST_WIDE_INT probable_max_size
,
1565 bool bail_out_libcall
, bool *is_move_done
)
1572 *is_move_done
= true;
1575 if (CONST_INT_P (size
) && INTVAL (size
) == 0)
1580 case BLOCK_OP_NORMAL
:
1581 case BLOCK_OP_TAILCALL
:
1585 case BLOCK_OP_CALL_PARM
:
1586 may_use_call
= block_move_libcall_safe_for_call_parm ();
1588 /* Make inhibit_defer_pop nonzero around the library call
1589 to force it to pop the arguments right away. */
1593 case BLOCK_OP_NO_LIBCALL
:
1597 case BLOCK_OP_NO_LIBCALL_RET
:
1605 gcc_assert (MEM_P (x
) && MEM_P (y
));
1606 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1607 gcc_assert (align
>= BITS_PER_UNIT
);
1609 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1610 block copy is more efficient for other large modes, e.g. DCmode. */
1611 x
= adjust_address (x
, BLKmode
, 0);
1612 y
= adjust_address (y
, BLKmode
, 0);
1614 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1615 can be incorrect is coming from __builtin_memcpy. */
1616 poly_int64 const_size
;
1617 if (poly_int_rtx_p (size
, &const_size
))
1619 x
= shallow_copy_rtx (x
);
1620 y
= shallow_copy_rtx (y
);
1621 set_mem_size (x
, const_size
);
1622 set_mem_size (y
, const_size
);
1625 if (CONST_INT_P (size
) && can_move_by_pieces (INTVAL (size
), align
))
1626 move_by_pieces (x
, y
, INTVAL (size
), align
, RETURN_BEGIN
);
1627 else if (emit_block_move_via_cpymem (x
, y
, size
, align
,
1628 expected_align
, expected_size
,
1629 min_size
, max_size
, probable_max_size
))
1631 else if (may_use_call
1632 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
))
1633 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y
)))
1635 if (bail_out_libcall
)
1638 *is_move_done
= false;
1642 if (may_use_call
< 0)
1645 retval
= emit_block_copy_via_libcall (x
, y
, size
,
1646 method
== BLOCK_OP_TAILCALL
);
1650 emit_block_move_via_loop (x
, y
, size
, align
);
1652 if (method
== BLOCK_OP_CALL_PARM
)
1659 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1661 unsigned HOST_WIDE_INT max
, min
= 0;
1662 if (GET_CODE (size
) == CONST_INT
)
1663 min
= max
= UINTVAL (size
);
1665 max
= GET_MODE_MASK (GET_MODE (size
));
1666 return emit_block_move_hints (x
, y
, size
, method
, 0, -1,
1670 /* A subroutine of emit_block_move. Returns true if calling the
1671 block move libcall will not clobber any parameters which may have
1672 already been placed on the stack. */
1675 block_move_libcall_safe_for_call_parm (void)
1677 #if defined (REG_PARM_STACK_SPACE)
1681 /* If arguments are pushed on the stack, then they're safe. */
1685 /* If registers go on the stack anyway, any argument is sure to clobber
1686 an outgoing argument. */
1687 #if defined (REG_PARM_STACK_SPACE)
1688 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1689 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1690 depend on its argument. */
1692 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn
? NULL_TREE
: TREE_TYPE (fn
)))
1693 && REG_PARM_STACK_SPACE (fn
) != 0)
1697 /* If any argument goes in memory, then it might clobber an outgoing
1700 CUMULATIVE_ARGS args_so_far_v
;
1701 cumulative_args_t args_so_far
;
1704 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1705 INIT_CUMULATIVE_ARGS (args_so_far_v
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1706 args_so_far
= pack_cumulative_args (&args_so_far_v
);
1708 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1709 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1711 machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1712 function_arg_info
arg_info (mode
, /*named=*/true);
1713 rtx tmp
= targetm
.calls
.function_arg (args_so_far
, arg_info
);
1714 if (!tmp
|| !REG_P (tmp
))
1716 if (targetm
.calls
.arg_partial_bytes (args_so_far
, arg_info
))
1718 targetm
.calls
.function_arg_advance (args_so_far
, arg_info
);
1724 /* A subroutine of emit_block_move. Expand a cpymem pattern;
1725 return true if successful. */
1728 emit_block_move_via_cpymem (rtx x
, rtx y
, rtx size
, unsigned int align
,
1729 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1730 unsigned HOST_WIDE_INT min_size
,
1731 unsigned HOST_WIDE_INT max_size
,
1732 unsigned HOST_WIDE_INT probable_max_size
)
1734 if (expected_align
< align
)
1735 expected_align
= align
;
1736 if (expected_size
!= -1)
1738 if ((unsigned HOST_WIDE_INT
)expected_size
> probable_max_size
)
1739 expected_size
= probable_max_size
;
1740 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
1741 expected_size
= min_size
;
1744 /* Since this is a move insn, we don't care about volatility. */
1745 temporary_volatile_ok
v (true);
1747 /* Try the most limited insn first, because there's no point
1748 including more than one in the machine description unless
1749 the more limited one has some advantage. */
1751 opt_scalar_int_mode mode_iter
;
1752 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_INT
)
1754 scalar_int_mode mode
= mode_iter
.require ();
1755 enum insn_code code
= direct_optab_handler (cpymem_optab
, mode
);
1757 if (code
!= CODE_FOR_nothing
1758 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1759 here because if SIZE is less than the mode mask, as it is
1760 returned by the macro, it will definitely be less than the
1761 actual mode mask. Since SIZE is within the Pmode address
1762 space, we limit MODE to Pmode. */
1763 && ((CONST_INT_P (size
)
1764 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1765 <= (GET_MODE_MASK (mode
) >> 1)))
1766 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
1767 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
1769 class expand_operand ops
[9];
1772 /* ??? When called via emit_block_move_for_call, it'd be
1773 nice if there were some way to inform the backend, so
1774 that it doesn't fail the expansion because it thinks
1775 emitting the libcall would be more efficient. */
1776 nops
= insn_data
[(int) code
].n_generator_args
;
1777 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
1779 create_fixed_operand (&ops
[0], x
);
1780 create_fixed_operand (&ops
[1], y
);
1781 /* The check above guarantees that this size conversion is valid. */
1782 create_convert_operand_to (&ops
[2], size
, mode
, true);
1783 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
1786 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
1787 create_integer_operand (&ops
[5], expected_size
);
1791 create_integer_operand (&ops
[6], min_size
);
1792 /* If we cannot represent the maximal size,
1793 make parameter NULL. */
1794 if ((HOST_WIDE_INT
) max_size
!= -1)
1795 create_integer_operand (&ops
[7], max_size
);
1797 create_fixed_operand (&ops
[7], NULL
);
1801 /* If we cannot represent the maximal size,
1802 make parameter NULL. */
1803 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
1804 create_integer_operand (&ops
[8], probable_max_size
);
1806 create_fixed_operand (&ops
[8], NULL
);
1808 if (maybe_expand_insn (code
, nops
, ops
))
1816 /* A subroutine of emit_block_move. Copy the data via an explicit
1817 loop. This is used only when libcalls are forbidden. */
1818 /* ??? It'd be nice to copy in hunks larger than QImode. */
1821 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1822 unsigned int align ATTRIBUTE_UNUSED
)
1824 rtx_code_label
*cmp_label
, *top_label
;
1825 rtx iter
, x_addr
, y_addr
, tmp
;
1826 machine_mode x_addr_mode
= get_address_mode (x
);
1827 machine_mode y_addr_mode
= get_address_mode (y
);
1828 machine_mode iter_mode
;
1830 iter_mode
= GET_MODE (size
);
1831 if (iter_mode
== VOIDmode
)
1832 iter_mode
= word_mode
;
1834 top_label
= gen_label_rtx ();
1835 cmp_label
= gen_label_rtx ();
1836 iter
= gen_reg_rtx (iter_mode
);
1838 emit_move_insn (iter
, const0_rtx
);
1840 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1841 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1842 do_pending_stack_adjust ();
1844 emit_jump (cmp_label
);
1845 emit_label (top_label
);
1847 tmp
= convert_modes (x_addr_mode
, iter_mode
, iter
, true);
1848 x_addr
= simplify_gen_binary (PLUS
, x_addr_mode
, x_addr
, tmp
);
1850 if (x_addr_mode
!= y_addr_mode
)
1851 tmp
= convert_modes (y_addr_mode
, iter_mode
, iter
, true);
1852 y_addr
= simplify_gen_binary (PLUS
, y_addr_mode
, y_addr
, tmp
);
1854 x
= change_address (x
, QImode
, x_addr
);
1855 y
= change_address (y
, QImode
, y_addr
);
1857 emit_move_insn (x
, y
);
1859 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1860 true, OPTAB_LIB_WIDEN
);
1862 emit_move_insn (iter
, tmp
);
1864 emit_label (cmp_label
);
1866 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1868 profile_probability::guessed_always ()
1869 .apply_scale (9, 10));
1872 /* Expand a call to memcpy or memmove or memcmp, and return the result.
1873 TAILCALL is true if this is a tail call. */
1876 emit_block_op_via_libcall (enum built_in_function fncode
, rtx dst
, rtx src
,
1877 rtx size
, bool tailcall
)
1879 rtx dst_addr
, src_addr
;
1880 tree call_expr
, dst_tree
, src_tree
, size_tree
;
1881 machine_mode size_mode
;
1883 /* Since dst and src are passed to a libcall, mark the corresponding
1884 tree EXPR as addressable. */
1885 tree dst_expr
= MEM_EXPR (dst
);
1886 tree src_expr
= MEM_EXPR (src
);
1888 mark_addressable (dst_expr
);
1890 mark_addressable (src_expr
);
1892 dst_addr
= copy_addr_to_reg (XEXP (dst
, 0));
1893 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1894 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1896 src_addr
= copy_addr_to_reg (XEXP (src
, 0));
1897 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1898 src_tree
= make_tree (ptr_type_node
, src_addr
);
1900 size_mode
= TYPE_MODE (sizetype
);
1901 size
= convert_to_mode (size_mode
, size
, 1);
1902 size
= copy_to_mode_reg (size_mode
, size
);
1903 size_tree
= make_tree (sizetype
, size
);
1905 /* It is incorrect to use the libcall calling conventions for calls to
1906 memcpy/memmove/memcmp because they can be provided by the user. */
1907 tree fn
= builtin_decl_implicit (fncode
);
1908 call_expr
= build_call_expr (fn
, 3, dst_tree
, src_tree
, size_tree
);
1909 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
1911 return expand_call (call_expr
, NULL_RTX
, false);
1914 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
1915 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
1916 otherwise return null. */
1919 expand_cmpstrn_or_cmpmem (insn_code icode
, rtx target
, rtx arg1_rtx
,
1920 rtx arg2_rtx
, tree arg3_type
, rtx arg3_rtx
,
1921 HOST_WIDE_INT align
)
1923 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
1925 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
1928 class expand_operand ops
[5];
1929 create_output_operand (&ops
[0], target
, insn_mode
);
1930 create_fixed_operand (&ops
[1], arg1_rtx
);
1931 create_fixed_operand (&ops
[2], arg2_rtx
);
1932 create_convert_operand_from (&ops
[3], arg3_rtx
, TYPE_MODE (arg3_type
),
1933 TYPE_UNSIGNED (arg3_type
));
1934 create_integer_operand (&ops
[4], align
);
1935 if (maybe_expand_insn (icode
, 5, ops
))
1936 return ops
[0].value
;
1940 /* Expand a block compare between X and Y with length LEN using the
1941 cmpmem optab, placing the result in TARGET. LEN_TYPE is the type
1942 of the expression that was used to calculate the length. ALIGN
1943 gives the known minimum common alignment. */
1946 emit_block_cmp_via_cmpmem (rtx x
, rtx y
, rtx len
, tree len_type
, rtx target
,
1949 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
1950 implementing memcmp because it will stop if it encounters two
1952 insn_code icode
= direct_optab_handler (cmpmem_optab
, SImode
);
1954 if (icode
== CODE_FOR_nothing
)
1957 return expand_cmpstrn_or_cmpmem (icode
, target
, x
, y
, len_type
, len
, align
);
1960 /* Emit code to compare a block Y to a block X. This may be done with
1961 string-compare instructions, with multiple scalar instructions,
1962 or with a library call.
1964 Both X and Y must be MEM rtx's. LEN is an rtx that says how long
1965 they are. LEN_TYPE is the type of the expression that was used to
1968 If EQUALITY_ONLY is true, it means we don't have to return the tri-state
1969 value of a normal memcmp call, instead we can just compare for equality.
1970 If FORCE_LIBCALL is true, we should emit a call to memcmp rather than
1973 Optionally, the caller can pass a constfn and associated data in Y_CFN
1974 and Y_CFN_DATA. describing that the second operand being compared is a
1975 known constant and how to obtain its data.
1976 Return the result of the comparison, or NULL_RTX if we failed to
1977 perform the operation. */
1980 emit_block_cmp_hints (rtx x
, rtx y
, rtx len
, tree len_type
, rtx target
,
1981 bool equality_only
, by_pieces_constfn y_cfn
,
1986 if (CONST_INT_P (len
) && INTVAL (len
) == 0)
1989 gcc_assert (MEM_P (x
) && MEM_P (y
));
1990 unsigned int align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1991 gcc_assert (align
>= BITS_PER_UNIT
);
1993 x
= adjust_address (x
, BLKmode
, 0);
1994 y
= adjust_address (y
, BLKmode
, 0);
1997 && CONST_INT_P (len
)
1998 && can_do_by_pieces (INTVAL (len
), align
, COMPARE_BY_PIECES
))
1999 result
= compare_by_pieces (x
, y
, INTVAL (len
), target
, align
,
2002 result
= emit_block_cmp_via_cmpmem (x
, y
, len
, len_type
, target
, align
);
2007 /* Copy all or part of a value X into registers starting at REGNO.
2008 The number of registers to be filled is NREGS. */
2011 move_block_to_reg (int regno
, rtx x
, int nregs
, machine_mode mode
)
2016 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
2017 x
= validize_mem (force_const_mem (mode
, x
));
2019 /* See if the machine can do this with a load multiple insn. */
2020 if (targetm
.have_load_multiple ())
2022 rtx_insn
*last
= get_last_insn ();
2023 rtx first
= gen_rtx_REG (word_mode
, regno
);
2024 if (rtx_insn
*pat
= targetm
.gen_load_multiple (first
, x
,
2031 delete_insns_since (last
);
2034 for (int i
= 0; i
< nregs
; i
++)
2035 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
2036 operand_subword_force (x
, i
, mode
));
2039 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2040 The number of registers to be filled is NREGS. */
2043 move_block_from_reg (int regno
, rtx x
, int nregs
)
2048 /* See if the machine can do this with a store multiple insn. */
2049 if (targetm
.have_store_multiple ())
2051 rtx_insn
*last
= get_last_insn ();
2052 rtx first
= gen_rtx_REG (word_mode
, regno
);
2053 if (rtx_insn
*pat
= targetm
.gen_store_multiple (x
, first
,
2060 delete_insns_since (last
);
2063 for (int i
= 0; i
< nregs
; i
++)
2065 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
2069 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
2073 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2074 ORIG, where ORIG is a non-consecutive group of registers represented by
2075 a PARALLEL. The clone is identical to the original except in that the
2076 original set of registers is replaced by a new set of pseudo registers.
2077 The new set has the same modes as the original set. */
2080 gen_group_rtx (rtx orig
)
2085 gcc_assert (GET_CODE (orig
) == PARALLEL
);
2087 length
= XVECLEN (orig
, 0);
2088 tmps
= XALLOCAVEC (rtx
, length
);
2090 /* Skip a NULL entry in first slot. */
2091 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
2096 for (; i
< length
; i
++)
2098 machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
2099 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
2101 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
2104 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
2107 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
2108 except that values are placed in TMPS[i], and must later be moved
2109 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
2112 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
,
2117 machine_mode m
= GET_MODE (orig_src
);
2119 gcc_assert (GET_CODE (dst
) == PARALLEL
);
2122 && !SCALAR_INT_MODE_P (m
)
2123 && !MEM_P (orig_src
)
2124 && GET_CODE (orig_src
) != CONCAT
)
2126 scalar_int_mode imode
;
2127 if (int_mode_for_mode (GET_MODE (orig_src
)).exists (&imode
))
2129 src
= gen_reg_rtx (imode
);
2130 emit_move_insn (gen_lowpart (GET_MODE (orig_src
), src
), orig_src
);
2134 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
);
2135 emit_move_insn (src
, orig_src
);
2137 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
2141 /* Check for a NULL entry, used to indicate that the parameter goes
2142 both on the stack and in registers. */
2143 if (XEXP (XVECEXP (dst
, 0, 0), 0))
2148 /* Process the pieces. */
2149 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2151 machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
2152 poly_int64 bytepos
= rtx_to_poly_int64 (XEXP (XVECEXP (dst
, 0, i
), 1));
2153 poly_int64 bytelen
= GET_MODE_SIZE (mode
);
2154 poly_int64 shift
= 0;
2156 /* Handle trailing fragments that run over the size of the struct.
2157 It's the target's responsibility to make sure that the fragment
2158 cannot be strictly smaller in some cases and strictly larger
2160 gcc_checking_assert (ordered_p (bytepos
+ bytelen
, ssize
));
2161 if (known_size_p (ssize
) && maybe_gt (bytepos
+ bytelen
, ssize
))
2163 /* Arrange to shift the fragment to where it belongs.
2164 extract_bit_field loads to the lsb of the reg. */
2166 #ifdef BLOCK_REG_PADDING
2167 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
2168 == (BYTES_BIG_ENDIAN
? PAD_UPWARD
: PAD_DOWNWARD
)
2173 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2174 bytelen
= ssize
- bytepos
;
2175 gcc_assert (maybe_gt (bytelen
, 0));
2178 /* If we won't be loading directly from memory, protect the real source
2179 from strange tricks we might play; but make sure that the source can
2180 be loaded directly into the destination. */
2182 if (!MEM_P (orig_src
)
2183 && (!CONSTANT_P (orig_src
)
2184 || (GET_MODE (orig_src
) != mode
2185 && GET_MODE (orig_src
) != VOIDmode
)))
2187 if (GET_MODE (orig_src
) == VOIDmode
)
2188 src
= gen_reg_rtx (mode
);
2190 src
= gen_reg_rtx (GET_MODE (orig_src
));
2192 emit_move_insn (src
, orig_src
);
2195 /* Optimize the access just a bit. */
2197 && (! targetm
.slow_unaligned_access (mode
, MEM_ALIGN (src
))
2198 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
2199 && multiple_p (bytepos
* BITS_PER_UNIT
, GET_MODE_ALIGNMENT (mode
))
2200 && known_eq (bytelen
, GET_MODE_SIZE (mode
)))
2202 tmps
[i
] = gen_reg_rtx (mode
);
2203 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
2205 else if (COMPLEX_MODE_P (mode
)
2206 && GET_MODE (src
) == mode
2207 && known_eq (bytelen
, GET_MODE_SIZE (mode
)))
2208 /* Let emit_move_complex do the bulk of the work. */
2210 else if (GET_CODE (src
) == CONCAT
)
2212 poly_int64 slen
= GET_MODE_SIZE (GET_MODE (src
));
2213 poly_int64 slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
2217 if (can_div_trunc_p (bytepos
, slen0
, &elt
, &subpos
)
2218 && known_le (subpos
+ bytelen
, slen0
))
2220 /* The following assumes that the concatenated objects all
2221 have the same size. In this case, a simple calculation
2222 can be used to determine the object and the bit field
2224 tmps
[i
] = XEXP (src
, elt
);
2225 if (maybe_ne (subpos
, 0)
2226 || maybe_ne (subpos
+ bytelen
, slen0
)
2227 || (!CONSTANT_P (tmps
[i
])
2228 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
)))
2229 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
2230 subpos
* BITS_PER_UNIT
,
2231 1, NULL_RTX
, mode
, mode
, false,
2238 gcc_assert (known_eq (bytepos
, 0));
2239 mem
= assign_stack_temp (GET_MODE (src
), slen
);
2240 emit_move_insn (mem
, src
);
2241 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
2242 0, 1, NULL_RTX
, mode
, mode
, false,
2246 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2247 SIMD register, which is currently broken. While we get GCC
2248 to emit proper RTL for these cases, let's dump to memory. */
2249 else if (VECTOR_MODE_P (GET_MODE (dst
))
2252 poly_uint64 slen
= GET_MODE_SIZE (GET_MODE (src
));
2255 mem
= assign_stack_temp (GET_MODE (src
), slen
);
2256 emit_move_insn (mem
, src
);
2257 tmps
[i
] = adjust_address (mem
, mode
, bytepos
);
2259 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
2260 && XVECLEN (dst
, 0) > 1)
2261 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE (dst
), bytepos
);
2262 else if (CONSTANT_P (src
))
2264 if (known_eq (bytelen
, ssize
))
2270 /* TODO: const_wide_int can have sizes other than this... */
2271 gcc_assert (known_eq (2 * bytelen
, ssize
));
2272 split_double (src
, &first
, &second
);
2279 else if (REG_P (src
) && GET_MODE (src
) == mode
)
2282 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
2283 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
2284 mode
, mode
, false, NULL
);
2286 if (maybe_ne (shift
, 0))
2287 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
2292 /* Emit code to move a block SRC of type TYPE to a block DST,
2293 where DST is non-consecutive registers represented by a PARALLEL.
2294 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
2298 emit_group_load (rtx dst
, rtx src
, tree type
, poly_int64 ssize
)
2303 tmps
= XALLOCAVEC (rtx
, XVECLEN (dst
, 0));
2304 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
2306 /* Copy the extracted pieces into the proper (probable) hard regs. */
2307 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
2309 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
2312 emit_move_insn (d
, tmps
[i
]);
2316 /* Similar, but load SRC into new pseudos in a format that looks like
2317 PARALLEL. This can later be fed to emit_group_move to get things
2318 in the right place. */
2321 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, poly_int64 ssize
)
2326 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
2327 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
2329 /* Convert the vector to look just like the original PARALLEL, except
2330 with the computed values. */
2331 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
2333 rtx e
= XVECEXP (parallel
, 0, i
);
2334 rtx d
= XEXP (e
, 0);
2338 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
2339 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
2341 RTVEC_ELT (vec
, i
) = e
;
2344 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
2347 /* Emit code to move a block SRC to block DST, where SRC and DST are
2348 non-consecutive groups of registers, each represented by a PARALLEL. */
2351 emit_group_move (rtx dst
, rtx src
)
2355 gcc_assert (GET_CODE (src
) == PARALLEL
2356 && GET_CODE (dst
) == PARALLEL
2357 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
2359 /* Skip first entry if NULL. */
2360 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
2361 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
2362 XEXP (XVECEXP (src
, 0, i
), 0));
2365 /* Move a group of registers represented by a PARALLEL into pseudos. */
2368 emit_group_move_into_temps (rtx src
)
2370 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
2373 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
2375 rtx e
= XVECEXP (src
, 0, i
);
2376 rtx d
= XEXP (e
, 0);
2379 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
2380 RTVEC_ELT (vec
, i
) = e
;
2383 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
2386 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
2387 where SRC is non-consecutive registers represented by a PARALLEL.
2388 SSIZE represents the total size of block ORIG_DST, or -1 if not
2392 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
,
2396 int start
, finish
, i
;
2397 machine_mode m
= GET_MODE (orig_dst
);
2399 gcc_assert (GET_CODE (src
) == PARALLEL
);
2401 if (!SCALAR_INT_MODE_P (m
)
2402 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
2404 scalar_int_mode imode
;
2405 if (int_mode_for_mode (GET_MODE (orig_dst
)).exists (&imode
))
2407 dst
= gen_reg_rtx (imode
);
2408 emit_group_store (dst
, src
, type
, ssize
);
2409 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
2413 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
);
2414 emit_group_store (dst
, src
, type
, ssize
);
2416 emit_move_insn (orig_dst
, dst
);
2420 /* Check for a NULL entry, used to indicate that the parameter goes
2421 both on the stack and in registers. */
2422 if (XEXP (XVECEXP (src
, 0, 0), 0))
2426 finish
= XVECLEN (src
, 0);
2428 tmps
= XALLOCAVEC (rtx
, finish
);
2430 /* Copy the (probable) hard regs into pseudos. */
2431 for (i
= start
; i
< finish
; i
++)
2433 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2434 if (!REG_P (reg
) || REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
2436 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2437 emit_move_insn (tmps
[i
], reg
);
2443 /* If we won't be storing directly into memory, protect the real destination
2444 from strange tricks we might play. */
2446 if (GET_CODE (dst
) == PARALLEL
)
2450 /* We can get a PARALLEL dst if there is a conditional expression in
2451 a return statement. In that case, the dst and src are the same,
2452 so no action is necessary. */
2453 if (rtx_equal_p (dst
, src
))
2456 /* It is unclear if we can ever reach here, but we may as well handle
2457 it. Allocate a temporary, and split this into a store/load to/from
2459 temp
= assign_stack_temp (GET_MODE (dst
), ssize
);
2460 emit_group_store (temp
, src
, type
, ssize
);
2461 emit_group_load (dst
, temp
, type
, ssize
);
2464 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
2466 machine_mode outer
= GET_MODE (dst
);
2472 if (!REG_P (dst
) || REGNO (dst
) < FIRST_PSEUDO_REGISTER
)
2473 dst
= gen_reg_rtx (outer
);
2475 /* Make life a bit easier for combine. */
2476 /* If the first element of the vector is the low part
2477 of the destination mode, use a paradoxical subreg to
2478 initialize the destination. */
2481 inner
= GET_MODE (tmps
[start
]);
2482 bytepos
= subreg_lowpart_offset (inner
, outer
);
2483 if (known_eq (rtx_to_poly_int64 (XEXP (XVECEXP (src
, 0, start
), 1)),
2486 temp
= simplify_gen_subreg (outer
, tmps
[start
],
2490 emit_move_insn (dst
, temp
);
2497 /* If the first element wasn't the low part, try the last. */
2499 && start
< finish
- 1)
2501 inner
= GET_MODE (tmps
[finish
- 1]);
2502 bytepos
= subreg_lowpart_offset (inner
, outer
);
2503 if (known_eq (rtx_to_poly_int64 (XEXP (XVECEXP (src
, 0,
2507 temp
= simplify_gen_subreg (outer
, tmps
[finish
- 1],
2511 emit_move_insn (dst
, temp
);
2518 /* Otherwise, simply initialize the result to zero. */
2520 emit_move_insn (dst
, CONST0_RTX (outer
));
2523 /* Process the pieces. */
2524 for (i
= start
; i
< finish
; i
++)
2526 poly_int64 bytepos
= rtx_to_poly_int64 (XEXP (XVECEXP (src
, 0, i
), 1));
2527 machine_mode mode
= GET_MODE (tmps
[i
]);
2528 poly_int64 bytelen
= GET_MODE_SIZE (mode
);
2529 poly_uint64 adj_bytelen
;
2532 /* Handle trailing fragments that run over the size of the struct.
2533 It's the target's responsibility to make sure that the fragment
2534 cannot be strictly smaller in some cases and strictly larger
2536 gcc_checking_assert (ordered_p (bytepos
+ bytelen
, ssize
));
2537 if (known_size_p (ssize
) && maybe_gt (bytepos
+ bytelen
, ssize
))
2538 adj_bytelen
= ssize
- bytepos
;
2540 adj_bytelen
= bytelen
;
2542 if (GET_CODE (dst
) == CONCAT
)
2544 if (known_le (bytepos
+ adj_bytelen
,
2545 GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)))))
2546 dest
= XEXP (dst
, 0);
2547 else if (known_ge (bytepos
, GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)))))
2549 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2550 dest
= XEXP (dst
, 1);
2554 machine_mode dest_mode
= GET_MODE (dest
);
2555 machine_mode tmp_mode
= GET_MODE (tmps
[i
]);
2557 gcc_assert (known_eq (bytepos
, 0) && XVECLEN (src
, 0));
2559 if (GET_MODE_ALIGNMENT (dest_mode
)
2560 >= GET_MODE_ALIGNMENT (tmp_mode
))
2562 dest
= assign_stack_temp (dest_mode
,
2563 GET_MODE_SIZE (dest_mode
));
2564 emit_move_insn (adjust_address (dest
,
2572 dest
= assign_stack_temp (tmp_mode
,
2573 GET_MODE_SIZE (tmp_mode
));
2574 emit_move_insn (dest
, tmps
[i
]);
2575 dst
= adjust_address (dest
, dest_mode
, bytepos
);
2581 /* Handle trailing fragments that run over the size of the struct. */
2582 if (known_size_p (ssize
) && maybe_gt (bytepos
+ bytelen
, ssize
))
2584 /* store_bit_field always takes its value from the lsb.
2585 Move the fragment to the lsb if it's not already there. */
2587 #ifdef BLOCK_REG_PADDING
2588 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2589 == (BYTES_BIG_ENDIAN
? PAD_UPWARD
: PAD_DOWNWARD
)
2595 poly_int64 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2596 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
2600 /* Make sure not to write past the end of the struct. */
2601 store_bit_field (dest
,
2602 adj_bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2603 bytepos
* BITS_PER_UNIT
, ssize
* BITS_PER_UNIT
- 1,
2604 VOIDmode
, tmps
[i
], false);
2607 /* Optimize the access just a bit. */
2608 else if (MEM_P (dest
)
2609 && (!targetm
.slow_unaligned_access (mode
, MEM_ALIGN (dest
))
2610 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2611 && multiple_p (bytepos
* BITS_PER_UNIT
,
2612 GET_MODE_ALIGNMENT (mode
))
2613 && known_eq (bytelen
, GET_MODE_SIZE (mode
)))
2614 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2617 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2618 0, 0, mode
, tmps
[i
], false);
2621 /* Copy from the pseudo into the (probable) hard reg. */
2622 if (orig_dst
!= dst
)
2623 emit_move_insn (orig_dst
, dst
);
2626 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2627 of the value stored in X. */
2630 maybe_emit_group_store (rtx x
, tree type
)
2632 machine_mode mode
= TYPE_MODE (type
);
2633 gcc_checking_assert (GET_MODE (x
) == VOIDmode
|| GET_MODE (x
) == mode
);
2634 if (GET_CODE (x
) == PARALLEL
)
2636 rtx result
= gen_reg_rtx (mode
);
2637 emit_group_store (result
, x
, type
, int_size_in_bytes (type
));
2643 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2645 This is used on targets that return BLKmode values in registers. */
2648 copy_blkmode_from_reg (rtx target
, rtx srcreg
, tree type
)
2650 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2651 rtx src
= NULL
, dst
= NULL
;
2652 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2653 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2654 /* No current ABI uses variable-sized modes to pass a BLKmnode type. */
2655 fixed_size_mode mode
= as_a
<fixed_size_mode
> (GET_MODE (srcreg
));
2656 fixed_size_mode tmode
= as_a
<fixed_size_mode
> (GET_MODE (target
));
2657 fixed_size_mode copy_mode
;
2659 /* BLKmode registers created in the back-end shouldn't have survived. */
2660 gcc_assert (mode
!= BLKmode
);
2662 /* If the structure doesn't take up a whole number of words, see whether
2663 SRCREG is padded on the left or on the right. If it's on the left,
2664 set PADDING_CORRECTION to the number of bits to skip.
2666 In most ABIs, the structure will be returned at the least end of
2667 the register, which translates to right padding on little-endian
2668 targets and left padding on big-endian targets. The opposite
2669 holds if the structure is returned at the most significant
2670 end of the register. */
2671 if (bytes
% UNITS_PER_WORD
!= 0
2672 && (targetm
.calls
.return_in_msb (type
)
2674 : BYTES_BIG_ENDIAN
))
2676 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2678 /* We can use a single move if we have an exact mode for the size. */
2679 else if (MEM_P (target
)
2680 && (!targetm
.slow_unaligned_access (mode
, MEM_ALIGN (target
))
2681 || MEM_ALIGN (target
) >= GET_MODE_ALIGNMENT (mode
))
2682 && bytes
== GET_MODE_SIZE (mode
))
2684 emit_move_insn (adjust_address (target
, mode
, 0), srcreg
);
2688 /* And if we additionally have the same mode for a register. */
2689 else if (REG_P (target
)
2690 && GET_MODE (target
) == mode
2691 && bytes
== GET_MODE_SIZE (mode
))
2693 emit_move_insn (target
, srcreg
);
2697 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2698 into a new pseudo which is a full word. */
2699 if (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
2701 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2705 /* Copy the structure BITSIZE bits at a time. If the target lives in
2706 memory, take care of not reading/writing past its end by selecting
2707 a copy mode suited to BITSIZE. This should always be possible given
2710 If the target lives in register, make sure not to select a copy mode
2711 larger than the mode of the register.
2713 We could probably emit more efficient code for machines which do not use
2714 strict alignment, but it doesn't seem worth the effort at the current
2717 copy_mode
= word_mode
;
2720 opt_scalar_int_mode mem_mode
= int_mode_for_size (bitsize
, 1);
2721 if (mem_mode
.exists ())
2722 copy_mode
= mem_mode
.require ();
2724 else if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2727 for (bitpos
= 0, xbitpos
= padding_correction
;
2728 bitpos
< bytes
* BITS_PER_UNIT
;
2729 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2731 /* We need a new source operand each time xbitpos is on a
2732 word boundary and when xbitpos == padding_correction
2733 (the first time through). */
2734 if (xbitpos
% BITS_PER_WORD
== 0 || xbitpos
== padding_correction
)
2735 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
, mode
);
2737 /* We need a new destination operand each time bitpos is on
2739 if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2741 else if (bitpos
% BITS_PER_WORD
== 0)
2742 dst
= operand_subword (target
, bitpos
/ BITS_PER_WORD
, 1, tmode
);
2744 /* Use xbitpos for the source extraction (right justified) and
2745 bitpos for the destination store (left justified). */
2746 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, 0, 0, copy_mode
,
2747 extract_bit_field (src
, bitsize
,
2748 xbitpos
% BITS_PER_WORD
, 1,
2749 NULL_RTX
, copy_mode
, copy_mode
,
2755 /* Copy BLKmode value SRC into a register of mode MODE_IN. Return the
2756 register if it contains any data, otherwise return null.
2758 This is used on targets that return BLKmode values in registers. */
2761 copy_blkmode_to_reg (machine_mode mode_in
, tree src
)
2764 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0, bytes
;
2765 unsigned int bitsize
;
2766 rtx
*dst_words
, dst
, x
, src_word
= NULL_RTX
, dst_word
= NULL_RTX
;
2767 /* No current ABI uses variable-sized modes to pass a BLKmnode type. */
2768 fixed_size_mode mode
= as_a
<fixed_size_mode
> (mode_in
);
2769 fixed_size_mode dst_mode
;
2770 scalar_int_mode min_mode
;
2772 gcc_assert (TYPE_MODE (TREE_TYPE (src
)) == BLKmode
);
2774 x
= expand_normal (src
);
2776 bytes
= arg_int_size_in_bytes (TREE_TYPE (src
));
2780 /* If the structure doesn't take up a whole number of words, see
2781 whether the register value should be padded on the left or on
2782 the right. Set PADDING_CORRECTION to the number of padding
2783 bits needed on the left side.
2785 In most ABIs, the structure will be returned at the least end of
2786 the register, which translates to right padding on little-endian
2787 targets and left padding on big-endian targets. The opposite
2788 holds if the structure is returned at the most significant
2789 end of the register. */
2790 if (bytes
% UNITS_PER_WORD
!= 0
2791 && (targetm
.calls
.return_in_msb (TREE_TYPE (src
))
2793 : BYTES_BIG_ENDIAN
))
2794 padding_correction
= (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
)
2797 n_regs
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
2798 dst_words
= XALLOCAVEC (rtx
, n_regs
);
2799 bitsize
= MIN (TYPE_ALIGN (TREE_TYPE (src
)), BITS_PER_WORD
);
2800 min_mode
= smallest_int_mode_for_size (bitsize
);
2802 /* Copy the structure BITSIZE bits at a time. */
2803 for (bitpos
= 0, xbitpos
= padding_correction
;
2804 bitpos
< bytes
* BITS_PER_UNIT
;
2805 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2807 /* We need a new destination pseudo each time xbitpos is
2808 on a word boundary and when xbitpos == padding_correction
2809 (the first time through). */
2810 if (xbitpos
% BITS_PER_WORD
== 0
2811 || xbitpos
== padding_correction
)
2813 /* Generate an appropriate register. */
2814 dst_word
= gen_reg_rtx (word_mode
);
2815 dst_words
[xbitpos
/ BITS_PER_WORD
] = dst_word
;
2817 /* Clear the destination before we move anything into it. */
2818 emit_move_insn (dst_word
, CONST0_RTX (word_mode
));
2821 /* Find the largest integer mode that can be used to copy all or as
2822 many bits as possible of the structure if the target supports larger
2823 copies. There are too many corner cases here w.r.t to alignments on
2824 the read/writes. So if there is any padding just use single byte
2826 opt_scalar_int_mode mode_iter
;
2827 if (padding_correction
== 0 && !STRICT_ALIGNMENT
)
2829 FOR_EACH_MODE_FROM (mode_iter
, min_mode
)
2831 unsigned int msize
= GET_MODE_BITSIZE (mode_iter
.require ());
2832 if (msize
<= ((bytes
* BITS_PER_UNIT
) - bitpos
)
2833 && msize
<= BITS_PER_WORD
)
2840 /* We need a new source operand each time bitpos is on a word
2842 if (bitpos
% BITS_PER_WORD
== 0)
2843 src_word
= operand_subword_force (x
, bitpos
/ BITS_PER_WORD
, BLKmode
);
2845 /* Use bitpos for the source extraction (left justified) and
2846 xbitpos for the destination store (right justified). */
2847 store_bit_field (dst_word
, bitsize
, xbitpos
% BITS_PER_WORD
,
2849 extract_bit_field (src_word
, bitsize
,
2850 bitpos
% BITS_PER_WORD
, 1,
2851 NULL_RTX
, word_mode
, word_mode
,
2856 if (mode
== BLKmode
)
2858 /* Find the smallest integer mode large enough to hold the
2859 entire structure. */
2860 opt_scalar_int_mode mode_iter
;
2861 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_INT
)
2862 if (GET_MODE_SIZE (mode_iter
.require ()) >= bytes
)
2865 /* A suitable mode should have been found. */
2866 mode
= mode_iter
.require ();
2869 if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
2870 dst_mode
= word_mode
;
2873 dst
= gen_reg_rtx (dst_mode
);
2875 for (i
= 0; i
< n_regs
; i
++)
2876 emit_move_insn (operand_subword (dst
, i
, 0, dst_mode
), dst_words
[i
]);
2878 if (mode
!= dst_mode
)
2879 dst
= gen_lowpart (mode
, dst
);
2884 /* Add a USE expression for REG to the (possibly empty) list pointed
2885 to by CALL_FUSAGE. REG must denote a hard register. */
2888 use_reg_mode (rtx
*call_fusage
, rtx reg
, machine_mode mode
)
2890 gcc_assert (REG_P (reg
));
2892 if (!HARD_REGISTER_P (reg
))
2896 = gen_rtx_EXPR_LIST (mode
, gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2899 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2900 to by CALL_FUSAGE. REG must denote a hard register. */
2903 clobber_reg_mode (rtx
*call_fusage
, rtx reg
, machine_mode mode
)
2905 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2908 = gen_rtx_EXPR_LIST (mode
, gen_rtx_CLOBBER (VOIDmode
, reg
), *call_fusage
);
2911 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2912 starting at REGNO. All of these registers must be hard registers. */
2915 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2919 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2921 for (i
= 0; i
< nregs
; i
++)
2922 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2925 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2926 PARALLEL REGS. This is for calls that pass values in multiple
2927 non-contiguous locations. The Irix 6 ABI has examples of this. */
2930 use_group_regs (rtx
*call_fusage
, rtx regs
)
2934 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2936 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2938 /* A NULL entry means the parameter goes both on the stack and in
2939 registers. This can also be a MEM for targets that pass values
2940 partially on the stack and partially in registers. */
2941 if (reg
!= 0 && REG_P (reg
))
2942 use_reg (call_fusage
, reg
);
2946 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2947 assigment and the code of the expresion on the RHS is CODE. Return
2951 get_def_for_expr (tree name
, enum tree_code code
)
2955 if (TREE_CODE (name
) != SSA_NAME
)
2958 def_stmt
= get_gimple_for_ssa_name (name
);
2960 || gimple_assign_rhs_code (def_stmt
) != code
)
2966 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2967 assigment and the class of the expresion on the RHS is CLASS. Return
2971 get_def_for_expr_class (tree name
, enum tree_code_class tclass
)
2975 if (TREE_CODE (name
) != SSA_NAME
)
2978 def_stmt
= get_gimple_for_ssa_name (name
);
2980 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) != tclass
)
2986 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2987 its length in bytes. */
2990 clear_storage_hints (rtx object
, rtx size
, enum block_op_methods method
,
2991 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
2992 unsigned HOST_WIDE_INT min_size
,
2993 unsigned HOST_WIDE_INT max_size
,
2994 unsigned HOST_WIDE_INT probable_max_size
)
2996 machine_mode mode
= GET_MODE (object
);
2999 gcc_assert (method
== BLOCK_OP_NORMAL
|| method
== BLOCK_OP_TAILCALL
);
3001 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
3002 just move a zero. Otherwise, do this a piece at a time. */
3003 poly_int64 size_val
;
3005 && poly_int_rtx_p (size
, &size_val
)
3006 && known_eq (size_val
, GET_MODE_SIZE (mode
)))
3008 rtx zero
= CONST0_RTX (mode
);
3011 emit_move_insn (object
, zero
);
3015 if (COMPLEX_MODE_P (mode
))
3017 zero
= CONST0_RTX (GET_MODE_INNER (mode
));
3020 write_complex_part (object
, zero
, 0);
3021 write_complex_part (object
, zero
, 1);
3027 if (size
== const0_rtx
)
3030 align
= MEM_ALIGN (object
);
3032 if (CONST_INT_P (size
)
3033 && targetm
.use_by_pieces_infrastructure_p (INTVAL (size
), align
,
3035 optimize_insn_for_speed_p ()))
3036 clear_by_pieces (object
, INTVAL (size
), align
);
3037 else if (set_storage_via_setmem (object
, size
, const0_rtx
, align
,
3038 expected_align
, expected_size
,
3039 min_size
, max_size
, probable_max_size
))
3041 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object
)))
3042 return set_storage_via_libcall (object
, size
, const0_rtx
,
3043 method
== BLOCK_OP_TAILCALL
);
3051 clear_storage (rtx object
, rtx size
, enum block_op_methods method
)
3053 unsigned HOST_WIDE_INT max
, min
= 0;
3054 if (GET_CODE (size
) == CONST_INT
)
3055 min
= max
= UINTVAL (size
);
3057 max
= GET_MODE_MASK (GET_MODE (size
));
3058 return clear_storage_hints (object
, size
, method
, 0, -1, min
, max
, max
);
3062 /* A subroutine of clear_storage. Expand a call to memset.
3063 Return the return value of memset, 0 otherwise. */
3066 set_storage_via_libcall (rtx object
, rtx size
, rtx val
, bool tailcall
)
3068 tree call_expr
, fn
, object_tree
, size_tree
, val_tree
;
3069 machine_mode size_mode
;
3071 object
= copy_addr_to_reg (XEXP (object
, 0));
3072 object_tree
= make_tree (ptr_type_node
, object
);
3074 if (!CONST_INT_P (val
))
3075 val
= convert_to_mode (TYPE_MODE (integer_type_node
), val
, 1);
3076 val_tree
= make_tree (integer_type_node
, val
);
3078 size_mode
= TYPE_MODE (sizetype
);
3079 size
= convert_to_mode (size_mode
, size
, 1);
3080 size
= copy_to_mode_reg (size_mode
, size
);
3081 size_tree
= make_tree (sizetype
, size
);
3083 /* It is incorrect to use the libcall calling conventions for calls to
3084 memset because it can be provided by the user. */
3085 fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
3086 call_expr
= build_call_expr (fn
, 3, object_tree
, val_tree
, size_tree
);
3087 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
3089 return expand_call (call_expr
, NULL_RTX
, false);
3092 /* Expand a setmem pattern; return true if successful. */
3095 set_storage_via_setmem (rtx object
, rtx size
, rtx val
, unsigned int align
,
3096 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
3097 unsigned HOST_WIDE_INT min_size
,
3098 unsigned HOST_WIDE_INT max_size
,
3099 unsigned HOST_WIDE_INT probable_max_size
)
3101 /* Try the most limited insn first, because there's no point
3102 including more than one in the machine description unless
3103 the more limited one has some advantage. */
3105 if (expected_align
< align
)
3106 expected_align
= align
;
3107 if (expected_size
!= -1)
3109 if ((unsigned HOST_WIDE_INT
)expected_size
> max_size
)
3110 expected_size
= max_size
;
3111 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
3112 expected_size
= min_size
;
3115 opt_scalar_int_mode mode_iter
;
3116 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_INT
)
3118 scalar_int_mode mode
= mode_iter
.require ();
3119 enum insn_code code
= direct_optab_handler (setmem_optab
, mode
);
3121 if (code
!= CODE_FOR_nothing
3122 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
3123 here because if SIZE is less than the mode mask, as it is
3124 returned by the macro, it will definitely be less than the
3125 actual mode mask. Since SIZE is within the Pmode address
3126 space, we limit MODE to Pmode. */
3127 && ((CONST_INT_P (size
)
3128 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
3129 <= (GET_MODE_MASK (mode
) >> 1)))
3130 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
3131 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
3133 class expand_operand ops
[9];
3136 nops
= insn_data
[(int) code
].n_generator_args
;
3137 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
3139 create_fixed_operand (&ops
[0], object
);
3140 /* The check above guarantees that this size conversion is valid. */
3141 create_convert_operand_to (&ops
[1], size
, mode
, true);
3142 create_convert_operand_from (&ops
[2], val
, byte_mode
, true);
3143 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
3146 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
3147 create_integer_operand (&ops
[5], expected_size
);
3151 create_integer_operand (&ops
[6], min_size
);
3152 /* If we cannot represent the maximal size,
3153 make parameter NULL. */
3154 if ((HOST_WIDE_INT
) max_size
!= -1)
3155 create_integer_operand (&ops
[7], max_size
);
3157 create_fixed_operand (&ops
[7], NULL
);
3161 /* If we cannot represent the maximal size,
3162 make parameter NULL. */
3163 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
3164 create_integer_operand (&ops
[8], probable_max_size
);
3166 create_fixed_operand (&ops
[8], NULL
);
3168 if (maybe_expand_insn (code
, nops
, ops
))
3177 /* Write to one of the components of the complex value CPLX. Write VAL to
3178 the real part if IMAG_P is false, and the imaginary part if its true. */
3181 write_complex_part (rtx cplx
, rtx val
, bool imag_p
)
3187 if (GET_CODE (cplx
) == CONCAT
)
3189 emit_move_insn (XEXP (cplx
, imag_p
), val
);
3193 cmode
= GET_MODE (cplx
);
3194 imode
= GET_MODE_INNER (cmode
);
3195 ibitsize
= GET_MODE_BITSIZE (imode
);
3197 /* For MEMs simplify_gen_subreg may generate an invalid new address
3198 because, e.g., the original address is considered mode-dependent
3199 by the target, which restricts simplify_subreg from invoking
3200 adjust_address_nv. Instead of preparing fallback support for an
3201 invalid address, we call adjust_address_nv directly. */
3204 emit_move_insn (adjust_address_nv (cplx
, imode
,
3205 imag_p
? GET_MODE_SIZE (imode
) : 0),
3210 /* If the sub-object is at least word sized, then we know that subregging
3211 will work. This special case is important, since store_bit_field
3212 wants to operate on integer modes, and there's rarely an OImode to
3213 correspond to TCmode. */
3214 if (ibitsize
>= BITS_PER_WORD
3215 /* For hard regs we have exact predicates. Assume we can split
3216 the original object if it spans an even number of hard regs.
3217 This special case is important for SCmode on 64-bit platforms
3218 where the natural size of floating-point regs is 32-bit. */
3220 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3221 && REG_NREGS (cplx
) % 2 == 0))
3223 rtx part
= simplify_gen_subreg (imode
, cplx
, cmode
,
3224 imag_p
? GET_MODE_SIZE (imode
) : 0);
3227 emit_move_insn (part
, val
);
3231 /* simplify_gen_subreg may fail for sub-word MEMs. */
3232 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3235 store_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0, 0, 0, imode
, val
,
3239 /* Extract one of the components of the complex value CPLX. Extract the
3240 real part if IMAG_P is false, and the imaginary part if it's true. */
3243 read_complex_part (rtx cplx
, bool imag_p
)
3249 if (GET_CODE (cplx
) == CONCAT
)
3250 return XEXP (cplx
, imag_p
);
3252 cmode
= GET_MODE (cplx
);
3253 imode
= GET_MODE_INNER (cmode
);
3254 ibitsize
= GET_MODE_BITSIZE (imode
);
3256 /* Special case reads from complex constants that got spilled to memory. */
3257 if (MEM_P (cplx
) && GET_CODE (XEXP (cplx
, 0)) == SYMBOL_REF
)
3259 tree decl
= SYMBOL_REF_DECL (XEXP (cplx
, 0));
3260 if (decl
&& TREE_CODE (decl
) == COMPLEX_CST
)
3262 tree part
= imag_p
? TREE_IMAGPART (decl
) : TREE_REALPART (decl
);
3263 if (CONSTANT_CLASS_P (part
))
3264 return expand_expr (part
, NULL_RTX
, imode
, EXPAND_NORMAL
);
3268 /* For MEMs simplify_gen_subreg may generate an invalid new address
3269 because, e.g., the original address is considered mode-dependent
3270 by the target, which restricts simplify_subreg from invoking
3271 adjust_address_nv. Instead of preparing fallback support for an
3272 invalid address, we call adjust_address_nv directly. */
3274 return adjust_address_nv (cplx
, imode
,
3275 imag_p
? GET_MODE_SIZE (imode
) : 0);
3277 /* If the sub-object is at least word sized, then we know that subregging
3278 will work. This special case is important, since extract_bit_field
3279 wants to operate on integer modes, and there's rarely an OImode to
3280 correspond to TCmode. */
3281 if (ibitsize
>= BITS_PER_WORD
3282 /* For hard regs we have exact predicates. Assume we can split
3283 the original object if it spans an even number of hard regs.
3284 This special case is important for SCmode on 64-bit platforms
3285 where the natural size of floating-point regs is 32-bit. */
3287 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3288 && REG_NREGS (cplx
) % 2 == 0))
3290 rtx ret
= simplify_gen_subreg (imode
, cplx
, cmode
,
3291 imag_p
? GET_MODE_SIZE (imode
) : 0);
3295 /* simplify_gen_subreg may fail for sub-word MEMs. */
3296 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3299 return extract_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0,
3300 true, NULL_RTX
, imode
, imode
, false, NULL
);
3303 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3304 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3305 represented in NEW_MODE. If FORCE is true, this will never happen, as
3306 we'll force-create a SUBREG if needed. */
3309 emit_move_change_mode (machine_mode new_mode
,
3310 machine_mode old_mode
, rtx x
, bool force
)
3314 if (push_operand (x
, GET_MODE (x
)))
3316 ret
= gen_rtx_MEM (new_mode
, XEXP (x
, 0));
3317 MEM_COPY_ATTRIBUTES (ret
, x
);
3321 /* We don't have to worry about changing the address since the
3322 size in bytes is supposed to be the same. */
3323 if (reload_in_progress
)
3325 /* Copy the MEM to change the mode and move any
3326 substitutions from the old MEM to the new one. */
3327 ret
= adjust_address_nv (x
, new_mode
, 0);
3328 copy_replacements (x
, ret
);
3331 ret
= adjust_address (x
, new_mode
, 0);
3335 /* Note that we do want simplify_subreg's behavior of validating
3336 that the new mode is ok for a hard register. If we were to use
3337 simplify_gen_subreg, we would create the subreg, but would
3338 probably run into the target not being able to implement it. */
3339 /* Except, of course, when FORCE is true, when this is exactly what
3340 we want. Which is needed for CCmodes on some targets. */
3342 ret
= simplify_gen_subreg (new_mode
, x
, old_mode
, 0);
3344 ret
= simplify_subreg (new_mode
, x
, old_mode
, 0);
3350 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3351 an integer mode of the same size as MODE. Returns the instruction
3352 emitted, or NULL if such a move could not be generated. */
3355 emit_move_via_integer (machine_mode mode
, rtx x
, rtx y
, bool force
)
3357 scalar_int_mode imode
;
3358 enum insn_code code
;
3360 /* There must exist a mode of the exact size we require. */
3361 if (!int_mode_for_mode (mode
).exists (&imode
))
3364 /* The target must support moves in this mode. */
3365 code
= optab_handler (mov_optab
, imode
);
3366 if (code
== CODE_FOR_nothing
)
3369 x
= emit_move_change_mode (imode
, mode
, x
, force
);
3372 y
= emit_move_change_mode (imode
, mode
, y
, force
);
3375 return emit_insn (GEN_FCN (code
) (x
, y
));
3378 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3379 Return an equivalent MEM that does not use an auto-increment. */
3382 emit_move_resolve_push (machine_mode mode
, rtx x
)
3384 enum rtx_code code
= GET_CODE (XEXP (x
, 0));
3387 poly_int64 adjust
= GET_MODE_SIZE (mode
);
3388 #ifdef PUSH_ROUNDING
3389 adjust
= PUSH_ROUNDING (adjust
);
3391 if (code
== PRE_DEC
|| code
== POST_DEC
)
3393 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
3395 rtx expr
= XEXP (XEXP (x
, 0), 1);
3397 gcc_assert (GET_CODE (expr
) == PLUS
|| GET_CODE (expr
) == MINUS
);
3398 poly_int64 val
= rtx_to_poly_int64 (XEXP (expr
, 1));
3399 if (GET_CODE (expr
) == MINUS
)
3401 gcc_assert (known_eq (adjust
, val
) || known_eq (adjust
, -val
));
3405 /* Do not use anti_adjust_stack, since we don't want to update
3406 stack_pointer_delta. */
3407 temp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
3408 gen_int_mode (adjust
, Pmode
), stack_pointer_rtx
,
3409 0, OPTAB_LIB_WIDEN
);
3410 if (temp
!= stack_pointer_rtx
)
3411 emit_move_insn (stack_pointer_rtx
, temp
);
3418 temp
= stack_pointer_rtx
;
3423 temp
= plus_constant (Pmode
, stack_pointer_rtx
, -adjust
);
3429 return replace_equiv_address (x
, temp
);
3432 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3433 X is known to satisfy push_operand, and MODE is known to be complex.
3434 Returns the last instruction emitted. */
3437 emit_move_complex_push (machine_mode mode
, rtx x
, rtx y
)
3439 scalar_mode submode
= GET_MODE_INNER (mode
);
3442 #ifdef PUSH_ROUNDING
3443 poly_int64 submodesize
= GET_MODE_SIZE (submode
);
3445 /* In case we output to the stack, but the size is smaller than the
3446 machine can push exactly, we need to use move instructions. */
3447 if (maybe_ne (PUSH_ROUNDING (submodesize
), submodesize
))
3449 x
= emit_move_resolve_push (mode
, x
);
3450 return emit_move_insn (x
, y
);
3454 /* Note that the real part always precedes the imag part in memory
3455 regardless of machine's endianness. */
3456 switch (GET_CODE (XEXP (x
, 0)))
3470 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3471 read_complex_part (y
, imag_first
));
3472 return emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3473 read_complex_part (y
, !imag_first
));
3476 /* A subroutine of emit_move_complex. Perform the move from Y to X
3477 via two moves of the parts. Returns the last instruction emitted. */
3480 emit_move_complex_parts (rtx x
, rtx y
)
3482 /* Show the output dies here. This is necessary for SUBREGs
3483 of pseudos since we cannot track their lifetimes correctly;
3484 hard regs shouldn't appear here except as return values. */
3485 if (!reload_completed
&& !reload_in_progress
3486 && REG_P (x
) && !reg_overlap_mentioned_p (x
, y
))
3489 write_complex_part (x
, read_complex_part (y
, false), false);
3490 write_complex_part (x
, read_complex_part (y
, true), true);
3492 return get_last_insn ();
3495 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3496 MODE is known to be complex. Returns the last instruction emitted. */
3499 emit_move_complex (machine_mode mode
, rtx x
, rtx y
)
3503 /* Need to take special care for pushes, to maintain proper ordering
3504 of the data, and possibly extra padding. */
3505 if (push_operand (x
, mode
))
3506 return emit_move_complex_push (mode
, x
, y
);
3508 /* See if we can coerce the target into moving both values at once, except
3509 for floating point where we favor moving as parts if this is easy. */
3510 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
3511 && optab_handler (mov_optab
, GET_MODE_INNER (mode
)) != CODE_FOR_nothing
3513 && HARD_REGISTER_P (x
)
3514 && REG_NREGS (x
) == 1)
3516 && HARD_REGISTER_P (y
)
3517 && REG_NREGS (y
) == 1))
3519 /* Not possible if the values are inherently not adjacent. */
3520 else if (GET_CODE (x
) == CONCAT
|| GET_CODE (y
) == CONCAT
)
3522 /* Is possible if both are registers (or subregs of registers). */
3523 else if (register_operand (x
, mode
) && register_operand (y
, mode
))
3525 /* If one of the operands is a memory, and alignment constraints
3526 are friendly enough, we may be able to do combined memory operations.
3527 We do not attempt this if Y is a constant because that combination is
3528 usually better with the by-parts thing below. */
3529 else if ((MEM_P (x
) ? !CONSTANT_P (y
) : MEM_P (y
))
3530 && (!STRICT_ALIGNMENT
3531 || get_mode_alignment (mode
) == BIGGEST_ALIGNMENT
))
3540 /* For memory to memory moves, optimal behavior can be had with the
3541 existing block move logic. */
3542 if (MEM_P (x
) && MEM_P (y
))
3544 emit_block_move (x
, y
, gen_int_mode (GET_MODE_SIZE (mode
), Pmode
),
3545 BLOCK_OP_NO_LIBCALL
);
3546 return get_last_insn ();
3549 ret
= emit_move_via_integer (mode
, x
, y
, true);
3554 return emit_move_complex_parts (x
, y
);
3557 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3558 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3561 emit_move_ccmode (machine_mode mode
, rtx x
, rtx y
)
3565 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3568 enum insn_code code
= optab_handler (mov_optab
, CCmode
);
3569 if (code
!= CODE_FOR_nothing
)
3571 x
= emit_move_change_mode (CCmode
, mode
, x
, true);
3572 y
= emit_move_change_mode (CCmode
, mode
, y
, true);
3573 return emit_insn (GEN_FCN (code
) (x
, y
));
3577 /* Otherwise, find the MODE_INT mode of the same width. */
3578 ret
= emit_move_via_integer (mode
, x
, y
, false);
3579 gcc_assert (ret
!= NULL
);
3583 /* Return true if word I of OP lies entirely in the
3584 undefined bits of a paradoxical subreg. */
3587 undefined_operand_subword_p (const_rtx op
, int i
)
3589 if (GET_CODE (op
) != SUBREG
)
3591 machine_mode innermostmode
= GET_MODE (SUBREG_REG (op
));
3592 poly_int64 offset
= i
* UNITS_PER_WORD
+ subreg_memory_offset (op
);
3593 return (known_ge (offset
, GET_MODE_SIZE (innermostmode
))
3594 || known_le (offset
, -UNITS_PER_WORD
));
3597 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3598 MODE is any multi-word or full-word mode that lacks a move_insn
3599 pattern. Note that you will get better code if you define such
3600 patterns, even if they must turn into multiple assembler instructions. */
3603 emit_move_multi_word (machine_mode mode
, rtx x
, rtx y
)
3605 rtx_insn
*last_insn
= 0;
3611 /* This function can only handle cases where the number of words is
3612 known at compile time. */
3613 mode_size
= GET_MODE_SIZE (mode
).to_constant ();
3614 gcc_assert (mode_size
>= UNITS_PER_WORD
);
3616 /* If X is a push on the stack, do the push now and replace
3617 X with a reference to the stack pointer. */
3618 if (push_operand (x
, mode
))
3619 x
= emit_move_resolve_push (mode
, x
);
3621 /* If we are in reload, see if either operand is a MEM whose address
3622 is scheduled for replacement. */
3623 if (reload_in_progress
&& MEM_P (x
)
3624 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3625 x
= replace_equiv_address_nv (x
, inner
);
3626 if (reload_in_progress
&& MEM_P (y
)
3627 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3628 y
= replace_equiv_address_nv (y
, inner
);
3632 need_clobber
= false;
3633 for (i
= 0; i
< CEIL (mode_size
, UNITS_PER_WORD
); i
++)
3635 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3638 /* Do not generate code for a move if it would come entirely
3639 from the undefined bits of a paradoxical subreg. */
3640 if (undefined_operand_subword_p (y
, i
))
3643 ypart
= operand_subword (y
, i
, 1, mode
);
3645 /* If we can't get a part of Y, put Y into memory if it is a
3646 constant. Otherwise, force it into a register. Then we must
3647 be able to get a part of Y. */
3648 if (ypart
== 0 && CONSTANT_P (y
))
3650 y
= use_anchored_address (force_const_mem (mode
, y
));
3651 ypart
= operand_subword (y
, i
, 1, mode
);
3653 else if (ypart
== 0)
3654 ypart
= operand_subword_force (y
, i
, mode
);
3656 gcc_assert (xpart
&& ypart
);
3658 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3660 last_insn
= emit_move_insn (xpart
, ypart
);
3666 /* Show the output dies here. This is necessary for SUBREGs
3667 of pseudos since we cannot track their lifetimes correctly;
3668 hard regs shouldn't appear here except as return values.
3669 We never want to emit such a clobber after reload. */
3671 && ! (reload_in_progress
|| reload_completed
)
3672 && need_clobber
!= 0)
3680 /* Low level part of emit_move_insn.
3681 Called just like emit_move_insn, but assumes X and Y
3682 are basically valid. */
3685 emit_move_insn_1 (rtx x
, rtx y
)
3687 machine_mode mode
= GET_MODE (x
);
3688 enum insn_code code
;
3690 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
3692 code
= optab_handler (mov_optab
, mode
);
3693 if (code
!= CODE_FOR_nothing
)
3694 return emit_insn (GEN_FCN (code
) (x
, y
));
3696 /* Expand complex moves by moving real part and imag part. */
3697 if (COMPLEX_MODE_P (mode
))
3698 return emit_move_complex (mode
, x
, y
);
3700 if (GET_MODE_CLASS (mode
) == MODE_DECIMAL_FLOAT
3701 || ALL_FIXED_POINT_MODE_P (mode
))
3703 rtx_insn
*result
= emit_move_via_integer (mode
, x
, y
, true);
3705 /* If we can't find an integer mode, use multi words. */
3709 return emit_move_multi_word (mode
, x
, y
);
3712 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3713 return emit_move_ccmode (mode
, x
, y
);
3715 /* Try using a move pattern for the corresponding integer mode. This is
3716 only safe when simplify_subreg can convert MODE constants into integer
3717 constants. At present, it can only do this reliably if the value
3718 fits within a HOST_WIDE_INT. */
3720 || known_le (GET_MODE_BITSIZE (mode
), HOST_BITS_PER_WIDE_INT
))
3722 rtx_insn
*ret
= emit_move_via_integer (mode
, x
, y
, lra_in_progress
);
3726 if (! lra_in_progress
|| recog (PATTERN (ret
), ret
, 0) >= 0)
3731 return emit_move_multi_word (mode
, x
, y
);
3734 /* Generate code to copy Y into X.
3735 Both Y and X must have the same mode, except that
3736 Y can be a constant with VOIDmode.
3737 This mode cannot be BLKmode; use emit_block_move for that.
3739 Return the last instruction emitted. */
3742 emit_move_insn (rtx x
, rtx y
)
3744 machine_mode mode
= GET_MODE (x
);
3745 rtx y_cst
= NULL_RTX
;
3746 rtx_insn
*last_insn
;
3749 gcc_assert (mode
!= BLKmode
3750 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
3755 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3756 && (last_insn
= compress_float_constant (x
, y
)))
3761 if (!targetm
.legitimate_constant_p (mode
, y
))
3763 y
= force_const_mem (mode
, y
);
3765 /* If the target's cannot_force_const_mem prevented the spill,
3766 assume that the target's move expanders will also take care
3767 of the non-legitimate constant. */
3771 y
= use_anchored_address (y
);
3775 /* If X or Y are memory references, verify that their addresses are valid
3778 && (! memory_address_addr_space_p (GET_MODE (x
), XEXP (x
, 0),
3780 && ! push_operand (x
, GET_MODE (x
))))
3781 x
= validize_mem (x
);
3784 && ! memory_address_addr_space_p (GET_MODE (y
), XEXP (y
, 0),
3785 MEM_ADDR_SPACE (y
)))
3786 y
= validize_mem (y
);
3788 gcc_assert (mode
!= BLKmode
);
3790 last_insn
= emit_move_insn_1 (x
, y
);
3792 if (y_cst
&& REG_P (x
)
3793 && (set
= single_set (last_insn
)) != NULL_RTX
3794 && SET_DEST (set
) == x
3795 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3796 set_unique_reg_note (last_insn
, REG_EQUAL
, copy_rtx (y_cst
));
3801 /* Generate the body of an instruction to copy Y into X.
3802 It may be a list of insns, if one insn isn't enough. */
3805 gen_move_insn (rtx x
, rtx y
)
3810 emit_move_insn_1 (x
, y
);
3816 /* If Y is representable exactly in a narrower mode, and the target can
3817 perform the extension directly from constant or memory, then emit the
3818 move as an extension. */
3821 compress_float_constant (rtx x
, rtx y
)
3823 machine_mode dstmode
= GET_MODE (x
);
3824 machine_mode orig_srcmode
= GET_MODE (y
);
3825 machine_mode srcmode
;
3826 const REAL_VALUE_TYPE
*r
;
3827 int oldcost
, newcost
;
3828 bool speed
= optimize_insn_for_speed_p ();
3830 r
= CONST_DOUBLE_REAL_VALUE (y
);
3832 if (targetm
.legitimate_constant_p (dstmode
, y
))
3833 oldcost
= set_src_cost (y
, orig_srcmode
, speed
);
3835 oldcost
= set_src_cost (force_const_mem (dstmode
, y
), dstmode
, speed
);
3837 FOR_EACH_MODE_UNTIL (srcmode
, orig_srcmode
)
3841 rtx_insn
*last_insn
;
3843 /* Skip if the target can't extend this way. */
3844 ic
= can_extend_p (dstmode
, srcmode
, 0);
3845 if (ic
== CODE_FOR_nothing
)
3848 /* Skip if the narrowed value isn't exact. */
3849 if (! exact_real_truncate (srcmode
, r
))
3852 trunc_y
= const_double_from_real_value (*r
, srcmode
);
3854 if (targetm
.legitimate_constant_p (srcmode
, trunc_y
))
3856 /* Skip if the target needs extra instructions to perform
3858 if (!insn_operand_matches (ic
, 1, trunc_y
))
3860 /* This is valid, but may not be cheaper than the original. */
3861 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3863 if (oldcost
< newcost
)
3866 else if (float_extend_from_mem
[dstmode
][srcmode
])
3868 trunc_y
= force_const_mem (srcmode
, trunc_y
);
3869 /* This is valid, but may not be cheaper than the original. */
3870 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3872 if (oldcost
< newcost
)
3874 trunc_y
= validize_mem (trunc_y
);
3879 /* For CSE's benefit, force the compressed constant pool entry
3880 into a new pseudo. This constant may be used in different modes,
3881 and if not, combine will put things back together for us. */
3882 trunc_y
= force_reg (srcmode
, trunc_y
);
3884 /* If x is a hard register, perform the extension into a pseudo,
3885 so that e.g. stack realignment code is aware of it. */
3887 if (REG_P (x
) && HARD_REGISTER_P (x
))
3888 target
= gen_reg_rtx (dstmode
);
3890 emit_unop_insn (ic
, target
, trunc_y
, UNKNOWN
);
3891 last_insn
= get_last_insn ();
3894 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3897 return emit_move_insn (x
, target
);
3904 /* Pushing data onto the stack. */
3906 /* Push a block of length SIZE (perhaps variable)
3907 and return an rtx to address the beginning of the block.
3908 The value may be virtual_outgoing_args_rtx.
3910 EXTRA is the number of bytes of padding to push in addition to SIZE.
3911 BELOW nonzero means this padding comes at low addresses;
3912 otherwise, the padding comes at high addresses. */
3915 push_block (rtx size
, poly_int64 extra
, int below
)
3919 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3920 if (CONSTANT_P (size
))
3921 anti_adjust_stack (plus_constant (Pmode
, size
, extra
));
3922 else if (REG_P (size
) && known_eq (extra
, 0))
3923 anti_adjust_stack (size
);
3926 temp
= copy_to_mode_reg (Pmode
, size
);
3927 if (maybe_ne (extra
, 0))
3928 temp
= expand_binop (Pmode
, add_optab
, temp
,
3929 gen_int_mode (extra
, Pmode
),
3930 temp
, 0, OPTAB_LIB_WIDEN
);
3931 anti_adjust_stack (temp
);
3934 if (STACK_GROWS_DOWNWARD
)
3936 temp
= virtual_outgoing_args_rtx
;
3937 if (maybe_ne (extra
, 0) && below
)
3938 temp
= plus_constant (Pmode
, temp
, extra
);
3943 if (poly_int_rtx_p (size
, &csize
))
3944 temp
= plus_constant (Pmode
, virtual_outgoing_args_rtx
,
3945 -csize
- (below
? 0 : extra
));
3946 else if (maybe_ne (extra
, 0) && !below
)
3947 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3948 negate_rtx (Pmode
, plus_constant (Pmode
, size
,
3951 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3952 negate_rtx (Pmode
, size
));
3955 return memory_address (NARROWEST_INT_MODE
, temp
);
3958 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3961 mem_autoinc_base (rtx mem
)
3965 rtx addr
= XEXP (mem
, 0);
3966 if (GET_RTX_CLASS (GET_CODE (addr
)) == RTX_AUTOINC
)
3967 return XEXP (addr
, 0);
3972 /* A utility routine used here, in reload, and in try_split. The insns
3973 after PREV up to and including LAST are known to adjust the stack,
3974 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3975 placing notes as appropriate. PREV may be NULL, indicating the
3976 entire insn sequence prior to LAST should be scanned.
3978 The set of allowed stack pointer modifications is small:
3979 (1) One or more auto-inc style memory references (aka pushes),
3980 (2) One or more addition/subtraction with the SP as destination,
3981 (3) A single move insn with the SP as destination,
3982 (4) A call_pop insn,
3983 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3985 Insns in the sequence that do not modify the SP are ignored,
3986 except for noreturn calls.
3988 The return value is the amount of adjustment that can be trivially
3989 verified, via immediate operand or auto-inc. If the adjustment
3990 cannot be trivially extracted, the return value is HOST_WIDE_INT_MIN. */
3993 find_args_size_adjust (rtx_insn
*insn
)
3998 pat
= PATTERN (insn
);
4001 /* Look for a call_pop pattern. */
4004 /* We have to allow non-call_pop patterns for the case
4005 of emit_single_push_insn of a TLS address. */
4006 if (GET_CODE (pat
) != PARALLEL
)
4009 /* All call_pop have a stack pointer adjust in the parallel.
4010 The call itself is always first, and the stack adjust is
4011 usually last, so search from the end. */
4012 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; --i
)
4014 set
= XVECEXP (pat
, 0, i
);
4015 if (GET_CODE (set
) != SET
)
4017 dest
= SET_DEST (set
);
4018 if (dest
== stack_pointer_rtx
)
4021 /* We'd better have found the stack pointer adjust. */
4024 /* Fall through to process the extracted SET and DEST
4025 as if it was a standalone insn. */
4027 else if (GET_CODE (pat
) == SET
)
4029 else if ((set
= single_set (insn
)) != NULL
)
4031 else if (GET_CODE (pat
) == PARALLEL
)
4033 /* ??? Some older ports use a parallel with a stack adjust
4034 and a store for a PUSH_ROUNDING pattern, rather than a
4035 PRE/POST_MODIFY rtx. Don't force them to update yet... */
4036 /* ??? See h8300 and m68k, pushqi1. */
4037 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; --i
)
4039 set
= XVECEXP (pat
, 0, i
);
4040 if (GET_CODE (set
) != SET
)
4042 dest
= SET_DEST (set
);
4043 if (dest
== stack_pointer_rtx
)
4046 /* We do not expect an auto-inc of the sp in the parallel. */
4047 gcc_checking_assert (mem_autoinc_base (dest
) != stack_pointer_rtx
);
4048 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
4049 != stack_pointer_rtx
);
4057 dest
= SET_DEST (set
);
4059 /* Look for direct modifications of the stack pointer. */
4060 if (REG_P (dest
) && REGNO (dest
) == STACK_POINTER_REGNUM
)
4062 /* Look for a trivial adjustment, otherwise assume nothing. */
4063 /* Note that the SPU restore_stack_block pattern refers to
4064 the stack pointer in V4SImode. Consider that non-trivial. */
4066 if (SCALAR_INT_MODE_P (GET_MODE (dest
))
4067 && strip_offset (SET_SRC (set
), &offset
) == stack_pointer_rtx
)
4069 /* ??? Reload can generate no-op moves, which will be cleaned
4070 up later. Recognize it and continue searching. */
4071 else if (rtx_equal_p (dest
, SET_SRC (set
)))
4074 return HOST_WIDE_INT_MIN
;
4080 /* Otherwise only think about autoinc patterns. */
4081 if (mem_autoinc_base (dest
) == stack_pointer_rtx
)
4084 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
4085 != stack_pointer_rtx
);
4087 else if (mem_autoinc_base (SET_SRC (set
)) == stack_pointer_rtx
)
4088 mem
= SET_SRC (set
);
4092 addr
= XEXP (mem
, 0);
4093 switch (GET_CODE (addr
))
4097 return GET_MODE_SIZE (GET_MODE (mem
));
4100 return -GET_MODE_SIZE (GET_MODE (mem
));
4103 addr
= XEXP (addr
, 1);
4104 gcc_assert (GET_CODE (addr
) == PLUS
);
4105 gcc_assert (XEXP (addr
, 0) == stack_pointer_rtx
);
4106 return rtx_to_poly_int64 (XEXP (addr
, 1));
4114 fixup_args_size_notes (rtx_insn
*prev
, rtx_insn
*last
,
4115 poly_int64 end_args_size
)
4117 poly_int64 args_size
= end_args_size
;
4118 bool saw_unknown
= false;
4121 for (insn
= last
; insn
!= prev
; insn
= PREV_INSN (insn
))
4123 if (!NONDEBUG_INSN_P (insn
))
4126 /* We might have existing REG_ARGS_SIZE notes, e.g. when pushing
4127 a call argument containing a TLS address that itself requires
4128 a call to __tls_get_addr. The handling of stack_pointer_delta
4129 in emit_single_push_insn is supposed to ensure that any such
4130 notes are already correct. */
4131 rtx note
= find_reg_note (insn
, REG_ARGS_SIZE
, NULL_RTX
);
4132 gcc_assert (!note
|| known_eq (args_size
, get_args_size (note
)));
4134 poly_int64 this_delta
= find_args_size_adjust (insn
);
4135 if (known_eq (this_delta
, 0))
4138 || ACCUMULATE_OUTGOING_ARGS
4139 || find_reg_note (insn
, REG_NORETURN
, NULL_RTX
) == NULL_RTX
)
4143 gcc_assert (!saw_unknown
);
4144 if (known_eq (this_delta
, HOST_WIDE_INT_MIN
))
4148 add_args_size_note (insn
, args_size
);
4149 if (STACK_GROWS_DOWNWARD
)
4150 this_delta
= -poly_uint64 (this_delta
);
4153 args_size
= HOST_WIDE_INT_MIN
;
4155 args_size
-= this_delta
;
4161 #ifdef PUSH_ROUNDING
4162 /* Emit single push insn. */
4165 emit_single_push_insn_1 (machine_mode mode
, rtx x
, tree type
)
4168 poly_int64 rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
4170 enum insn_code icode
;
4172 /* If there is push pattern, use it. Otherwise try old way of throwing
4173 MEM representing push operation to move expander. */
4174 icode
= optab_handler (push_optab
, mode
);
4175 if (icode
!= CODE_FOR_nothing
)
4177 class expand_operand ops
[1];
4179 create_input_operand (&ops
[0], x
, mode
);
4180 if (maybe_expand_insn (icode
, 1, ops
))
4183 if (known_eq (GET_MODE_SIZE (mode
), rounded_size
))
4184 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
4185 /* If we are to pad downward, adjust the stack pointer first and
4186 then store X into the stack location using an offset. This is
4187 because emit_move_insn does not know how to pad; it does not have
4189 else if (targetm
.calls
.function_arg_padding (mode
, type
) == PAD_DOWNWARD
)
4191 emit_move_insn (stack_pointer_rtx
,
4192 expand_binop (Pmode
,
4193 STACK_GROWS_DOWNWARD
? sub_optab
4196 gen_int_mode (rounded_size
, Pmode
),
4197 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
4199 poly_int64 offset
= rounded_size
- GET_MODE_SIZE (mode
);
4200 if (STACK_GROWS_DOWNWARD
&& STACK_PUSH_CODE
== POST_DEC
)
4201 /* We have already decremented the stack pointer, so get the
4203 offset
+= rounded_size
;
4205 if (!STACK_GROWS_DOWNWARD
&& STACK_PUSH_CODE
== POST_INC
)
4206 /* We have already incremented the stack pointer, so get the
4208 offset
-= rounded_size
;
4210 dest_addr
= plus_constant (Pmode
, stack_pointer_rtx
, offset
);
4214 if (STACK_GROWS_DOWNWARD
)
4215 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4216 dest_addr
= plus_constant (Pmode
, stack_pointer_rtx
, -rounded_size
);
4218 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4219 dest_addr
= plus_constant (Pmode
, stack_pointer_rtx
, rounded_size
);
4221 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
4224 dest
= gen_rtx_MEM (mode
, dest_addr
);
4228 set_mem_attributes (dest
, type
, 1);
4230 if (cfun
->tail_call_marked
)
4231 /* Function incoming arguments may overlap with sibling call
4232 outgoing arguments and we cannot allow reordering of reads
4233 from function arguments with stores to outgoing arguments
4234 of sibling calls. */
4235 set_mem_alias_set (dest
, 0);
4237 emit_move_insn (dest
, x
);
4240 /* Emit and annotate a single push insn. */
4243 emit_single_push_insn (machine_mode mode
, rtx x
, tree type
)
4245 poly_int64 delta
, old_delta
= stack_pointer_delta
;
4246 rtx_insn
*prev
= get_last_insn ();
4249 emit_single_push_insn_1 (mode
, x
, type
);
4251 /* Adjust stack_pointer_delta to describe the situation after the push
4252 we just performed. Note that we must do this after the push rather
4253 than before the push in case calculating X needs pushes and pops of
4254 its own (e.g. if calling __tls_get_addr). The REG_ARGS_SIZE notes
4255 for such pushes and pops must not include the effect of the future
4257 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
4259 last
= get_last_insn ();
4261 /* Notice the common case where we emitted exactly one insn. */
4262 if (PREV_INSN (last
) == prev
)
4264 add_args_size_note (last
, stack_pointer_delta
);
4268 delta
= fixup_args_size_notes (prev
, last
, stack_pointer_delta
);
4269 gcc_assert (known_eq (delta
, HOST_WIDE_INT_MIN
)
4270 || known_eq (delta
, old_delta
));
4274 /* If reading SIZE bytes from X will end up reading from
4275 Y return the number of bytes that overlap. Return -1
4276 if there is no overlap or -2 if we can't determine
4277 (for example when X and Y have different base registers). */
4280 memory_load_overlap (rtx x
, rtx y
, HOST_WIDE_INT size
)
4282 rtx tmp
= plus_constant (Pmode
, x
, size
);
4283 rtx sub
= simplify_gen_binary (MINUS
, Pmode
, tmp
, y
);
4285 if (!CONST_INT_P (sub
))
4288 HOST_WIDE_INT val
= INTVAL (sub
);
4290 return IN_RANGE (val
, 1, size
) ? val
: -1;
4293 /* Generate code to push X onto the stack, assuming it has mode MODE and
4295 MODE is redundant except when X is a CONST_INT (since they don't
4297 SIZE is an rtx for the size of data to be copied (in bytes),
4298 needed only if X is BLKmode.
4299 Return true if successful. May return false if asked to push a
4300 partial argument during a sibcall optimization (as specified by
4301 SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4304 ALIGN (in bits) is maximum alignment we can assume.
4306 If PARTIAL and REG are both nonzero, then copy that many of the first
4307 bytes of X into registers starting with REG, and push the rest of X.
4308 The amount of space pushed is decreased by PARTIAL bytes.
4309 REG must be a hard register in this case.
4310 If REG is zero but PARTIAL is not, take any all others actions for an
4311 argument partially in registers, but do not actually load any
4314 EXTRA is the amount in bytes of extra space to leave next to this arg.
4315 This is ignored if an argument block has already been allocated.
4317 On a machine that lacks real push insns, ARGS_ADDR is the address of
4318 the bottom of the argument block for this call. We use indexing off there
4319 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4320 argument block has not been preallocated.
4322 ARGS_SO_FAR is the size of args previously pushed for this call.
4324 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4325 for arguments passed in registers. If nonzero, it will be the number
4326 of bytes required. */
4329 emit_push_insn (rtx x
, machine_mode mode
, tree type
, rtx size
,
4330 unsigned int align
, int partial
, rtx reg
, poly_int64 extra
,
4331 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
4332 rtx alignment_pad
, bool sibcall_p
)
4335 pad_direction stack_direction
4336 = STACK_GROWS_DOWNWARD
? PAD_DOWNWARD
: PAD_UPWARD
;
4338 /* Decide where to pad the argument: PAD_DOWNWARD for below,
4339 PAD_UPWARD for above, or PAD_NONE for don't pad it.
4340 Default is below for small data on big-endian machines; else above. */
4341 pad_direction where_pad
= targetm
.calls
.function_arg_padding (mode
, type
);
4343 /* Invert direction if stack is post-decrement.
4345 if (STACK_PUSH_CODE
== POST_DEC
)
4346 if (where_pad
!= PAD_NONE
)
4347 where_pad
= (where_pad
== PAD_DOWNWARD
? PAD_UPWARD
: PAD_DOWNWARD
);
4351 int nregs
= partial
/ UNITS_PER_WORD
;
4352 rtx
*tmp_regs
= NULL
;
4353 int overlapping
= 0;
4356 || (STRICT_ALIGNMENT
&& align
< GET_MODE_ALIGNMENT (mode
)))
4358 /* Copy a block into the stack, entirely or partially. */
4365 offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4366 used
= partial
- offset
;
4368 if (mode
!= BLKmode
)
4370 /* A value is to be stored in an insufficiently aligned
4371 stack slot; copy via a suitably aligned slot if
4373 size
= gen_int_mode (GET_MODE_SIZE (mode
), Pmode
);
4374 if (!MEM_P (xinner
))
4376 temp
= assign_temp (type
, 1, 1);
4377 emit_move_insn (temp
, xinner
);
4384 /* USED is now the # of bytes we need not copy to the stack
4385 because registers will take care of them. */
4388 xinner
= adjust_address (xinner
, BLKmode
, used
);
4390 /* If the partial register-part of the arg counts in its stack size,
4391 skip the part of stack space corresponding to the registers.
4392 Otherwise, start copying to the beginning of the stack space,
4393 by setting SKIP to 0. */
4394 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
4396 #ifdef PUSH_ROUNDING
4397 /* Do it with several push insns if that doesn't take lots of insns
4398 and if there is no difficulty with push insns that skip bytes
4399 on the stack for alignment purposes. */
4402 && CONST_INT_P (size
)
4404 && MEM_ALIGN (xinner
) >= align
4405 && can_move_by_pieces ((unsigned) INTVAL (size
) - used
, align
)
4406 /* Here we avoid the case of a structure whose weak alignment
4407 forces many pushes of a small amount of data,
4408 and such small pushes do rounding that causes trouble. */
4409 && ((!targetm
.slow_unaligned_access (word_mode
, align
))
4410 || align
>= BIGGEST_ALIGNMENT
4411 || known_eq (PUSH_ROUNDING (align
/ BITS_PER_UNIT
),
4412 align
/ BITS_PER_UNIT
))
4413 && known_eq (PUSH_ROUNDING (INTVAL (size
)), INTVAL (size
)))
4415 /* Push padding now if padding above and stack grows down,
4416 or if padding below and stack grows up.
4417 But if space already allocated, this has already been done. */
4418 if (maybe_ne (extra
, 0)
4420 && where_pad
!= PAD_NONE
4421 && where_pad
!= stack_direction
)
4422 anti_adjust_stack (gen_int_mode (extra
, Pmode
));
4424 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
,
4428 #endif /* PUSH_ROUNDING */
4432 /* Otherwise make space on the stack and copy the data
4433 to the address of that space. */
4435 /* Deduct words put into registers from the size we must copy. */
4438 if (CONST_INT_P (size
))
4439 size
= GEN_INT (INTVAL (size
) - used
);
4441 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
4442 gen_int_mode (used
, GET_MODE (size
)),
4443 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4446 /* Get the address of the stack space.
4447 In this case, we do not deal with EXTRA separately.
4448 A single stack adjust will do. */
4452 temp
= push_block (size
, extra
, where_pad
== PAD_DOWNWARD
);
4455 else if (poly_int_rtx_p (args_so_far
, &offset
))
4456 temp
= memory_address (BLKmode
,
4457 plus_constant (Pmode
, args_addr
,
4460 temp
= memory_address (BLKmode
,
4461 plus_constant (Pmode
,
4462 gen_rtx_PLUS (Pmode
,
4467 if (!ACCUMULATE_OUTGOING_ARGS
)
4469 /* If the source is referenced relative to the stack pointer,
4470 copy it to another register to stabilize it. We do not need
4471 to do this if we know that we won't be changing sp. */
4473 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
4474 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
4475 temp
= copy_to_reg (temp
);
4478 target
= gen_rtx_MEM (BLKmode
, temp
);
4480 /* We do *not* set_mem_attributes here, because incoming arguments
4481 may overlap with sibling call outgoing arguments and we cannot
4482 allow reordering of reads from function arguments with stores
4483 to outgoing arguments of sibling calls. We do, however, want
4484 to record the alignment of the stack slot. */
4485 /* ALIGN may well be better aligned than TYPE, e.g. due to
4486 PARM_BOUNDARY. Assume the caller isn't lying. */
4487 set_mem_align (target
, align
);
4489 /* If part should go in registers and pushing to that part would
4490 overwrite some of the values that need to go into regs, load the
4491 overlapping values into temporary pseudos to be moved into the hard
4492 regs at the end after the stack pushing has completed.
4493 We cannot load them directly into the hard regs here because
4494 they can be clobbered by the block move expansions.
4497 if (partial
> 0 && reg
!= 0 && mode
== BLKmode
4498 && GET_CODE (reg
) != PARALLEL
)
4500 overlapping
= memory_load_overlap (XEXP (x
, 0), temp
, partial
);
4501 if (overlapping
> 0)
4503 gcc_assert (overlapping
% UNITS_PER_WORD
== 0);
4504 overlapping
/= UNITS_PER_WORD
;
4506 tmp_regs
= XALLOCAVEC (rtx
, overlapping
);
4508 for (int i
= 0; i
< overlapping
; i
++)
4509 tmp_regs
[i
] = gen_reg_rtx (word_mode
);
4511 for (int i
= 0; i
< overlapping
; i
++)
4512 emit_move_insn (tmp_regs
[i
],
4513 operand_subword_force (target
, i
, mode
));
4515 else if (overlapping
== -1)
4517 /* Could not determine whether there is overlap.
4518 Fail the sibcall. */
4526 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
4529 else if (partial
> 0)
4531 /* Scalar partly in registers. This case is only supported
4532 for fixed-wdth modes. */
4533 int size
= GET_MODE_SIZE (mode
).to_constant ();
4534 size
/= UNITS_PER_WORD
;
4537 /* # bytes of start of argument
4538 that we must make space for but need not store. */
4539 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4540 int args_offset
= INTVAL (args_so_far
);
4543 /* Push padding now if padding above and stack grows down,
4544 or if padding below and stack grows up.
4545 But if space already allocated, this has already been done. */
4546 if (maybe_ne (extra
, 0)
4548 && where_pad
!= PAD_NONE
4549 && where_pad
!= stack_direction
)
4550 anti_adjust_stack (gen_int_mode (extra
, Pmode
));
4552 /* If we make space by pushing it, we might as well push
4553 the real data. Otherwise, we can leave OFFSET nonzero
4554 and leave the space uninitialized. */
4558 /* Now NOT_STACK gets the number of words that we don't need to
4559 allocate on the stack. Convert OFFSET to words too. */
4560 not_stack
= (partial
- offset
) / UNITS_PER_WORD
;
4561 offset
/= UNITS_PER_WORD
;
4563 /* If the partial register-part of the arg counts in its stack size,
4564 skip the part of stack space corresponding to the registers.
4565 Otherwise, start copying to the beginning of the stack space,
4566 by setting SKIP to 0. */
4567 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
4569 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
4570 x
= validize_mem (force_const_mem (mode
, x
));
4572 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4573 SUBREGs of such registers are not allowed. */
4574 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
4575 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
4576 x
= copy_to_reg (x
);
4578 /* Loop over all the words allocated on the stack for this arg. */
4579 /* We can do it by words, because any scalar bigger than a word
4580 has a size a multiple of a word. */
4581 for (i
= size
- 1; i
>= not_stack
; i
--)
4582 if (i
>= not_stack
+ offset
)
4583 if (!emit_push_insn (operand_subword_force (x
, i
, mode
),
4584 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
4586 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
4588 reg_parm_stack_space
, alignment_pad
, sibcall_p
))
4596 /* Push padding now if padding above and stack grows down,
4597 or if padding below and stack grows up.
4598 But if space already allocated, this has already been done. */
4599 if (maybe_ne (extra
, 0)
4601 && where_pad
!= PAD_NONE
4602 && where_pad
!= stack_direction
)
4603 anti_adjust_stack (gen_int_mode (extra
, Pmode
));
4605 #ifdef PUSH_ROUNDING
4606 if (args_addr
== 0 && PUSH_ARGS
)
4607 emit_single_push_insn (mode
, x
, type
);
4611 addr
= simplify_gen_binary (PLUS
, Pmode
, args_addr
, args_so_far
);
4612 dest
= gen_rtx_MEM (mode
, memory_address (mode
, addr
));
4614 /* We do *not* set_mem_attributes here, because incoming arguments
4615 may overlap with sibling call outgoing arguments and we cannot
4616 allow reordering of reads from function arguments with stores
4617 to outgoing arguments of sibling calls. We do, however, want
4618 to record the alignment of the stack slot. */
4619 /* ALIGN may well be better aligned than TYPE, e.g. due to
4620 PARM_BOUNDARY. Assume the caller isn't lying. */
4621 set_mem_align (dest
, align
);
4623 emit_move_insn (dest
, x
);
4627 /* Move the partial arguments into the registers and any overlapping
4628 values that we moved into the pseudos in tmp_regs. */
4629 if (partial
> 0 && reg
!= 0)
4631 /* Handle calls that pass values in multiple non-contiguous locations.
4632 The Irix 6 ABI has examples of this. */
4633 if (GET_CODE (reg
) == PARALLEL
)
4634 emit_group_load (reg
, x
, type
, -1);
4637 gcc_assert (partial
% UNITS_PER_WORD
== 0);
4638 move_block_to_reg (REGNO (reg
), x
, nregs
- overlapping
, mode
);
4640 for (int i
= 0; i
< overlapping
; i
++)
4641 emit_move_insn (gen_rtx_REG (word_mode
, REGNO (reg
)
4642 + nregs
- overlapping
+ i
),
4648 if (maybe_ne (extra
, 0) && args_addr
== 0 && where_pad
== stack_direction
)
4649 anti_adjust_stack (gen_int_mode (extra
, Pmode
));
4651 if (alignment_pad
&& args_addr
== 0)
4652 anti_adjust_stack (alignment_pad
);
4657 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4661 get_subtarget (rtx x
)
4665 /* Only registers can be subtargets. */
4667 /* Don't use hard regs to avoid extending their life. */
4668 || REGNO (x
) < FIRST_PSEUDO_REGISTER
4672 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4673 FIELD is a bitfield. Returns true if the optimization was successful,
4674 and there's nothing else to do. */
4677 optimize_bitfield_assignment_op (poly_uint64 pbitsize
,
4678 poly_uint64 pbitpos
,
4679 poly_uint64 pbitregion_start
,
4680 poly_uint64 pbitregion_end
,
4681 machine_mode mode1
, rtx str_rtx
,
4682 tree to
, tree src
, bool reverse
)
4684 /* str_mode is not guaranteed to be a scalar type. */
4685 machine_mode str_mode
= GET_MODE (str_rtx
);
4686 unsigned int str_bitsize
;
4691 enum tree_code code
;
4693 unsigned HOST_WIDE_INT bitsize
, bitpos
, bitregion_start
, bitregion_end
;
4694 if (mode1
!= VOIDmode
4695 || !pbitsize
.is_constant (&bitsize
)
4696 || !pbitpos
.is_constant (&bitpos
)
4697 || !pbitregion_start
.is_constant (&bitregion_start
)
4698 || !pbitregion_end
.is_constant (&bitregion_end
)
4699 || bitsize
>= BITS_PER_WORD
4700 || !GET_MODE_BITSIZE (str_mode
).is_constant (&str_bitsize
)
4701 || str_bitsize
> BITS_PER_WORD
4702 || TREE_SIDE_EFFECTS (to
)
4703 || TREE_THIS_VOLATILE (to
))
4707 if (TREE_CODE (src
) != SSA_NAME
)
4709 if (TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
)
4712 srcstmt
= get_gimple_for_ssa_name (src
);
4714 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt
)) != tcc_binary
)
4717 code
= gimple_assign_rhs_code (srcstmt
);
4719 op0
= gimple_assign_rhs1 (srcstmt
);
4721 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4722 to find its initialization. Hopefully the initialization will
4723 be from a bitfield load. */
4724 if (TREE_CODE (op0
) == SSA_NAME
)
4726 gimple
*op0stmt
= get_gimple_for_ssa_name (op0
);
4728 /* We want to eventually have OP0 be the same as TO, which
4729 should be a bitfield. */
4731 || !is_gimple_assign (op0stmt
)
4732 || gimple_assign_rhs_code (op0stmt
) != TREE_CODE (to
))
4734 op0
= gimple_assign_rhs1 (op0stmt
);
4737 op1
= gimple_assign_rhs2 (srcstmt
);
4739 if (!operand_equal_p (to
, op0
, 0))
4742 if (MEM_P (str_rtx
))
4744 unsigned HOST_WIDE_INT offset1
;
4746 if (str_bitsize
== 0 || str_bitsize
> BITS_PER_WORD
)
4747 str_bitsize
= BITS_PER_WORD
;
4749 scalar_int_mode best_mode
;
4750 if (!get_best_mode (bitsize
, bitpos
, bitregion_start
, bitregion_end
,
4751 MEM_ALIGN (str_rtx
), str_bitsize
, false, &best_mode
))
4753 str_mode
= best_mode
;
4754 str_bitsize
= GET_MODE_BITSIZE (best_mode
);
4757 bitpos
%= str_bitsize
;
4758 offset1
= (offset1
- bitpos
) / BITS_PER_UNIT
;
4759 str_rtx
= adjust_address (str_rtx
, str_mode
, offset1
);
4761 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
4764 /* If the bit field covers the whole REG/MEM, store_field
4765 will likely generate better code. */
4766 if (bitsize
>= str_bitsize
)
4769 /* We can't handle fields split across multiple entities. */
4770 if (bitpos
+ bitsize
> str_bitsize
)
4773 if (reverse
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
4774 bitpos
= str_bitsize
- bitpos
- bitsize
;
4780 /* For now, just optimize the case of the topmost bitfield
4781 where we don't need to do any masking and also
4782 1 bit bitfields where xor can be used.
4783 We might win by one instruction for the other bitfields
4784 too if insv/extv instructions aren't used, so that
4785 can be added later. */
4786 if ((reverse
|| bitpos
+ bitsize
!= str_bitsize
)
4787 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
4790 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4791 value
= convert_modes (str_mode
,
4792 TYPE_MODE (TREE_TYPE (op1
)), value
,
4793 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4795 /* We may be accessing data outside the field, which means
4796 we can alias adjacent data. */
4797 if (MEM_P (str_rtx
))
4799 str_rtx
= shallow_copy_rtx (str_rtx
);
4800 set_mem_alias_set (str_rtx
, 0);
4801 set_mem_expr (str_rtx
, 0);
4804 if (bitsize
== 1 && (reverse
|| bitpos
+ bitsize
!= str_bitsize
))
4806 value
= expand_and (str_mode
, value
, const1_rtx
, NULL
);
4810 binop
= code
== PLUS_EXPR
? add_optab
: sub_optab
;
4812 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4814 value
= flip_storage_order (str_mode
, value
);
4815 result
= expand_binop (str_mode
, binop
, str_rtx
,
4816 value
, str_rtx
, 1, OPTAB_WIDEN
);
4817 if (result
!= str_rtx
)
4818 emit_move_insn (str_rtx
, result
);
4823 if (TREE_CODE (op1
) != INTEGER_CST
)
4825 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4826 value
= convert_modes (str_mode
,
4827 TYPE_MODE (TREE_TYPE (op1
)), value
,
4828 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4830 /* We may be accessing data outside the field, which means
4831 we can alias adjacent data. */
4832 if (MEM_P (str_rtx
))
4834 str_rtx
= shallow_copy_rtx (str_rtx
);
4835 set_mem_alias_set (str_rtx
, 0);
4836 set_mem_expr (str_rtx
, 0);
4839 binop
= code
== BIT_IOR_EXPR
? ior_optab
: xor_optab
;
4840 if (bitpos
+ bitsize
!= str_bitsize
)
4842 rtx mask
= gen_int_mode ((HOST_WIDE_INT_1U
<< bitsize
) - 1,
4844 value
= expand_and (str_mode
, value
, mask
, NULL_RTX
);
4846 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4848 value
= flip_storage_order (str_mode
, value
);
4849 result
= expand_binop (str_mode
, binop
, str_rtx
,
4850 value
, str_rtx
, 1, OPTAB_WIDEN
);
4851 if (result
!= str_rtx
)
4852 emit_move_insn (str_rtx
, result
);
4862 /* In the C++ memory model, consecutive bit fields in a structure are
4863 considered one memory location.
4865 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4866 returns the bit range of consecutive bits in which this COMPONENT_REF
4867 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4868 and *OFFSET may be adjusted in the process.
4870 If the access does not need to be restricted, 0 is returned in both
4871 *BITSTART and *BITEND. */
4874 get_bit_range (poly_uint64_pod
*bitstart
, poly_uint64_pod
*bitend
, tree exp
,
4875 poly_int64_pod
*bitpos
, tree
*offset
)
4877 poly_int64 bitoffset
;
4880 gcc_assert (TREE_CODE (exp
) == COMPONENT_REF
);
4882 field
= TREE_OPERAND (exp
, 1);
4883 repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
4884 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4885 need to limit the range we can access. */
4888 *bitstart
= *bitend
= 0;
4892 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4893 part of a larger bit field, then the representative does not serve any
4894 useful purpose. This can occur in Ada. */
4895 if (handled_component_p (TREE_OPERAND (exp
, 0)))
4898 poly_int64 rbitsize
, rbitpos
;
4900 int unsignedp
, reversep
, volatilep
= 0;
4901 get_inner_reference (TREE_OPERAND (exp
, 0), &rbitsize
, &rbitpos
,
4902 &roffset
, &rmode
, &unsignedp
, &reversep
,
4904 if (!multiple_p (rbitpos
, BITS_PER_UNIT
))
4906 *bitstart
= *bitend
= 0;
4911 /* Compute the adjustment to bitpos from the offset of the field
4912 relative to the representative. DECL_FIELD_OFFSET of field and
4913 repr are the same by construction if they are not constants,
4914 see finish_bitfield_layout. */
4915 poly_uint64 field_offset
, repr_offset
;
4916 if (poly_int_tree_p (DECL_FIELD_OFFSET (field
), &field_offset
)
4917 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
4918 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
4921 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
4922 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
4924 /* If the adjustment is larger than bitpos, we would have a negative bit
4925 position for the lower bound and this may wreak havoc later. Adjust
4926 offset and bitpos to make the lower bound non-negative in that case. */
4927 if (maybe_gt (bitoffset
, *bitpos
))
4929 poly_int64 adjust_bits
= upper_bound (bitoffset
, *bitpos
) - *bitpos
;
4930 poly_int64 adjust_bytes
= exact_div (adjust_bits
, BITS_PER_UNIT
);
4932 *bitpos
+= adjust_bits
;
4933 if (*offset
== NULL_TREE
)
4934 *offset
= size_int (-adjust_bytes
);
4936 *offset
= size_binop (MINUS_EXPR
, *offset
, size_int (adjust_bytes
));
4940 *bitstart
= *bitpos
- bitoffset
;
4942 *bitend
= *bitstart
+ tree_to_poly_uint64 (DECL_SIZE (repr
)) - 1;
4945 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4946 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4947 DECL_RTL was not set yet, return NORTL. */
4950 addr_expr_of_non_mem_decl_p_1 (tree addr
, bool nortl
)
4952 if (TREE_CODE (addr
) != ADDR_EXPR
)
4955 tree base
= TREE_OPERAND (addr
, 0);
4958 || TREE_ADDRESSABLE (base
)
4959 || DECL_MODE (base
) == BLKmode
)
4962 if (!DECL_RTL_SET_P (base
))
4965 return (!MEM_P (DECL_RTL (base
)));
4968 /* Returns true if the MEM_REF REF refers to an object that does not
4969 reside in memory and has non-BLKmode. */
4972 mem_ref_refers_to_non_mem_p (tree ref
)
4974 tree base
= TREE_OPERAND (ref
, 0);
4975 return addr_expr_of_non_mem_decl_p_1 (base
, false);
4978 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4979 is true, try generating a nontemporal store. */
4982 expand_assignment (tree to
, tree from
, bool nontemporal
)
4988 enum insn_code icode
;
4990 /* Don't crash if the lhs of the assignment was erroneous. */
4991 if (TREE_CODE (to
) == ERROR_MARK
)
4993 expand_normal (from
);
4997 /* Optimize away no-op moves without side-effects. */
4998 if (operand_equal_p (to
, from
, 0))
5001 /* Handle misaligned stores. */
5002 mode
= TYPE_MODE (TREE_TYPE (to
));
5003 if ((TREE_CODE (to
) == MEM_REF
5004 || TREE_CODE (to
) == TARGET_MEM_REF
)
5006 && !mem_ref_refers_to_non_mem_p (to
)
5007 && ((align
= get_object_alignment (to
))
5008 < GET_MODE_ALIGNMENT (mode
))
5009 && (((icode
= optab_handler (movmisalign_optab
, mode
))
5010 != CODE_FOR_nothing
)
5011 || targetm
.slow_unaligned_access (mode
, align
)))
5015 reg
= expand_expr (from
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
5016 reg
= force_not_mem (reg
);
5017 mem
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5018 if (TREE_CODE (to
) == MEM_REF
&& REF_REVERSE_STORAGE_ORDER (to
))
5019 reg
= flip_storage_order (mode
, reg
);
5021 if (icode
!= CODE_FOR_nothing
)
5023 class expand_operand ops
[2];
5025 create_fixed_operand (&ops
[0], mem
);
5026 create_input_operand (&ops
[1], reg
, mode
);
5027 /* The movmisalign<mode> pattern cannot fail, else the assignment
5028 would silently be omitted. */
5029 expand_insn (icode
, 2, ops
);
5032 store_bit_field (mem
, GET_MODE_BITSIZE (mode
), 0, 0, 0, mode
, reg
,
5037 /* Assignment of a structure component needs special treatment
5038 if the structure component's rtx is not simply a MEM.
5039 Assignment of an array element at a constant index, and assignment of
5040 an array element in an unaligned packed structure field, has the same
5041 problem. Same for (partially) storing into a non-memory object. */
5042 if (handled_component_p (to
)
5043 || (TREE_CODE (to
) == MEM_REF
5044 && (REF_REVERSE_STORAGE_ORDER (to
)
5045 || mem_ref_refers_to_non_mem_p (to
)))
5046 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
5049 poly_int64 bitsize
, bitpos
;
5050 poly_uint64 bitregion_start
= 0;
5051 poly_uint64 bitregion_end
= 0;
5053 int unsignedp
, reversep
, volatilep
= 0;
5057 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
5058 &unsignedp
, &reversep
, &volatilep
);
5060 /* Make sure bitpos is not negative, it can wreak havoc later. */
5061 if (maybe_lt (bitpos
, 0))
5063 gcc_assert (offset
== NULL_TREE
);
5064 offset
= size_int (bits_to_bytes_round_down (bitpos
));
5065 bitpos
= num_trailing_bits (bitpos
);
5068 if (TREE_CODE (to
) == COMPONENT_REF
5069 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to
, 1)))
5070 get_bit_range (&bitregion_start
, &bitregion_end
, to
, &bitpos
, &offset
);
5071 /* The C++ memory model naturally applies to byte-aligned fields.
5072 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
5073 BITSIZE are not byte-aligned, there is no need to limit the range
5074 we can access. This can occur with packed structures in Ada. */
5075 else if (maybe_gt (bitsize
, 0)
5076 && multiple_p (bitsize
, BITS_PER_UNIT
)
5077 && multiple_p (bitpos
, BITS_PER_UNIT
))
5079 bitregion_start
= bitpos
;
5080 bitregion_end
= bitpos
+ bitsize
- 1;
5083 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5085 /* If the field has a mode, we want to access it in the
5086 field's mode, not the computed mode.
5087 If a MEM has VOIDmode (external with incomplete type),
5088 use BLKmode for it instead. */
5091 if (mode1
!= VOIDmode
)
5092 to_rtx
= adjust_address (to_rtx
, mode1
, 0);
5093 else if (GET_MODE (to_rtx
) == VOIDmode
)
5094 to_rtx
= adjust_address (to_rtx
, BLKmode
, 0);
5099 machine_mode address_mode
;
5102 if (!MEM_P (to_rtx
))
5104 /* We can get constant negative offsets into arrays with broken
5105 user code. Translate this to a trap instead of ICEing. */
5106 gcc_assert (TREE_CODE (offset
) == INTEGER_CST
);
5107 expand_builtin_trap ();
5108 to_rtx
= gen_rtx_MEM (BLKmode
, const0_rtx
);
5111 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
5112 address_mode
= get_address_mode (to_rtx
);
5113 if (GET_MODE (offset_rtx
) != address_mode
)
5115 /* We cannot be sure that the RTL in offset_rtx is valid outside
5116 of a memory address context, so force it into a register
5117 before attempting to convert it to the desired mode. */
5118 offset_rtx
= force_operand (offset_rtx
, NULL_RTX
);
5119 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
5122 /* If we have an expression in OFFSET_RTX and a non-zero
5123 byte offset in BITPOS, adding the byte offset before the
5124 OFFSET_RTX results in better intermediate code, which makes
5125 later rtl optimization passes perform better.
5127 We prefer intermediate code like this:
5129 r124:DI=r123:DI+0x18
5134 r124:DI=r123:DI+0x10
5135 [r124:DI+0x8]=r121:DI
5137 This is only done for aligned data values, as these can
5138 be expected to result in single move instructions. */
5140 if (mode1
!= VOIDmode
5141 && maybe_ne (bitpos
, 0)
5142 && maybe_gt (bitsize
, 0)
5143 && multiple_p (bitpos
, BITS_PER_UNIT
, &bytepos
)
5144 && multiple_p (bitpos
, bitsize
)
5145 && multiple_p (bitsize
, GET_MODE_ALIGNMENT (mode1
))
5146 && MEM_ALIGN (to_rtx
) >= GET_MODE_ALIGNMENT (mode1
))
5148 to_rtx
= adjust_address (to_rtx
, mode1
, bytepos
);
5149 bitregion_start
= 0;
5150 if (known_ge (bitregion_end
, poly_uint64 (bitpos
)))
5151 bitregion_end
-= bitpos
;
5155 to_rtx
= offset_address (to_rtx
, offset_rtx
,
5156 highest_pow2_factor_for_target (to
,
5160 /* No action is needed if the target is not a memory and the field
5161 lies completely outside that target. This can occur if the source
5162 code contains an out-of-bounds access to a small array. */
5164 && GET_MODE (to_rtx
) != BLKmode
5165 && known_ge (bitpos
, GET_MODE_PRECISION (GET_MODE (to_rtx
))))
5167 expand_normal (from
);
5170 /* Handle expand_expr of a complex value returning a CONCAT. */
5171 else if (GET_CODE (to_rtx
) == CONCAT
)
5173 machine_mode to_mode
= GET_MODE (to_rtx
);
5174 gcc_checking_assert (COMPLEX_MODE_P (to_mode
));
5175 poly_int64 mode_bitsize
= GET_MODE_BITSIZE (to_mode
);
5176 unsigned short inner_bitsize
= GET_MODE_UNIT_BITSIZE (to_mode
);
5177 if (TYPE_MODE (TREE_TYPE (from
)) == to_mode
5178 && known_eq (bitpos
, 0)
5179 && known_eq (bitsize
, mode_bitsize
))
5180 result
= store_expr (from
, to_rtx
, false, nontemporal
, reversep
);
5181 else if (TYPE_MODE (TREE_TYPE (from
)) == GET_MODE_INNER (to_mode
)
5182 && known_eq (bitsize
, inner_bitsize
)
5183 && (known_eq (bitpos
, 0)
5184 || known_eq (bitpos
, inner_bitsize
)))
5185 result
= store_expr (from
, XEXP (to_rtx
, maybe_ne (bitpos
, 0)),
5186 false, nontemporal
, reversep
);
5187 else if (known_le (bitpos
+ bitsize
, inner_bitsize
))
5188 result
= store_field (XEXP (to_rtx
, 0), bitsize
, bitpos
,
5189 bitregion_start
, bitregion_end
,
5190 mode1
, from
, get_alias_set (to
),
5191 nontemporal
, reversep
);
5192 else if (known_ge (bitpos
, inner_bitsize
))
5193 result
= store_field (XEXP (to_rtx
, 1), bitsize
,
5194 bitpos
- inner_bitsize
,
5195 bitregion_start
, bitregion_end
,
5196 mode1
, from
, get_alias_set (to
),
5197 nontemporal
, reversep
);
5198 else if (known_eq (bitpos
, 0) && known_eq (bitsize
, mode_bitsize
))
5200 result
= expand_normal (from
);
5201 if (GET_CODE (result
) == CONCAT
)
5203 to_mode
= GET_MODE_INNER (to_mode
);
5204 machine_mode from_mode
= GET_MODE_INNER (GET_MODE (result
));
5206 = simplify_gen_subreg (to_mode
, XEXP (result
, 0),
5209 = simplify_gen_subreg (to_mode
, XEXP (result
, 1),
5211 if (!from_real
|| !from_imag
)
5212 goto concat_store_slow
;
5213 emit_move_insn (XEXP (to_rtx
, 0), from_real
);
5214 emit_move_insn (XEXP (to_rtx
, 1), from_imag
);
5220 from_rtx
= change_address (result
, to_mode
, NULL_RTX
);
5223 = simplify_gen_subreg (to_mode
, result
,
5224 TYPE_MODE (TREE_TYPE (from
)), 0);
5227 emit_move_insn (XEXP (to_rtx
, 0),
5228 read_complex_part (from_rtx
, false));
5229 emit_move_insn (XEXP (to_rtx
, 1),
5230 read_complex_part (from_rtx
, true));
5234 machine_mode to_mode
5235 = GET_MODE_INNER (GET_MODE (to_rtx
));
5237 = simplify_gen_subreg (to_mode
, result
,
5238 TYPE_MODE (TREE_TYPE (from
)),
5241 = simplify_gen_subreg (to_mode
, result
,
5242 TYPE_MODE (TREE_TYPE (from
)),
5243 GET_MODE_SIZE (to_mode
));
5244 if (!from_real
|| !from_imag
)
5245 goto concat_store_slow
;
5246 emit_move_insn (XEXP (to_rtx
, 0), from_real
);
5247 emit_move_insn (XEXP (to_rtx
, 1), from_imag
);
5254 rtx temp
= assign_stack_temp (to_mode
,
5255 GET_MODE_SIZE (GET_MODE (to_rtx
)));
5256 write_complex_part (temp
, XEXP (to_rtx
, 0), false);
5257 write_complex_part (temp
, XEXP (to_rtx
, 1), true);
5258 result
= store_field (temp
, bitsize
, bitpos
,
5259 bitregion_start
, bitregion_end
,
5260 mode1
, from
, get_alias_set (to
),
5261 nontemporal
, reversep
);
5262 emit_move_insn (XEXP (to_rtx
, 0), read_complex_part (temp
, false));
5263 emit_move_insn (XEXP (to_rtx
, 1), read_complex_part (temp
, true));
5266 /* For calls to functions returning variable length structures, if TO_RTX
5267 is not a MEM, go through a MEM because we must not create temporaries
5269 else if (!MEM_P (to_rtx
)
5270 && TREE_CODE (from
) == CALL_EXPR
5271 && COMPLETE_TYPE_P (TREE_TYPE (from
))
5272 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) != INTEGER_CST
)
5274 rtx temp
= assign_stack_temp (GET_MODE (to_rtx
),
5275 GET_MODE_SIZE (GET_MODE (to_rtx
)));
5276 result
= store_field (temp
, bitsize
, bitpos
, bitregion_start
,
5277 bitregion_end
, mode1
, from
, get_alias_set (to
),
5278 nontemporal
, reversep
);
5279 emit_move_insn (to_rtx
, temp
);
5285 /* If the field is at offset zero, we could have been given the
5286 DECL_RTX of the parent struct. Don't munge it. */
5287 to_rtx
= shallow_copy_rtx (to_rtx
);
5288 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
5290 MEM_VOLATILE_P (to_rtx
) = 1;
5293 gcc_checking_assert (known_ge (bitpos
, 0));
5294 if (optimize_bitfield_assignment_op (bitsize
, bitpos
,
5295 bitregion_start
, bitregion_end
,
5296 mode1
, to_rtx
, to
, from
,
5300 result
= store_field (to_rtx
, bitsize
, bitpos
,
5301 bitregion_start
, bitregion_end
,
5302 mode1
, from
, get_alias_set (to
),
5303 nontemporal
, reversep
);
5307 preserve_temp_slots (result
);
5312 /* If the rhs is a function call and its value is not an aggregate,
5313 call the function before we start to compute the lhs.
5314 This is needed for correct code for cases such as
5315 val = setjmp (buf) on machines where reference to val
5316 requires loading up part of an address in a separate insn.
5318 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5319 since it might be a promoted variable where the zero- or sign- extension
5320 needs to be done. Handling this in the normal way is safe because no
5321 computation is done before the call. The same is true for SSA names. */
5322 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
5323 && COMPLETE_TYPE_P (TREE_TYPE (from
))
5324 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
5326 || TREE_CODE (to
) == PARM_DECL
5327 || TREE_CODE (to
) == RESULT_DECL
)
5328 && REG_P (DECL_RTL (to
)))
5329 || TREE_CODE (to
) == SSA_NAME
))
5334 value
= expand_normal (from
);
5337 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5339 /* Handle calls that return values in multiple non-contiguous locations.
5340 The Irix 6 ABI has examples of this. */
5341 if (GET_CODE (to_rtx
) == PARALLEL
)
5343 if (GET_CODE (value
) == PARALLEL
)
5344 emit_group_move (to_rtx
, value
);
5346 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
5347 int_size_in_bytes (TREE_TYPE (from
)));
5349 else if (GET_CODE (value
) == PARALLEL
)
5350 emit_group_store (to_rtx
, value
, TREE_TYPE (from
),
5351 int_size_in_bytes (TREE_TYPE (from
)));
5352 else if (GET_MODE (to_rtx
) == BLKmode
)
5354 /* Handle calls that return BLKmode values in registers. */
5356 copy_blkmode_from_reg (to_rtx
, value
, TREE_TYPE (from
));
5358 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
5362 if (POINTER_TYPE_P (TREE_TYPE (to
)))
5363 value
= convert_memory_address_addr_space
5364 (as_a
<scalar_int_mode
> (GET_MODE (to_rtx
)), value
,
5365 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to
))));
5367 emit_move_insn (to_rtx
, value
);
5370 preserve_temp_slots (to_rtx
);
5375 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5376 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5378 /* Don't move directly into a return register. */
5379 if (TREE_CODE (to
) == RESULT_DECL
5380 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
5386 /* If the source is itself a return value, it still is in a pseudo at
5387 this point so we can move it back to the return register directly. */
5389 && TYPE_MODE (TREE_TYPE (from
)) == BLKmode
5390 && TREE_CODE (from
) != CALL_EXPR
)
5391 temp
= copy_blkmode_to_reg (GET_MODE (to_rtx
), from
);
5393 temp
= expand_expr (from
, NULL_RTX
, GET_MODE (to_rtx
), EXPAND_NORMAL
);
5395 /* Handle calls that return values in multiple non-contiguous locations.
5396 The Irix 6 ABI has examples of this. */
5397 if (GET_CODE (to_rtx
) == PARALLEL
)
5399 if (GET_CODE (temp
) == PARALLEL
)
5400 emit_group_move (to_rtx
, temp
);
5402 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
5403 int_size_in_bytes (TREE_TYPE (from
)));
5406 emit_move_insn (to_rtx
, temp
);
5408 preserve_temp_slots (to_rtx
);
5413 /* In case we are returning the contents of an object which overlaps
5414 the place the value is being stored, use a safe function when copying
5415 a value through a pointer into a structure value return block. */
5416 if (TREE_CODE (to
) == RESULT_DECL
5417 && TREE_CODE (from
) == INDIRECT_REF
5418 && ADDR_SPACE_GENERIC_P
5419 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from
, 0)))))
5420 && refs_may_alias_p (to
, from
)
5421 && cfun
->returns_struct
5422 && !cfun
->returns_pcc_struct
)
5427 size
= expr_size (from
);
5428 from_rtx
= expand_normal (from
);
5430 emit_block_move_via_libcall (XEXP (to_rtx
, 0), XEXP (from_rtx
, 0), size
);
5432 preserve_temp_slots (to_rtx
);
5437 /* Compute FROM and store the value in the rtx we got. */
5440 result
= store_expr (from
, to_rtx
, 0, nontemporal
, false);
5441 preserve_temp_slots (result
);
5446 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5447 succeeded, false otherwise. */
5450 emit_storent_insn (rtx to
, rtx from
)
5452 class expand_operand ops
[2];
5453 machine_mode mode
= GET_MODE (to
);
5454 enum insn_code code
= optab_handler (storent_optab
, mode
);
5456 if (code
== CODE_FOR_nothing
)
5459 create_fixed_operand (&ops
[0], to
);
5460 create_input_operand (&ops
[1], from
, mode
);
5461 return maybe_expand_insn (code
, 2, ops
);
5464 /* Helper function for store_expr storing of STRING_CST. */
5467 string_cst_read_str (void *data
, HOST_WIDE_INT offset
, scalar_int_mode mode
)
5469 tree str
= (tree
) data
;
5471 gcc_assert (offset
>= 0);
5472 if (offset
>= TREE_STRING_LENGTH (str
))
5475 if ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
5476 > (unsigned HOST_WIDE_INT
) TREE_STRING_LENGTH (str
))
5478 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
5479 size_t l
= TREE_STRING_LENGTH (str
) - offset
;
5480 memcpy (p
, TREE_STRING_POINTER (str
) + offset
, l
);
5481 memset (p
+ l
, '\0', GET_MODE_SIZE (mode
) - l
);
5482 return c_readstr (p
, mode
, false);
5485 return c_readstr (TREE_STRING_POINTER (str
) + offset
, mode
, false);
5488 /* Generate code for computing expression EXP,
5489 and storing the value into TARGET.
5491 If the mode is BLKmode then we may return TARGET itself.
5492 It turns out that in BLKmode it doesn't cause a problem.
5493 because C has no operators that could combine two different
5494 assignments into the same BLKmode object with different values
5495 with no sequence point. Will other languages need this to
5498 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5499 stack, and block moves may need to be treated specially.
5501 If NONTEMPORAL is true, try using a nontemporal store instruction.
5503 If REVERSE is true, the store is to be done in reverse order. */
5506 store_expr (tree exp
, rtx target
, int call_param_p
,
5507 bool nontemporal
, bool reverse
)
5510 rtx alt_rtl
= NULL_RTX
;
5511 location_t loc
= curr_insn_location ();
5513 if (VOID_TYPE_P (TREE_TYPE (exp
)))
5515 /* C++ can generate ?: expressions with a throw expression in one
5516 branch and an rvalue in the other. Here, we resolve attempts to
5517 store the throw expression's nonexistent result. */
5518 gcc_assert (!call_param_p
);
5519 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5522 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
5524 /* Perform first part of compound expression, then assign from second
5526 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
5527 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5528 return store_expr (TREE_OPERAND (exp
, 1), target
,
5529 call_param_p
, nontemporal
, reverse
);
5531 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
5533 /* For conditional expression, get safe form of the target. Then
5534 test the condition, doing the appropriate assignment on either
5535 side. This avoids the creation of unnecessary temporaries.
5536 For non-BLKmode, it is more efficient not to do this. */
5538 rtx_code_label
*lab1
= gen_label_rtx (), *lab2
= gen_label_rtx ();
5540 do_pending_stack_adjust ();
5542 jumpifnot (TREE_OPERAND (exp
, 0), lab1
,
5543 profile_probability::uninitialized ());
5544 store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
,
5545 nontemporal
, reverse
);
5546 emit_jump_insn (targetm
.gen_jump (lab2
));
5549 store_expr (TREE_OPERAND (exp
, 2), target
, call_param_p
,
5550 nontemporal
, reverse
);
5556 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
5557 /* If this is a scalar in a register that is stored in a wider mode
5558 than the declared mode, compute the result into its declared mode
5559 and then convert to the wider mode. Our value is the computed
5562 rtx inner_target
= 0;
5563 scalar_int_mode outer_mode
= subreg_unpromoted_mode (target
);
5564 scalar_int_mode inner_mode
= subreg_promoted_mode (target
);
5566 /* We can do the conversion inside EXP, which will often result
5567 in some optimizations. Do the conversion in two steps: first
5568 change the signedness, if needed, then the extend. But don't
5569 do this if the type of EXP is a subtype of something else
5570 since then the conversion might involve more than just
5571 converting modes. */
5572 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
5573 && TREE_TYPE (TREE_TYPE (exp
)) == 0
5574 && GET_MODE_PRECISION (outer_mode
)
5575 == TYPE_PRECISION (TREE_TYPE (exp
)))
5577 if (!SUBREG_CHECK_PROMOTED_SIGN (target
,
5578 TYPE_UNSIGNED (TREE_TYPE (exp
))))
5580 /* Some types, e.g. Fortran's logical*4, won't have a signed
5581 version, so use the mode instead. */
5583 = (signed_or_unsigned_type_for
5584 (SUBREG_PROMOTED_SIGN (target
), TREE_TYPE (exp
)));
5586 ntype
= lang_hooks
.types
.type_for_mode
5587 (TYPE_MODE (TREE_TYPE (exp
)),
5588 SUBREG_PROMOTED_SIGN (target
));
5590 exp
= fold_convert_loc (loc
, ntype
, exp
);
5593 exp
= fold_convert_loc (loc
, lang_hooks
.types
.type_for_mode
5594 (inner_mode
, SUBREG_PROMOTED_SIGN (target
)),
5597 inner_target
= SUBREG_REG (target
);
5600 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
5601 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5604 /* If TEMP is a VOIDmode constant, use convert_modes to make
5605 sure that we properly convert it. */
5606 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
5608 temp
= convert_modes (outer_mode
, TYPE_MODE (TREE_TYPE (exp
)),
5609 temp
, SUBREG_PROMOTED_SIGN (target
));
5610 temp
= convert_modes (inner_mode
, outer_mode
, temp
,
5611 SUBREG_PROMOTED_SIGN (target
));
5614 convert_move (SUBREG_REG (target
), temp
,
5615 SUBREG_PROMOTED_SIGN (target
));
5619 else if ((TREE_CODE (exp
) == STRING_CST
5620 || (TREE_CODE (exp
) == MEM_REF
5621 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
5622 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5624 && integer_zerop (TREE_OPERAND (exp
, 1))))
5625 && !nontemporal
&& !call_param_p
5628 /* Optimize initialization of an array with a STRING_CST. */
5629 HOST_WIDE_INT exp_len
, str_copy_len
;
5631 tree str
= TREE_CODE (exp
) == STRING_CST
5632 ? exp
: TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5634 exp_len
= int_expr_size (exp
);
5638 if (TREE_STRING_LENGTH (str
) <= 0)
5641 if (can_store_by_pieces (exp_len
, string_cst_read_str
, (void *) str
,
5642 MEM_ALIGN (target
), false))
5644 store_by_pieces (target
, exp_len
, string_cst_read_str
, (void *) str
,
5645 MEM_ALIGN (target
), false, RETURN_BEGIN
);
5649 str_copy_len
= TREE_STRING_LENGTH (str
);
5650 if ((STORE_MAX_PIECES
& (STORE_MAX_PIECES
- 1)) == 0)
5652 str_copy_len
+= STORE_MAX_PIECES
- 1;
5653 str_copy_len
&= ~(STORE_MAX_PIECES
- 1);
5655 if (str_copy_len
>= exp_len
)
5658 if (!can_store_by_pieces (str_copy_len
, string_cst_read_str
,
5659 (void *) str
, MEM_ALIGN (target
), false))
5662 dest_mem
= store_by_pieces (target
, str_copy_len
, string_cst_read_str
,
5663 (void *) str
, MEM_ALIGN (target
), false,
5665 clear_storage (adjust_address_1 (dest_mem
, BLKmode
, 0, 1, 1, 0,
5666 exp_len
- str_copy_len
),
5667 GEN_INT (exp_len
- str_copy_len
), BLOCK_OP_NORMAL
);
5675 /* If we want to use a nontemporal or a reverse order store, force the
5676 value into a register first. */
5677 tmp_target
= nontemporal
|| reverse
? NULL_RTX
: target
;
5678 temp
= expand_expr_real (exp
, tmp_target
, GET_MODE (target
),
5680 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
5684 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5685 the same as that of TARGET, adjust the constant. This is needed, for
5686 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5687 only a word-sized value. */
5688 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
5689 && TREE_CODE (exp
) != ERROR_MARK
5690 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
5692 if (GET_MODE_CLASS (GET_MODE (target
))
5693 != GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp
)))
5694 && known_eq (GET_MODE_BITSIZE (GET_MODE (target
)),
5695 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
)))))
5697 rtx t
= simplify_gen_subreg (GET_MODE (target
), temp
,
5698 TYPE_MODE (TREE_TYPE (exp
)), 0);
5702 if (GET_MODE (temp
) == VOIDmode
)
5703 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5704 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5707 /* If value was not generated in the target, store it there.
5708 Convert the value to TARGET's type first if necessary and emit the
5709 pending incrementations that have been queued when expanding EXP.
5710 Note that we cannot emit the whole queue blindly because this will
5711 effectively disable the POST_INC optimization later.
5713 If TEMP and TARGET compare equal according to rtx_equal_p, but
5714 one or both of them are volatile memory refs, we have to distinguish
5716 - expand_expr has used TARGET. In this case, we must not generate
5717 another copy. This can be detected by TARGET being equal according
5719 - expand_expr has not used TARGET - that means that the source just
5720 happens to have the same RTX form. Since temp will have been created
5721 by expand_expr, it will compare unequal according to == .
5722 We must generate a copy in this case, to reach the correct number
5723 of volatile memory references. */
5725 if ((! rtx_equal_p (temp
, target
)
5726 || (temp
!= target
&& (side_effects_p (temp
)
5727 || side_effects_p (target
))))
5728 && TREE_CODE (exp
) != ERROR_MARK
5729 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5730 but TARGET is not valid memory reference, TEMP will differ
5731 from TARGET although it is really the same location. */
5733 && rtx_equal_p (alt_rtl
, target
)
5734 && !side_effects_p (alt_rtl
)
5735 && !side_effects_p (target
))
5736 /* If there's nothing to copy, don't bother. Don't call
5737 expr_size unless necessary, because some front-ends (C++)
5738 expr_size-hook must not be given objects that are not
5739 supposed to be bit-copied or bit-initialized. */
5740 && expr_size (exp
) != const0_rtx
)
5742 if (GET_MODE (temp
) != GET_MODE (target
) && GET_MODE (temp
) != VOIDmode
)
5744 if (GET_MODE (target
) == BLKmode
)
5746 /* Handle calls that return BLKmode values in registers. */
5747 if (REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
5748 copy_blkmode_from_reg (target
, temp
, TREE_TYPE (exp
));
5750 store_bit_field (target
,
5751 INTVAL (expr_size (exp
)) * BITS_PER_UNIT
,
5752 0, 0, 0, GET_MODE (temp
), temp
, reverse
);
5755 convert_move (target
, temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5758 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
5760 /* Handle copying a string constant into an array. The string
5761 constant may be shorter than the array. So copy just the string's
5762 actual length, and clear the rest. First get the size of the data
5763 type of the string, which is actually the size of the target. */
5764 rtx size
= expr_size (exp
);
5766 if (CONST_INT_P (size
)
5767 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
5768 emit_block_move (target
, temp
, size
,
5770 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5773 machine_mode pointer_mode
5774 = targetm
.addr_space
.pointer_mode (MEM_ADDR_SPACE (target
));
5775 machine_mode address_mode
= get_address_mode (target
);
5777 /* Compute the size of the data to copy from the string. */
5779 = size_binop_loc (loc
, MIN_EXPR
,
5780 make_tree (sizetype
, size
),
5781 size_int (TREE_STRING_LENGTH (exp
)));
5783 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
5785 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
5786 rtx_code_label
*label
= 0;
5788 /* Copy that much. */
5789 copy_size_rtx
= convert_to_mode (pointer_mode
, copy_size_rtx
,
5790 TYPE_UNSIGNED (sizetype
));
5791 emit_block_move (target
, temp
, copy_size_rtx
,
5793 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5795 /* Figure out how much is left in TARGET that we have to clear.
5796 Do all calculations in pointer_mode. */
5797 poly_int64 const_copy_size
;
5798 if (poly_int_rtx_p (copy_size_rtx
, &const_copy_size
))
5800 size
= plus_constant (address_mode
, size
, -const_copy_size
);
5801 target
= adjust_address (target
, BLKmode
, const_copy_size
);
5805 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
5806 copy_size_rtx
, NULL_RTX
, 0,
5809 if (GET_MODE (copy_size_rtx
) != address_mode
)
5810 copy_size_rtx
= convert_to_mode (address_mode
,
5812 TYPE_UNSIGNED (sizetype
));
5814 target
= offset_address (target
, copy_size_rtx
,
5815 highest_pow2_factor (copy_size
));
5816 label
= gen_label_rtx ();
5817 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
5818 GET_MODE (size
), 0, label
);
5821 if (size
!= const0_rtx
)
5822 clear_storage (target
, size
, BLOCK_OP_NORMAL
);
5828 /* Handle calls that return values in multiple non-contiguous locations.
5829 The Irix 6 ABI has examples of this. */
5830 else if (GET_CODE (target
) == PARALLEL
)
5832 if (GET_CODE (temp
) == PARALLEL
)
5833 emit_group_move (target
, temp
);
5835 emit_group_load (target
, temp
, TREE_TYPE (exp
),
5836 int_size_in_bytes (TREE_TYPE (exp
)));
5838 else if (GET_CODE (temp
) == PARALLEL
)
5839 emit_group_store (target
, temp
, TREE_TYPE (exp
),
5840 int_size_in_bytes (TREE_TYPE (exp
)));
5841 else if (GET_MODE (temp
) == BLKmode
)
5842 emit_block_move (target
, temp
, expr_size (exp
),
5844 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5845 /* If we emit a nontemporal store, there is nothing else to do. */
5846 else if (nontemporal
&& emit_storent_insn (target
, temp
))
5851 temp
= flip_storage_order (GET_MODE (target
), temp
);
5852 temp
= force_operand (temp
, target
);
5854 emit_move_insn (target
, temp
);
5861 /* Return true if field F of structure TYPE is a flexible array. */
5864 flexible_array_member_p (const_tree f
, const_tree type
)
5869 return (DECL_CHAIN (f
) == NULL
5870 && TREE_CODE (tf
) == ARRAY_TYPE
5872 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf
))
5873 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf
)))
5874 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf
))
5875 && int_size_in_bytes (type
) >= 0);
5878 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5879 must have in order for it to completely initialize a value of type TYPE.
5880 Return -1 if the number isn't known.
5882 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5884 static HOST_WIDE_INT
5885 count_type_elements (const_tree type
, bool for_ctor_p
)
5887 switch (TREE_CODE (type
))
5893 nelts
= array_type_nelts (type
);
5894 if (nelts
&& tree_fits_uhwi_p (nelts
))
5896 unsigned HOST_WIDE_INT n
;
5898 n
= tree_to_uhwi (nelts
) + 1;
5899 if (n
== 0 || for_ctor_p
)
5902 return n
* count_type_elements (TREE_TYPE (type
), false);
5904 return for_ctor_p
? -1 : 1;
5909 unsigned HOST_WIDE_INT n
;
5913 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5914 if (TREE_CODE (f
) == FIELD_DECL
)
5917 n
+= count_type_elements (TREE_TYPE (f
), false);
5918 else if (!flexible_array_member_p (f
, type
))
5919 /* Don't count flexible arrays, which are not supposed
5920 to be initialized. */
5928 case QUAL_UNION_TYPE
:
5933 gcc_assert (!for_ctor_p
);
5934 /* Estimate the number of scalars in each field and pick the
5935 maximum. Other estimates would do instead; the idea is simply
5936 to make sure that the estimate is not sensitive to the ordering
5939 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5940 if (TREE_CODE (f
) == FIELD_DECL
)
5942 m
= count_type_elements (TREE_TYPE (f
), false);
5943 /* If the field doesn't span the whole union, add an extra
5944 scalar for the rest. */
5945 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f
)),
5946 TYPE_SIZE (type
)) != 1)
5959 unsigned HOST_WIDE_INT nelts
;
5960 if (TYPE_VECTOR_SUBPARTS (type
).is_constant (&nelts
))
5968 case FIXED_POINT_TYPE
:
5973 case REFERENCE_TYPE
:
5989 /* Helper for categorize_ctor_elements. Identical interface. */
5992 categorize_ctor_elements_1 (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
5993 HOST_WIDE_INT
*p_unique_nz_elts
,
5994 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
5996 unsigned HOST_WIDE_INT idx
;
5997 HOST_WIDE_INT nz_elts
, unique_nz_elts
, init_elts
, num_fields
;
5998 tree value
, purpose
, elt_type
;
6000 /* Whether CTOR is a valid constant initializer, in accordance with what
6001 initializer_constant_valid_p does. If inferred from the constructor
6002 elements, true until proven otherwise. */
6003 bool const_from_elts_p
= constructor_static_from_elts_p (ctor
);
6004 bool const_p
= const_from_elts_p
? true : TREE_STATIC (ctor
);
6010 elt_type
= NULL_TREE
;
6012 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), idx
, purpose
, value
)
6014 HOST_WIDE_INT mult
= 1;
6016 if (purpose
&& TREE_CODE (purpose
) == RANGE_EXPR
)
6018 tree lo_index
= TREE_OPERAND (purpose
, 0);
6019 tree hi_index
= TREE_OPERAND (purpose
, 1);
6021 if (tree_fits_uhwi_p (lo_index
) && tree_fits_uhwi_p (hi_index
))
6022 mult
= (tree_to_uhwi (hi_index
)
6023 - tree_to_uhwi (lo_index
) + 1);
6026 elt_type
= TREE_TYPE (value
);
6028 switch (TREE_CODE (value
))
6032 HOST_WIDE_INT nz
= 0, unz
= 0, ic
= 0;
6034 bool const_elt_p
= categorize_ctor_elements_1 (value
, &nz
, &unz
,
6037 nz_elts
+= mult
* nz
;
6038 unique_nz_elts
+= unz
;
6039 init_elts
+= mult
* ic
;
6041 if (const_from_elts_p
&& const_p
)
6042 const_p
= const_elt_p
;
6049 if (!initializer_zerop (value
))
6058 nz_elts
+= mult
* TREE_STRING_LENGTH (value
);
6059 unique_nz_elts
+= TREE_STRING_LENGTH (value
);
6060 init_elts
+= mult
* TREE_STRING_LENGTH (value
);
6064 if (!initializer_zerop (TREE_REALPART (value
)))
6069 if (!initializer_zerop (TREE_IMAGPART (value
)))
6074 init_elts
+= 2 * mult
;
6079 /* We can only construct constant-length vectors using
6081 unsigned int nunits
= VECTOR_CST_NELTS (value
).to_constant ();
6082 for (unsigned int i
= 0; i
< nunits
; ++i
)
6084 tree v
= VECTOR_CST_ELT (value
, i
);
6085 if (!initializer_zerop (v
))
6097 HOST_WIDE_INT tc
= count_type_elements (elt_type
, false);
6098 nz_elts
+= mult
* tc
;
6099 unique_nz_elts
+= tc
;
6100 init_elts
+= mult
* tc
;
6102 if (const_from_elts_p
&& const_p
)
6104 = initializer_constant_valid_p (value
,
6106 TYPE_REVERSE_STORAGE_ORDER
6114 if (*p_complete
&& !complete_ctor_at_level_p (TREE_TYPE (ctor
),
6115 num_fields
, elt_type
))
6116 *p_complete
= false;
6118 *p_nz_elts
+= nz_elts
;
6119 *p_unique_nz_elts
+= unique_nz_elts
;
6120 *p_init_elts
+= init_elts
;
6125 /* Examine CTOR to discover:
6126 * how many scalar fields are set to nonzero values,
6127 and place it in *P_NZ_ELTS;
6128 * the same, but counting RANGE_EXPRs as multiplier of 1 instead of
6129 high - low + 1 (this can be useful for callers to determine ctors
6130 that could be cheaply initialized with - perhaps nested - loops
6131 compared to copied from huge read-only data),
6132 and place it in *P_UNIQUE_NZ_ELTS;
6133 * how many scalar fields in total are in CTOR,
6134 and place it in *P_ELT_COUNT.
6135 * whether the constructor is complete -- in the sense that every
6136 meaningful byte is explicitly given a value --
6137 and place it in *P_COMPLETE.
6139 Return whether or not CTOR is a valid static constant initializer, the same
6140 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
6143 categorize_ctor_elements (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
6144 HOST_WIDE_INT
*p_unique_nz_elts
,
6145 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
6148 *p_unique_nz_elts
= 0;
6152 return categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_unique_nz_elts
,
6153 p_init_elts
, p_complete
);
6156 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
6157 of which had type LAST_TYPE. Each element was itself a complete
6158 initializer, in the sense that every meaningful byte was explicitly
6159 given a value. Return true if the same is true for the constructor
6163 complete_ctor_at_level_p (const_tree type
, HOST_WIDE_INT num_elts
,
6164 const_tree last_type
)
6166 if (TREE_CODE (type
) == UNION_TYPE
6167 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
6172 gcc_assert (num_elts
== 1 && last_type
);
6174 /* ??? We could look at each element of the union, and find the
6175 largest element. Which would avoid comparing the size of the
6176 initialized element against any tail padding in the union.
6177 Doesn't seem worth the effort... */
6178 return simple_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (last_type
)) == 1;
6181 return count_type_elements (type
, true) == num_elts
;
6184 /* Return 1 if EXP contains mostly (3/4) zeros. */
6187 mostly_zeros_p (const_tree exp
)
6189 if (TREE_CODE (exp
) == CONSTRUCTOR
)
6191 HOST_WIDE_INT nz_elts
, unz_elts
, init_elts
;
6194 categorize_ctor_elements (exp
, &nz_elts
, &unz_elts
, &init_elts
,
6196 return !complete_p
|| nz_elts
< init_elts
/ 4;
6199 return initializer_zerop (exp
);
6202 /* Return 1 if EXP contains all zeros. */
6205 all_zeros_p (const_tree exp
)
6207 if (TREE_CODE (exp
) == CONSTRUCTOR
)
6209 HOST_WIDE_INT nz_elts
, unz_elts
, init_elts
;
6212 categorize_ctor_elements (exp
, &nz_elts
, &unz_elts
, &init_elts
,
6214 return nz_elts
== 0;
6217 return initializer_zerop (exp
);
6220 /* Helper function for store_constructor.
6221 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
6222 CLEARED is as for store_constructor.
6223 ALIAS_SET is the alias set to use for any stores.
6224 If REVERSE is true, the store is to be done in reverse order.
6226 This provides a recursive shortcut back to store_constructor when it isn't
6227 necessary to go through store_field. This is so that we can pass through
6228 the cleared field to let store_constructor know that we may not have to
6229 clear a substructure if the outer structure has already been cleared. */
6232 store_constructor_field (rtx target
, poly_uint64 bitsize
, poly_int64 bitpos
,
6233 poly_uint64 bitregion_start
,
6234 poly_uint64 bitregion_end
,
6236 tree exp
, int cleared
,
6237 alias_set_type alias_set
, bool reverse
)
6240 poly_uint64 bytesize
;
6241 if (TREE_CODE (exp
) == CONSTRUCTOR
6242 /* We can only call store_constructor recursively if the size and
6243 bit position are on a byte boundary. */
6244 && multiple_p (bitpos
, BITS_PER_UNIT
, &bytepos
)
6245 && maybe_ne (bitsize
, 0U)
6246 && multiple_p (bitsize
, BITS_PER_UNIT
, &bytesize
)
6247 /* If we have a nonzero bitpos for a register target, then we just
6248 let store_field do the bitfield handling. This is unlikely to
6249 generate unnecessary clear instructions anyways. */
6250 && (known_eq (bitpos
, 0) || MEM_P (target
)))
6254 machine_mode target_mode
= GET_MODE (target
);
6255 if (target_mode
!= BLKmode
6256 && !multiple_p (bitpos
, GET_MODE_ALIGNMENT (target_mode
)))
6257 target_mode
= BLKmode
;
6258 target
= adjust_address (target
, target_mode
, bytepos
);
6262 /* Update the alias set, if required. */
6263 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
6264 && MEM_ALIAS_SET (target
) != 0)
6266 target
= copy_rtx (target
);
6267 set_mem_alias_set (target
, alias_set
);
6270 store_constructor (exp
, target
, cleared
, bytesize
, reverse
);
6273 store_field (target
, bitsize
, bitpos
, bitregion_start
, bitregion_end
, mode
,
6274 exp
, alias_set
, false, reverse
);
6278 /* Returns the number of FIELD_DECLs in TYPE. */
6281 fields_length (const_tree type
)
6283 tree t
= TYPE_FIELDS (type
);
6286 for (; t
; t
= DECL_CHAIN (t
))
6287 if (TREE_CODE (t
) == FIELD_DECL
)
6294 /* Store the value of constructor EXP into the rtx TARGET.
6295 TARGET is either a REG or a MEM; we know it cannot conflict, since
6296 safe_from_p has been called.
6297 CLEARED is true if TARGET is known to have been zero'd.
6298 SIZE is the number of bytes of TARGET we are allowed to modify: this
6299 may not be the same as the size of EXP if we are assigning to a field
6300 which has been packed to exclude padding bits.
6301 If REVERSE is true, the store is to be done in reverse order. */
6304 store_constructor (tree exp
, rtx target
, int cleared
, poly_int64 size
,
6307 tree type
= TREE_TYPE (exp
);
6308 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
6309 poly_int64 bitregion_end
= known_gt (size
, 0) ? size
* BITS_PER_UNIT
- 1 : 0;
6311 switch (TREE_CODE (type
))
6315 case QUAL_UNION_TYPE
:
6317 unsigned HOST_WIDE_INT idx
;
6320 /* The storage order is specified for every aggregate type. */
6321 reverse
= TYPE_REVERSE_STORAGE_ORDER (type
);
6323 /* If size is zero or the target is already cleared, do nothing. */
6324 if (known_eq (size
, 0) || cleared
)
6326 /* We either clear the aggregate or indicate the value is dead. */
6327 else if ((TREE_CODE (type
) == UNION_TYPE
6328 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
6329 && ! CONSTRUCTOR_ELTS (exp
))
6330 /* If the constructor is empty, clear the union. */
6332 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
6336 /* If we are building a static constructor into a register,
6337 set the initial value as zero so we can fold the value into
6338 a constant. But if more than one register is involved,
6339 this probably loses. */
6340 else if (REG_P (target
) && TREE_STATIC (exp
)
6341 && known_le (GET_MODE_SIZE (GET_MODE (target
)),
6342 REGMODE_NATURAL_SIZE (GET_MODE (target
))))
6344 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6348 /* If the constructor has fewer fields than the structure or
6349 if we are initializing the structure to mostly zeros, clear
6350 the whole structure first. Don't do this if TARGET is a
6351 register whose mode size isn't equal to SIZE since
6352 clear_storage can't handle this case. */
6353 else if (known_size_p (size
)
6354 && (((int) CONSTRUCTOR_NELTS (exp
) != fields_length (type
))
6355 || mostly_zeros_p (exp
))
6357 || known_eq (GET_MODE_SIZE (GET_MODE (target
)), size
)))
6359 clear_storage (target
, gen_int_mode (size
, Pmode
),
6364 if (REG_P (target
) && !cleared
)
6365 emit_clobber (target
);
6367 /* Store each element of the constructor into the
6368 corresponding field of TARGET. */
6369 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, field
, value
)
6372 HOST_WIDE_INT bitsize
;
6373 HOST_WIDE_INT bitpos
= 0;
6375 rtx to_rtx
= target
;
6377 /* Just ignore missing fields. We cleared the whole
6378 structure, above, if any fields are missing. */
6382 if (cleared
&& initializer_zerop (value
))
6385 if (tree_fits_uhwi_p (DECL_SIZE (field
)))
6386 bitsize
= tree_to_uhwi (DECL_SIZE (field
));
6390 mode
= DECL_MODE (field
);
6391 if (DECL_BIT_FIELD (field
))
6394 offset
= DECL_FIELD_OFFSET (field
);
6395 if (tree_fits_shwi_p (offset
)
6396 && tree_fits_shwi_p (bit_position (field
)))
6398 bitpos
= int_bit_position (field
);
6404 /* If this initializes a field that is smaller than a
6405 word, at the start of a word, try to widen it to a full
6406 word. This special case allows us to output C++ member
6407 function initializations in a form that the optimizers
6409 if (WORD_REGISTER_OPERATIONS
6411 && bitsize
< BITS_PER_WORD
6412 && bitpos
% BITS_PER_WORD
== 0
6413 && GET_MODE_CLASS (mode
) == MODE_INT
6414 && TREE_CODE (value
) == INTEGER_CST
6416 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
6418 tree type
= TREE_TYPE (value
);
6420 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
6422 type
= lang_hooks
.types
.type_for_mode
6423 (word_mode
, TYPE_UNSIGNED (type
));
6424 value
= fold_convert (type
, value
);
6425 /* Make sure the bits beyond the original bitsize are zero
6426 so that we can correctly avoid extra zeroing stores in
6427 later constructor elements. */
6429 = wide_int_to_tree (type
, wi::mask (bitsize
, false,
6431 value
= fold_build2 (BIT_AND_EXPR
, type
, value
, bitsize_mask
);
6434 if (BYTES_BIG_ENDIAN
)
6436 = fold_build2 (LSHIFT_EXPR
, type
, value
,
6437 build_int_cst (type
,
6438 BITS_PER_WORD
- bitsize
));
6439 bitsize
= BITS_PER_WORD
;
6443 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
6444 && DECL_NONADDRESSABLE_P (field
))
6446 to_rtx
= copy_rtx (to_rtx
);
6447 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
6450 store_constructor_field (to_rtx
, bitsize
, bitpos
,
6451 0, bitregion_end
, mode
,
6453 get_alias_set (TREE_TYPE (field
)),
6461 unsigned HOST_WIDE_INT i
;
6464 tree elttype
= TREE_TYPE (type
);
6466 HOST_WIDE_INT minelt
= 0;
6467 HOST_WIDE_INT maxelt
= 0;
6469 /* The storage order is specified for every aggregate type. */
6470 reverse
= TYPE_REVERSE_STORAGE_ORDER (type
);
6472 domain
= TYPE_DOMAIN (type
);
6473 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
6474 && TYPE_MAX_VALUE (domain
)
6475 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain
))
6476 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain
)));
6478 /* If we have constant bounds for the range of the type, get them. */
6481 minelt
= tree_to_shwi (TYPE_MIN_VALUE (domain
));
6482 maxelt
= tree_to_shwi (TYPE_MAX_VALUE (domain
));
6485 /* If the constructor has fewer elements than the array, clear
6486 the whole array first. Similarly if this is static
6487 constructor of a non-BLKmode object. */
6490 else if (REG_P (target
) && TREE_STATIC (exp
))
6494 unsigned HOST_WIDE_INT idx
;
6496 HOST_WIDE_INT count
= 0, zero_count
= 0;
6497 need_to_clear
= ! const_bounds_p
;
6499 /* This loop is a more accurate version of the loop in
6500 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6501 is also needed to check for missing elements. */
6502 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, index
, value
)
6504 HOST_WIDE_INT this_node_count
;
6509 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6511 tree lo_index
= TREE_OPERAND (index
, 0);
6512 tree hi_index
= TREE_OPERAND (index
, 1);
6514 if (! tree_fits_uhwi_p (lo_index
)
6515 || ! tree_fits_uhwi_p (hi_index
))
6521 this_node_count
= (tree_to_uhwi (hi_index
)
6522 - tree_to_uhwi (lo_index
) + 1);
6525 this_node_count
= 1;
6527 count
+= this_node_count
;
6528 if (mostly_zeros_p (value
))
6529 zero_count
+= this_node_count
;
6532 /* Clear the entire array first if there are any missing
6533 elements, or if the incidence of zero elements is >=
6536 && (count
< maxelt
- minelt
+ 1
6537 || 4 * zero_count
>= 3 * count
))
6541 if (need_to_clear
&& maybe_gt (size
, 0))
6544 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6546 clear_storage (target
, gen_int_mode (size
, Pmode
),
6551 if (!cleared
&& REG_P (target
))
6552 /* Inform later passes that the old value is dead. */
6553 emit_clobber (target
);
6555 /* Store each element of the constructor into the
6556 corresponding element of TARGET, determined by counting the
6558 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), i
, index
, value
)
6562 HOST_WIDE_INT bitpos
;
6563 rtx xtarget
= target
;
6565 if (cleared
&& initializer_zerop (value
))
6568 mode
= TYPE_MODE (elttype
);
6569 if (mode
!= BLKmode
)
6570 bitsize
= GET_MODE_BITSIZE (mode
);
6571 else if (!poly_int_tree_p (TYPE_SIZE (elttype
), &bitsize
))
6574 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6576 tree lo_index
= TREE_OPERAND (index
, 0);
6577 tree hi_index
= TREE_OPERAND (index
, 1);
6578 rtx index_r
, pos_rtx
;
6579 HOST_WIDE_INT lo
, hi
, count
;
6582 /* If the range is constant and "small", unroll the loop. */
6584 && tree_fits_shwi_p (lo_index
)
6585 && tree_fits_shwi_p (hi_index
)
6586 && (lo
= tree_to_shwi (lo_index
),
6587 hi
= tree_to_shwi (hi_index
),
6588 count
= hi
- lo
+ 1,
6591 || (tree_fits_uhwi_p (TYPE_SIZE (elttype
))
6592 && (tree_to_uhwi (TYPE_SIZE (elttype
)) * count
6595 lo
-= minelt
; hi
-= minelt
;
6596 for (; lo
<= hi
; lo
++)
6598 bitpos
= lo
* tree_to_shwi (TYPE_SIZE (elttype
));
6601 && !MEM_KEEP_ALIAS_SET_P (target
)
6602 && TREE_CODE (type
) == ARRAY_TYPE
6603 && TYPE_NONALIASED_COMPONENT (type
))
6605 target
= copy_rtx (target
);
6606 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6609 store_constructor_field
6610 (target
, bitsize
, bitpos
, 0, bitregion_end
,
6611 mode
, value
, cleared
,
6612 get_alias_set (elttype
), reverse
);
6617 rtx_code_label
*loop_start
= gen_label_rtx ();
6618 rtx_code_label
*loop_end
= gen_label_rtx ();
6621 expand_normal (hi_index
);
6623 index
= build_decl (EXPR_LOCATION (exp
),
6624 VAR_DECL
, NULL_TREE
, domain
);
6625 index_r
= gen_reg_rtx (promote_decl_mode (index
, NULL
));
6626 SET_DECL_RTL (index
, index_r
);
6627 store_expr (lo_index
, index_r
, 0, false, reverse
);
6629 /* Build the head of the loop. */
6630 do_pending_stack_adjust ();
6631 emit_label (loop_start
);
6633 /* Assign value to element index. */
6635 fold_convert (ssizetype
,
6636 fold_build2 (MINUS_EXPR
,
6639 TYPE_MIN_VALUE (domain
)));
6642 size_binop (MULT_EXPR
, position
,
6643 fold_convert (ssizetype
,
6644 TYPE_SIZE_UNIT (elttype
)));
6646 pos_rtx
= expand_normal (position
);
6647 xtarget
= offset_address (target
, pos_rtx
,
6648 highest_pow2_factor (position
));
6649 xtarget
= adjust_address (xtarget
, mode
, 0);
6650 if (TREE_CODE (value
) == CONSTRUCTOR
)
6651 store_constructor (value
, xtarget
, cleared
,
6652 exact_div (bitsize
, BITS_PER_UNIT
),
6655 store_expr (value
, xtarget
, 0, false, reverse
);
6657 /* Generate a conditional jump to exit the loop. */
6658 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
6660 jumpif (exit_cond
, loop_end
,
6661 profile_probability::uninitialized ());
6663 /* Update the loop counter, and jump to the head of
6665 expand_assignment (index
,
6666 build2 (PLUS_EXPR
, TREE_TYPE (index
),
6667 index
, integer_one_node
),
6670 emit_jump (loop_start
);
6672 /* Build the end of the loop. */
6673 emit_label (loop_end
);
6676 else if ((index
!= 0 && ! tree_fits_shwi_p (index
))
6677 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype
)))
6682 index
= ssize_int (1);
6685 index
= fold_convert (ssizetype
,
6686 fold_build2 (MINUS_EXPR
,
6689 TYPE_MIN_VALUE (domain
)));
6692 size_binop (MULT_EXPR
, index
,
6693 fold_convert (ssizetype
,
6694 TYPE_SIZE_UNIT (elttype
)));
6695 xtarget
= offset_address (target
,
6696 expand_normal (position
),
6697 highest_pow2_factor (position
));
6698 xtarget
= adjust_address (xtarget
, mode
, 0);
6699 store_expr (value
, xtarget
, 0, false, reverse
);
6704 bitpos
= ((tree_to_shwi (index
) - minelt
)
6705 * tree_to_uhwi (TYPE_SIZE (elttype
)));
6707 bitpos
= (i
* tree_to_uhwi (TYPE_SIZE (elttype
)));
6709 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
6710 && TREE_CODE (type
) == ARRAY_TYPE
6711 && TYPE_NONALIASED_COMPONENT (type
))
6713 target
= copy_rtx (target
);
6714 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6716 store_constructor_field (target
, bitsize
, bitpos
, 0,
6717 bitregion_end
, mode
, value
,
6718 cleared
, get_alias_set (elttype
),
6727 unsigned HOST_WIDE_INT idx
;
6728 constructor_elt
*ce
;
6731 insn_code icode
= CODE_FOR_nothing
;
6733 tree elttype
= TREE_TYPE (type
);
6734 int elt_size
= tree_to_uhwi (TYPE_SIZE (elttype
));
6735 machine_mode eltmode
= TYPE_MODE (elttype
);
6736 HOST_WIDE_INT bitsize
;
6737 HOST_WIDE_INT bitpos
;
6738 rtvec vector
= NULL
;
6740 unsigned HOST_WIDE_INT const_n_elts
;
6741 alias_set_type alias
;
6742 bool vec_vec_init_p
= false;
6743 machine_mode mode
= GET_MODE (target
);
6745 gcc_assert (eltmode
!= BLKmode
);
6747 /* Try using vec_duplicate_optab for uniform vectors. */
6748 if (!TREE_SIDE_EFFECTS (exp
)
6749 && VECTOR_MODE_P (mode
)
6750 && eltmode
== GET_MODE_INNER (mode
)
6751 && ((icode
= optab_handler (vec_duplicate_optab
, mode
))
6752 != CODE_FOR_nothing
)
6753 && (elt
= uniform_vector_p (exp
)))
6755 class expand_operand ops
[2];
6756 create_output_operand (&ops
[0], target
, mode
);
6757 create_input_operand (&ops
[1], expand_normal (elt
), eltmode
);
6758 expand_insn (icode
, 2, ops
);
6759 if (!rtx_equal_p (target
, ops
[0].value
))
6760 emit_move_insn (target
, ops
[0].value
);
6764 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
6766 && VECTOR_MODE_P (mode
)
6767 && n_elts
.is_constant (&const_n_elts
))
6769 machine_mode emode
= eltmode
;
6771 if (CONSTRUCTOR_NELTS (exp
)
6772 && (TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (exp
, 0)->value
))
6775 tree etype
= TREE_TYPE (CONSTRUCTOR_ELT (exp
, 0)->value
);
6776 gcc_assert (known_eq (CONSTRUCTOR_NELTS (exp
)
6777 * TYPE_VECTOR_SUBPARTS (etype
),
6779 emode
= TYPE_MODE (etype
);
6781 icode
= convert_optab_handler (vec_init_optab
, mode
, emode
);
6782 if (icode
!= CODE_FOR_nothing
)
6784 unsigned int i
, n
= const_n_elts
;
6786 if (emode
!= eltmode
)
6788 n
= CONSTRUCTOR_NELTS (exp
);
6789 vec_vec_init_p
= true;
6791 vector
= rtvec_alloc (n
);
6792 for (i
= 0; i
< n
; i
++)
6793 RTVEC_ELT (vector
, i
) = CONST0_RTX (emode
);
6797 /* If the constructor has fewer elements than the vector,
6798 clear the whole array first. Similarly if this is static
6799 constructor of a non-BLKmode object. */
6802 else if (REG_P (target
) && TREE_STATIC (exp
))
6806 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
6809 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
6811 tree sz
= TYPE_SIZE (TREE_TYPE (value
));
6813 = tree_to_uhwi (int_const_binop (TRUNC_DIV_EXPR
, sz
,
6814 TYPE_SIZE (elttype
)));
6816 count
+= n_elts_here
;
6817 if (mostly_zeros_p (value
))
6818 zero_count
+= n_elts_here
;
6821 /* Clear the entire vector first if there are any missing elements,
6822 or if the incidence of zero elements is >= 75%. */
6823 need_to_clear
= (maybe_lt (count
, n_elts
)
6824 || 4 * zero_count
>= 3 * count
);
6827 if (need_to_clear
&& maybe_gt (size
, 0) && !vector
)
6830 emit_move_insn (target
, CONST0_RTX (mode
));
6832 clear_storage (target
, gen_int_mode (size
, Pmode
),
6837 /* Inform later passes that the old value is dead. */
6838 if (!cleared
&& !vector
&& REG_P (target
))
6839 emit_move_insn (target
, CONST0_RTX (mode
));
6842 alias
= MEM_ALIAS_SET (target
);
6844 alias
= get_alias_set (elttype
);
6846 /* Store each element of the constructor into the corresponding
6847 element of TARGET, determined by counting the elements. */
6848 for (idx
= 0, i
= 0;
6849 vec_safe_iterate (CONSTRUCTOR_ELTS (exp
), idx
, &ce
);
6850 idx
++, i
+= bitsize
/ elt_size
)
6852 HOST_WIDE_INT eltpos
;
6853 tree value
= ce
->value
;
6855 bitsize
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value
)));
6856 if (cleared
&& initializer_zerop (value
))
6860 eltpos
= tree_to_uhwi (ce
->index
);
6868 gcc_assert (ce
->index
== NULL_TREE
);
6869 gcc_assert (TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
);
6873 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
6874 RTVEC_ELT (vector
, eltpos
) = expand_normal (value
);
6878 machine_mode value_mode
6879 = (TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
6880 ? TYPE_MODE (TREE_TYPE (value
)) : eltmode
);
6881 bitpos
= eltpos
* elt_size
;
6882 store_constructor_field (target
, bitsize
, bitpos
, 0,
6883 bitregion_end
, value_mode
,
6884 value
, cleared
, alias
, reverse
);
6889 emit_insn (GEN_FCN (icode
) (target
,
6890 gen_rtx_PARALLEL (mode
, vector
)));
6899 /* Store the value of EXP (an expression tree)
6900 into a subfield of TARGET which has mode MODE and occupies
6901 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6902 If MODE is VOIDmode, it means that we are storing into a bit-field.
6904 BITREGION_START is bitpos of the first bitfield in this region.
6905 BITREGION_END is the bitpos of the ending bitfield in this region.
6906 These two fields are 0, if the C++ memory model does not apply,
6907 or we are not interested in keeping track of bitfield regions.
6909 Always return const0_rtx unless we have something particular to
6912 ALIAS_SET is the alias set for the destination. This value will
6913 (in general) be different from that for TARGET, since TARGET is a
6914 reference to the containing structure.
6916 If NONTEMPORAL is true, try generating a nontemporal store.
6918 If REVERSE is true, the store is to be done in reverse order. */
6921 store_field (rtx target
, poly_int64 bitsize
, poly_int64 bitpos
,
6922 poly_uint64 bitregion_start
, poly_uint64 bitregion_end
,
6923 machine_mode mode
, tree exp
,
6924 alias_set_type alias_set
, bool nontemporal
, bool reverse
)
6926 if (TREE_CODE (exp
) == ERROR_MARK
)
6929 /* If we have nothing to store, do nothing unless the expression has
6930 side-effects. Don't do that for zero sized addressable lhs of
6932 if (known_eq (bitsize
, 0)
6933 && (!TREE_ADDRESSABLE (TREE_TYPE (exp
))
6934 || TREE_CODE (exp
) != CALL_EXPR
))
6935 return expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6937 if (GET_CODE (target
) == CONCAT
)
6939 /* We're storing into a struct containing a single __complex. */
6941 gcc_assert (known_eq (bitpos
, 0));
6942 return store_expr (exp
, target
, 0, nontemporal
, reverse
);
6945 /* If the structure is in a register or if the component
6946 is a bit field, we cannot use addressing to access it.
6947 Use bit-field techniques or SUBREG to store in it. */
6949 poly_int64 decl_bitsize
;
6950 if (mode
== VOIDmode
6951 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
6952 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
6953 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
6955 || GET_CODE (target
) == SUBREG
6956 /* If the field isn't aligned enough to store as an ordinary memref,
6957 store it as a bit field. */
6959 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
6960 || !multiple_p (bitpos
, GET_MODE_ALIGNMENT (mode
)))
6961 && targetm
.slow_unaligned_access (mode
, MEM_ALIGN (target
)))
6962 || !multiple_p (bitpos
, BITS_PER_UNIT
)))
6963 || (known_size_p (bitsize
)
6965 && maybe_gt (GET_MODE_BITSIZE (mode
), bitsize
))
6966 /* If the RHS and field are a constant size and the size of the
6967 RHS isn't the same size as the bitfield, we must use bitfield
6969 || (known_size_p (bitsize
)
6970 && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp
)))
6971 && maybe_ne (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp
))),
6973 /* Except for initialization of full bytes from a CONSTRUCTOR, which
6974 we will handle specially below. */
6975 && !(TREE_CODE (exp
) == CONSTRUCTOR
6976 && multiple_p (bitsize
, BITS_PER_UNIT
))
6977 /* And except for bitwise copying of TREE_ADDRESSABLE types,
6978 where the FIELD_DECL has the right bitsize, but TREE_TYPE (exp)
6979 includes some extra padding. store_expr / expand_expr will in
6980 that case call get_inner_reference that will have the bitsize
6981 we check here and thus the block move will not clobber the
6982 padding that shouldn't be clobbered. In the future we could
6983 replace the TREE_ADDRESSABLE check with a check that
6984 get_base_address needs to live in memory. */
6985 && (!TREE_ADDRESSABLE (TREE_TYPE (exp
))
6986 || TREE_CODE (exp
) != COMPONENT_REF
6987 || !multiple_p (bitsize
, BITS_PER_UNIT
)
6988 || !multiple_p (bitpos
, BITS_PER_UNIT
)
6989 || !poly_int_tree_p (DECL_SIZE (TREE_OPERAND (exp
, 1)),
6991 || maybe_ne (decl_bitsize
, bitsize
)))
6992 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6993 decl we must use bitfield operations. */
6994 || (known_size_p (bitsize
)
6995 && TREE_CODE (exp
) == MEM_REF
6996 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
6997 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6998 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6999 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) != BLKmode
))
7004 /* If EXP is a NOP_EXPR of precision less than its mode, then that
7005 implies a mask operation. If the precision is the same size as
7006 the field we're storing into, that mask is redundant. This is
7007 particularly common with bit field assignments generated by the
7009 nop_def
= get_def_for_expr (exp
, NOP_EXPR
);
7012 tree type
= TREE_TYPE (exp
);
7013 if (INTEGRAL_TYPE_P (type
)
7014 && maybe_ne (TYPE_PRECISION (type
),
7015 GET_MODE_BITSIZE (TYPE_MODE (type
)))
7016 && known_eq (bitsize
, TYPE_PRECISION (type
)))
7018 tree op
= gimple_assign_rhs1 (nop_def
);
7019 type
= TREE_TYPE (op
);
7020 if (INTEGRAL_TYPE_P (type
)
7021 && known_ge (TYPE_PRECISION (type
), bitsize
))
7026 temp
= expand_normal (exp
);
7028 /* We don't support variable-sized BLKmode bitfields, since our
7029 handling of BLKmode is bound up with the ability to break
7030 things into words. */
7031 gcc_assert (mode
!= BLKmode
|| bitsize
.is_constant ());
7033 /* Handle calls that return values in multiple non-contiguous locations.
7034 The Irix 6 ABI has examples of this. */
7035 if (GET_CODE (temp
) == PARALLEL
)
7037 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
7038 machine_mode temp_mode
= GET_MODE (temp
);
7039 if (temp_mode
== BLKmode
|| temp_mode
== VOIDmode
)
7040 temp_mode
= smallest_int_mode_for_size (size
* BITS_PER_UNIT
);
7041 rtx temp_target
= gen_reg_rtx (temp_mode
);
7042 emit_group_store (temp_target
, temp
, TREE_TYPE (exp
), size
);
7046 /* Handle calls that return BLKmode values in registers. */
7047 else if (mode
== BLKmode
&& REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
7049 rtx temp_target
= gen_reg_rtx (GET_MODE (temp
));
7050 copy_blkmode_from_reg (temp_target
, temp
, TREE_TYPE (exp
));
7054 /* If the value has aggregate type and an integral mode then, if BITSIZE
7055 is narrower than this mode and this is for big-endian data, we first
7056 need to put the value into the low-order bits for store_bit_field,
7057 except when MODE is BLKmode and BITSIZE larger than the word size
7058 (see the handling of fields larger than a word in store_bit_field).
7059 Moreover, the field may be not aligned on a byte boundary; in this
7060 case, if it has reverse storage order, it needs to be accessed as a
7061 scalar field with reverse storage order and we must first put the
7062 value into target order. */
7063 scalar_int_mode temp_mode
;
7064 if (AGGREGATE_TYPE_P (TREE_TYPE (exp
))
7065 && is_int_mode (GET_MODE (temp
), &temp_mode
))
7067 HOST_WIDE_INT size
= GET_MODE_BITSIZE (temp_mode
);
7069 reverse
= TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (exp
));
7072 temp
= flip_storage_order (temp_mode
, temp
);
7074 gcc_checking_assert (known_le (bitsize
, size
));
7075 if (maybe_lt (bitsize
, size
)
7076 && reverse
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
7077 /* Use of to_constant for BLKmode was checked above. */
7078 && !(mode
== BLKmode
&& bitsize
.to_constant () > BITS_PER_WORD
))
7079 temp
= expand_shift (RSHIFT_EXPR
, temp_mode
, temp
,
7080 size
- bitsize
, NULL_RTX
, 1);
7083 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
7084 if (mode
!= VOIDmode
&& mode
!= BLKmode
7085 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
7086 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
7088 /* If the mode of TEMP and TARGET is BLKmode, both must be in memory
7089 and BITPOS must be aligned on a byte boundary. If so, we simply do
7090 a block copy. Likewise for a BLKmode-like TARGET. */
7091 if (GET_MODE (temp
) == BLKmode
7092 && (GET_MODE (target
) == BLKmode
7094 && GET_MODE_CLASS (GET_MODE (target
)) == MODE_INT
7095 && multiple_p (bitpos
, BITS_PER_UNIT
)
7096 && multiple_p (bitsize
, BITS_PER_UNIT
))))
7098 gcc_assert (MEM_P (target
) && MEM_P (temp
));
7099 poly_int64 bytepos
= exact_div (bitpos
, BITS_PER_UNIT
);
7100 poly_int64 bytesize
= bits_to_bytes_round_up (bitsize
);
7102 target
= adjust_address (target
, VOIDmode
, bytepos
);
7103 emit_block_move (target
, temp
,
7104 gen_int_mode (bytesize
, Pmode
),
7110 /* If the mode of TEMP is still BLKmode and BITSIZE not larger than the
7111 word size, we need to load the value (see again store_bit_field). */
7112 if (GET_MODE (temp
) == BLKmode
&& known_le (bitsize
, BITS_PER_WORD
))
7114 scalar_int_mode temp_mode
= smallest_int_mode_for_size (bitsize
);
7115 temp
= extract_bit_field (temp
, bitsize
, 0, 1, NULL_RTX
, temp_mode
,
7116 temp_mode
, false, NULL
);
7119 /* Store the value in the bitfield. */
7120 gcc_checking_assert (known_ge (bitpos
, 0));
7121 store_bit_field (target
, bitsize
, bitpos
,
7122 bitregion_start
, bitregion_end
,
7123 mode
, temp
, reverse
);
7129 /* Now build a reference to just the desired component. */
7130 rtx to_rtx
= adjust_address (target
, mode
,
7131 exact_div (bitpos
, BITS_PER_UNIT
));
7133 if (to_rtx
== target
)
7134 to_rtx
= copy_rtx (to_rtx
);
7136 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
7137 set_mem_alias_set (to_rtx
, alias_set
);
7139 /* Above we avoided using bitfield operations for storing a CONSTRUCTOR
7140 into a target smaller than its type; handle that case now. */
7141 if (TREE_CODE (exp
) == CONSTRUCTOR
&& known_size_p (bitsize
))
7143 poly_int64 bytesize
= exact_div (bitsize
, BITS_PER_UNIT
);
7144 store_constructor (exp
, to_rtx
, 0, bytesize
, reverse
);
7148 return store_expr (exp
, to_rtx
, 0, nontemporal
, reverse
);
7152 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
7153 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
7154 codes and find the ultimate containing object, which we return.
7156 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
7157 bit position, *PUNSIGNEDP to the signedness and *PREVERSEP to the
7158 storage order of the field.
7159 If the position of the field is variable, we store a tree
7160 giving the variable offset (in units) in *POFFSET.
7161 This offset is in addition to the bit position.
7162 If the position is not variable, we store 0 in *POFFSET.
7164 If any of the extraction expressions is volatile,
7165 we store 1 in *PVOLATILEP. Otherwise we don't change that.
7167 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
7168 Otherwise, it is a mode that can be used to access the field.
7170 If the field describes a variable-sized object, *PMODE is set to
7171 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
7172 this case, but the address of the object can be found. */
7175 get_inner_reference (tree exp
, poly_int64_pod
*pbitsize
,
7176 poly_int64_pod
*pbitpos
, tree
*poffset
,
7177 machine_mode
*pmode
, int *punsignedp
,
7178 int *preversep
, int *pvolatilep
)
7181 machine_mode mode
= VOIDmode
;
7182 bool blkmode_bitfield
= false;
7183 tree offset
= size_zero_node
;
7184 poly_offset_int bit_offset
= 0;
7186 /* First get the mode, signedness, storage order and size. We do this from
7187 just the outermost expression. */
7189 if (TREE_CODE (exp
) == COMPONENT_REF
)
7191 tree field
= TREE_OPERAND (exp
, 1);
7192 size_tree
= DECL_SIZE (field
);
7193 if (flag_strict_volatile_bitfields
> 0
7194 && TREE_THIS_VOLATILE (exp
)
7195 && DECL_BIT_FIELD_TYPE (field
)
7196 && DECL_MODE (field
) != BLKmode
)
7197 /* Volatile bitfields should be accessed in the mode of the
7198 field's type, not the mode computed based on the bit
7200 mode
= TYPE_MODE (DECL_BIT_FIELD_TYPE (field
));
7201 else if (!DECL_BIT_FIELD (field
))
7203 mode
= DECL_MODE (field
);
7204 /* For vector fields re-check the target flags, as DECL_MODE
7205 could have been set with different target flags than
7206 the current function has. */
7208 && VECTOR_TYPE_P (TREE_TYPE (field
))
7209 && VECTOR_MODE_P (TYPE_MODE_RAW (TREE_TYPE (field
))))
7210 mode
= TYPE_MODE (TREE_TYPE (field
));
7212 else if (DECL_MODE (field
) == BLKmode
)
7213 blkmode_bitfield
= true;
7215 *punsignedp
= DECL_UNSIGNED (field
);
7217 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
7219 size_tree
= TREE_OPERAND (exp
, 1);
7220 *punsignedp
= (! INTEGRAL_TYPE_P (TREE_TYPE (exp
))
7221 || TYPE_UNSIGNED (TREE_TYPE (exp
)));
7223 /* For vector types, with the correct size of access, use the mode of
7225 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == VECTOR_TYPE
7226 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
7227 && tree_int_cst_equal (size_tree
, TYPE_SIZE (TREE_TYPE (exp
))))
7228 mode
= TYPE_MODE (TREE_TYPE (exp
));
7232 mode
= TYPE_MODE (TREE_TYPE (exp
));
7233 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
7235 if (mode
== BLKmode
)
7236 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
7238 *pbitsize
= GET_MODE_BITSIZE (mode
);
7243 if (! tree_fits_uhwi_p (size_tree
))
7244 mode
= BLKmode
, *pbitsize
= -1;
7246 *pbitsize
= tree_to_uhwi (size_tree
);
7249 *preversep
= reverse_storage_order_for_component_p (exp
);
7251 /* Compute cumulative bit-offset for nested component-refs and array-refs,
7252 and find the ultimate containing object. */
7255 switch (TREE_CODE (exp
))
7258 bit_offset
+= wi::to_poly_offset (TREE_OPERAND (exp
, 2));
7263 tree field
= TREE_OPERAND (exp
, 1);
7264 tree this_offset
= component_ref_field_offset (exp
);
7266 /* If this field hasn't been filled in yet, don't go past it.
7267 This should only happen when folding expressions made during
7268 type construction. */
7269 if (this_offset
== 0)
7272 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
7273 bit_offset
+= wi::to_poly_offset (DECL_FIELD_BIT_OFFSET (field
));
7275 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
7280 case ARRAY_RANGE_REF
:
7282 tree index
= TREE_OPERAND (exp
, 1);
7283 tree low_bound
= array_ref_low_bound (exp
);
7284 tree unit_size
= array_ref_element_size (exp
);
7286 /* We assume all arrays have sizes that are a multiple of a byte.
7287 First subtract the lower bound, if any, in the type of the
7288 index, then convert to sizetype and multiply by the size of
7289 the array element. */
7290 if (! integer_zerop (low_bound
))
7291 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
7294 offset
= size_binop (PLUS_EXPR
, offset
,
7295 size_binop (MULT_EXPR
,
7296 fold_convert (sizetype
, index
),
7305 bit_offset
+= *pbitsize
;
7308 case VIEW_CONVERT_EXPR
:
7312 /* Hand back the decl for MEM[&decl, off]. */
7313 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
7315 tree off
= TREE_OPERAND (exp
, 1);
7316 if (!integer_zerop (off
))
7318 poly_offset_int boff
= mem_ref_offset (exp
);
7319 boff
<<= LOG2_BITS_PER_UNIT
;
7322 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7330 /* If any reference in the chain is volatile, the effect is volatile. */
7331 if (TREE_THIS_VOLATILE (exp
))
7334 exp
= TREE_OPERAND (exp
, 0);
7338 /* If OFFSET is constant, see if we can return the whole thing as a
7339 constant bit position. Make sure to handle overflow during
7341 if (poly_int_tree_p (offset
))
7343 poly_offset_int tem
= wi::sext (wi::to_poly_offset (offset
),
7344 TYPE_PRECISION (sizetype
));
7345 tem
<<= LOG2_BITS_PER_UNIT
;
7347 if (tem
.to_shwi (pbitpos
))
7348 *poffset
= offset
= NULL_TREE
;
7351 /* Otherwise, split it up. */
7354 /* Avoid returning a negative bitpos as this may wreak havoc later. */
7355 if (!bit_offset
.to_shwi (pbitpos
) || maybe_lt (*pbitpos
, 0))
7357 *pbitpos
= num_trailing_bits (bit_offset
.force_shwi ());
7358 poly_offset_int bytes
= bits_to_bytes_round_down (bit_offset
);
7359 offset
= size_binop (PLUS_EXPR
, offset
,
7360 build_int_cst (sizetype
, bytes
.force_shwi ()));
7366 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
7367 if (mode
== VOIDmode
7369 && multiple_p (*pbitpos
, BITS_PER_UNIT
)
7370 && multiple_p (*pbitsize
, BITS_PER_UNIT
))
7378 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7380 static unsigned HOST_WIDE_INT
7381 target_align (const_tree target
)
7383 /* We might have a chain of nested references with intermediate misaligning
7384 bitfields components, so need to recurse to find out. */
7386 unsigned HOST_WIDE_INT this_align
, outer_align
;
7388 switch (TREE_CODE (target
))
7394 this_align
= DECL_ALIGN (TREE_OPERAND (target
, 1));
7395 outer_align
= target_align (TREE_OPERAND (target
, 0));
7396 return MIN (this_align
, outer_align
);
7399 case ARRAY_RANGE_REF
:
7400 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7401 outer_align
= target_align (TREE_OPERAND (target
, 0));
7402 return MIN (this_align
, outer_align
);
7405 case NON_LVALUE_EXPR
:
7406 case VIEW_CONVERT_EXPR
:
7407 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7408 outer_align
= target_align (TREE_OPERAND (target
, 0));
7409 return MAX (this_align
, outer_align
);
7412 return TYPE_ALIGN (TREE_TYPE (target
));
7417 /* Given an rtx VALUE that may contain additions and multiplications, return
7418 an equivalent value that just refers to a register, memory, or constant.
7419 This is done by generating instructions to perform the arithmetic and
7420 returning a pseudo-register containing the value.
7422 The returned value may be a REG, SUBREG, MEM or constant. */
7425 force_operand (rtx value
, rtx target
)
7428 /* Use subtarget as the target for operand 0 of a binary operation. */
7429 rtx subtarget
= get_subtarget (target
);
7430 enum rtx_code code
= GET_CODE (value
);
7432 /* Check for subreg applied to an expression produced by loop optimizer. */
7434 && !REG_P (SUBREG_REG (value
))
7435 && !MEM_P (SUBREG_REG (value
)))
7438 = simplify_gen_subreg (GET_MODE (value
),
7439 force_reg (GET_MODE (SUBREG_REG (value
)),
7440 force_operand (SUBREG_REG (value
),
7442 GET_MODE (SUBREG_REG (value
)),
7443 SUBREG_BYTE (value
));
7444 code
= GET_CODE (value
);
7447 /* Check for a PIC address load. */
7448 if ((code
== PLUS
|| code
== MINUS
)
7449 && XEXP (value
, 0) == pic_offset_table_rtx
7450 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
7451 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
7452 || GET_CODE (XEXP (value
, 1)) == CONST
))
7455 subtarget
= gen_reg_rtx (GET_MODE (value
));
7456 emit_move_insn (subtarget
, value
);
7460 if (ARITHMETIC_P (value
))
7462 op2
= XEXP (value
, 1);
7463 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
7465 if (code
== MINUS
&& CONST_INT_P (op2
))
7468 op2
= negate_rtx (GET_MODE (value
), op2
);
7471 /* Check for an addition with OP2 a constant integer and our first
7472 operand a PLUS of a virtual register and something else. In that
7473 case, we want to emit the sum of the virtual register and the
7474 constant first and then add the other value. This allows virtual
7475 register instantiation to simply modify the constant rather than
7476 creating another one around this addition. */
7477 if (code
== PLUS
&& CONST_INT_P (op2
)
7478 && GET_CODE (XEXP (value
, 0)) == PLUS
7479 && REG_P (XEXP (XEXP (value
, 0), 0))
7480 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7481 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
7483 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
7484 XEXP (XEXP (value
, 0), 0), op2
,
7485 subtarget
, 0, OPTAB_LIB_WIDEN
);
7486 return expand_simple_binop (GET_MODE (value
), code
, temp
,
7487 force_operand (XEXP (XEXP (value
,
7489 target
, 0, OPTAB_LIB_WIDEN
);
7492 op1
= force_operand (XEXP (value
, 0), subtarget
);
7493 op2
= force_operand (op2
, NULL_RTX
);
7497 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
7499 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
7500 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7501 target
, 1, OPTAB_LIB_WIDEN
);
7503 return expand_divmod (0,
7504 FLOAT_MODE_P (GET_MODE (value
))
7505 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
7506 GET_MODE (value
), op1
, op2
, target
, 0);
7508 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7511 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
7514 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7517 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7518 target
, 0, OPTAB_LIB_WIDEN
);
7520 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7521 target
, 1, OPTAB_LIB_WIDEN
);
7524 if (UNARY_P (value
))
7527 target
= gen_reg_rtx (GET_MODE (value
));
7528 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
7535 case FLOAT_TRUNCATE
:
7536 convert_move (target
, op1
, code
== ZERO_EXTEND
);
7541 expand_fix (target
, op1
, code
== UNSIGNED_FIX
);
7545 case UNSIGNED_FLOAT
:
7546 expand_float (target
, op1
, code
== UNSIGNED_FLOAT
);
7550 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
7554 #ifdef INSN_SCHEDULING
7555 /* On machines that have insn scheduling, we want all memory reference to be
7556 explicit, so we need to deal with such paradoxical SUBREGs. */
7557 if (paradoxical_subreg_p (value
) && MEM_P (SUBREG_REG (value
)))
7559 = simplify_gen_subreg (GET_MODE (value
),
7560 force_reg (GET_MODE (SUBREG_REG (value
)),
7561 force_operand (SUBREG_REG (value
),
7563 GET_MODE (SUBREG_REG (value
)),
7564 SUBREG_BYTE (value
));
7570 /* Subroutine of expand_expr: return nonzero iff there is no way that
7571 EXP can reference X, which is being modified. TOP_P is nonzero if this
7572 call is going to be used to determine whether we need a temporary
7573 for EXP, as opposed to a recursive call to this function.
7575 It is always safe for this routine to return zero since it merely
7576 searches for optimization opportunities. */
7579 safe_from_p (const_rtx x
, tree exp
, int top_p
)
7585 /* If EXP has varying size, we MUST use a target since we currently
7586 have no way of allocating temporaries of variable size
7587 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7588 So we assume here that something at a higher level has prevented a
7589 clash. This is somewhat bogus, but the best we can do. Only
7590 do this when X is BLKmode and when we are at the top level. */
7591 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
7592 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
7593 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
7594 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
7595 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
7597 && GET_MODE (x
) == BLKmode
)
7598 /* If X is in the outgoing argument area, it is always safe. */
7600 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
7601 || (GET_CODE (XEXP (x
, 0)) == PLUS
7602 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
7605 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7606 find the underlying pseudo. */
7607 if (GET_CODE (x
) == SUBREG
)
7610 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7614 /* Now look at our tree code and possibly recurse. */
7615 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
7617 case tcc_declaration
:
7618 exp_rtl
= DECL_RTL_IF_SET (exp
);
7624 case tcc_exceptional
:
7625 if (TREE_CODE (exp
) == TREE_LIST
)
7629 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
7631 exp
= TREE_CHAIN (exp
);
7634 if (TREE_CODE (exp
) != TREE_LIST
)
7635 return safe_from_p (x
, exp
, 0);
7638 else if (TREE_CODE (exp
) == CONSTRUCTOR
)
7640 constructor_elt
*ce
;
7641 unsigned HOST_WIDE_INT idx
;
7643 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp
), idx
, ce
)
7644 if ((ce
->index
!= NULL_TREE
&& !safe_from_p (x
, ce
->index
, 0))
7645 || !safe_from_p (x
, ce
->value
, 0))
7649 else if (TREE_CODE (exp
) == ERROR_MARK
)
7650 return 1; /* An already-visited SAVE_EXPR? */
7655 /* The only case we look at here is the DECL_INITIAL inside a
7657 return (TREE_CODE (exp
) != DECL_EXPR
7658 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
7659 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
7660 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
7663 case tcc_comparison
:
7664 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
7669 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7671 case tcc_expression
:
7674 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7675 the expression. If it is set, we conflict iff we are that rtx or
7676 both are in memory. Otherwise, we check all operands of the
7677 expression recursively. */
7679 switch (TREE_CODE (exp
))
7682 /* If the operand is static or we are static, we can't conflict.
7683 Likewise if we don't conflict with the operand at all. */
7684 if (staticp (TREE_OPERAND (exp
, 0))
7685 || TREE_STATIC (exp
)
7686 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
7689 /* Otherwise, the only way this can conflict is if we are taking
7690 the address of a DECL a that address if part of X, which is
7692 exp
= TREE_OPERAND (exp
, 0);
7695 if (!DECL_RTL_SET_P (exp
)
7696 || !MEM_P (DECL_RTL (exp
)))
7699 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
7705 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
7706 get_alias_set (exp
)))
7711 /* Assume that the call will clobber all hard registers and
7713 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7718 case WITH_CLEANUP_EXPR
:
7719 case CLEANUP_POINT_EXPR
:
7720 /* Lowered by gimplify.c. */
7724 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7730 /* If we have an rtx, we do not need to scan our operands. */
7734 nops
= TREE_OPERAND_LENGTH (exp
);
7735 for (i
= 0; i
< nops
; i
++)
7736 if (TREE_OPERAND (exp
, i
) != 0
7737 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
7743 /* Should never get a type here. */
7747 /* If we have an rtl, find any enclosed object. Then see if we conflict
7751 if (GET_CODE (exp_rtl
) == SUBREG
)
7753 exp_rtl
= SUBREG_REG (exp_rtl
);
7755 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
7759 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7760 are memory and they conflict. */
7761 return ! (rtx_equal_p (x
, exp_rtl
)
7762 || (MEM_P (x
) && MEM_P (exp_rtl
)
7763 && true_dependence (exp_rtl
, VOIDmode
, x
)));
7766 /* If we reach here, it is safe. */
7771 /* Return the highest power of two that EXP is known to be a multiple of.
7772 This is used in updating alignment of MEMs in array references. */
7774 unsigned HOST_WIDE_INT
7775 highest_pow2_factor (const_tree exp
)
7777 unsigned HOST_WIDE_INT ret
;
7778 int trailing_zeros
= tree_ctz (exp
);
7779 if (trailing_zeros
>= HOST_BITS_PER_WIDE_INT
)
7780 return BIGGEST_ALIGNMENT
;
7781 ret
= HOST_WIDE_INT_1U
<< trailing_zeros
;
7782 if (ret
> BIGGEST_ALIGNMENT
)
7783 return BIGGEST_ALIGNMENT
;
7787 /* Similar, except that the alignment requirements of TARGET are
7788 taken into account. Assume it is at least as aligned as its
7789 type, unless it is a COMPONENT_REF in which case the layout of
7790 the structure gives the alignment. */
7792 static unsigned HOST_WIDE_INT
7793 highest_pow2_factor_for_target (const_tree target
, const_tree exp
)
7795 unsigned HOST_WIDE_INT talign
= target_align (target
) / BITS_PER_UNIT
;
7796 unsigned HOST_WIDE_INT factor
= highest_pow2_factor (exp
);
7798 return MAX (factor
, talign
);
7801 /* Convert the tree comparison code TCODE to the rtl one where the
7802 signedness is UNSIGNEDP. */
7804 static enum rtx_code
7805 convert_tree_comp_to_rtx (enum tree_code tcode
, int unsignedp
)
7817 code
= unsignedp
? LTU
: LT
;
7820 code
= unsignedp
? LEU
: LE
;
7823 code
= unsignedp
? GTU
: GT
;
7826 code
= unsignedp
? GEU
: GE
;
7828 case UNORDERED_EXPR
:
7859 /* Subroutine of expand_expr. Expand the two operands of a binary
7860 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7861 The value may be stored in TARGET if TARGET is nonzero. The
7862 MODIFIER argument is as documented by expand_expr. */
7865 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
7866 enum expand_modifier modifier
)
7868 if (! safe_from_p (target
, exp1
, 1))
7870 if (operand_equal_p (exp0
, exp1
, 0))
7872 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7873 *op1
= copy_rtx (*op0
);
7877 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7878 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
7883 /* Return a MEM that contains constant EXP. DEFER is as for
7884 output_constant_def and MODIFIER is as for expand_expr. */
7887 expand_expr_constant (tree exp
, int defer
, enum expand_modifier modifier
)
7891 mem
= output_constant_def (exp
, defer
);
7892 if (modifier
!= EXPAND_INITIALIZER
)
7893 mem
= use_anchored_address (mem
);
7897 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7898 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7901 expand_expr_addr_expr_1 (tree exp
, rtx target
, scalar_int_mode tmode
,
7902 enum expand_modifier modifier
, addr_space_t as
)
7904 rtx result
, subtarget
;
7906 poly_int64 bitsize
, bitpos
;
7907 int unsignedp
, reversep
, volatilep
= 0;
7910 /* If we are taking the address of a constant and are at the top level,
7911 we have to use output_constant_def since we can't call force_const_mem
7913 /* ??? This should be considered a front-end bug. We should not be
7914 generating ADDR_EXPR of something that isn't an LVALUE. The only
7915 exception here is STRING_CST. */
7916 if (CONSTANT_CLASS_P (exp
))
7918 result
= XEXP (expand_expr_constant (exp
, 0, modifier
), 0);
7919 if (modifier
< EXPAND_SUM
)
7920 result
= force_operand (result
, target
);
7924 /* Everything must be something allowed by is_gimple_addressable. */
7925 switch (TREE_CODE (exp
))
7928 /* This case will happen via recursion for &a->b. */
7929 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7933 tree tem
= TREE_OPERAND (exp
, 0);
7934 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
7935 tem
= fold_build_pointer_plus (tem
, TREE_OPERAND (exp
, 1));
7936 return expand_expr (tem
, target
, tmode
, modifier
);
7939 case TARGET_MEM_REF
:
7940 return addr_for_mem_ref (exp
, as
, true);
7943 /* Expand the initializer like constants above. */
7944 result
= XEXP (expand_expr_constant (DECL_INITIAL (exp
),
7946 if (modifier
< EXPAND_SUM
)
7947 result
= force_operand (result
, target
);
7951 /* The real part of the complex number is always first, therefore
7952 the address is the same as the address of the parent object. */
7955 inner
= TREE_OPERAND (exp
, 0);
7959 /* The imaginary part of the complex number is always second.
7960 The expression is therefore always offset by the size of the
7963 bitpos
= GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (exp
)));
7964 inner
= TREE_OPERAND (exp
, 0);
7967 case COMPOUND_LITERAL_EXPR
:
7968 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7969 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7970 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7971 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7972 the initializers aren't gimplified. */
7973 if (COMPOUND_LITERAL_EXPR_DECL (exp
)
7974 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp
)))
7975 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp
),
7976 target
, tmode
, modifier
, as
);
7979 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7980 expand_expr, as that can have various side effects; LABEL_DECLs for
7981 example, may not have their DECL_RTL set yet. Expand the rtl of
7982 CONSTRUCTORs too, which should yield a memory reference for the
7983 constructor's contents. Assume language specific tree nodes can
7984 be expanded in some interesting way. */
7985 gcc_assert (TREE_CODE (exp
) < LAST_AND_UNUSED_TREE_CODE
);
7987 || TREE_CODE (exp
) == CONSTRUCTOR
7988 || TREE_CODE (exp
) == COMPOUND_LITERAL_EXPR
)
7990 result
= expand_expr (exp
, target
, tmode
,
7991 modifier
== EXPAND_INITIALIZER
7992 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
7994 /* If the DECL isn't in memory, then the DECL wasn't properly
7995 marked TREE_ADDRESSABLE, which will be either a front-end
7996 or a tree optimizer bug. */
7998 gcc_assert (MEM_P (result
));
7999 result
= XEXP (result
, 0);
8001 /* ??? Is this needed anymore? */
8003 TREE_USED (exp
) = 1;
8005 if (modifier
!= EXPAND_INITIALIZER
8006 && modifier
!= EXPAND_CONST_ADDRESS
8007 && modifier
!= EXPAND_SUM
)
8008 result
= force_operand (result
, target
);
8012 /* Pass FALSE as the last argument to get_inner_reference although
8013 we are expanding to RTL. The rationale is that we know how to
8014 handle "aligning nodes" here: we can just bypass them because
8015 they won't change the final object whose address will be returned
8016 (they actually exist only for that purpose). */
8017 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode1
,
8018 &unsignedp
, &reversep
, &volatilep
);
8022 /* We must have made progress. */
8023 gcc_assert (inner
!= exp
);
8025 subtarget
= offset
|| maybe_ne (bitpos
, 0) ? NULL_RTX
: target
;
8026 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
8027 inner alignment, force the inner to be sufficiently aligned. */
8028 if (CONSTANT_CLASS_P (inner
)
8029 && TYPE_ALIGN (TREE_TYPE (inner
)) < TYPE_ALIGN (TREE_TYPE (exp
)))
8031 inner
= copy_node (inner
);
8032 TREE_TYPE (inner
) = copy_node (TREE_TYPE (inner
));
8033 SET_TYPE_ALIGN (TREE_TYPE (inner
), TYPE_ALIGN (TREE_TYPE (exp
)));
8034 TYPE_USER_ALIGN (TREE_TYPE (inner
)) = 1;
8036 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
, as
);
8042 if (modifier
!= EXPAND_NORMAL
)
8043 result
= force_operand (result
, NULL
);
8044 tmp
= expand_expr (offset
, NULL_RTX
, tmode
,
8045 modifier
== EXPAND_INITIALIZER
8046 ? EXPAND_INITIALIZER
: EXPAND_NORMAL
);
8048 /* expand_expr is allowed to return an object in a mode other
8049 than TMODE. If it did, we need to convert. */
8050 if (GET_MODE (tmp
) != VOIDmode
&& tmode
!= GET_MODE (tmp
))
8051 tmp
= convert_modes (tmode
, GET_MODE (tmp
),
8052 tmp
, TYPE_UNSIGNED (TREE_TYPE (offset
)));
8053 result
= convert_memory_address_addr_space (tmode
, result
, as
);
8054 tmp
= convert_memory_address_addr_space (tmode
, tmp
, as
);
8056 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8057 result
= simplify_gen_binary (PLUS
, tmode
, result
, tmp
);
8060 subtarget
= maybe_ne (bitpos
, 0) ? NULL_RTX
: target
;
8061 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
8062 1, OPTAB_LIB_WIDEN
);
8066 if (maybe_ne (bitpos
, 0))
8068 /* Someone beforehand should have rejected taking the address
8069 of an object that isn't byte-aligned. */
8070 poly_int64 bytepos
= exact_div (bitpos
, BITS_PER_UNIT
);
8071 result
= convert_memory_address_addr_space (tmode
, result
, as
);
8072 result
= plus_constant (tmode
, result
, bytepos
);
8073 if (modifier
< EXPAND_SUM
)
8074 result
= force_operand (result
, target
);
8080 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
8081 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
8084 expand_expr_addr_expr (tree exp
, rtx target
, machine_mode tmode
,
8085 enum expand_modifier modifier
)
8087 addr_space_t as
= ADDR_SPACE_GENERIC
;
8088 scalar_int_mode address_mode
= Pmode
;
8089 scalar_int_mode pointer_mode
= ptr_mode
;
8093 /* Target mode of VOIDmode says "whatever's natural". */
8094 if (tmode
== VOIDmode
)
8095 tmode
= TYPE_MODE (TREE_TYPE (exp
));
8097 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
8099 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
8100 address_mode
= targetm
.addr_space
.address_mode (as
);
8101 pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
8104 /* We can get called with some Weird Things if the user does silliness
8105 like "(short) &a". In that case, convert_memory_address won't do
8106 the right thing, so ignore the given target mode. */
8107 scalar_int_mode new_tmode
= (tmode
== pointer_mode
8111 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
8112 new_tmode
, modifier
, as
);
8114 /* Despite expand_expr claims concerning ignoring TMODE when not
8115 strictly convenient, stuff breaks if we don't honor it. Note
8116 that combined with the above, we only do this for pointer modes. */
8117 rmode
= GET_MODE (result
);
8118 if (rmode
== VOIDmode
)
8120 if (rmode
!= new_tmode
)
8121 result
= convert_memory_address_addr_space (new_tmode
, result
, as
);
8126 /* Generate code for computing CONSTRUCTOR EXP.
8127 An rtx for the computed value is returned. If AVOID_TEMP_MEM
8128 is TRUE, instead of creating a temporary variable in memory
8129 NULL is returned and the caller needs to handle it differently. */
8132 expand_constructor (tree exp
, rtx target
, enum expand_modifier modifier
,
8133 bool avoid_temp_mem
)
8135 tree type
= TREE_TYPE (exp
);
8136 machine_mode mode
= TYPE_MODE (type
);
8138 /* Try to avoid creating a temporary at all. This is possible
8139 if all of the initializer is zero.
8140 FIXME: try to handle all [0..255] initializers we can handle
8142 if (TREE_STATIC (exp
)
8143 && !TREE_ADDRESSABLE (exp
)
8144 && target
!= 0 && mode
== BLKmode
8145 && all_zeros_p (exp
))
8147 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
8151 /* All elts simple constants => refer to a constant in memory. But
8152 if this is a non-BLKmode mode, let it store a field at a time
8153 since that should make a CONST_INT, CONST_WIDE_INT or
8154 CONST_DOUBLE when we fold. Likewise, if we have a target we can
8155 use, it is best to store directly into the target unless the type
8156 is large enough that memcpy will be used. If we are making an
8157 initializer and all operands are constant, put it in memory as
8160 FIXME: Avoid trying to fill vector constructors piece-meal.
8161 Output them with output_constant_def below unless we're sure
8162 they're zeros. This should go away when vector initializers
8163 are treated like VECTOR_CST instead of arrays. */
8164 if ((TREE_STATIC (exp
)
8165 && ((mode
== BLKmode
8166 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
8167 || TREE_ADDRESSABLE (exp
)
8168 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type
))
8169 && (! can_move_by_pieces
8170 (tree_to_uhwi (TYPE_SIZE_UNIT (type
)),
8172 && ! mostly_zeros_p (exp
))))
8173 || ((modifier
== EXPAND_INITIALIZER
|| modifier
== EXPAND_CONST_ADDRESS
)
8174 && TREE_CONSTANT (exp
)))
8181 constructor
= expand_expr_constant (exp
, 1, modifier
);
8183 if (modifier
!= EXPAND_CONST_ADDRESS
8184 && modifier
!= EXPAND_INITIALIZER
8185 && modifier
!= EXPAND_SUM
)
8186 constructor
= validize_mem (constructor
);
8191 /* Handle calls that pass values in multiple non-contiguous
8192 locations. The Irix 6 ABI has examples of this. */
8193 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
8194 || GET_CODE (target
) == PARALLEL
|| modifier
== EXPAND_STACK_PARM
)
8199 target
= assign_temp (type
, TREE_ADDRESSABLE (exp
), 1);
8202 store_constructor (exp
, target
, 0, int_expr_size (exp
), false);
8207 /* expand_expr: generate code for computing expression EXP.
8208 An rtx for the computed value is returned. The value is never null.
8209 In the case of a void EXP, const0_rtx is returned.
8211 The value may be stored in TARGET if TARGET is nonzero.
8212 TARGET is just a suggestion; callers must assume that
8213 the rtx returned may not be the same as TARGET.
8215 If TARGET is CONST0_RTX, it means that the value will be ignored.
8217 If TMODE is not VOIDmode, it suggests generating the
8218 result in mode TMODE. But this is done only when convenient.
8219 Otherwise, TMODE is ignored and the value generated in its natural mode.
8220 TMODE is just a suggestion; callers must assume that
8221 the rtx returned may not have mode TMODE.
8223 Note that TARGET may have neither TMODE nor MODE. In that case, it
8224 probably will not be used.
8226 If MODIFIER is EXPAND_SUM then when EXP is an addition
8227 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
8228 or a nest of (PLUS ...) and (MINUS ...) where the terms are
8229 products as above, or REG or MEM, or constant.
8230 Ordinarily in such cases we would output mul or add instructions
8231 and then return a pseudo reg containing the sum.
8233 EXPAND_INITIALIZER is much like EXPAND_SUM except that
8234 it also marks a label as absolutely required (it can't be dead).
8235 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
8236 This is used for outputting expressions used in initializers.
8238 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
8239 with a constant address even if that address is not normally legitimate.
8240 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
8242 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
8243 a call parameter. Such targets require special care as we haven't yet
8244 marked TARGET so that it's safe from being trashed by libcalls. We
8245 don't want to use TARGET for anything but the final result;
8246 Intermediate values must go elsewhere. Additionally, calls to
8247 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
8249 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
8250 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
8251 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
8252 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
8255 If INNER_REFERENCE_P is true, we are expanding an inner reference.
8256 In this case, we don't adjust a returned MEM rtx that wouldn't be
8257 sufficiently aligned for its mode; instead, it's up to the caller
8258 to deal with it afterwards. This is used to make sure that unaligned
8259 base objects for which out-of-bounds accesses are supported, for
8260 example record types with trailing arrays, aren't realigned behind
8261 the back of the caller.
8262 The normal operating mode is to pass FALSE for this parameter. */
8265 expand_expr_real (tree exp
, rtx target
, machine_mode tmode
,
8266 enum expand_modifier modifier
, rtx
*alt_rtl
,
8267 bool inner_reference_p
)
8271 /* Handle ERROR_MARK before anybody tries to access its type. */
8272 if (TREE_CODE (exp
) == ERROR_MARK
8273 || (TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
))
8275 ret
= CONST0_RTX (tmode
);
8276 return ret
? ret
: const0_rtx
;
8279 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
,
8284 /* Try to expand the conditional expression which is represented by
8285 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If it succeeds
8286 return the rtl reg which represents the result. Otherwise return
8290 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED
,
8291 tree treeop1 ATTRIBUTE_UNUSED
,
8292 tree treeop2 ATTRIBUTE_UNUSED
)
8295 rtx op00
, op01
, op1
, op2
;
8296 enum rtx_code comparison_code
;
8297 machine_mode comparison_mode
;
8300 tree type
= TREE_TYPE (treeop1
);
8301 int unsignedp
= TYPE_UNSIGNED (type
);
8302 machine_mode mode
= TYPE_MODE (type
);
8303 machine_mode orig_mode
= mode
;
8304 static bool expanding_cond_expr_using_cmove
= false;
8306 /* Conditional move expansion can end up TERing two operands which,
8307 when recursively hitting conditional expressions can result in
8308 exponential behavior if the cmove expansion ultimatively fails.
8309 It's hardly profitable to TER a cmove into a cmove so avoid doing
8310 that by failing early if we end up recursing. */
8311 if (expanding_cond_expr_using_cmove
)
8314 /* If we cannot do a conditional move on the mode, try doing it
8315 with the promoted mode. */
8316 if (!can_conditionally_move_p (mode
))
8318 mode
= promote_mode (type
, mode
, &unsignedp
);
8319 if (!can_conditionally_move_p (mode
))
8321 temp
= assign_temp (type
, 0, 0); /* Use promoted mode for temp. */
8324 temp
= assign_temp (type
, 0, 1);
8326 expanding_cond_expr_using_cmove
= true;
8328 expand_operands (treeop1
, treeop2
,
8329 temp
, &op1
, &op2
, EXPAND_NORMAL
);
8331 if (TREE_CODE (treeop0
) == SSA_NAME
8332 && (srcstmt
= get_def_for_expr_class (treeop0
, tcc_comparison
)))
8334 tree type
= TREE_TYPE (gimple_assign_rhs1 (srcstmt
));
8335 enum tree_code cmpcode
= gimple_assign_rhs_code (srcstmt
);
8336 op00
= expand_normal (gimple_assign_rhs1 (srcstmt
));
8337 op01
= expand_normal (gimple_assign_rhs2 (srcstmt
));
8338 comparison_mode
= TYPE_MODE (type
);
8339 unsignedp
= TYPE_UNSIGNED (type
);
8340 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
8342 else if (COMPARISON_CLASS_P (treeop0
))
8344 tree type
= TREE_TYPE (TREE_OPERAND (treeop0
, 0));
8345 enum tree_code cmpcode
= TREE_CODE (treeop0
);
8346 op00
= expand_normal (TREE_OPERAND (treeop0
, 0));
8347 op01
= expand_normal (TREE_OPERAND (treeop0
, 1));
8348 unsignedp
= TYPE_UNSIGNED (type
);
8349 comparison_mode
= TYPE_MODE (type
);
8350 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
8354 op00
= expand_normal (treeop0
);
8356 comparison_code
= NE
;
8357 comparison_mode
= GET_MODE (op00
);
8358 if (comparison_mode
== VOIDmode
)
8359 comparison_mode
= TYPE_MODE (TREE_TYPE (treeop0
));
8361 expanding_cond_expr_using_cmove
= false;
8363 if (GET_MODE (op1
) != mode
)
8364 op1
= gen_lowpart (mode
, op1
);
8366 if (GET_MODE (op2
) != mode
)
8367 op2
= gen_lowpart (mode
, op2
);
8369 /* Try to emit the conditional move. */
8370 insn
= emit_conditional_move (temp
, comparison_code
,
8371 op00
, op01
, comparison_mode
,
8375 /* If we could do the conditional move, emit the sequence,
8379 rtx_insn
*seq
= get_insns ();
8382 return convert_modes (orig_mode
, mode
, temp
, 0);
8385 /* Otherwise discard the sequence and fall back to code with
8392 expand_expr_real_2 (sepops ops
, rtx target
, machine_mode tmode
,
8393 enum expand_modifier modifier
)
8395 rtx op0
, op1
, op2
, temp
;
8396 rtx_code_label
*lab
;
8400 scalar_int_mode int_mode
;
8401 enum tree_code code
= ops
->code
;
8403 rtx subtarget
, original_target
;
8405 bool reduce_bit_field
;
8406 location_t loc
= ops
->location
;
8407 tree treeop0
, treeop1
, treeop2
;
8408 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8409 ? reduce_to_bit_field_precision ((expr), \
8415 mode
= TYPE_MODE (type
);
8416 unsignedp
= TYPE_UNSIGNED (type
);
8422 /* We should be called only on simple (binary or unary) expressions,
8423 exactly those that are valid in gimple expressions that aren't
8424 GIMPLE_SINGLE_RHS (or invalid). */
8425 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
8426 || get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
8427 || get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
);
8429 ignore
= (target
== const0_rtx
8430 || ((CONVERT_EXPR_CODE_P (code
)
8431 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
8432 && TREE_CODE (type
) == VOID_TYPE
));
8434 /* We should be called only if we need the result. */
8435 gcc_assert (!ignore
);
8437 /* An operation in what may be a bit-field type needs the
8438 result to be reduced to the precision of the bit-field type,
8439 which is narrower than that of the type's mode. */
8440 reduce_bit_field
= (INTEGRAL_TYPE_P (type
)
8441 && !type_has_mode_precision_p (type
));
8443 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
8446 /* Use subtarget as the target for operand 0 of a binary operation. */
8447 subtarget
= get_subtarget (target
);
8448 original_target
= target
;
8452 case NON_LVALUE_EXPR
:
8455 if (treeop0
== error_mark_node
)
8458 if (TREE_CODE (type
) == UNION_TYPE
)
8460 tree valtype
= TREE_TYPE (treeop0
);
8462 /* If both input and output are BLKmode, this conversion isn't doing
8463 anything except possibly changing memory attribute. */
8464 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
8466 rtx result
= expand_expr (treeop0
, target
, tmode
,
8469 result
= copy_rtx (result
);
8470 set_mem_attributes (result
, type
, 0);
8476 if (TYPE_MODE (type
) != BLKmode
)
8477 target
= gen_reg_rtx (TYPE_MODE (type
));
8479 target
= assign_temp (type
, 1, 1);
8483 /* Store data into beginning of memory target. */
8484 store_expr (treeop0
,
8485 adjust_address (target
, TYPE_MODE (valtype
), 0),
8486 modifier
== EXPAND_STACK_PARM
,
8487 false, TYPE_REVERSE_STORAGE_ORDER (type
));
8491 gcc_assert (REG_P (target
)
8492 && !TYPE_REVERSE_STORAGE_ORDER (type
));
8494 /* Store this field into a union of the proper type. */
8495 poly_uint64 op0_size
8496 = tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (treeop0
)));
8497 poly_uint64 union_size
= GET_MODE_BITSIZE (mode
);
8498 store_field (target
,
8499 /* The conversion must be constructed so that
8500 we know at compile time how many bits
8502 ordered_min (op0_size
, union_size
),
8503 0, 0, 0, TYPE_MODE (valtype
), treeop0
, 0,
8507 /* Return the entire union. */
8511 if (mode
== TYPE_MODE (TREE_TYPE (treeop0
)))
8513 op0
= expand_expr (treeop0
, target
, VOIDmode
,
8516 /* If the signedness of the conversion differs and OP0 is
8517 a promoted SUBREG, clear that indication since we now
8518 have to do the proper extension. */
8519 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)) != unsignedp
8520 && GET_CODE (op0
) == SUBREG
)
8521 SUBREG_PROMOTED_VAR_P (op0
) = 0;
8523 return REDUCE_BIT_FIELD (op0
);
8526 op0
= expand_expr (treeop0
, NULL_RTX
, mode
,
8527 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
);
8528 if (GET_MODE (op0
) == mode
)
8531 /* If OP0 is a constant, just convert it into the proper mode. */
8532 else if (CONSTANT_P (op0
))
8534 tree inner_type
= TREE_TYPE (treeop0
);
8535 machine_mode inner_mode
= GET_MODE (op0
);
8537 if (inner_mode
== VOIDmode
)
8538 inner_mode
= TYPE_MODE (inner_type
);
8540 if (modifier
== EXPAND_INITIALIZER
)
8541 op0
= lowpart_subreg (mode
, op0
, inner_mode
);
8543 op0
= convert_modes (mode
, inner_mode
, op0
,
8544 TYPE_UNSIGNED (inner_type
));
8547 else if (modifier
== EXPAND_INITIALIZER
)
8548 op0
= gen_rtx_fmt_e (TYPE_UNSIGNED (TREE_TYPE (treeop0
))
8549 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
8551 else if (target
== 0)
8552 op0
= convert_to_mode (mode
, op0
,
8553 TYPE_UNSIGNED (TREE_TYPE
8557 convert_move (target
, op0
,
8558 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8562 return REDUCE_BIT_FIELD (op0
);
8564 case ADDR_SPACE_CONVERT_EXPR
:
8566 tree treeop0_type
= TREE_TYPE (treeop0
);
8568 gcc_assert (POINTER_TYPE_P (type
));
8569 gcc_assert (POINTER_TYPE_P (treeop0_type
));
8571 addr_space_t as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
8572 addr_space_t as_from
= TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type
));
8574 /* Conversions between pointers to the same address space should
8575 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8576 gcc_assert (as_to
!= as_from
);
8578 op0
= expand_expr (treeop0
, NULL_RTX
, VOIDmode
, modifier
);
8580 /* Ask target code to handle conversion between pointers
8581 to overlapping address spaces. */
8582 if (targetm
.addr_space
.subset_p (as_to
, as_from
)
8583 || targetm
.addr_space
.subset_p (as_from
, as_to
))
8585 op0
= targetm
.addr_space
.convert (op0
, treeop0_type
, type
);
8589 /* For disjoint address spaces, converting anything but a null
8590 pointer invokes undefined behavior. We truncate or extend the
8591 value as if we'd converted via integers, which handles 0 as
8592 required, and all others as the programmer likely expects. */
8593 #ifndef POINTERS_EXTEND_UNSIGNED
8594 const int POINTERS_EXTEND_UNSIGNED
= 1;
8596 op0
= convert_modes (mode
, TYPE_MODE (treeop0_type
),
8597 op0
, POINTERS_EXTEND_UNSIGNED
);
8603 case POINTER_PLUS_EXPR
:
8604 /* Even though the sizetype mode and the pointer's mode can be different
8605 expand is able to handle this correctly and get the correct result out
8606 of the PLUS_EXPR code. */
8607 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8608 if sizetype precision is smaller than pointer precision. */
8609 if (TYPE_PRECISION (sizetype
) < TYPE_PRECISION (type
))
8610 treeop1
= fold_convert_loc (loc
, type
,
8611 fold_convert_loc (loc
, ssizetype
,
8613 /* If sizetype precision is larger than pointer precision, truncate the
8614 offset to have matching modes. */
8615 else if (TYPE_PRECISION (sizetype
) > TYPE_PRECISION (type
))
8616 treeop1
= fold_convert_loc (loc
, type
, treeop1
);
8620 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8621 something else, make sure we add the register to the constant and
8622 then to the other thing. This case can occur during strength
8623 reduction and doing it this way will produce better code if the
8624 frame pointer or argument pointer is eliminated.
8626 fold-const.c will ensure that the constant is always in the inner
8627 PLUS_EXPR, so the only case we need to do anything about is if
8628 sp, ap, or fp is our second argument, in which case we must swap
8629 the innermost first argument and our second argument. */
8631 if (TREE_CODE (treeop0
) == PLUS_EXPR
8632 && TREE_CODE (TREE_OPERAND (treeop0
, 1)) == INTEGER_CST
8634 && (DECL_RTL (treeop1
) == frame_pointer_rtx
8635 || DECL_RTL (treeop1
) == stack_pointer_rtx
8636 || DECL_RTL (treeop1
) == arg_pointer_rtx
))
8641 /* If the result is to be ptr_mode and we are adding an integer to
8642 something, we might be forming a constant. So try to use
8643 plus_constant. If it produces a sum and we can't accept it,
8644 use force_operand. This allows P = &ARR[const] to generate
8645 efficient code on machines where a SYMBOL_REF is not a valid
8648 If this is an EXPAND_SUM call, always return the sum. */
8649 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
8650 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
8652 if (modifier
== EXPAND_STACK_PARM
)
8654 if (TREE_CODE (treeop0
) == INTEGER_CST
8655 && HWI_COMPUTABLE_MODE_P (mode
)
8656 && TREE_CONSTANT (treeop1
))
8660 machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop1
));
8662 op1
= expand_expr (treeop1
, subtarget
, VOIDmode
,
8664 /* Use wi::shwi to ensure that the constant is
8665 truncated according to the mode of OP1, then sign extended
8666 to a HOST_WIDE_INT. Using the constant directly can result
8667 in non-canonical RTL in a 64x32 cross compile. */
8668 wc
= TREE_INT_CST_LOW (treeop0
);
8670 immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8671 op1
= plus_constant (mode
, op1
, INTVAL (constant_part
));
8672 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8673 op1
= force_operand (op1
, target
);
8674 return REDUCE_BIT_FIELD (op1
);
8677 else if (TREE_CODE (treeop1
) == INTEGER_CST
8678 && HWI_COMPUTABLE_MODE_P (mode
)
8679 && TREE_CONSTANT (treeop0
))
8683 machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop0
));
8685 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8686 (modifier
== EXPAND_INITIALIZER
8687 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
8688 if (! CONSTANT_P (op0
))
8690 op1
= expand_expr (treeop1
, NULL_RTX
,
8691 VOIDmode
, modifier
);
8692 /* Return a PLUS if modifier says it's OK. */
8693 if (modifier
== EXPAND_SUM
8694 || modifier
== EXPAND_INITIALIZER
)
8695 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
8698 /* Use wi::shwi to ensure that the constant is
8699 truncated according to the mode of OP1, then sign extended
8700 to a HOST_WIDE_INT. Using the constant directly can result
8701 in non-canonical RTL in a 64x32 cross compile. */
8702 wc
= TREE_INT_CST_LOW (treeop1
);
8704 = immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8705 op0
= plus_constant (mode
, op0
, INTVAL (constant_part
));
8706 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8707 op0
= force_operand (op0
, target
);
8708 return REDUCE_BIT_FIELD (op0
);
8712 /* Use TER to expand pointer addition of a negated value
8713 as pointer subtraction. */
8714 if ((POINTER_TYPE_P (TREE_TYPE (treeop0
))
8715 || (TREE_CODE (TREE_TYPE (treeop0
)) == VECTOR_TYPE
8716 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0
)))))
8717 && TREE_CODE (treeop1
) == SSA_NAME
8718 && TYPE_MODE (TREE_TYPE (treeop0
))
8719 == TYPE_MODE (TREE_TYPE (treeop1
)))
8721 gimple
*def
= get_def_for_expr (treeop1
, NEGATE_EXPR
);
8724 treeop1
= gimple_assign_rhs1 (def
);
8730 /* No sense saving up arithmetic to be done
8731 if it's all in the wrong mode to form part of an address.
8732 And force_operand won't know whether to sign-extend or
8734 if (modifier
!= EXPAND_INITIALIZER
8735 && (modifier
!= EXPAND_SUM
|| mode
!= ptr_mode
))
8737 expand_operands (treeop0
, treeop1
,
8738 subtarget
, &op0
, &op1
, modifier
);
8739 if (op0
== const0_rtx
)
8741 if (op1
== const0_rtx
)
8746 expand_operands (treeop0
, treeop1
,
8747 subtarget
, &op0
, &op1
, modifier
);
8748 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8751 case POINTER_DIFF_EXPR
:
8753 /* For initializers, we are allowed to return a MINUS of two
8754 symbolic constants. Here we handle all cases when both operands
8756 /* Handle difference of two symbolic constants,
8757 for the sake of an initializer. */
8758 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8759 && really_constant_p (treeop0
)
8760 && really_constant_p (treeop1
))
8762 expand_operands (treeop0
, treeop1
,
8763 NULL_RTX
, &op0
, &op1
, modifier
);
8764 return simplify_gen_binary (MINUS
, mode
, op0
, op1
);
8767 /* No sense saving up arithmetic to be done
8768 if it's all in the wrong mode to form part of an address.
8769 And force_operand won't know whether to sign-extend or
8771 if (modifier
!= EXPAND_INITIALIZER
8772 && (modifier
!= EXPAND_SUM
|| mode
!= ptr_mode
))
8775 expand_operands (treeop0
, treeop1
,
8776 subtarget
, &op0
, &op1
, modifier
);
8778 /* Convert A - const to A + (-const). */
8779 if (CONST_INT_P (op1
))
8781 op1
= negate_rtx (mode
, op1
);
8782 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8787 case WIDEN_MULT_PLUS_EXPR
:
8788 case WIDEN_MULT_MINUS_EXPR
:
8789 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8790 op2
= expand_normal (treeop2
);
8791 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
8795 case WIDEN_MULT_EXPR
:
8796 /* If first operand is constant, swap them.
8797 Thus the following special case checks need only
8798 check the second operand. */
8799 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8800 std::swap (treeop0
, treeop1
);
8802 /* First, check if we have a multiplication of one signed and one
8803 unsigned operand. */
8804 if (TREE_CODE (treeop1
) != INTEGER_CST
8805 && (TYPE_UNSIGNED (TREE_TYPE (treeop0
))
8806 != TYPE_UNSIGNED (TREE_TYPE (treeop1
))))
8808 machine_mode innermode
= TYPE_MODE (TREE_TYPE (treeop0
));
8809 this_optab
= usmul_widen_optab
;
8810 if (find_widening_optab_handler (this_optab
, mode
, innermode
)
8811 != CODE_FOR_nothing
)
8813 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8814 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8817 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op1
, &op0
,
8819 /* op0 and op1 might still be constant, despite the above
8820 != INTEGER_CST check. Handle it. */
8821 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8823 op0
= convert_modes (mode
, innermode
, op0
, true);
8824 op1
= convert_modes (mode
, innermode
, op1
, false);
8825 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8826 target
, unsignedp
));
8831 /* Check for a multiplication with matching signedness. */
8832 else if ((TREE_CODE (treeop1
) == INTEGER_CST
8833 && int_fits_type_p (treeop1
, TREE_TYPE (treeop0
)))
8834 || (TYPE_UNSIGNED (TREE_TYPE (treeop1
))
8835 == TYPE_UNSIGNED (TREE_TYPE (treeop0
))))
8837 tree op0type
= TREE_TYPE (treeop0
);
8838 machine_mode innermode
= TYPE_MODE (op0type
);
8839 bool zextend_p
= TYPE_UNSIGNED (op0type
);
8840 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
8841 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
8843 if (TREE_CODE (treeop0
) != INTEGER_CST
)
8845 if (find_widening_optab_handler (this_optab
, mode
, innermode
)
8846 != CODE_FOR_nothing
)
8848 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8850 /* op0 and op1 might still be constant, despite the above
8851 != INTEGER_CST check. Handle it. */
8852 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8855 op0
= convert_modes (mode
, innermode
, op0
, zextend_p
);
8857 = convert_modes (mode
, innermode
, op1
,
8858 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8859 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8863 temp
= expand_widening_mult (mode
, op0
, op1
, target
,
8864 unsignedp
, this_optab
);
8865 return REDUCE_BIT_FIELD (temp
);
8867 if (find_widening_optab_handler (other_optab
, mode
, innermode
)
8869 && innermode
== word_mode
)
8872 op0
= expand_normal (treeop0
);
8873 op1
= expand_normal (treeop1
);
8874 /* op0 and op1 might be constants, despite the above
8875 != INTEGER_CST check. Handle it. */
8876 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8877 goto widen_mult_const
;
8878 if (TREE_CODE (treeop1
) == INTEGER_CST
)
8879 op1
= convert_modes (mode
, word_mode
, op1
,
8880 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8881 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8882 unsignedp
, OPTAB_LIB_WIDEN
);
8883 hipart
= gen_highpart (word_mode
, temp
);
8884 htem
= expand_mult_highpart_adjust (word_mode
, hipart
,
8888 emit_move_insn (hipart
, htem
);
8889 return REDUCE_BIT_FIELD (temp
);
8893 treeop0
= fold_build1 (CONVERT_EXPR
, type
, treeop0
);
8894 treeop1
= fold_build1 (CONVERT_EXPR
, type
, treeop1
);
8895 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8896 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8899 /* If this is a fixed-point operation, then we cannot use the code
8900 below because "expand_mult" doesn't support sat/no-sat fixed-point
8902 if (ALL_FIXED_POINT_MODE_P (mode
))
8905 /* If first operand is constant, swap them.
8906 Thus the following special case checks need only
8907 check the second operand. */
8908 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8909 std::swap (treeop0
, treeop1
);
8911 /* Attempt to return something suitable for generating an
8912 indexed address, for machines that support that. */
8914 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8915 && tree_fits_shwi_p (treeop1
))
8917 tree exp1
= treeop1
;
8919 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8923 op0
= force_operand (op0
, NULL_RTX
);
8925 op0
= copy_to_mode_reg (mode
, op0
);
8927 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
8928 gen_int_mode (tree_to_shwi (exp1
),
8929 TYPE_MODE (TREE_TYPE (exp1
)))));
8932 if (modifier
== EXPAND_STACK_PARM
)
8935 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8936 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8938 case TRUNC_MOD_EXPR
:
8939 case FLOOR_MOD_EXPR
:
8941 case ROUND_MOD_EXPR
:
8943 case TRUNC_DIV_EXPR
:
8944 case FLOOR_DIV_EXPR
:
8946 case ROUND_DIV_EXPR
:
8947 case EXACT_DIV_EXPR
:
8949 /* If this is a fixed-point operation, then we cannot use the code
8950 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8952 if (ALL_FIXED_POINT_MODE_P (mode
))
8955 if (modifier
== EXPAND_STACK_PARM
)
8957 /* Possible optimization: compute the dividend with EXPAND_SUM
8958 then if the divisor is constant can optimize the case
8959 where some terms of the dividend have coeffs divisible by it. */
8960 expand_operands (treeop0
, treeop1
,
8961 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8962 bool mod_p
= code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
8963 || code
== CEIL_MOD_EXPR
|| code
== ROUND_MOD_EXPR
;
8964 if (SCALAR_INT_MODE_P (mode
)
8966 && get_range_pos_neg (treeop0
) == 1
8967 && get_range_pos_neg (treeop1
) == 1)
8969 /* If both arguments are known to be positive when interpreted
8970 as signed, we can expand it as both signed and unsigned
8971 division or modulo. Choose the cheaper sequence in that case. */
8972 bool speed_p
= optimize_insn_for_speed_p ();
8973 do_pending_stack_adjust ();
8975 rtx uns_ret
= expand_divmod (mod_p
, code
, mode
, op0
, op1
, target
, 1);
8976 rtx_insn
*uns_insns
= get_insns ();
8979 rtx sgn_ret
= expand_divmod (mod_p
, code
, mode
, op0
, op1
, target
, 0);
8980 rtx_insn
*sgn_insns
= get_insns ();
8982 unsigned uns_cost
= seq_cost (uns_insns
, speed_p
);
8983 unsigned sgn_cost
= seq_cost (sgn_insns
, speed_p
);
8985 /* If costs are the same then use as tie breaker the other
8987 if (uns_cost
== sgn_cost
)
8989 uns_cost
= seq_cost (uns_insns
, !speed_p
);
8990 sgn_cost
= seq_cost (sgn_insns
, !speed_p
);
8993 if (uns_cost
< sgn_cost
|| (uns_cost
== sgn_cost
&& unsignedp
))
8995 emit_insn (uns_insns
);
8998 emit_insn (sgn_insns
);
9001 return expand_divmod (mod_p
, code
, mode
, op0
, op1
, target
, unsignedp
);
9006 case MULT_HIGHPART_EXPR
:
9007 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9008 temp
= expand_mult_highpart (mode
, op0
, op1
, target
, unsignedp
);
9012 case FIXED_CONVERT_EXPR
:
9013 op0
= expand_normal (treeop0
);
9014 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
9015 target
= gen_reg_rtx (mode
);
9017 if ((TREE_CODE (TREE_TYPE (treeop0
)) == INTEGER_TYPE
9018 && TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
9019 || (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_UNSIGNED (type
)))
9020 expand_fixed_convert (target
, op0
, 1, TYPE_SATURATING (type
));
9022 expand_fixed_convert (target
, op0
, 0, TYPE_SATURATING (type
));
9025 case FIX_TRUNC_EXPR
:
9026 op0
= expand_normal (treeop0
);
9027 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
9028 target
= gen_reg_rtx (mode
);
9029 expand_fix (target
, op0
, unsignedp
);
9033 op0
= expand_normal (treeop0
);
9034 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
9035 target
= gen_reg_rtx (mode
);
9036 /* expand_float can't figure out what to do if FROM has VOIDmode.
9037 So give it the correct mode. With -O, cse will optimize this. */
9038 if (GET_MODE (op0
) == VOIDmode
)
9039 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0
)),
9041 expand_float (target
, op0
,
9042 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
9046 op0
= expand_expr (treeop0
, subtarget
,
9047 VOIDmode
, EXPAND_NORMAL
);
9048 if (modifier
== EXPAND_STACK_PARM
)
9050 temp
= expand_unop (mode
,
9051 optab_for_tree_code (NEGATE_EXPR
, type
,
9055 return REDUCE_BIT_FIELD (temp
);
9059 op0
= expand_expr (treeop0
, subtarget
,
9060 VOIDmode
, EXPAND_NORMAL
);
9061 if (modifier
== EXPAND_STACK_PARM
)
9064 /* ABS_EXPR is not valid for complex arguments. */
9065 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
9066 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
9068 /* Unsigned abs is simply the operand. Testing here means we don't
9069 risk generating incorrect code below. */
9070 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
9073 return expand_abs (mode
, op0
, target
, unsignedp
,
9074 safe_from_p (target
, treeop0
, 1));
9078 target
= original_target
;
9080 || modifier
== EXPAND_STACK_PARM
9081 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
9082 || GET_MODE (target
) != mode
9084 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
9085 target
= gen_reg_rtx (mode
);
9086 expand_operands (treeop0
, treeop1
,
9087 target
, &op0
, &op1
, EXPAND_NORMAL
);
9089 /* First try to do it with a special MIN or MAX instruction.
9090 If that does not win, use a conditional jump to select the proper
9092 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9093 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
9098 /* For vector MIN <x, y>, expand it a VEC_COND_EXPR <x <= y, x, y>
9099 and similarly for MAX <x, y>. */
9100 if (VECTOR_TYPE_P (type
))
9102 tree t0
= make_tree (type
, op0
);
9103 tree t1
= make_tree (type
, op1
);
9104 tree comparison
= build2 (code
== MIN_EXPR
? LE_EXPR
: GE_EXPR
,
9106 return expand_vec_cond_expr (type
, comparison
, t0
, t1
,
9110 /* At this point, a MEM target is no longer useful; we will get better
9113 if (! REG_P (target
))
9114 target
= gen_reg_rtx (mode
);
9116 /* If op1 was placed in target, swap op0 and op1. */
9117 if (target
!= op0
&& target
== op1
)
9118 std::swap (op0
, op1
);
9120 /* We generate better code and avoid problems with op1 mentioning
9121 target by forcing op1 into a pseudo if it isn't a constant. */
9122 if (! CONSTANT_P (op1
))
9123 op1
= force_reg (mode
, op1
);
9126 enum rtx_code comparison_code
;
9129 if (code
== MAX_EXPR
)
9130 comparison_code
= unsignedp
? GEU
: GE
;
9132 comparison_code
= unsignedp
? LEU
: LE
;
9134 /* Canonicalize to comparisons against 0. */
9135 if (op1
== const1_rtx
)
9137 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
9138 or (a != 0 ? a : 1) for unsigned.
9139 For MIN we are safe converting (a <= 1 ? a : 1)
9140 into (a <= 0 ? a : 1) */
9141 cmpop1
= const0_rtx
;
9142 if (code
== MAX_EXPR
)
9143 comparison_code
= unsignedp
? NE
: GT
;
9145 if (op1
== constm1_rtx
&& !unsignedp
)
9147 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
9148 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
9149 cmpop1
= const0_rtx
;
9150 if (code
== MIN_EXPR
)
9151 comparison_code
= LT
;
9154 /* Use a conditional move if possible. */
9155 if (can_conditionally_move_p (mode
))
9161 /* Try to emit the conditional move. */
9162 insn
= emit_conditional_move (target
, comparison_code
,
9167 /* If we could do the conditional move, emit the sequence,
9171 rtx_insn
*seq
= get_insns ();
9177 /* Otherwise discard the sequence and fall back to code with
9183 emit_move_insn (target
, op0
);
9185 lab
= gen_label_rtx ();
9186 do_compare_rtx_and_jump (target
, cmpop1
, comparison_code
,
9187 unsignedp
, mode
, NULL_RTX
, NULL
, lab
,
9188 profile_probability::uninitialized ());
9190 emit_move_insn (target
, op1
);
9195 op0
= expand_expr (treeop0
, subtarget
,
9196 VOIDmode
, EXPAND_NORMAL
);
9197 if (modifier
== EXPAND_STACK_PARM
)
9199 /* In case we have to reduce the result to bitfield precision
9200 for unsigned bitfield expand this as XOR with a proper constant
9202 if (reduce_bit_field
&& TYPE_UNSIGNED (type
))
9204 int_mode
= SCALAR_INT_TYPE_MODE (type
);
9205 wide_int mask
= wi::mask (TYPE_PRECISION (type
),
9206 false, GET_MODE_PRECISION (int_mode
));
9208 temp
= expand_binop (int_mode
, xor_optab
, op0
,
9209 immed_wide_int_const (mask
, int_mode
),
9210 target
, 1, OPTAB_LIB_WIDEN
);
9213 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
9217 /* ??? Can optimize bitwise operations with one arg constant.
9218 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
9219 and (a bitwise1 b) bitwise2 b (etc)
9220 but that is probably not worth while. */
9229 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type
))
9230 || type_has_mode_precision_p (type
));
9236 /* If this is a fixed-point operation, then we cannot use the code
9237 below because "expand_shift" doesn't support sat/no-sat fixed-point
9239 if (ALL_FIXED_POINT_MODE_P (mode
))
9242 if (! safe_from_p (subtarget
, treeop1
, 1))
9244 if (modifier
== EXPAND_STACK_PARM
)
9246 op0
= expand_expr (treeop0
, subtarget
,
9247 VOIDmode
, EXPAND_NORMAL
);
9249 /* Left shift optimization when shifting across word_size boundary.
9251 If mode == GET_MODE_WIDER_MODE (word_mode), then normally
9252 there isn't native instruction to support this wide mode
9253 left shift. Given below scenario:
9255 Type A = (Type) B << C
9258 | dest_high | dest_low |
9262 If the shift amount C caused we shift B to across the word
9263 size boundary, i.e part of B shifted into high half of
9264 destination register, and part of B remains in the low
9265 half, then GCC will use the following left shift expand
9268 1. Initialize dest_low to B.
9269 2. Initialize every bit of dest_high to the sign bit of B.
9270 3. Logic left shift dest_low by C bit to finalize dest_low.
9271 The value of dest_low before this shift is kept in a temp D.
9272 4. Logic left shift dest_high by C.
9273 5. Logic right shift D by (word_size - C).
9274 6. Or the result of 4 and 5 to finalize dest_high.
9276 While, by checking gimple statements, if operand B is
9277 coming from signed extension, then we can simplify above
9280 1. dest_high = src_low >> (word_size - C).
9281 2. dest_low = src_low << C.
9283 We can use one arithmetic right shift to finish all the
9284 purpose of steps 2, 4, 5, 6, thus we reduce the steps
9285 needed from 6 into 2.
9287 The case is similar for zero extension, except that we
9288 initialize dest_high to zero rather than copies of the sign
9289 bit from B. Furthermore, we need to use a logical right shift
9292 The choice of sign-extension versus zero-extension is
9293 determined entirely by whether or not B is signed and is
9294 independent of the current setting of unsignedp. */
9297 if (code
== LSHIFT_EXPR
9300 && GET_MODE_2XWIDER_MODE (word_mode
).exists (&int_mode
)
9302 && TREE_CONSTANT (treeop1
)
9303 && TREE_CODE (treeop0
) == SSA_NAME
)
9305 gimple
*def
= SSA_NAME_DEF_STMT (treeop0
);
9306 if (is_gimple_assign (def
)
9307 && gimple_assign_rhs_code (def
) == NOP_EXPR
)
9309 scalar_int_mode rmode
= SCALAR_INT_TYPE_MODE
9310 (TREE_TYPE (gimple_assign_rhs1 (def
)));
9312 if (GET_MODE_SIZE (rmode
) < GET_MODE_SIZE (int_mode
)
9313 && TREE_INT_CST_LOW (treeop1
) < GET_MODE_BITSIZE (word_mode
)
9314 && ((TREE_INT_CST_LOW (treeop1
) + GET_MODE_BITSIZE (rmode
))
9315 >= GET_MODE_BITSIZE (word_mode
)))
9317 rtx_insn
*seq
, *seq_old
;
9318 poly_uint64 high_off
= subreg_highpart_offset (word_mode
,
9320 bool extend_unsigned
9321 = TYPE_UNSIGNED (TREE_TYPE (gimple_assign_rhs1 (def
)));
9322 rtx low
= lowpart_subreg (word_mode
, op0
, int_mode
);
9323 rtx dest_low
= lowpart_subreg (word_mode
, target
, int_mode
);
9324 rtx dest_high
= simplify_gen_subreg (word_mode
, target
,
9325 int_mode
, high_off
);
9326 HOST_WIDE_INT ramount
= (BITS_PER_WORD
9327 - TREE_INT_CST_LOW (treeop1
));
9328 tree rshift
= build_int_cst (TREE_TYPE (treeop1
), ramount
);
9331 /* dest_high = src_low >> (word_size - C). */
9332 temp
= expand_variable_shift (RSHIFT_EXPR
, word_mode
, low
,
9335 if (temp
!= dest_high
)
9336 emit_move_insn (dest_high
, temp
);
9338 /* dest_low = src_low << C. */
9339 temp
= expand_variable_shift (LSHIFT_EXPR
, word_mode
, low
,
9340 treeop1
, dest_low
, unsignedp
);
9341 if (temp
!= dest_low
)
9342 emit_move_insn (dest_low
, temp
);
9348 if (have_insn_for (ASHIFT
, int_mode
))
9350 bool speed_p
= optimize_insn_for_speed_p ();
9352 rtx ret_old
= expand_variable_shift (code
, int_mode
,
9357 seq_old
= get_insns ();
9359 if (seq_cost (seq
, speed_p
)
9360 >= seq_cost (seq_old
, speed_p
))
9371 if (temp
== NULL_RTX
)
9372 temp
= expand_variable_shift (code
, mode
, op0
, treeop1
, target
,
9374 if (code
== LSHIFT_EXPR
)
9375 temp
= REDUCE_BIT_FIELD (temp
);
9379 /* Could determine the answer when only additive constants differ. Also,
9380 the addition of one can be handled by changing the condition. */
9387 case UNORDERED_EXPR
:
9396 temp
= do_store_flag (ops
,
9397 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
9398 tmode
!= VOIDmode
? tmode
: mode
);
9402 /* Use a compare and a jump for BLKmode comparisons, or for function
9403 type comparisons is have_canonicalize_funcptr_for_compare. */
9406 || modifier
== EXPAND_STACK_PARM
9407 || ! safe_from_p (target
, treeop0
, 1)
9408 || ! safe_from_p (target
, treeop1
, 1)
9409 /* Make sure we don't have a hard reg (such as function's return
9410 value) live across basic blocks, if not optimizing. */
9411 || (!optimize
&& REG_P (target
)
9412 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
9413 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
9415 emit_move_insn (target
, const0_rtx
);
9417 rtx_code_label
*lab1
= gen_label_rtx ();
9418 jumpifnot_1 (code
, treeop0
, treeop1
, lab1
,
9419 profile_probability::uninitialized ());
9421 if (TYPE_PRECISION (type
) == 1 && !TYPE_UNSIGNED (type
))
9422 emit_move_insn (target
, constm1_rtx
);
9424 emit_move_insn (target
, const1_rtx
);
9430 /* Get the rtx code of the operands. */
9431 op0
= expand_normal (treeop0
);
9432 op1
= expand_normal (treeop1
);
9435 target
= gen_reg_rtx (TYPE_MODE (type
));
9437 /* If target overlaps with op1, then either we need to force
9438 op1 into a pseudo (if target also overlaps with op0),
9439 or write the complex parts in reverse order. */
9440 switch (GET_CODE (target
))
9443 if (reg_overlap_mentioned_p (XEXP (target
, 0), op1
))
9445 if (reg_overlap_mentioned_p (XEXP (target
, 1), op0
))
9447 complex_expr_force_op1
:
9448 temp
= gen_reg_rtx (GET_MODE_INNER (GET_MODE (target
)));
9449 emit_move_insn (temp
, op1
);
9453 complex_expr_swap_order
:
9454 /* Move the imaginary (op1) and real (op0) parts to their
9456 write_complex_part (target
, op1
, true);
9457 write_complex_part (target
, op0
, false);
9463 temp
= adjust_address_nv (target
,
9464 GET_MODE_INNER (GET_MODE (target
)), 0);
9465 if (reg_overlap_mentioned_p (temp
, op1
))
9467 scalar_mode imode
= GET_MODE_INNER (GET_MODE (target
));
9468 temp
= adjust_address_nv (target
, imode
,
9469 GET_MODE_SIZE (imode
));
9470 if (reg_overlap_mentioned_p (temp
, op0
))
9471 goto complex_expr_force_op1
;
9472 goto complex_expr_swap_order
;
9476 if (reg_overlap_mentioned_p (target
, op1
))
9478 if (reg_overlap_mentioned_p (target
, op0
))
9479 goto complex_expr_force_op1
;
9480 goto complex_expr_swap_order
;
9485 /* Move the real (op0) and imaginary (op1) parts to their location. */
9486 write_complex_part (target
, op0
, false);
9487 write_complex_part (target
, op1
, true);
9491 case WIDEN_SUM_EXPR
:
9493 tree oprnd0
= treeop0
;
9494 tree oprnd1
= treeop1
;
9496 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9497 target
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, op1
,
9502 case VEC_UNPACK_HI_EXPR
:
9503 case VEC_UNPACK_LO_EXPR
:
9504 case VEC_UNPACK_FIX_TRUNC_HI_EXPR
:
9505 case VEC_UNPACK_FIX_TRUNC_LO_EXPR
:
9507 op0
= expand_normal (treeop0
);
9508 temp
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, NULL_RTX
,
9514 case VEC_UNPACK_FLOAT_HI_EXPR
:
9515 case VEC_UNPACK_FLOAT_LO_EXPR
:
9517 op0
= expand_normal (treeop0
);
9518 /* The signedness is determined from input operand. */
9519 temp
= expand_widen_pattern_expr
9520 (ops
, op0
, NULL_RTX
, NULL_RTX
,
9521 target
, TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
9527 case VEC_WIDEN_MULT_HI_EXPR
:
9528 case VEC_WIDEN_MULT_LO_EXPR
:
9529 case VEC_WIDEN_MULT_EVEN_EXPR
:
9530 case VEC_WIDEN_MULT_ODD_EXPR
:
9531 case VEC_WIDEN_LSHIFT_HI_EXPR
:
9532 case VEC_WIDEN_LSHIFT_LO_EXPR
:
9533 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9534 target
= expand_widen_pattern_expr (ops
, op0
, op1
, NULL_RTX
,
9536 gcc_assert (target
);
9539 case VEC_PACK_SAT_EXPR
:
9540 case VEC_PACK_FIX_TRUNC_EXPR
:
9541 mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9544 case VEC_PACK_TRUNC_EXPR
:
9545 if (VECTOR_BOOLEAN_TYPE_P (type
)
9546 && VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (treeop0
))
9547 && mode
== TYPE_MODE (TREE_TYPE (treeop0
))
9548 && SCALAR_INT_MODE_P (mode
))
9550 class expand_operand eops
[4];
9551 machine_mode imode
= TYPE_MODE (TREE_TYPE (treeop0
));
9552 expand_operands (treeop0
, treeop1
,
9553 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9554 this_optab
= vec_pack_sbool_trunc_optab
;
9555 enum insn_code icode
= optab_handler (this_optab
, imode
);
9556 create_output_operand (&eops
[0], target
, mode
);
9557 create_convert_operand_from (&eops
[1], op0
, imode
, false);
9558 create_convert_operand_from (&eops
[2], op1
, imode
, false);
9559 temp
= GEN_INT (TYPE_VECTOR_SUBPARTS (type
).to_constant ());
9560 create_input_operand (&eops
[3], temp
, imode
);
9561 expand_insn (icode
, 4, eops
);
9562 return eops
[0].value
;
9564 mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9567 case VEC_PACK_FLOAT_EXPR
:
9568 mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9569 expand_operands (treeop0
, treeop1
,
9570 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9571 this_optab
= optab_for_tree_code (code
, TREE_TYPE (treeop0
),
9573 target
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9574 TYPE_UNSIGNED (TREE_TYPE (treeop0
)),
9576 gcc_assert (target
);
9581 expand_operands (treeop0
, treeop1
, target
, &op0
, &op1
, EXPAND_NORMAL
);
9582 vec_perm_builder sel
;
9583 if (TREE_CODE (treeop2
) == VECTOR_CST
9584 && tree_to_vec_perm_builder (&sel
, treeop2
))
9586 machine_mode sel_mode
= TYPE_MODE (TREE_TYPE (treeop2
));
9587 temp
= expand_vec_perm_const (mode
, op0
, op1
, sel
,
9592 op2
= expand_normal (treeop2
);
9593 temp
= expand_vec_perm_var (mode
, op0
, op1
, op2
, target
);
9601 tree oprnd0
= treeop0
;
9602 tree oprnd1
= treeop1
;
9603 tree oprnd2
= treeop2
;
9606 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9607 op2
= expand_normal (oprnd2
);
9608 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9615 tree oprnd0
= treeop0
;
9616 tree oprnd1
= treeop1
;
9617 tree oprnd2
= treeop2
;
9620 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9621 op2
= expand_normal (oprnd2
);
9622 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9627 case REALIGN_LOAD_EXPR
:
9629 tree oprnd0
= treeop0
;
9630 tree oprnd1
= treeop1
;
9631 tree oprnd2
= treeop2
;
9634 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9635 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9636 op2
= expand_normal (oprnd2
);
9637 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
9645 /* A COND_EXPR with its type being VOID_TYPE represents a
9646 conditional jump and is handled in
9647 expand_gimple_cond_expr. */
9648 gcc_assert (!VOID_TYPE_P (type
));
9650 /* Note that COND_EXPRs whose type is a structure or union
9651 are required to be constructed to contain assignments of
9652 a temporary variable, so that we can evaluate them here
9653 for side effect only. If type is void, we must do likewise. */
9655 gcc_assert (!TREE_ADDRESSABLE (type
)
9657 && TREE_TYPE (treeop1
) != void_type_node
9658 && TREE_TYPE (treeop2
) != void_type_node
);
9660 temp
= expand_cond_expr_using_cmove (treeop0
, treeop1
, treeop2
);
9664 /* If we are not to produce a result, we have no target. Otherwise,
9665 if a target was specified use it; it will not be used as an
9666 intermediate target unless it is safe. If no target, use a
9669 if (modifier
!= EXPAND_STACK_PARM
9671 && safe_from_p (original_target
, treeop0
, 1)
9672 && GET_MODE (original_target
) == mode
9673 && !MEM_P (original_target
))
9674 temp
= original_target
;
9676 temp
= assign_temp (type
, 0, 1);
9678 do_pending_stack_adjust ();
9680 rtx_code_label
*lab0
= gen_label_rtx ();
9681 rtx_code_label
*lab1
= gen_label_rtx ();
9682 jumpifnot (treeop0
, lab0
,
9683 profile_probability::uninitialized ());
9684 store_expr (treeop1
, temp
,
9685 modifier
== EXPAND_STACK_PARM
,
9688 emit_jump_insn (targetm
.gen_jump (lab1
));
9691 store_expr (treeop2
, temp
,
9692 modifier
== EXPAND_STACK_PARM
,
9701 target
= expand_vec_cond_expr (type
, treeop0
, treeop1
, treeop2
, target
);
9704 case VEC_DUPLICATE_EXPR
:
9705 op0
= expand_expr (treeop0
, NULL_RTX
, VOIDmode
, modifier
);
9706 target
= expand_vector_broadcast (mode
, op0
);
9707 gcc_assert (target
);
9710 case VEC_SERIES_EXPR
:
9711 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, modifier
);
9712 return expand_vec_series_expr (mode
, op0
, op1
, target
);
9714 case BIT_INSERT_EXPR
:
9716 unsigned bitpos
= tree_to_uhwi (treeop2
);
9718 if (INTEGRAL_TYPE_P (TREE_TYPE (treeop1
)))
9719 bitsize
= TYPE_PRECISION (TREE_TYPE (treeop1
));
9721 bitsize
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (treeop1
)));
9722 rtx op0
= expand_normal (treeop0
);
9723 rtx op1
= expand_normal (treeop1
);
9724 rtx dst
= gen_reg_rtx (mode
);
9725 emit_move_insn (dst
, op0
);
9726 store_bit_field (dst
, bitsize
, bitpos
, 0, 0,
9727 TYPE_MODE (TREE_TYPE (treeop1
)), op1
, false);
9735 /* Here to do an ordinary binary operator. */
9737 expand_operands (treeop0
, treeop1
,
9738 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9740 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9742 if (modifier
== EXPAND_STACK_PARM
)
9744 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9745 unsignedp
, OPTAB_LIB_WIDEN
);
9747 /* Bitwise operations do not need bitfield reduction as we expect their
9748 operands being properly truncated. */
9749 if (code
== BIT_XOR_EXPR
9750 || code
== BIT_AND_EXPR
9751 || code
== BIT_IOR_EXPR
)
9753 return REDUCE_BIT_FIELD (temp
);
9755 #undef REDUCE_BIT_FIELD
9758 /* Return TRUE if expression STMT is suitable for replacement.
9759 Never consider memory loads as replaceable, because those don't ever lead
9760 into constant expressions. */
9763 stmt_is_replaceable_p (gimple
*stmt
)
9765 if (ssa_is_replaceable_p (stmt
))
9767 /* Don't move around loads. */
9768 if (!gimple_assign_single_p (stmt
)
9769 || is_gimple_val (gimple_assign_rhs1 (stmt
)))
9776 expand_expr_real_1 (tree exp
, rtx target
, machine_mode tmode
,
9777 enum expand_modifier modifier
, rtx
*alt_rtl
,
9778 bool inner_reference_p
)
9780 rtx op0
, op1
, temp
, decl_rtl
;
9783 machine_mode mode
, dmode
;
9784 enum tree_code code
= TREE_CODE (exp
);
9785 rtx subtarget
, original_target
;
9788 bool reduce_bit_field
;
9789 location_t loc
= EXPR_LOCATION (exp
);
9790 struct separate_ops ops
;
9791 tree treeop0
, treeop1
, treeop2
;
9792 tree ssa_name
= NULL_TREE
;
9795 type
= TREE_TYPE (exp
);
9796 mode
= TYPE_MODE (type
);
9797 unsignedp
= TYPE_UNSIGNED (type
);
9799 treeop0
= treeop1
= treeop2
= NULL_TREE
;
9800 if (!VL_EXP_CLASS_P (exp
))
9801 switch (TREE_CODE_LENGTH (code
))
9804 case 3: treeop2
= TREE_OPERAND (exp
, 2); /* FALLTHRU */
9805 case 2: treeop1
= TREE_OPERAND (exp
, 1); /* FALLTHRU */
9806 case 1: treeop0
= TREE_OPERAND (exp
, 0); /* FALLTHRU */
9816 ignore
= (target
== const0_rtx
9817 || ((CONVERT_EXPR_CODE_P (code
)
9818 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
9819 && TREE_CODE (type
) == VOID_TYPE
));
9821 /* An operation in what may be a bit-field type needs the
9822 result to be reduced to the precision of the bit-field type,
9823 which is narrower than that of the type's mode. */
9824 reduce_bit_field
= (!ignore
9825 && INTEGRAL_TYPE_P (type
)
9826 && !type_has_mode_precision_p (type
));
9828 /* If we are going to ignore this result, we need only do something
9829 if there is a side-effect somewhere in the expression. If there
9830 is, short-circuit the most common cases here. Note that we must
9831 not call expand_expr with anything but const0_rtx in case this
9832 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9836 if (! TREE_SIDE_EFFECTS (exp
))
9839 /* Ensure we reference a volatile object even if value is ignored, but
9840 don't do this if all we are doing is taking its address. */
9841 if (TREE_THIS_VOLATILE (exp
)
9842 && TREE_CODE (exp
) != FUNCTION_DECL
9843 && mode
!= VOIDmode
&& mode
!= BLKmode
9844 && modifier
!= EXPAND_CONST_ADDRESS
)
9846 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
9852 if (TREE_CODE_CLASS (code
) == tcc_unary
9853 || code
== BIT_FIELD_REF
9854 || code
== COMPONENT_REF
9855 || code
== INDIRECT_REF
)
9856 return expand_expr (treeop0
, const0_rtx
, VOIDmode
,
9859 else if (TREE_CODE_CLASS (code
) == tcc_binary
9860 || TREE_CODE_CLASS (code
) == tcc_comparison
9861 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
9863 expand_expr (treeop0
, const0_rtx
, VOIDmode
, modifier
);
9864 expand_expr (treeop1
, const0_rtx
, VOIDmode
, modifier
);
9871 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
9874 /* Use subtarget as the target for operand 0 of a binary operation. */
9875 subtarget
= get_subtarget (target
);
9876 original_target
= target
;
9882 tree function
= decl_function_context (exp
);
9884 temp
= label_rtx (exp
);
9885 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
9887 if (function
!= current_function_decl
9889 LABEL_REF_NONLOCAL_P (temp
) = 1;
9891 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
9896 /* ??? ivopts calls expander, without any preparation from
9897 out-of-ssa. So fake instructions as if this was an access to the
9898 base variable. This unnecessarily allocates a pseudo, see how we can
9899 reuse it, if partition base vars have it set already. */
9900 if (!currently_expanding_to_rtl
)
9902 tree var
= SSA_NAME_VAR (exp
);
9903 if (var
&& DECL_RTL_SET_P (var
))
9904 return DECL_RTL (var
);
9905 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp
)),
9906 LAST_VIRTUAL_REGISTER
+ 1);
9909 g
= get_gimple_for_ssa_name (exp
);
9910 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9912 && modifier
== EXPAND_INITIALIZER
9913 && !SSA_NAME_IS_DEFAULT_DEF (exp
)
9914 && (optimize
|| !SSA_NAME_VAR (exp
)
9915 || DECL_IGNORED_P (SSA_NAME_VAR (exp
)))
9916 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp
)))
9917 g
= SSA_NAME_DEF_STMT (exp
);
9921 location_t saved_loc
= curr_insn_location ();
9922 location_t loc
= gimple_location (g
);
9923 if (loc
!= UNKNOWN_LOCATION
)
9924 set_curr_insn_location (loc
);
9925 ops
.code
= gimple_assign_rhs_code (g
);
9926 switch (get_gimple_rhs_class (ops
.code
))
9928 case GIMPLE_TERNARY_RHS
:
9929 ops
.op2
= gimple_assign_rhs3 (g
);
9931 case GIMPLE_BINARY_RHS
:
9932 ops
.op1
= gimple_assign_rhs2 (g
);
9934 /* Try to expand conditonal compare. */
9935 if (targetm
.gen_ccmp_first
)
9937 gcc_checking_assert (targetm
.gen_ccmp_next
!= NULL
);
9938 r
= expand_ccmp_expr (g
, mode
);
9943 case GIMPLE_UNARY_RHS
:
9944 ops
.op0
= gimple_assign_rhs1 (g
);
9945 ops
.type
= TREE_TYPE (gimple_assign_lhs (g
));
9947 r
= expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
9949 case GIMPLE_SINGLE_RHS
:
9951 r
= expand_expr_real (gimple_assign_rhs1 (g
), target
,
9952 tmode
, modifier
, alt_rtl
,
9959 set_curr_insn_location (saved_loc
);
9960 if (REG_P (r
) && !REG_EXPR (r
))
9961 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp
), r
);
9966 decl_rtl
= get_rtx_for_ssa_name (ssa_name
);
9967 exp
= SSA_NAME_VAR (ssa_name
);
9968 goto expand_decl_rtl
;
9972 /* If a static var's type was incomplete when the decl was written,
9973 but the type is complete now, lay out the decl now. */
9974 if (DECL_SIZE (exp
) == 0
9975 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
9976 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
9977 layout_decl (exp
, 0);
9983 decl_rtl
= DECL_RTL (exp
);
9985 gcc_assert (decl_rtl
);
9987 /* DECL_MODE might change when TYPE_MODE depends on attribute target
9988 settings for VECTOR_TYPE_P that might switch for the function. */
9989 if (currently_expanding_to_rtl
9990 && code
== VAR_DECL
&& MEM_P (decl_rtl
)
9991 && VECTOR_TYPE_P (type
) && exp
&& DECL_MODE (exp
) != mode
)
9992 decl_rtl
= change_address (decl_rtl
, TYPE_MODE (type
), 0);
9994 decl_rtl
= copy_rtx (decl_rtl
);
9996 /* Record writes to register variables. */
9997 if (modifier
== EXPAND_WRITE
9999 && HARD_REGISTER_P (decl_rtl
))
10000 add_to_hard_reg_set (&crtl
->asm_clobbers
,
10001 GET_MODE (decl_rtl
), REGNO (decl_rtl
));
10003 /* Ensure variable marked as used even if it doesn't go through
10004 a parser. If it hasn't be used yet, write out an external
10007 TREE_USED (exp
) = 1;
10009 /* Show we haven't gotten RTL for this yet. */
10012 /* Variables inherited from containing functions should have
10013 been lowered by this point. */
10015 context
= decl_function_context (exp
);
10017 || SCOPE_FILE_SCOPE_P (context
)
10018 || context
== current_function_decl
10019 || TREE_STATIC (exp
)
10020 || DECL_EXTERNAL (exp
)
10021 /* ??? C++ creates functions that are not TREE_STATIC. */
10022 || TREE_CODE (exp
) == FUNCTION_DECL
);
10024 /* This is the case of an array whose size is to be determined
10025 from its initializer, while the initializer is still being parsed.
10026 ??? We aren't parsing while expanding anymore. */
10028 if (MEM_P (decl_rtl
) && REG_P (XEXP (decl_rtl
, 0)))
10029 temp
= validize_mem (decl_rtl
);
10031 /* If DECL_RTL is memory, we are in the normal case and the
10032 address is not valid, get the address into a register. */
10034 else if (MEM_P (decl_rtl
) && modifier
!= EXPAND_INITIALIZER
)
10037 *alt_rtl
= decl_rtl
;
10038 decl_rtl
= use_anchored_address (decl_rtl
);
10039 if (modifier
!= EXPAND_CONST_ADDRESS
10040 && modifier
!= EXPAND_SUM
10041 && !memory_address_addr_space_p (exp
? DECL_MODE (exp
)
10042 : GET_MODE (decl_rtl
),
10043 XEXP (decl_rtl
, 0),
10044 MEM_ADDR_SPACE (decl_rtl
)))
10045 temp
= replace_equiv_address (decl_rtl
,
10046 copy_rtx (XEXP (decl_rtl
, 0)));
10049 /* If we got something, return it. But first, set the alignment
10050 if the address is a register. */
10053 if (exp
&& MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
10054 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
10060 dmode
= DECL_MODE (exp
);
10062 dmode
= TYPE_MODE (TREE_TYPE (ssa_name
));
10064 /* If the mode of DECL_RTL does not match that of the decl,
10065 there are two cases: we are dealing with a BLKmode value
10066 that is returned in a register, or we are dealing with
10067 a promoted value. In the latter case, return a SUBREG
10068 of the wanted mode, but mark it so that we know that it
10069 was already extended. */
10070 if (REG_P (decl_rtl
)
10071 && dmode
!= BLKmode
10072 && GET_MODE (decl_rtl
) != dmode
)
10074 machine_mode pmode
;
10076 /* Get the signedness to be used for this variable. Ensure we get
10077 the same mode we got when the variable was declared. */
10078 if (code
!= SSA_NAME
)
10079 pmode
= promote_decl_mode (exp
, &unsignedp
);
10080 else if ((g
= SSA_NAME_DEF_STMT (ssa_name
))
10081 && gimple_code (g
) == GIMPLE_CALL
10082 && !gimple_call_internal_p (g
))
10083 pmode
= promote_function_mode (type
, mode
, &unsignedp
,
10084 gimple_call_fntype (g
),
10087 pmode
= promote_ssa_mode (ssa_name
, &unsignedp
);
10088 gcc_assert (GET_MODE (decl_rtl
) == pmode
);
10090 temp
= gen_lowpart_SUBREG (mode
, decl_rtl
);
10091 SUBREG_PROMOTED_VAR_P (temp
) = 1;
10092 SUBREG_PROMOTED_SET (temp
, unsignedp
);
10100 /* Given that TYPE_PRECISION (type) is not always equal to
10101 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
10102 the former to the latter according to the signedness of the
10104 scalar_int_mode mode
= SCALAR_INT_TYPE_MODE (type
);
10105 temp
= immed_wide_int_const
10106 (wi::to_wide (exp
, GET_MODE_PRECISION (mode
)), mode
);
10112 tree tmp
= NULL_TREE
;
10113 if (VECTOR_MODE_P (mode
))
10114 return const_vector_from_tree (exp
);
10115 scalar_int_mode int_mode
;
10116 if (is_int_mode (mode
, &int_mode
))
10118 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp
)))
10119 return const_scalar_mask_from_tree (int_mode
, exp
);
10123 = lang_hooks
.types
.type_for_mode (int_mode
, 1);
10125 tmp
= fold_unary_loc (loc
, VIEW_CONVERT_EXPR
,
10126 type_for_mode
, exp
);
10131 vec
<constructor_elt
, va_gc
> *v
;
10132 /* Constructors need to be fixed-length. FIXME. */
10133 unsigned int nunits
= VECTOR_CST_NELTS (exp
).to_constant ();
10134 vec_alloc (v
, nunits
);
10135 for (unsigned int i
= 0; i
< nunits
; ++i
)
10136 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, VECTOR_CST_ELT (exp
, i
));
10137 tmp
= build_constructor (type
, v
);
10139 return expand_expr (tmp
, ignore
? const0_rtx
: target
,
10144 if (modifier
== EXPAND_WRITE
)
10146 /* Writing into CONST_DECL is always invalid, but handle it
10148 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (exp
));
10149 scalar_int_mode address_mode
= targetm
.addr_space
.address_mode (as
);
10150 op0
= expand_expr_addr_expr_1 (exp
, NULL_RTX
, address_mode
,
10151 EXPAND_NORMAL
, as
);
10152 op0
= memory_address_addr_space (mode
, op0
, as
);
10153 temp
= gen_rtx_MEM (mode
, op0
);
10154 set_mem_addr_space (temp
, as
);
10157 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
10160 /* If optimized, generate immediate CONST_DOUBLE
10161 which will be turned into memory by reload if necessary.
10163 We used to force a register so that loop.c could see it. But
10164 this does not allow gen_* patterns to perform optimizations with
10165 the constants. It also produces two insns in cases like "x = 1.0;".
10166 On most machines, floating-point constants are not permitted in
10167 many insns, so we'd end up copying it to a register in any case.
10169 Now, we do the copying in expand_binop, if appropriate. */
10170 return const_double_from_real_value (TREE_REAL_CST (exp
),
10171 TYPE_MODE (TREE_TYPE (exp
)));
10174 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp
),
10175 TYPE_MODE (TREE_TYPE (exp
)));
10178 /* Handle evaluating a complex constant in a CONCAT target. */
10179 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
10181 machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
10184 rtarg
= XEXP (original_target
, 0);
10185 itarg
= XEXP (original_target
, 1);
10187 /* Move the real and imaginary parts separately. */
10188 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, EXPAND_NORMAL
);
10189 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, EXPAND_NORMAL
);
10192 emit_move_insn (rtarg
, op0
);
10194 emit_move_insn (itarg
, op1
);
10196 return original_target
;
10202 temp
= expand_expr_constant (exp
, 1, modifier
);
10204 /* temp contains a constant address.
10205 On RISC machines where a constant address isn't valid,
10206 make some insns to get that address into a register. */
10207 if (modifier
!= EXPAND_CONST_ADDRESS
10208 && modifier
!= EXPAND_INITIALIZER
10209 && modifier
!= EXPAND_SUM
10210 && ! memory_address_addr_space_p (mode
, XEXP (temp
, 0),
10211 MEM_ADDR_SPACE (temp
)))
10212 return replace_equiv_address (temp
,
10213 copy_rtx (XEXP (temp
, 0)));
10217 return immed_wide_int_const (poly_int_cst_value (exp
), mode
);
10221 tree val
= treeop0
;
10222 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
,
10223 inner_reference_p
);
10225 if (!SAVE_EXPR_RESOLVED_P (exp
))
10227 /* We can indeed still hit this case, typically via builtin
10228 expanders calling save_expr immediately before expanding
10229 something. Assume this means that we only have to deal
10230 with non-BLKmode values. */
10231 gcc_assert (GET_MODE (ret
) != BLKmode
);
10233 val
= build_decl (curr_insn_location (),
10234 VAR_DECL
, NULL
, TREE_TYPE (exp
));
10235 DECL_ARTIFICIAL (val
) = 1;
10236 DECL_IGNORED_P (val
) = 1;
10238 TREE_OPERAND (exp
, 0) = treeop0
;
10239 SAVE_EXPR_RESOLVED_P (exp
) = 1;
10241 if (!CONSTANT_P (ret
))
10242 ret
= copy_to_reg (ret
);
10243 SET_DECL_RTL (val
, ret
);
10251 /* If we don't need the result, just ensure we evaluate any
10255 unsigned HOST_WIDE_INT idx
;
10258 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
10259 expand_expr (value
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
10264 return expand_constructor (exp
, target
, modifier
, false);
10266 case TARGET_MEM_REF
:
10269 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
10270 enum insn_code icode
;
10271 unsigned int align
;
10273 op0
= addr_for_mem_ref (exp
, as
, true);
10274 op0
= memory_address_addr_space (mode
, op0
, as
);
10275 temp
= gen_rtx_MEM (mode
, op0
);
10276 set_mem_attributes (temp
, exp
, 0);
10277 set_mem_addr_space (temp
, as
);
10278 align
= get_object_alignment (exp
);
10279 if (modifier
!= EXPAND_WRITE
10280 && modifier
!= EXPAND_MEMORY
10282 && align
< GET_MODE_ALIGNMENT (mode
)
10283 /* If the target does not have special handling for unaligned
10284 loads of mode then it can use regular moves for them. */
10285 && ((icode
= optab_handler (movmisalign_optab
, mode
))
10286 != CODE_FOR_nothing
))
10288 class expand_operand ops
[2];
10290 /* We've already validated the memory, and we're creating a
10291 new pseudo destination. The predicates really can't fail,
10292 nor can the generator. */
10293 create_output_operand (&ops
[0], NULL_RTX
, mode
);
10294 create_fixed_operand (&ops
[1], temp
);
10295 expand_insn (icode
, 2, ops
);
10296 temp
= ops
[0].value
;
10303 const bool reverse
= REF_REVERSE_STORAGE_ORDER (exp
);
10305 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
10306 machine_mode address_mode
;
10307 tree base
= TREE_OPERAND (exp
, 0);
10309 enum insn_code icode
;
10311 /* Handle expansion of non-aliased memory with non-BLKmode. That
10312 might end up in a register. */
10313 if (mem_ref_refers_to_non_mem_p (exp
))
10315 poly_int64 offset
= mem_ref_offset (exp
).force_shwi ();
10316 base
= TREE_OPERAND (base
, 0);
10317 poly_uint64 type_size
;
10318 if (known_eq (offset
, 0)
10320 && poly_int_tree_p (TYPE_SIZE (type
), &type_size
)
10321 && known_eq (GET_MODE_BITSIZE (DECL_MODE (base
)), type_size
))
10322 return expand_expr (build1 (VIEW_CONVERT_EXPR
, type
, base
),
10323 target
, tmode
, modifier
);
10324 if (TYPE_MODE (type
) == BLKmode
)
10326 temp
= assign_stack_temp (DECL_MODE (base
),
10327 GET_MODE_SIZE (DECL_MODE (base
)));
10328 store_expr (base
, temp
, 0, false, false);
10329 temp
= adjust_address (temp
, BLKmode
, offset
);
10330 set_mem_size (temp
, int_size_in_bytes (type
));
10333 exp
= build3 (BIT_FIELD_REF
, type
, base
, TYPE_SIZE (type
),
10334 bitsize_int (offset
* BITS_PER_UNIT
));
10335 REF_REVERSE_STORAGE_ORDER (exp
) = reverse
;
10336 return expand_expr (exp
, target
, tmode
, modifier
);
10338 address_mode
= targetm
.addr_space
.address_mode (as
);
10339 base
= TREE_OPERAND (exp
, 0);
10340 if ((def_stmt
= get_def_for_expr (base
, BIT_AND_EXPR
)))
10342 tree mask
= gimple_assign_rhs2 (def_stmt
);
10343 base
= build2 (BIT_AND_EXPR
, TREE_TYPE (base
),
10344 gimple_assign_rhs1 (def_stmt
), mask
);
10345 TREE_OPERAND (exp
, 0) = base
;
10347 align
= get_object_alignment (exp
);
10348 op0
= expand_expr (base
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
10349 op0
= memory_address_addr_space (mode
, op0
, as
);
10350 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
10352 rtx off
= immed_wide_int_const (mem_ref_offset (exp
), address_mode
);
10353 op0
= simplify_gen_binary (PLUS
, address_mode
, op0
, off
);
10354 op0
= memory_address_addr_space (mode
, op0
, as
);
10356 temp
= gen_rtx_MEM (mode
, op0
);
10357 set_mem_attributes (temp
, exp
, 0);
10358 set_mem_addr_space (temp
, as
);
10359 if (TREE_THIS_VOLATILE (exp
))
10360 MEM_VOLATILE_P (temp
) = 1;
10361 if (modifier
!= EXPAND_WRITE
10362 && modifier
!= EXPAND_MEMORY
10363 && !inner_reference_p
10365 && align
< GET_MODE_ALIGNMENT (mode
))
10367 if ((icode
= optab_handler (movmisalign_optab
, mode
))
10368 != CODE_FOR_nothing
)
10370 class expand_operand ops
[2];
10372 /* We've already validated the memory, and we're creating a
10373 new pseudo destination. The predicates really can't fail,
10374 nor can the generator. */
10375 create_output_operand (&ops
[0], NULL_RTX
, mode
);
10376 create_fixed_operand (&ops
[1], temp
);
10377 expand_insn (icode
, 2, ops
);
10378 temp
= ops
[0].value
;
10380 else if (targetm
.slow_unaligned_access (mode
, align
))
10381 temp
= extract_bit_field (temp
, GET_MODE_BITSIZE (mode
),
10382 0, TYPE_UNSIGNED (TREE_TYPE (exp
)),
10383 (modifier
== EXPAND_STACK_PARM
10384 ? NULL_RTX
: target
),
10385 mode
, mode
, false, alt_rtl
);
10388 && modifier
!= EXPAND_MEMORY
10389 && modifier
!= EXPAND_WRITE
)
10390 temp
= flip_storage_order (mode
, temp
);
10397 tree array
= treeop0
;
10398 tree index
= treeop1
;
10401 /* Fold an expression like: "foo"[2].
10402 This is not done in fold so it won't happen inside &.
10403 Don't fold if this is for wide characters since it's too
10404 difficult to do correctly and this is a very rare case. */
10406 if (modifier
!= EXPAND_CONST_ADDRESS
10407 && modifier
!= EXPAND_INITIALIZER
10408 && modifier
!= EXPAND_MEMORY
)
10410 tree t
= fold_read_from_constant_string (exp
);
10413 return expand_expr (t
, target
, tmode
, modifier
);
10416 /* If this is a constant index into a constant array,
10417 just get the value from the array. Handle both the cases when
10418 we have an explicit constructor and when our operand is a variable
10419 that was declared const. */
10421 if (modifier
!= EXPAND_CONST_ADDRESS
10422 && modifier
!= EXPAND_INITIALIZER
10423 && modifier
!= EXPAND_MEMORY
10424 && TREE_CODE (array
) == CONSTRUCTOR
10425 && ! TREE_SIDE_EFFECTS (array
)
10426 && TREE_CODE (index
) == INTEGER_CST
)
10428 unsigned HOST_WIDE_INT ix
;
10431 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array
), ix
,
10433 if (tree_int_cst_equal (field
, index
))
10435 if (!TREE_SIDE_EFFECTS (value
))
10436 return expand_expr (fold (value
), target
, tmode
, modifier
);
10441 else if (optimize
>= 1
10442 && modifier
!= EXPAND_CONST_ADDRESS
10443 && modifier
!= EXPAND_INITIALIZER
10444 && modifier
!= EXPAND_MEMORY
10445 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
10446 && TREE_CODE (index
) == INTEGER_CST
10447 && (VAR_P (array
) || TREE_CODE (array
) == CONST_DECL
)
10448 && (init
= ctor_for_folding (array
)) != error_mark_node
)
10450 if (init
== NULL_TREE
)
10452 tree value
= build_zero_cst (type
);
10453 if (TREE_CODE (value
) == CONSTRUCTOR
)
10455 /* If VALUE is a CONSTRUCTOR, this optimization is only
10456 useful if this doesn't store the CONSTRUCTOR into
10457 memory. If it does, it is more efficient to just
10458 load the data from the array directly. */
10459 rtx ret
= expand_constructor (value
, target
,
10461 if (ret
== NULL_RTX
)
10466 return expand_expr (value
, target
, tmode
, modifier
);
10468 else if (TREE_CODE (init
) == CONSTRUCTOR
)
10470 unsigned HOST_WIDE_INT ix
;
10473 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), ix
,
10475 if (tree_int_cst_equal (field
, index
))
10477 if (TREE_SIDE_EFFECTS (value
))
10480 if (TREE_CODE (value
) == CONSTRUCTOR
)
10482 /* If VALUE is a CONSTRUCTOR, this
10483 optimization is only useful if
10484 this doesn't store the CONSTRUCTOR
10485 into memory. If it does, it is more
10486 efficient to just load the data from
10487 the array directly. */
10488 rtx ret
= expand_constructor (value
, target
,
10490 if (ret
== NULL_RTX
)
10495 expand_expr (fold (value
), target
, tmode
, modifier
);
10498 else if (TREE_CODE (init
) == STRING_CST
)
10500 tree low_bound
= array_ref_low_bound (exp
);
10501 tree index1
= fold_convert_loc (loc
, sizetype
, treeop1
);
10503 /* Optimize the special case of a zero lower bound.
10505 We convert the lower bound to sizetype to avoid problems
10506 with constant folding. E.g. suppose the lower bound is
10507 1 and its mode is QI. Without the conversion
10508 (ARRAY + (INDEX - (unsigned char)1))
10510 (ARRAY + (-(unsigned char)1) + INDEX)
10512 (ARRAY + 255 + INDEX). Oops! */
10513 if (!integer_zerop (low_bound
))
10514 index1
= size_diffop_loc (loc
, index1
,
10515 fold_convert_loc (loc
, sizetype
,
10518 if (tree_fits_uhwi_p (index1
)
10519 && compare_tree_int (index1
, TREE_STRING_LENGTH (init
)) < 0)
10521 tree type
= TREE_TYPE (TREE_TYPE (init
));
10522 scalar_int_mode mode
;
10524 if (is_int_mode (TYPE_MODE (type
), &mode
)
10525 && GET_MODE_SIZE (mode
) == 1)
10526 return gen_int_mode (TREE_STRING_POINTER (init
)
10527 [TREE_INT_CST_LOW (index1
)],
10533 goto normal_inner_ref
;
10535 case COMPONENT_REF
:
10536 /* If the operand is a CONSTRUCTOR, we can just extract the
10537 appropriate field if it is present. */
10538 if (TREE_CODE (treeop0
) == CONSTRUCTOR
)
10540 unsigned HOST_WIDE_INT idx
;
10542 scalar_int_mode field_mode
;
10544 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0
),
10546 if (field
== treeop1
10547 /* We can normally use the value of the field in the
10548 CONSTRUCTOR. However, if this is a bitfield in
10549 an integral mode that we can fit in a HOST_WIDE_INT,
10550 we must mask only the number of bits in the bitfield,
10551 since this is done implicitly by the constructor. If
10552 the bitfield does not meet either of those conditions,
10553 we can't do this optimization. */
10554 && (! DECL_BIT_FIELD (field
)
10555 || (is_int_mode (DECL_MODE (field
), &field_mode
)
10556 && (GET_MODE_PRECISION (field_mode
)
10557 <= HOST_BITS_PER_WIDE_INT
))))
10559 if (DECL_BIT_FIELD (field
)
10560 && modifier
== EXPAND_STACK_PARM
)
10562 op0
= expand_expr (value
, target
, tmode
, modifier
);
10563 if (DECL_BIT_FIELD (field
))
10565 HOST_WIDE_INT bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
10566 scalar_int_mode imode
10567 = SCALAR_INT_TYPE_MODE (TREE_TYPE (field
));
10569 if (TYPE_UNSIGNED (TREE_TYPE (field
)))
10571 op1
= gen_int_mode ((HOST_WIDE_INT_1
<< bitsize
) - 1,
10573 op0
= expand_and (imode
, op0
, op1
, target
);
10577 int count
= GET_MODE_PRECISION (imode
) - bitsize
;
10579 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
10581 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
10589 goto normal_inner_ref
;
10591 case BIT_FIELD_REF
:
10592 case ARRAY_RANGE_REF
:
10595 machine_mode mode1
, mode2
;
10596 poly_int64 bitsize
, bitpos
, bytepos
;
10598 int reversep
, volatilep
= 0, must_force_mem
;
10600 = get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode1
,
10601 &unsignedp
, &reversep
, &volatilep
);
10602 rtx orig_op0
, memloc
;
10603 bool clear_mem_expr
= false;
10605 /* If we got back the original object, something is wrong. Perhaps
10606 we are evaluating an expression too early. In any event, don't
10607 infinitely recurse. */
10608 gcc_assert (tem
!= exp
);
10610 /* If TEM's type is a union of variable size, pass TARGET to the inner
10611 computation, since it will need a temporary and TARGET is known
10612 to have to do. This occurs in unchecked conversion in Ada. */
10614 = expand_expr_real (tem
,
10615 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
10616 && COMPLETE_TYPE_P (TREE_TYPE (tem
))
10617 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
10619 && modifier
!= EXPAND_STACK_PARM
10620 ? target
: NULL_RTX
),
10622 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
10625 /* If the field has a mode, we want to access it in the
10626 field's mode, not the computed mode.
10627 If a MEM has VOIDmode (external with incomplete type),
10628 use BLKmode for it instead. */
10631 if (mode1
!= VOIDmode
)
10632 op0
= adjust_address (op0
, mode1
, 0);
10633 else if (GET_MODE (op0
) == VOIDmode
)
10634 op0
= adjust_address (op0
, BLKmode
, 0);
10638 = CONSTANT_P (op0
) ? TYPE_MODE (TREE_TYPE (tem
)) : GET_MODE (op0
);
10640 /* Make sure bitpos is not negative, it can wreak havoc later. */
10641 if (maybe_lt (bitpos
, 0))
10643 gcc_checking_assert (offset
== NULL_TREE
);
10644 offset
= size_int (bits_to_bytes_round_down (bitpos
));
10645 bitpos
= num_trailing_bits (bitpos
);
10648 /* If we have either an offset, a BLKmode result, or a reference
10649 outside the underlying object, we must force it to memory.
10650 Such a case can occur in Ada if we have unchecked conversion
10651 of an expression from a scalar type to an aggregate type or
10652 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10653 passed a partially uninitialized object or a view-conversion
10654 to a larger size. */
10655 must_force_mem
= (offset
10656 || mode1
== BLKmode
10657 || (mode
== BLKmode
10658 && !int_mode_for_size (bitsize
, 1).exists ())
10659 || maybe_gt (bitpos
+ bitsize
,
10660 GET_MODE_BITSIZE (mode2
)));
10662 /* Handle CONCAT first. */
10663 if (GET_CODE (op0
) == CONCAT
&& !must_force_mem
)
10665 if (known_eq (bitpos
, 0)
10666 && known_eq (bitsize
, GET_MODE_BITSIZE (GET_MODE (op0
)))
10667 && COMPLEX_MODE_P (mode1
)
10668 && COMPLEX_MODE_P (GET_MODE (op0
))
10669 && (GET_MODE_PRECISION (GET_MODE_INNER (mode1
))
10670 == GET_MODE_PRECISION (GET_MODE_INNER (GET_MODE (op0
)))))
10673 op0
= flip_storage_order (GET_MODE (op0
), op0
);
10674 if (mode1
!= GET_MODE (op0
))
10677 for (int i
= 0; i
< 2; i
++)
10679 rtx op
= read_complex_part (op0
, i
!= 0);
10680 if (GET_CODE (op
) == SUBREG
)
10681 op
= force_reg (GET_MODE (op
), op
);
10682 rtx temp
= gen_lowpart_common (GET_MODE_INNER (mode1
),
10688 if (!REG_P (op
) && !MEM_P (op
))
10689 op
= force_reg (GET_MODE (op
), op
);
10690 op
= gen_lowpart (GET_MODE_INNER (mode1
), op
);
10694 op0
= gen_rtx_CONCAT (mode1
, parts
[0], parts
[1]);
10698 if (known_eq (bitpos
, 0)
10699 && known_eq (bitsize
,
10700 GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0))))
10701 && maybe_ne (bitsize
, 0))
10703 op0
= XEXP (op0
, 0);
10704 mode2
= GET_MODE (op0
);
10706 else if (known_eq (bitpos
,
10707 GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0))))
10708 && known_eq (bitsize
,
10709 GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 1))))
10710 && maybe_ne (bitpos
, 0)
10711 && maybe_ne (bitsize
, 0))
10713 op0
= XEXP (op0
, 1);
10715 mode2
= GET_MODE (op0
);
10718 /* Otherwise force into memory. */
10719 must_force_mem
= 1;
10722 /* If this is a constant, put it in a register if it is a legitimate
10723 constant and we don't need a memory reference. */
10724 if (CONSTANT_P (op0
)
10725 && mode2
!= BLKmode
10726 && targetm
.legitimate_constant_p (mode2
, op0
)
10727 && !must_force_mem
)
10728 op0
= force_reg (mode2
, op0
);
10730 /* Otherwise, if this is a constant, try to force it to the constant
10731 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10732 is a legitimate constant. */
10733 else if (CONSTANT_P (op0
) && (memloc
= force_const_mem (mode2
, op0
)))
10734 op0
= validize_mem (memloc
);
10736 /* Otherwise, if this is a constant or the object is not in memory
10737 and need be, put it there. */
10738 else if (CONSTANT_P (op0
) || (!MEM_P (op0
) && must_force_mem
))
10740 memloc
= assign_temp (TREE_TYPE (tem
), 1, 1);
10741 emit_move_insn (memloc
, op0
);
10743 clear_mem_expr
= true;
10748 machine_mode address_mode
;
10749 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
10752 gcc_assert (MEM_P (op0
));
10754 address_mode
= get_address_mode (op0
);
10755 if (GET_MODE (offset_rtx
) != address_mode
)
10757 /* We cannot be sure that the RTL in offset_rtx is valid outside
10758 of a memory address context, so force it into a register
10759 before attempting to convert it to the desired mode. */
10760 offset_rtx
= force_operand (offset_rtx
, NULL_RTX
);
10761 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
10764 /* See the comment in expand_assignment for the rationale. */
10765 if (mode1
!= VOIDmode
10766 && maybe_ne (bitpos
, 0)
10767 && maybe_gt (bitsize
, 0)
10768 && multiple_p (bitpos
, BITS_PER_UNIT
, &bytepos
)
10769 && multiple_p (bitpos
, bitsize
)
10770 && multiple_p (bitsize
, GET_MODE_ALIGNMENT (mode1
))
10771 && MEM_ALIGN (op0
) >= GET_MODE_ALIGNMENT (mode1
))
10773 op0
= adjust_address (op0
, mode1
, bytepos
);
10777 op0
= offset_address (op0
, offset_rtx
,
10778 highest_pow2_factor (offset
));
10781 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10782 record its alignment as BIGGEST_ALIGNMENT. */
10784 && known_eq (bitpos
, 0)
10786 && is_aligning_offset (offset
, tem
))
10787 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
10789 /* Don't forget about volatility even if this is a bitfield. */
10790 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
10792 if (op0
== orig_op0
)
10793 op0
= copy_rtx (op0
);
10795 MEM_VOLATILE_P (op0
) = 1;
10798 /* In cases where an aligned union has an unaligned object
10799 as a field, we might be extracting a BLKmode value from
10800 an integer-mode (e.g., SImode) object. Handle this case
10801 by doing the extract into an object as wide as the field
10802 (which we know to be the width of a basic mode), then
10803 storing into memory, and changing the mode to BLKmode. */
10804 if (mode1
== VOIDmode
10805 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
10806 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
10807 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
10808 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
10809 && modifier
!= EXPAND_CONST_ADDRESS
10810 && modifier
!= EXPAND_INITIALIZER
10811 && modifier
!= EXPAND_MEMORY
)
10812 /* If the bitfield is volatile and the bitsize
10813 is narrower than the access size of the bitfield,
10814 we need to extract bitfields from the access. */
10815 || (volatilep
&& TREE_CODE (exp
) == COMPONENT_REF
10816 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp
, 1))
10817 && mode1
!= BLKmode
10818 && maybe_lt (bitsize
, GET_MODE_SIZE (mode1
) * BITS_PER_UNIT
))
10819 /* If the field isn't aligned enough to fetch as a memref,
10820 fetch it as a bit field. */
10821 || (mode1
!= BLKmode
10823 ? MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
10824 || !multiple_p (bitpos
, GET_MODE_ALIGNMENT (mode1
))
10825 : TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
10826 || !multiple_p (bitpos
, GET_MODE_ALIGNMENT (mode
)))
10827 && modifier
!= EXPAND_MEMORY
10828 && ((modifier
== EXPAND_CONST_ADDRESS
10829 || modifier
== EXPAND_INITIALIZER
)
10831 : targetm
.slow_unaligned_access (mode1
,
10833 || !multiple_p (bitpos
, BITS_PER_UNIT
)))
10834 /* If the type and the field are a constant size and the
10835 size of the type isn't the same size as the bitfield,
10836 we must use bitfield operations. */
10837 || (known_size_p (bitsize
)
10838 && TYPE_SIZE (TREE_TYPE (exp
))
10839 && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp
)))
10840 && maybe_ne (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp
))),
10843 machine_mode ext_mode
= mode
;
10845 if (ext_mode
== BLKmode
10846 && ! (target
!= 0 && MEM_P (op0
)
10848 && multiple_p (bitpos
, BITS_PER_UNIT
)))
10849 ext_mode
= int_mode_for_size (bitsize
, 1).else_blk ();
10851 if (ext_mode
== BLKmode
)
10854 target
= assign_temp (type
, 1, 1);
10856 /* ??? Unlike the similar test a few lines below, this one is
10857 very likely obsolete. */
10858 if (known_eq (bitsize
, 0))
10861 /* In this case, BITPOS must start at a byte boundary and
10862 TARGET, if specified, must be a MEM. */
10863 gcc_assert (MEM_P (op0
)
10864 && (!target
|| MEM_P (target
)));
10866 bytepos
= exact_div (bitpos
, BITS_PER_UNIT
);
10867 poly_int64 bytesize
= bits_to_bytes_round_up (bitsize
);
10868 emit_block_move (target
,
10869 adjust_address (op0
, VOIDmode
, bytepos
),
10870 gen_int_mode (bytesize
, Pmode
),
10871 (modifier
== EXPAND_STACK_PARM
10872 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
10877 /* If we have nothing to extract, the result will be 0 for targets
10878 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10879 return 0 for the sake of consistency, as reading a zero-sized
10880 bitfield is valid in Ada and the value is fully specified. */
10881 if (known_eq (bitsize
, 0))
10884 op0
= validize_mem (op0
);
10886 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
10887 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10889 /* If the result has aggregate type and the extraction is done in
10890 an integral mode, then the field may be not aligned on a byte
10891 boundary; in this case, if it has reverse storage order, it
10892 needs to be extracted as a scalar field with reverse storage
10893 order and put back into memory order afterwards. */
10894 if (AGGREGATE_TYPE_P (type
)
10895 && GET_MODE_CLASS (ext_mode
) == MODE_INT
)
10896 reversep
= TYPE_REVERSE_STORAGE_ORDER (type
);
10898 gcc_checking_assert (known_ge (bitpos
, 0));
10899 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
10900 (modifier
== EXPAND_STACK_PARM
10901 ? NULL_RTX
: target
),
10902 ext_mode
, ext_mode
, reversep
, alt_rtl
);
10904 /* If the result has aggregate type and the mode of OP0 is an
10905 integral mode then, if BITSIZE is narrower than this mode
10906 and this is for big-endian data, we must put the field
10907 into the high-order bits. And we must also put it back
10908 into memory order if it has been previously reversed. */
10909 scalar_int_mode op0_mode
;
10910 if (AGGREGATE_TYPE_P (type
)
10911 && is_int_mode (GET_MODE (op0
), &op0_mode
))
10913 HOST_WIDE_INT size
= GET_MODE_BITSIZE (op0_mode
);
10915 gcc_checking_assert (known_le (bitsize
, size
));
10916 if (maybe_lt (bitsize
, size
)
10917 && reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
10918 op0
= expand_shift (LSHIFT_EXPR
, op0_mode
, op0
,
10919 size
- bitsize
, op0
, 1);
10922 op0
= flip_storage_order (op0_mode
, op0
);
10925 /* If the result type is BLKmode, store the data into a temporary
10926 of the appropriate type, but with the mode corresponding to the
10927 mode for the data we have (op0's mode). */
10928 if (mode
== BLKmode
)
10931 = assign_stack_temp_for_type (ext_mode
,
10932 GET_MODE_BITSIZE (ext_mode
),
10934 emit_move_insn (new_rtx
, op0
);
10935 op0
= copy_rtx (new_rtx
);
10936 PUT_MODE (op0
, BLKmode
);
10942 /* If the result is BLKmode, use that to access the object
10944 if (mode
== BLKmode
)
10947 /* Get a reference to just this component. */
10948 bytepos
= bits_to_bytes_round_down (bitpos
);
10949 if (modifier
== EXPAND_CONST_ADDRESS
10950 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
10951 op0
= adjust_address_nv (op0
, mode1
, bytepos
);
10953 op0
= adjust_address (op0
, mode1
, bytepos
);
10955 if (op0
== orig_op0
)
10956 op0
= copy_rtx (op0
);
10958 /* Don't set memory attributes if the base expression is
10959 SSA_NAME that got expanded as a MEM. In that case, we should
10960 just honor its original memory attributes. */
10961 if (TREE_CODE (tem
) != SSA_NAME
|| !MEM_P (orig_op0
))
10962 set_mem_attributes (op0
, exp
, 0);
10964 if (REG_P (XEXP (op0
, 0)))
10965 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10967 /* If op0 is a temporary because the original expressions was forced
10968 to memory, clear MEM_EXPR so that the original expression cannot
10969 be marked as addressable through MEM_EXPR of the temporary. */
10970 if (clear_mem_expr
)
10971 set_mem_expr (op0
, NULL_TREE
);
10973 MEM_VOLATILE_P (op0
) |= volatilep
;
10976 && modifier
!= EXPAND_MEMORY
10977 && modifier
!= EXPAND_WRITE
)
10978 op0
= flip_storage_order (mode1
, op0
);
10980 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
10981 || modifier
== EXPAND_CONST_ADDRESS
10982 || modifier
== EXPAND_INITIALIZER
)
10986 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
10988 convert_move (target
, op0
, unsignedp
);
10993 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
10996 /* All valid uses of __builtin_va_arg_pack () are removed during
10998 if (CALL_EXPR_VA_ARG_PACK (exp
))
10999 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
11001 tree fndecl
= get_callee_fndecl (exp
), attr
;
11004 /* Don't diagnose the error attribute in thunks, those are
11005 artificially created. */
11006 && !CALL_FROM_THUNK_P (exp
)
11007 && (attr
= lookup_attribute ("error",
11008 DECL_ATTRIBUTES (fndecl
))) != NULL
)
11010 const char *ident
= lang_hooks
.decl_printable_name (fndecl
, 1);
11011 error ("%Kcall to %qs declared with attribute error: %s", exp
,
11012 identifier_to_locale (ident
),
11013 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
11016 /* Don't diagnose the warning attribute in thunks, those are
11017 artificially created. */
11018 && !CALL_FROM_THUNK_P (exp
)
11019 && (attr
= lookup_attribute ("warning",
11020 DECL_ATTRIBUTES (fndecl
))) != NULL
)
11022 const char *ident
= lang_hooks
.decl_printable_name (fndecl
, 1);
11023 warning_at (tree_nonartificial_location (exp
),
11024 OPT_Wattribute_warning
,
11025 "%Kcall to %qs declared with attribute warning: %s",
11026 exp
, identifier_to_locale (ident
),
11027 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
11030 /* Check for a built-in function. */
11031 if (fndecl
&& fndecl_built_in_p (fndecl
))
11033 gcc_assert (DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_FRONTEND
);
11034 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
11037 return expand_call (exp
, target
, ignore
);
11039 case VIEW_CONVERT_EXPR
:
11042 /* If we are converting to BLKmode, try to avoid an intermediate
11043 temporary by fetching an inner memory reference. */
11044 if (mode
== BLKmode
11045 && poly_int_tree_p (TYPE_SIZE (type
))
11046 && TYPE_MODE (TREE_TYPE (treeop0
)) != BLKmode
11047 && handled_component_p (treeop0
))
11049 machine_mode mode1
;
11050 poly_int64 bitsize
, bitpos
, bytepos
;
11052 int unsignedp
, reversep
, volatilep
= 0;
11054 = get_inner_reference (treeop0
, &bitsize
, &bitpos
, &offset
, &mode1
,
11055 &unsignedp
, &reversep
, &volatilep
);
11058 /* ??? We should work harder and deal with non-zero offsets. */
11060 && multiple_p (bitpos
, BITS_PER_UNIT
, &bytepos
)
11062 && known_size_p (bitsize
)
11063 && known_eq (wi::to_poly_offset (TYPE_SIZE (type
)), bitsize
))
11065 /* See the normal_inner_ref case for the rationale. */
11067 = expand_expr_real (tem
,
11068 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
11069 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
11071 && modifier
!= EXPAND_STACK_PARM
11072 ? target
: NULL_RTX
),
11074 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
11077 if (MEM_P (orig_op0
))
11081 /* Get a reference to just this component. */
11082 if (modifier
== EXPAND_CONST_ADDRESS
11083 || modifier
== EXPAND_SUM
11084 || modifier
== EXPAND_INITIALIZER
)
11085 op0
= adjust_address_nv (op0
, mode
, bytepos
);
11087 op0
= adjust_address (op0
, mode
, bytepos
);
11089 if (op0
== orig_op0
)
11090 op0
= copy_rtx (op0
);
11092 set_mem_attributes (op0
, treeop0
, 0);
11093 if (REG_P (XEXP (op0
, 0)))
11094 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
11096 MEM_VOLATILE_P (op0
) |= volatilep
;
11102 op0
= expand_expr_real (treeop0
, NULL_RTX
, VOIDmode
, modifier
,
11103 NULL
, inner_reference_p
);
11105 /* If the input and output modes are both the same, we are done. */
11106 if (mode
== GET_MODE (op0
))
11108 /* If neither mode is BLKmode, and both modes are the same size
11109 then we can use gen_lowpart. */
11110 else if (mode
!= BLKmode
11111 && GET_MODE (op0
) != BLKmode
11112 && known_eq (GET_MODE_PRECISION (mode
),
11113 GET_MODE_PRECISION (GET_MODE (op0
)))
11114 && !COMPLEX_MODE_P (GET_MODE (op0
)))
11116 if (GET_CODE (op0
) == SUBREG
)
11117 op0
= force_reg (GET_MODE (op0
), op0
);
11118 temp
= gen_lowpart_common (mode
, op0
);
11123 if (!REG_P (op0
) && !MEM_P (op0
))
11124 op0
= force_reg (GET_MODE (op0
), op0
);
11125 op0
= gen_lowpart (mode
, op0
);
11128 /* If both types are integral, convert from one mode to the other. */
11129 else if (INTEGRAL_TYPE_P (type
) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0
)))
11130 op0
= convert_modes (mode
, GET_MODE (op0
), op0
,
11131 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
11132 /* If the output type is a bit-field type, do an extraction. */
11133 else if (reduce_bit_field
)
11134 return extract_bit_field (op0
, TYPE_PRECISION (type
), 0,
11135 TYPE_UNSIGNED (type
), NULL_RTX
,
11136 mode
, mode
, false, NULL
);
11137 /* As a last resort, spill op0 to memory, and reload it in a
11139 else if (!MEM_P (op0
))
11141 /* If the operand is not a MEM, force it into memory. Since we
11142 are going to be changing the mode of the MEM, don't call
11143 force_const_mem for constants because we don't allow pool
11144 constants to change mode. */
11145 tree inner_type
= TREE_TYPE (treeop0
);
11147 gcc_assert (!TREE_ADDRESSABLE (exp
));
11149 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
11151 = assign_stack_temp_for_type
11152 (TYPE_MODE (inner_type
),
11153 GET_MODE_SIZE (TYPE_MODE (inner_type
)), inner_type
);
11155 emit_move_insn (target
, op0
);
11159 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
11160 output type is such that the operand is known to be aligned, indicate
11161 that it is. Otherwise, we need only be concerned about alignment for
11162 non-BLKmode results. */
11165 enum insn_code icode
;
11167 if (modifier
!= EXPAND_WRITE
11168 && modifier
!= EXPAND_MEMORY
11169 && !inner_reference_p
11171 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode
))
11173 /* If the target does have special handling for unaligned
11174 loads of mode then use them. */
11175 if ((icode
= optab_handler (movmisalign_optab
, mode
))
11176 != CODE_FOR_nothing
)
11180 op0
= adjust_address (op0
, mode
, 0);
11181 /* We've already validated the memory, and we're creating a
11182 new pseudo destination. The predicates really can't
11184 reg
= gen_reg_rtx (mode
);
11186 /* Nor can the insn generator. */
11187 rtx_insn
*insn
= GEN_FCN (icode
) (reg
, op0
);
11191 else if (STRICT_ALIGNMENT
)
11193 poly_uint64 mode_size
= GET_MODE_SIZE (mode
);
11194 poly_uint64 temp_size
= mode_size
;
11195 if (GET_MODE (op0
) != BLKmode
)
11196 temp_size
= upper_bound (temp_size
,
11197 GET_MODE_SIZE (GET_MODE (op0
)));
11199 = assign_stack_temp_for_type (mode
, temp_size
, type
);
11200 rtx new_with_op0_mode
11201 = adjust_address (new_rtx
, GET_MODE (op0
), 0);
11203 gcc_assert (!TREE_ADDRESSABLE (exp
));
11205 if (GET_MODE (op0
) == BLKmode
)
11207 rtx size_rtx
= gen_int_mode (mode_size
, Pmode
);
11208 emit_block_move (new_with_op0_mode
, op0
, size_rtx
,
11209 (modifier
== EXPAND_STACK_PARM
11210 ? BLOCK_OP_CALL_PARM
11211 : BLOCK_OP_NORMAL
));
11214 emit_move_insn (new_with_op0_mode
, op0
);
11220 op0
= adjust_address (op0
, mode
, 0);
11227 tree lhs
= treeop0
;
11228 tree rhs
= treeop1
;
11229 gcc_assert (ignore
);
11231 /* Check for |= or &= of a bitfield of size one into another bitfield
11232 of size 1. In this case, (unless we need the result of the
11233 assignment) we can do this more efficiently with a
11234 test followed by an assignment, if necessary.
11236 ??? At this point, we can't get a BIT_FIELD_REF here. But if
11237 things change so we do, this code should be enhanced to
11239 if (TREE_CODE (lhs
) == COMPONENT_REF
11240 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
11241 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
11242 && TREE_OPERAND (rhs
, 0) == lhs
11243 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
11244 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
11245 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
11247 rtx_code_label
*label
= gen_label_rtx ();
11248 int value
= TREE_CODE (rhs
) == BIT_IOR_EXPR
;
11249 profile_probability prob
= profile_probability::uninitialized ();
11251 jumpifnot (TREE_OPERAND (rhs
, 1), label
, prob
);
11253 jumpif (TREE_OPERAND (rhs
, 1), label
, prob
);
11254 expand_assignment (lhs
, build_int_cst (TREE_TYPE (rhs
), value
),
11256 do_pending_stack_adjust ();
11257 emit_label (label
);
11261 expand_assignment (lhs
, rhs
, false);
11266 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
11268 case REALPART_EXPR
:
11269 op0
= expand_normal (treeop0
);
11270 return read_complex_part (op0
, false);
11272 case IMAGPART_EXPR
:
11273 op0
= expand_normal (treeop0
);
11274 return read_complex_part (op0
, true);
11281 /* Expanded in cfgexpand.c. */
11282 gcc_unreachable ();
11284 case TRY_CATCH_EXPR
:
11286 case EH_FILTER_EXPR
:
11287 case TRY_FINALLY_EXPR
:
11289 /* Lowered by tree-eh.c. */
11290 gcc_unreachable ();
11292 case WITH_CLEANUP_EXPR
:
11293 case CLEANUP_POINT_EXPR
:
11295 case CASE_LABEL_EXPR
:
11300 case COMPOUND_EXPR
:
11301 case PREINCREMENT_EXPR
:
11302 case PREDECREMENT_EXPR
:
11303 case POSTINCREMENT_EXPR
:
11304 case POSTDECREMENT_EXPR
:
11307 case COMPOUND_LITERAL_EXPR
:
11308 /* Lowered by gimplify.c. */
11309 gcc_unreachable ();
11312 /* Function descriptors are not valid except for as
11313 initialization constants, and should not be expanded. */
11314 gcc_unreachable ();
11316 case WITH_SIZE_EXPR
:
11317 /* WITH_SIZE_EXPR expands to its first argument. The caller should
11318 have pulled out the size to use in whatever context it needed. */
11319 return expand_expr_real (treeop0
, original_target
, tmode
,
11320 modifier
, alt_rtl
, inner_reference_p
);
11323 return expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
11327 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
11328 signedness of TYPE), possibly returning the result in TARGET.
11329 TYPE is known to be a partial integer type. */
11331 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
11333 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
11334 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
11336 /* For constant values, reduce using build_int_cst_type. */
11337 poly_int64 const_exp
;
11338 if (poly_int_rtx_p (exp
, &const_exp
))
11340 tree t
= build_int_cst_type (type
, const_exp
);
11341 return expand_expr (t
, target
, VOIDmode
, EXPAND_NORMAL
);
11343 else if (TYPE_UNSIGNED (type
))
11345 scalar_int_mode mode
= as_a
<scalar_int_mode
> (GET_MODE (exp
));
11346 rtx mask
= immed_wide_int_const
11347 (wi::mask (prec
, false, GET_MODE_PRECISION (mode
)), mode
);
11348 return expand_and (mode
, exp
, mask
, target
);
11352 scalar_int_mode mode
= as_a
<scalar_int_mode
> (GET_MODE (exp
));
11353 int count
= GET_MODE_PRECISION (mode
) - prec
;
11354 exp
= expand_shift (LSHIFT_EXPR
, mode
, exp
, count
, target
, 0);
11355 return expand_shift (RSHIFT_EXPR
, mode
, exp
, count
, target
, 0);
11359 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
11360 when applied to the address of EXP produces an address known to be
11361 aligned more than BIGGEST_ALIGNMENT. */
11364 is_aligning_offset (const_tree offset
, const_tree exp
)
11366 /* Strip off any conversions. */
11367 while (CONVERT_EXPR_P (offset
))
11368 offset
= TREE_OPERAND (offset
, 0);
11370 /* We must now have a BIT_AND_EXPR with a constant that is one less than
11371 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
11372 if (TREE_CODE (offset
) != BIT_AND_EXPR
11373 || !tree_fits_uhwi_p (TREE_OPERAND (offset
, 1))
11374 || compare_tree_int (TREE_OPERAND (offset
, 1),
11375 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
11376 || !pow2p_hwi (tree_to_uhwi (TREE_OPERAND (offset
, 1)) + 1))
11379 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
11380 It must be NEGATE_EXPR. Then strip any more conversions. */
11381 offset
= TREE_OPERAND (offset
, 0);
11382 while (CONVERT_EXPR_P (offset
))
11383 offset
= TREE_OPERAND (offset
, 0);
11385 if (TREE_CODE (offset
) != NEGATE_EXPR
)
11388 offset
= TREE_OPERAND (offset
, 0);
11389 while (CONVERT_EXPR_P (offset
))
11390 offset
= TREE_OPERAND (offset
, 0);
11392 /* This must now be the address of EXP. */
11393 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
11396 /* Return the tree node if an ARG corresponds to a string constant or zero
11397 if it doesn't. If we return nonzero, set *PTR_OFFSET to the (possibly
11398 non-constant) offset in bytes within the string that ARG is accessing.
11399 If MEM_SIZE is non-zero the storage size of the memory is returned.
11400 If DECL is non-zero the constant declaration is returned if available. */
11403 string_constant (tree arg
, tree
*ptr_offset
, tree
*mem_size
, tree
*decl
)
11405 tree dummy
= NULL_TREE
;;
11409 /* Store the type of the original expression before conversions
11410 via NOP_EXPR or POINTER_PLUS_EXPR to other types have been
11412 tree argtype
= TREE_TYPE (arg
);
11417 /* Non-constant index into the character array in an ARRAY_REF
11418 expression or null. */
11419 tree varidx
= NULL_TREE
;
11421 poly_int64 base_off
= 0;
11423 if (TREE_CODE (arg
) == ADDR_EXPR
)
11425 arg
= TREE_OPERAND (arg
, 0);
11427 if (TREE_CODE (arg
) == ARRAY_REF
)
11429 tree idx
= TREE_OPERAND (arg
, 1);
11430 if (TREE_CODE (idx
) != INTEGER_CST
)
11432 /* From a pointer (but not array) argument extract the variable
11433 index to prevent get_addr_base_and_unit_offset() from failing
11434 due to it. Use it later to compute the non-constant offset
11435 into the string and return it to the caller. */
11437 ref
= TREE_OPERAND (arg
, 0);
11439 if (TREE_CODE (TREE_TYPE (arg
)) == ARRAY_TYPE
)
11442 if (!integer_zerop (array_ref_low_bound (arg
)))
11445 if (!integer_onep (array_ref_element_size (arg
)))
11449 array
= get_addr_base_and_unit_offset (ref
, &base_off
);
11451 || (TREE_CODE (array
) != VAR_DECL
11452 && TREE_CODE (array
) != CONST_DECL
11453 && TREE_CODE (array
) != STRING_CST
))
11456 else if (TREE_CODE (arg
) == PLUS_EXPR
|| TREE_CODE (arg
) == POINTER_PLUS_EXPR
)
11458 tree arg0
= TREE_OPERAND (arg
, 0);
11459 tree arg1
= TREE_OPERAND (arg
, 1);
11462 tree str
= string_constant (arg0
, &offset
, mem_size
, decl
);
11465 str
= string_constant (arg1
, &offset
, mem_size
, decl
);
11471 /* Avoid pointers to arrays (see bug 86622). */
11472 if (POINTER_TYPE_P (TREE_TYPE (arg
))
11473 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == ARRAY_TYPE
11474 && !(decl
&& !*decl
)
11475 && !(decl
&& tree_fits_uhwi_p (DECL_SIZE_UNIT (*decl
))
11476 && tree_fits_uhwi_p (*mem_size
)
11477 && tree_int_cst_equal (*mem_size
, DECL_SIZE_UNIT (*decl
))))
11480 tree type
= TREE_TYPE (offset
);
11481 arg1
= fold_convert (type
, arg1
);
11482 *ptr_offset
= fold_build2 (PLUS_EXPR
, type
, offset
, arg1
);
11487 else if (TREE_CODE (arg
) == SSA_NAME
)
11489 gimple
*stmt
= SSA_NAME_DEF_STMT (arg
);
11490 if (!is_gimple_assign (stmt
))
11493 tree rhs1
= gimple_assign_rhs1 (stmt
);
11494 tree_code code
= gimple_assign_rhs_code (stmt
);
11495 if (code
== ADDR_EXPR
)
11496 return string_constant (rhs1
, ptr_offset
, mem_size
, decl
);
11497 else if (code
!= POINTER_PLUS_EXPR
)
11501 if (tree str
= string_constant (rhs1
, &offset
, mem_size
, decl
))
11503 /* Avoid pointers to arrays (see bug 86622). */
11504 if (POINTER_TYPE_P (TREE_TYPE (rhs1
))
11505 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs1
))) == ARRAY_TYPE
11506 && !(decl
&& !*decl
)
11507 && !(decl
&& tree_fits_uhwi_p (DECL_SIZE_UNIT (*decl
))
11508 && tree_fits_uhwi_p (*mem_size
)
11509 && tree_int_cst_equal (*mem_size
, DECL_SIZE_UNIT (*decl
))))
11512 tree rhs2
= gimple_assign_rhs2 (stmt
);
11513 tree type
= TREE_TYPE (offset
);
11514 rhs2
= fold_convert (type
, rhs2
);
11515 *ptr_offset
= fold_build2 (PLUS_EXPR
, type
, offset
, rhs2
);
11520 else if (DECL_P (arg
))
11525 tree offset
= wide_int_to_tree (sizetype
, base_off
);
11528 if (TREE_CODE (TREE_TYPE (array
)) != ARRAY_TYPE
)
11531 gcc_assert (TREE_CODE (arg
) == ARRAY_REF
);
11532 tree chartype
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (arg
, 0)));
11533 if (TREE_CODE (chartype
) != INTEGER_TYPE
)
11536 offset
= fold_convert (sizetype
, varidx
);
11539 if (TREE_CODE (array
) == STRING_CST
)
11541 *ptr_offset
= fold_convert (sizetype
, offset
);
11542 *mem_size
= TYPE_SIZE_UNIT (TREE_TYPE (array
));
11545 gcc_checking_assert (tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (array
)))
11546 >= TREE_STRING_LENGTH (array
));
11550 if (!VAR_P (array
) && TREE_CODE (array
) != CONST_DECL
)
11553 tree init
= ctor_for_folding (array
);
11555 /* Handle variables initialized with string literals. */
11556 if (!init
|| init
== error_mark_node
)
11558 if (TREE_CODE (init
) == CONSTRUCTOR
)
11560 /* Convert the 64-bit constant offset to a wider type to avoid
11563 if (!base_off
.is_constant (&wioff
))
11566 wioff
*= BITS_PER_UNIT
;
11567 if (!wi::fits_uhwi_p (wioff
))
11570 base_off
= wioff
.to_uhwi ();
11571 unsigned HOST_WIDE_INT fieldoff
= 0;
11572 init
= fold_ctor_reference (TREE_TYPE (arg
), init
, base_off
, 0, array
,
11574 HOST_WIDE_INT cstoff
;
11575 if (!base_off
.is_constant (&cstoff
))
11578 cstoff
= (cstoff
- fieldoff
) / BITS_PER_UNIT
;
11579 tree off
= build_int_cst (sizetype
, cstoff
);
11581 offset
= fold_build2 (PLUS_EXPR
, TREE_TYPE (offset
), offset
, off
);
11589 *ptr_offset
= offset
;
11591 tree inittype
= TREE_TYPE (init
);
11593 if (TREE_CODE (init
) == INTEGER_CST
11594 && (TREE_CODE (TREE_TYPE (array
)) == INTEGER_TYPE
11595 || TYPE_MAIN_VARIANT (inittype
) == char_type_node
))
11597 /* For a reference to (address of) a single constant character,
11598 store the native representation of the character in CHARBUF.
11599 If the reference is to an element of an array or a member
11600 of a struct, only consider narrow characters until ctors
11601 for wide character arrays are transformed to STRING_CSTs
11602 like those for narrow arrays. */
11603 unsigned char charbuf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
11604 int len
= native_encode_expr (init
, charbuf
, sizeof charbuf
, 0);
11607 /* Construct a string literal with elements of INITTYPE and
11608 the representation above. Then strip
11609 the ADDR_EXPR (ARRAY_REF (...)) around the STRING_CST. */
11610 init
= build_string_literal (len
, (char *)charbuf
, inittype
);
11611 init
= TREE_OPERAND (TREE_OPERAND (init
, 0), 0);
11615 tree initsize
= TYPE_SIZE_UNIT (inittype
);
11617 if (TREE_CODE (init
) == CONSTRUCTOR
&& initializer_zerop (init
))
11619 /* Fold an empty/zero constructor for an implicitly initialized
11620 object or subobject into the empty string. */
11622 /* Determine the character type from that of the original
11624 tree chartype
= argtype
;
11625 if (POINTER_TYPE_P (chartype
))
11626 chartype
= TREE_TYPE (chartype
);
11627 while (TREE_CODE (chartype
) == ARRAY_TYPE
)
11628 chartype
= TREE_TYPE (chartype
);
11629 /* Convert a char array to an empty STRING_CST having an array
11630 of the expected type. */
11632 initsize
= integer_zero_node
;
11634 unsigned HOST_WIDE_INT size
= tree_to_uhwi (initsize
);
11635 init
= build_string_literal (size
? 1 : 0, "", chartype
, size
);
11636 init
= TREE_OPERAND (init
, 0);
11637 init
= TREE_OPERAND (init
, 0);
11639 *ptr_offset
= integer_zero_node
;
11645 if (TREE_CODE (init
) != STRING_CST
)
11648 *mem_size
= initsize
;
11650 gcc_checking_assert (tree_to_shwi (initsize
) >= TREE_STRING_LENGTH (init
));
11655 /* Compute the modular multiplicative inverse of A modulo M
11656 using extended Euclid's algorithm. Assumes A and M are coprime. */
11658 mod_inv (const wide_int
&a
, const wide_int
&b
)
11660 /* Verify the assumption. */
11661 gcc_checking_assert (wi::eq_p (wi::gcd (a
, b
), 1));
11663 unsigned int p
= a
.get_precision () + 1;
11664 gcc_checking_assert (b
.get_precision () + 1 == p
);
11665 wide_int c
= wide_int::from (a
, p
, UNSIGNED
);
11666 wide_int d
= wide_int::from (b
, p
, UNSIGNED
);
11667 wide_int x0
= wide_int::from (0, p
, UNSIGNED
);
11668 wide_int x1
= wide_int::from (1, p
, UNSIGNED
);
11670 if (wi::eq_p (b
, 1))
11671 return wide_int::from (1, p
, UNSIGNED
);
11673 while (wi::gt_p (c
, 1, UNSIGNED
))
11676 wide_int q
= wi::divmod_trunc (c
, d
, UNSIGNED
, &d
);
11679 x0
= wi::sub (x1
, wi::mul (q
, x0
));
11682 if (wi::lt_p (x1
, 0, SIGNED
))
11687 /* Optimize x % C1 == C2 for signed modulo if C1 is a power of two and C2
11688 is non-zero and C3 ((1<<(prec-1)) | (C1 - 1)):
11689 for C2 > 0 to x & C3 == C2
11690 for C2 < 0 to x & C3 == (C2 & C3). */
11692 maybe_optimize_pow2p_mod_cmp (enum tree_code code
, tree
*arg0
, tree
*arg1
)
11694 gimple
*stmt
= get_def_for_expr (*arg0
, TRUNC_MOD_EXPR
);
11695 tree treeop0
= gimple_assign_rhs1 (stmt
);
11696 tree treeop1
= gimple_assign_rhs2 (stmt
);
11697 tree type
= TREE_TYPE (*arg0
);
11698 scalar_int_mode mode
;
11699 if (!is_a
<scalar_int_mode
> (TYPE_MODE (type
), &mode
))
11701 if (GET_MODE_BITSIZE (mode
) != TYPE_PRECISION (type
)
11702 || TYPE_PRECISION (type
) <= 1
11703 || TYPE_UNSIGNED (type
)
11704 /* Signed x % c == 0 should have been optimized into unsigned modulo
11706 || integer_zerop (*arg1
)
11707 /* If c is known to be non-negative, modulo will be expanded as unsigned
11709 || get_range_pos_neg (treeop0
) == 1)
11712 /* x % c == d where d < 0 && d <= -c should be always false. */
11713 if (tree_int_cst_sgn (*arg1
) == -1
11714 && -wi::to_widest (treeop1
) >= wi::to_widest (*arg1
))
11717 int prec
= TYPE_PRECISION (type
);
11718 wide_int w
= wi::to_wide (treeop1
) - 1;
11719 w
|= wi::shifted_mask (0, prec
- 1, true, prec
);
11720 tree c3
= wide_int_to_tree (type
, w
);
11722 if (tree_int_cst_sgn (*arg1
) == -1)
11723 c4
= wide_int_to_tree (type
, w
& wi::to_wide (*arg1
));
11725 rtx op0
= expand_normal (treeop0
);
11726 treeop0
= make_tree (TREE_TYPE (treeop0
), op0
);
11728 bool speed_p
= optimize_insn_for_speed_p ();
11730 do_pending_stack_adjust ();
11732 location_t loc
= gimple_location (stmt
);
11733 struct separate_ops ops
;
11734 ops
.code
= TRUNC_MOD_EXPR
;
11735 ops
.location
= loc
;
11736 ops
.type
= TREE_TYPE (treeop0
);
11739 ops
.op2
= NULL_TREE
;
11741 rtx mor
= expand_expr_real_2 (&ops
, NULL_RTX
, TYPE_MODE (ops
.type
),
11743 rtx_insn
*moinsns
= get_insns ();
11746 unsigned mocost
= seq_cost (moinsns
, speed_p
);
11747 mocost
+= rtx_cost (mor
, mode
, EQ
, 0, speed_p
);
11748 mocost
+= rtx_cost (expand_normal (*arg1
), mode
, EQ
, 1, speed_p
);
11750 ops
.code
= BIT_AND_EXPR
;
11751 ops
.location
= loc
;
11752 ops
.type
= TREE_TYPE (treeop0
);
11755 ops
.op2
= NULL_TREE
;
11757 rtx mur
= expand_expr_real_2 (&ops
, NULL_RTX
, TYPE_MODE (ops
.type
),
11759 rtx_insn
*muinsns
= get_insns ();
11762 unsigned mucost
= seq_cost (muinsns
, speed_p
);
11763 mucost
+= rtx_cost (mur
, mode
, EQ
, 0, speed_p
);
11764 mucost
+= rtx_cost (expand_normal (c4
), mode
, EQ
, 1, speed_p
);
11766 if (mocost
<= mucost
)
11768 emit_insn (moinsns
);
11769 *arg0
= make_tree (TREE_TYPE (*arg0
), mor
);
11773 emit_insn (muinsns
);
11774 *arg0
= make_tree (TREE_TYPE (*arg0
), mur
);
11779 /* Attempt to optimize unsigned (X % C1) == C2 (or (X % C1) != C2).
11781 (X - C2) * C3 <= C4 (or >), where
11782 C3 is modular multiplicative inverse of C1 and 1<<prec and
11783 C4 is ((1<<prec) - 1) / C1 or ((1<<prec) - 1) / C1 - 1 (the latter
11784 if C2 > ((1<<prec) - 1) % C1).
11785 If C1 is even, S = ctz (C1) and C2 is 0, use
11786 ((X * C3) r>> S) <= C4, where C3 is modular multiplicative
11787 inverse of C1>>S and 1<<prec and C4 is (((1<<prec) - 1) / (C1>>S)) >> S.
11789 For signed (X % C1) == 0 if C1 is odd to (all operations in it
11791 (X * C3) + C4 <= 2 * C4, where
11792 C3 is modular multiplicative inverse of (unsigned) C1 and 1<<prec and
11793 C4 is ((1<<(prec - 1) - 1) / C1).
11794 If C1 is even, S = ctz(C1), use
11795 ((X * C3) + C4) r>> S <= (C4 >> (S - 1))
11796 where C3 is modular multiplicative inverse of (unsigned)(C1>>S) and 1<<prec
11797 and C4 is ((1<<(prec - 1) - 1) / (C1>>S)) & (-1<<S).
11799 See the Hacker's Delight book, section 10-17. */
11801 maybe_optimize_mod_cmp (enum tree_code code
, tree
*arg0
, tree
*arg1
)
11803 gcc_checking_assert (code
== EQ_EXPR
|| code
== NE_EXPR
);
11804 gcc_checking_assert (TREE_CODE (*arg1
) == INTEGER_CST
);
11809 gimple
*stmt
= get_def_for_expr (*arg0
, TRUNC_MOD_EXPR
);
11813 tree treeop0
= gimple_assign_rhs1 (stmt
);
11814 tree treeop1
= gimple_assign_rhs2 (stmt
);
11815 if (TREE_CODE (treeop0
) != SSA_NAME
11816 || TREE_CODE (treeop1
) != INTEGER_CST
11817 /* Don't optimize the undefined behavior case x % 0;
11818 x % 1 should have been optimized into zero, punt if
11819 it makes it here for whatever reason;
11820 x % -c should have been optimized into x % c. */
11821 || compare_tree_int (treeop1
, 2) <= 0
11822 /* Likewise x % c == d where d >= c should be always false. */
11823 || tree_int_cst_le (treeop1
, *arg1
))
11826 /* Unsigned x % pow2 is handled right already, for signed
11827 modulo handle it in maybe_optimize_pow2p_mod_cmp. */
11828 if (integer_pow2p (treeop1
))
11829 return maybe_optimize_pow2p_mod_cmp (code
, arg0
, arg1
);
11831 tree type
= TREE_TYPE (*arg0
);
11832 scalar_int_mode mode
;
11833 if (!is_a
<scalar_int_mode
> (TYPE_MODE (type
), &mode
))
11835 if (GET_MODE_BITSIZE (mode
) != TYPE_PRECISION (type
)
11836 || TYPE_PRECISION (type
) <= 1)
11839 signop sgn
= UNSIGNED
;
11840 /* If both operands are known to have the sign bit clear, handle
11841 even the signed modulo case as unsigned. treeop1 is always
11842 positive >= 2, checked above. */
11843 if (!TYPE_UNSIGNED (type
) && get_range_pos_neg (treeop0
) != 1)
11846 if (!TYPE_UNSIGNED (type
))
11848 if (tree_int_cst_sgn (*arg1
) == -1)
11850 type
= unsigned_type_for (type
);
11851 if (!type
|| TYPE_MODE (type
) != TYPE_MODE (TREE_TYPE (*arg0
)))
11855 int prec
= TYPE_PRECISION (type
);
11856 wide_int w
= wi::to_wide (treeop1
);
11857 int shift
= wi::ctz (w
);
11858 /* Unsigned (X % C1) == C2 is equivalent to (X - C2) % C1 == 0 if
11859 C2 <= -1U % C1, because for any Z >= 0U - C2 in that case (Z % C1) != 0.
11860 If C1 is odd, we can handle all cases by subtracting
11861 C4 below. We could handle even the even C1 and C2 > -1U % C1 cases
11862 e.g. by testing for overflow on the subtraction, punt on that for now
11864 if ((sgn
== SIGNED
|| shift
) && !integer_zerop (*arg1
))
11868 wide_int x
= wi::umod_trunc (wi::mask (prec
, false, prec
), w
);
11869 if (wi::gtu_p (wi::to_wide (*arg1
), x
))
11873 imm_use_iterator imm_iter
;
11874 use_operand_p use_p
;
11875 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, treeop0
)
11877 gimple
*use_stmt
= USE_STMT (use_p
);
11878 /* Punt if treeop0 is used in the same bb in a division
11879 or another modulo with the same divisor. We should expect
11880 the division and modulo combined together. */
11881 if (use_stmt
== stmt
11882 || gimple_bb (use_stmt
) != gimple_bb (stmt
))
11884 if (!is_gimple_assign (use_stmt
)
11885 || (gimple_assign_rhs_code (use_stmt
) != TRUNC_DIV_EXPR
11886 && gimple_assign_rhs_code (use_stmt
) != TRUNC_MOD_EXPR
))
11888 if (gimple_assign_rhs1 (use_stmt
) != treeop0
11889 || !operand_equal_p (gimple_assign_rhs2 (use_stmt
), treeop1
, 0))
11894 w
= wi::lrshift (w
, shift
);
11895 wide_int a
= wide_int::from (w
, prec
+ 1, UNSIGNED
);
11896 wide_int b
= wi::shifted_mask (prec
, 1, false, prec
+ 1);
11897 wide_int m
= wide_int::from (mod_inv (a
, b
), prec
, UNSIGNED
);
11898 tree c3
= wide_int_to_tree (type
, m
);
11899 tree c5
= NULL_TREE
;
11901 if (sgn
== UNSIGNED
)
11903 d
= wi::divmod_trunc (wi::mask (prec
, false, prec
), w
, UNSIGNED
, &e
);
11904 /* Use <= floor ((1<<prec) - 1) / C1 only if C2 <= ((1<<prec) - 1) % C1,
11905 otherwise use < or subtract one from C4. E.g. for
11906 x % 3U == 0 we transform this into x * 0xaaaaaaab <= 0x55555555, but
11907 x % 3U == 1 already needs to be
11908 (x - 1) * 0xaaaaaaabU <= 0x55555554. */
11909 if (!shift
&& wi::gtu_p (wi::to_wide (*arg1
), e
))
11912 d
= wi::lrshift (d
, shift
);
11916 e
= wi::udiv_trunc (wi::mask (prec
- 1, false, prec
), w
);
11918 d
= wi::lshift (e
, 1);
11921 e
= wi::bit_and (e
, wi::mask (shift
, true, prec
));
11922 d
= wi::lrshift (e
, shift
- 1);
11924 c5
= wide_int_to_tree (type
, e
);
11926 tree c4
= wide_int_to_tree (type
, d
);
11928 rtx op0
= expand_normal (treeop0
);
11929 treeop0
= make_tree (TREE_TYPE (treeop0
), op0
);
11931 bool speed_p
= optimize_insn_for_speed_p ();
11933 do_pending_stack_adjust ();
11935 location_t loc
= gimple_location (stmt
);
11936 struct separate_ops ops
;
11937 ops
.code
= TRUNC_MOD_EXPR
;
11938 ops
.location
= loc
;
11939 ops
.type
= TREE_TYPE (treeop0
);
11942 ops
.op2
= NULL_TREE
;
11944 rtx mor
= expand_expr_real_2 (&ops
, NULL_RTX
, TYPE_MODE (ops
.type
),
11946 rtx_insn
*moinsns
= get_insns ();
11949 unsigned mocost
= seq_cost (moinsns
, speed_p
);
11950 mocost
+= rtx_cost (mor
, mode
, EQ
, 0, speed_p
);
11951 mocost
+= rtx_cost (expand_normal (*arg1
), mode
, EQ
, 1, speed_p
);
11953 tree t
= fold_convert_loc (loc
, type
, treeop0
);
11954 if (!integer_zerop (*arg1
))
11955 t
= fold_build2_loc (loc
, MINUS_EXPR
, type
, t
, fold_convert (type
, *arg1
));
11956 t
= fold_build2_loc (loc
, MULT_EXPR
, type
, t
, c3
);
11958 t
= fold_build2_loc (loc
, PLUS_EXPR
, type
, t
, c5
);
11961 tree s
= build_int_cst (NULL_TREE
, shift
);
11962 t
= fold_build2_loc (loc
, RROTATE_EXPR
, type
, t
, s
);
11966 rtx mur
= expand_normal (t
);
11967 rtx_insn
*muinsns
= get_insns ();
11970 unsigned mucost
= seq_cost (muinsns
, speed_p
);
11971 mucost
+= rtx_cost (mur
, mode
, LE
, 0, speed_p
);
11972 mucost
+= rtx_cost (expand_normal (c4
), mode
, LE
, 1, speed_p
);
11974 if (mocost
<= mucost
)
11976 emit_insn (moinsns
);
11977 *arg0
= make_tree (TREE_TYPE (*arg0
), mor
);
11981 emit_insn (muinsns
);
11982 *arg0
= make_tree (type
, mur
);
11984 return code
== EQ_EXPR
? LE_EXPR
: GT_EXPR
;
11987 /* Generate code to calculate OPS, and exploded expression
11988 using a store-flag instruction and return an rtx for the result.
11989 OPS reflects a comparison.
11991 If TARGET is nonzero, store the result there if convenient.
11993 Return zero if there is no suitable set-flag instruction
11994 available on this machine.
11996 Once expand_expr has been called on the arguments of the comparison,
11997 we are committed to doing the store flag, since it is not safe to
11998 re-evaluate the expression. We emit the store-flag insn by calling
11999 emit_store_flag, but only expand the arguments if we have a reason
12000 to believe that emit_store_flag will be successful. If we think that
12001 it will, but it isn't, we have to simulate the store-flag with a
12002 set/jump/set sequence. */
12005 do_store_flag (sepops ops
, rtx target
, machine_mode mode
)
12007 enum rtx_code code
;
12008 tree arg0
, arg1
, type
;
12009 machine_mode operand_mode
;
12012 rtx subtarget
= target
;
12013 location_t loc
= ops
->location
;
12018 /* Don't crash if the comparison was erroneous. */
12019 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
12022 type
= TREE_TYPE (arg0
);
12023 operand_mode
= TYPE_MODE (type
);
12024 unsignedp
= TYPE_UNSIGNED (type
);
12026 /* We won't bother with BLKmode store-flag operations because it would mean
12027 passing a lot of information to emit_store_flag. */
12028 if (operand_mode
== BLKmode
)
12031 /* We won't bother with store-flag operations involving function pointers
12032 when function pointers must be canonicalized before comparisons. */
12033 if (targetm
.have_canonicalize_funcptr_for_compare ()
12034 && ((POINTER_TYPE_P (TREE_TYPE (arg0
))
12035 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (arg0
))))
12036 || (POINTER_TYPE_P (TREE_TYPE (arg1
))
12037 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (arg1
))))))
12043 /* For vector typed comparisons emit code to generate the desired
12044 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
12045 expander for this. */
12046 if (TREE_CODE (ops
->type
) == VECTOR_TYPE
)
12048 tree ifexp
= build2 (ops
->code
, ops
->type
, arg0
, arg1
);
12049 if (VECTOR_BOOLEAN_TYPE_P (ops
->type
)
12050 && expand_vec_cmp_expr_p (TREE_TYPE (arg0
), ops
->type
, ops
->code
))
12051 return expand_vec_cmp_expr (ops
->type
, ifexp
, target
);
12054 tree if_true
= constant_boolean_node (true, ops
->type
);
12055 tree if_false
= constant_boolean_node (false, ops
->type
);
12056 return expand_vec_cond_expr (ops
->type
, ifexp
, if_true
,
12061 /* Optimize (x % C1) == C2 or (x % C1) != C2 if it is beneficial
12062 into (x - C2) * C3 < C4. */
12063 if ((ops
->code
== EQ_EXPR
|| ops
->code
== NE_EXPR
)
12064 && TREE_CODE (arg0
) == SSA_NAME
12065 && TREE_CODE (arg1
) == INTEGER_CST
)
12067 enum tree_code code
= maybe_optimize_mod_cmp (ops
->code
, &arg0
, &arg1
);
12068 if (code
!= ops
->code
)
12070 struct separate_ops nops
= *ops
;
12071 nops
.code
= ops
->code
= code
;
12074 nops
.type
= TREE_TYPE (arg0
);
12075 return do_store_flag (&nops
, target
, mode
);
12079 /* Get the rtx comparison code to use. We know that EXP is a comparison
12080 operation of some type. Some comparisons against 1 and -1 can be
12081 converted to comparisons with zero. Do so here so that the tests
12082 below will be aware that we have a comparison with zero. These
12083 tests will not catch constants in the first operand, but constants
12084 are rarely passed as the first operand. */
12095 if (integer_onep (arg1
))
12096 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
12098 code
= unsignedp
? LTU
: LT
;
12101 if (! unsignedp
&& integer_all_onesp (arg1
))
12102 arg1
= integer_zero_node
, code
= LT
;
12104 code
= unsignedp
? LEU
: LE
;
12107 if (! unsignedp
&& integer_all_onesp (arg1
))
12108 arg1
= integer_zero_node
, code
= GE
;
12110 code
= unsignedp
? GTU
: GT
;
12113 if (integer_onep (arg1
))
12114 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
12116 code
= unsignedp
? GEU
: GE
;
12119 case UNORDERED_EXPR
:
12145 gcc_unreachable ();
12148 /* Put a constant second. */
12149 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
12150 || TREE_CODE (arg0
) == FIXED_CST
)
12152 std::swap (arg0
, arg1
);
12153 code
= swap_condition (code
);
12156 /* If this is an equality or inequality test of a single bit, we can
12157 do this by shifting the bit being tested to the low-order bit and
12158 masking the result with the constant 1. If the condition was EQ,
12159 we xor it with 1. This does not require an scc insn and is faster
12160 than an scc insn even if we have it.
12162 The code to make this transformation was moved into fold_single_bit_test,
12163 so we just call into the folder and expand its result. */
12165 if ((code
== NE
|| code
== EQ
)
12166 && integer_zerop (arg1
)
12167 && (TYPE_PRECISION (ops
->type
) != 1 || TYPE_UNSIGNED (ops
->type
)))
12169 gimple
*srcstmt
= get_def_for_expr (arg0
, BIT_AND_EXPR
);
12171 && integer_pow2p (gimple_assign_rhs2 (srcstmt
)))
12173 enum tree_code tcode
= code
== NE
? NE_EXPR
: EQ_EXPR
;
12174 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
12175 tree temp
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg1
),
12176 gimple_assign_rhs1 (srcstmt
),
12177 gimple_assign_rhs2 (srcstmt
));
12178 temp
= fold_single_bit_test (loc
, tcode
, temp
, arg1
, type
);
12180 return expand_expr (temp
, target
, VOIDmode
, EXPAND_NORMAL
);
12184 if (! get_subtarget (target
)
12185 || GET_MODE (subtarget
) != operand_mode
)
12188 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
12191 target
= gen_reg_rtx (mode
);
12193 /* Try a cstore if possible. */
12194 return emit_store_flag_force (target
, code
, op0
, op1
,
12195 operand_mode
, unsignedp
,
12196 (TYPE_PRECISION (ops
->type
) == 1
12197 && !TYPE_UNSIGNED (ops
->type
)) ? -1 : 1);
12200 /* Attempt to generate a casesi instruction. Returns 1 if successful,
12201 0 otherwise (i.e. if there is no casesi instruction).
12203 DEFAULT_PROBABILITY is the probability of jumping to the default
12206 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
12207 rtx table_label
, rtx default_label
, rtx fallback_label
,
12208 profile_probability default_probability
)
12210 class expand_operand ops
[5];
12211 scalar_int_mode index_mode
= SImode
;
12212 rtx op1
, op2
, index
;
12214 if (! targetm
.have_casesi ())
12217 /* The index must be some form of integer. Convert it to SImode. */
12218 scalar_int_mode omode
= SCALAR_INT_TYPE_MODE (index_type
);
12219 if (GET_MODE_BITSIZE (omode
) > GET_MODE_BITSIZE (index_mode
))
12221 rtx rangertx
= expand_normal (range
);
12223 /* We must handle the endpoints in the original mode. */
12224 index_expr
= build2 (MINUS_EXPR
, index_type
,
12225 index_expr
, minval
);
12226 minval
= integer_zero_node
;
12227 index
= expand_normal (index_expr
);
12229 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
12230 omode
, 1, default_label
,
12231 default_probability
);
12232 /* Now we can safely truncate. */
12233 index
= convert_to_mode (index_mode
, index
, 0);
12237 if (omode
!= index_mode
)
12239 index_type
= lang_hooks
.types
.type_for_mode (index_mode
, 0);
12240 index_expr
= fold_convert (index_type
, index_expr
);
12243 index
= expand_normal (index_expr
);
12246 do_pending_stack_adjust ();
12248 op1
= expand_normal (minval
);
12249 op2
= expand_normal (range
);
12251 create_input_operand (&ops
[0], index
, index_mode
);
12252 create_convert_operand_from_type (&ops
[1], op1
, TREE_TYPE (minval
));
12253 create_convert_operand_from_type (&ops
[2], op2
, TREE_TYPE (range
));
12254 create_fixed_operand (&ops
[3], table_label
);
12255 create_fixed_operand (&ops
[4], (default_label
12257 : fallback_label
));
12258 expand_jump_insn (targetm
.code_for_casesi
, 5, ops
);
12262 /* Attempt to generate a tablejump instruction; same concept. */
12263 /* Subroutine of the next function.
12265 INDEX is the value being switched on, with the lowest value
12266 in the table already subtracted.
12267 MODE is its expected mode (needed if INDEX is constant).
12268 RANGE is the length of the jump table.
12269 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
12271 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
12272 index value is out of range.
12273 DEFAULT_PROBABILITY is the probability of jumping to
12274 the default label. */
12277 do_tablejump (rtx index
, machine_mode mode
, rtx range
, rtx table_label
,
12278 rtx default_label
, profile_probability default_probability
)
12282 if (INTVAL (range
) > cfun
->cfg
->max_jumptable_ents
)
12283 cfun
->cfg
->max_jumptable_ents
= INTVAL (range
);
12285 /* Do an unsigned comparison (in the proper mode) between the index
12286 expression and the value which represents the length of the range.
12287 Since we just finished subtracting the lower bound of the range
12288 from the index expression, this comparison allows us to simultaneously
12289 check that the original index expression value is both greater than
12290 or equal to the minimum value of the range and less than or equal to
12291 the maximum value of the range. */
12294 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
12295 default_label
, default_probability
);
12297 /* If index is in range, it must fit in Pmode.
12298 Convert to Pmode so we can index with it. */
12301 unsigned int width
;
12303 /* We know the value of INDEX is between 0 and RANGE. If we have a
12304 sign-extended subreg, and RANGE does not have the sign bit set, then
12305 we have a value that is valid for both sign and zero extension. In
12306 this case, we get better code if we sign extend. */
12307 if (GET_CODE (index
) == SUBREG
12308 && SUBREG_PROMOTED_VAR_P (index
)
12309 && SUBREG_PROMOTED_SIGNED_P (index
)
12310 && ((width
= GET_MODE_PRECISION (as_a
<scalar_int_mode
> (mode
)))
12311 <= HOST_BITS_PER_WIDE_INT
)
12312 && ! (UINTVAL (range
) & (HOST_WIDE_INT_1U
<< (width
- 1))))
12313 index
= convert_to_mode (Pmode
, index
, 0);
12315 index
= convert_to_mode (Pmode
, index
, 1);
12318 /* Don't let a MEM slip through, because then INDEX that comes
12319 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
12320 and break_out_memory_refs will go to work on it and mess it up. */
12321 #ifdef PIC_CASE_VECTOR_ADDRESS
12322 if (flag_pic
&& !REG_P (index
))
12323 index
= copy_to_mode_reg (Pmode
, index
);
12326 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
12327 GET_MODE_SIZE, because this indicates how large insns are. The other
12328 uses should all be Pmode, because they are addresses. This code
12329 could fail if addresses and insns are not the same size. */
12330 index
= simplify_gen_binary (MULT
, Pmode
, index
,
12331 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE
),
12333 index
= simplify_gen_binary (PLUS
, Pmode
, index
,
12334 gen_rtx_LABEL_REF (Pmode
, table_label
));
12336 #ifdef PIC_CASE_VECTOR_ADDRESS
12338 index
= PIC_CASE_VECTOR_ADDRESS (index
);
12341 index
= memory_address (CASE_VECTOR_MODE
, index
);
12342 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
12343 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
12344 convert_move (temp
, vector
, 0);
12346 emit_jump_insn (targetm
.gen_tablejump (temp
, table_label
));
12348 /* If we are generating PIC code or if the table is PC-relative, the
12349 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
12350 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
12355 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
12356 rtx table_label
, rtx default_label
,
12357 profile_probability default_probability
)
12361 if (! targetm
.have_tablejump ())
12364 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
12365 fold_convert (index_type
, index_expr
),
12366 fold_convert (index_type
, minval
));
12367 index
= expand_normal (index_expr
);
12368 do_pending_stack_adjust ();
12370 do_tablejump (index
, TYPE_MODE (index_type
),
12371 convert_modes (TYPE_MODE (index_type
),
12372 TYPE_MODE (TREE_TYPE (range
)),
12373 expand_normal (range
),
12374 TYPE_UNSIGNED (TREE_TYPE (range
))),
12375 table_label
, default_label
, default_probability
);
12379 /* Return a CONST_VECTOR rtx representing vector mask for
12380 a VECTOR_CST of booleans. */
12382 const_vector_mask_from_tree (tree exp
)
12384 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
12385 machine_mode inner
= GET_MODE_INNER (mode
);
12387 rtx_vector_builder
builder (mode
, VECTOR_CST_NPATTERNS (exp
),
12388 VECTOR_CST_NELTS_PER_PATTERN (exp
));
12389 unsigned int count
= builder
.encoded_nelts ();
12390 for (unsigned int i
= 0; i
< count
; ++i
)
12392 tree elt
= VECTOR_CST_ELT (exp
, i
);
12393 gcc_assert (TREE_CODE (elt
) == INTEGER_CST
);
12394 if (integer_zerop (elt
))
12395 builder
.quick_push (CONST0_RTX (inner
));
12396 else if (integer_onep (elt
)
12397 || integer_minus_onep (elt
))
12398 builder
.quick_push (CONSTM1_RTX (inner
));
12400 gcc_unreachable ();
12402 return builder
.build ();
12405 /* EXP is a VECTOR_CST in which each element is either all-zeros or all-ones.
12406 Return a constant scalar rtx of mode MODE in which bit X is set if element
12407 X of EXP is nonzero. */
12409 const_scalar_mask_from_tree (scalar_int_mode mode
, tree exp
)
12411 wide_int res
= wi::zero (GET_MODE_PRECISION (mode
));
12414 /* The result has a fixed number of bits so the input must too. */
12415 unsigned int nunits
= VECTOR_CST_NELTS (exp
).to_constant ();
12416 for (unsigned int i
= 0; i
< nunits
; ++i
)
12418 elt
= VECTOR_CST_ELT (exp
, i
);
12419 gcc_assert (TREE_CODE (elt
) == INTEGER_CST
);
12420 if (integer_all_onesp (elt
))
12421 res
= wi::set_bit (res
, i
);
12423 gcc_assert (integer_zerop (elt
));
12426 return immed_wide_int_const (res
, mode
);
12429 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
12431 const_vector_from_tree (tree exp
)
12433 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
12435 if (initializer_zerop (exp
))
12436 return CONST0_RTX (mode
);
12438 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp
)))
12439 return const_vector_mask_from_tree (exp
);
12441 machine_mode inner
= GET_MODE_INNER (mode
);
12443 rtx_vector_builder
builder (mode
, VECTOR_CST_NPATTERNS (exp
),
12444 VECTOR_CST_NELTS_PER_PATTERN (exp
));
12445 unsigned int count
= builder
.encoded_nelts ();
12446 for (unsigned int i
= 0; i
< count
; ++i
)
12448 tree elt
= VECTOR_CST_ELT (exp
, i
);
12449 if (TREE_CODE (elt
) == REAL_CST
)
12450 builder
.quick_push (const_double_from_real_value (TREE_REAL_CST (elt
),
12452 else if (TREE_CODE (elt
) == FIXED_CST
)
12453 builder
.quick_push (CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt
),
12456 builder
.quick_push (immed_wide_int_const (wi::to_poly_wide (elt
),
12459 return builder
.build ();
12462 /* Build a decl for a personality function given a language prefix. */
12465 build_personality_function (const char *lang
)
12467 const char *unwind_and_version
;
12471 switch (targetm_common
.except_unwind_info (&global_options
))
12476 unwind_and_version
= "_sj0";
12480 unwind_and_version
= "_v0";
12483 unwind_and_version
= "_seh0";
12486 gcc_unreachable ();
12489 name
= ACONCAT (("__", lang
, "_personality", unwind_and_version
, NULL
));
12491 type
= build_function_type_list (integer_type_node
, integer_type_node
,
12492 long_long_unsigned_type_node
,
12493 ptr_type_node
, ptr_type_node
, NULL_TREE
);
12494 decl
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
,
12495 get_identifier (name
), type
);
12496 DECL_ARTIFICIAL (decl
) = 1;
12497 DECL_EXTERNAL (decl
) = 1;
12498 TREE_PUBLIC (decl
) = 1;
12500 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
12501 are the flags assigned by targetm.encode_section_info. */
12502 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl
), 0), NULL
);
12507 /* Extracts the personality function of DECL and returns the corresponding
12511 get_personality_function (tree decl
)
12513 tree personality
= DECL_FUNCTION_PERSONALITY (decl
);
12514 enum eh_personality_kind pk
;
12516 pk
= function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl
));
12517 if (pk
== eh_personality_none
)
12521 && pk
== eh_personality_any
)
12522 personality
= lang_hooks
.eh_personality ();
12524 if (pk
== eh_personality_lang
)
12525 gcc_assert (personality
!= NULL_TREE
);
12527 return XEXP (DECL_RTL (personality
), 0);
12530 /* Returns a tree for the size of EXP in bytes. */
12533 tree_expr_size (const_tree exp
)
12536 && DECL_SIZE_UNIT (exp
) != 0)
12537 return DECL_SIZE_UNIT (exp
);
12539 return size_in_bytes (TREE_TYPE (exp
));
12542 /* Return an rtx for the size in bytes of the value of EXP. */
12545 expr_size (tree exp
)
12549 if (TREE_CODE (exp
) == WITH_SIZE_EXPR
)
12550 size
= TREE_OPERAND (exp
, 1);
12553 size
= tree_expr_size (exp
);
12555 gcc_assert (size
== SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, exp
));
12558 return expand_expr (size
, NULL_RTX
, TYPE_MODE (sizetype
), EXPAND_NORMAL
);
12561 /* Return a wide integer for the size in bytes of the value of EXP, or -1
12562 if the size can vary or is larger than an integer. */
12564 static HOST_WIDE_INT
12565 int_expr_size (tree exp
)
12569 if (TREE_CODE (exp
) == WITH_SIZE_EXPR
)
12570 size
= TREE_OPERAND (exp
, 1);
12573 size
= tree_expr_size (exp
);
12577 if (size
== 0 || !tree_fits_shwi_p (size
))
12580 return tree_to_shwi (size
);