1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
38 #include "diagnostic.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
45 #include "insn-attr.h"
50 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
52 #include "optabs-tree.h"
55 #include "langhooks.h"
56 #include "common/common-target.h"
58 #include "tree-ssa-live.h"
59 #include "tree-outof-ssa.h"
60 #include "tree-ssa-address.h"
63 #include "gimple-fold.h"
64 #include "rtx-vector-builder.h"
67 /* If this is nonzero, we do not bother generating VOLATILE
68 around volatile memory references, and we are willing to
69 output indirect addresses. If cse is to follow, we reject
70 indirect addresses so a useful potential cse is generated;
71 if it is used only once, instruction combination will produce
72 the same indirect address eventually. */
75 static bool block_move_libcall_safe_for_call_parm (void);
76 static bool emit_block_move_via_movmem (rtx
, rtx
, rtx
, unsigned, unsigned, HOST_WIDE_INT
,
77 unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
78 unsigned HOST_WIDE_INT
);
79 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
80 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
81 static rtx_insn
*compress_float_constant (rtx
, rtx
);
82 static rtx
get_subtarget (rtx
);
83 static void store_constructor (tree
, rtx
, int, poly_int64
, bool);
84 static rtx
store_field (rtx
, poly_int64
, poly_int64
, poly_uint64
, poly_uint64
,
85 machine_mode
, tree
, alias_set_type
, bool, bool);
87 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (const_tree
, const_tree
);
89 static int is_aligning_offset (const_tree
, const_tree
);
90 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
91 static rtx
do_store_flag (sepops
, rtx
, machine_mode
);
93 static void emit_single_push_insn (machine_mode
, rtx
, tree
);
95 static void do_tablejump (rtx
, machine_mode
, rtx
, rtx
, rtx
,
97 static rtx
const_vector_from_tree (tree
);
98 static rtx
const_scalar_mask_from_tree (scalar_int_mode
, tree
);
99 static tree
tree_expr_size (const_tree
);
100 static HOST_WIDE_INT
int_expr_size (tree
);
101 static void convert_mode_scalar (rtx
, rtx
, int);
104 /* This is run to set up which modes can be used
105 directly in memory and to initialize the block move optab. It is run
106 at the beginning of compilation and when the target is reinitialized. */
109 init_expr_target (void)
116 /* Try indexing by frame ptr and try by stack ptr.
117 It is known that on the Convex the stack ptr isn't a valid index.
118 With luck, one or the other is valid on any machine. */
119 mem
= gen_rtx_MEM (word_mode
, stack_pointer_rtx
);
120 mem1
= gen_rtx_MEM (word_mode
, frame_pointer_rtx
);
122 /* A scratch register we can modify in-place below to avoid
123 useless RTL allocations. */
124 reg
= gen_rtx_REG (word_mode
, LAST_VIRTUAL_REGISTER
+ 1);
126 rtx_insn
*insn
= as_a
<rtx_insn
*> (rtx_alloc (INSN
));
127 pat
= gen_rtx_SET (NULL_RTX
, NULL_RTX
);
128 PATTERN (insn
) = pat
;
130 for (machine_mode mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
131 mode
= (machine_mode
) ((int) mode
+ 1))
135 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
136 PUT_MODE (mem
, mode
);
137 PUT_MODE (mem1
, mode
);
139 /* See if there is some register that can be used in this mode and
140 directly loaded or stored from memory. */
142 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
143 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
144 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
147 if (!targetm
.hard_regno_mode_ok (regno
, mode
))
150 set_mode_and_regno (reg
, mode
, regno
);
153 SET_DEST (pat
) = reg
;
154 if (recog (pat
, insn
, &num_clobbers
) >= 0)
155 direct_load
[(int) mode
] = 1;
157 SET_SRC (pat
) = mem1
;
158 SET_DEST (pat
) = reg
;
159 if (recog (pat
, insn
, &num_clobbers
) >= 0)
160 direct_load
[(int) mode
] = 1;
163 SET_DEST (pat
) = mem
;
164 if (recog (pat
, insn
, &num_clobbers
) >= 0)
165 direct_store
[(int) mode
] = 1;
168 SET_DEST (pat
) = mem1
;
169 if (recog (pat
, insn
, &num_clobbers
) >= 0)
170 direct_store
[(int) mode
] = 1;
174 mem
= gen_rtx_MEM (VOIDmode
, gen_raw_REG (Pmode
, LAST_VIRTUAL_REGISTER
+ 1));
176 opt_scalar_float_mode mode_iter
;
177 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_FLOAT
)
179 scalar_float_mode mode
= mode_iter
.require ();
180 scalar_float_mode srcmode
;
181 FOR_EACH_MODE_UNTIL (srcmode
, mode
)
185 ic
= can_extend_p (mode
, srcmode
, 0);
186 if (ic
== CODE_FOR_nothing
)
189 PUT_MODE (mem
, srcmode
);
191 if (insn_operand_matches (ic
, 1, mem
))
192 float_extend_from_mem
[mode
][srcmode
] = true;
197 /* This is run at the start of compiling a function. */
202 memset (&crtl
->expr
, 0, sizeof (crtl
->expr
));
205 /* Copy data from FROM to TO, where the machine modes are not the same.
206 Both modes may be integer, or both may be floating, or both may be
208 UNSIGNEDP should be nonzero if FROM is an unsigned type.
209 This causes zero-extension instead of sign-extension. */
212 convert_move (rtx to
, rtx from
, int unsignedp
)
214 machine_mode to_mode
= GET_MODE (to
);
215 machine_mode from_mode
= GET_MODE (from
);
217 gcc_assert (to_mode
!= BLKmode
);
218 gcc_assert (from_mode
!= BLKmode
);
220 /* If the source and destination are already the same, then there's
225 /* If FROM is a SUBREG that indicates that we have already done at least
226 the required extension, strip it. We don't handle such SUBREGs as
229 scalar_int_mode to_int_mode
;
230 if (GET_CODE (from
) == SUBREG
231 && SUBREG_PROMOTED_VAR_P (from
)
232 && is_a
<scalar_int_mode
> (to_mode
, &to_int_mode
)
233 && (GET_MODE_PRECISION (subreg_promoted_mode (from
))
234 >= GET_MODE_PRECISION (to_int_mode
))
235 && SUBREG_CHECK_PROMOTED_SIGN (from
, unsignedp
))
237 from
= gen_lowpart (to_int_mode
, SUBREG_REG (from
));
238 from_mode
= to_int_mode
;
241 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
243 if (to_mode
== from_mode
244 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
246 emit_move_insn (to
, from
);
250 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
252 gcc_assert (known_eq (GET_MODE_BITSIZE (from_mode
),
253 GET_MODE_BITSIZE (to_mode
)));
255 if (VECTOR_MODE_P (to_mode
))
256 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
258 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
260 emit_move_insn (to
, from
);
264 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
266 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
267 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
271 convert_mode_scalar (to
, from
, unsignedp
);
274 /* Like convert_move, but deals only with scalar modes. */
277 convert_mode_scalar (rtx to
, rtx from
, int unsignedp
)
279 /* Both modes should be scalar types. */
280 scalar_mode from_mode
= as_a
<scalar_mode
> (GET_MODE (from
));
281 scalar_mode to_mode
= as_a
<scalar_mode
> (GET_MODE (to
));
282 bool to_real
= SCALAR_FLOAT_MODE_P (to_mode
);
283 bool from_real
= SCALAR_FLOAT_MODE_P (from_mode
);
287 gcc_assert (to_real
== from_real
);
289 /* rtx code for making an equivalent value. */
290 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
291 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
299 gcc_assert ((GET_MODE_PRECISION (from_mode
)
300 != GET_MODE_PRECISION (to_mode
))
301 || (DECIMAL_FLOAT_MODE_P (from_mode
)
302 != DECIMAL_FLOAT_MODE_P (to_mode
)));
304 if (GET_MODE_PRECISION (from_mode
) == GET_MODE_PRECISION (to_mode
))
305 /* Conversion between decimal float and binary float, same size. */
306 tab
= DECIMAL_FLOAT_MODE_P (from_mode
) ? trunc_optab
: sext_optab
;
307 else if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
312 /* Try converting directly if the insn is supported. */
314 code
= convert_optab_handler (tab
, to_mode
, from_mode
);
315 if (code
!= CODE_FOR_nothing
)
317 emit_unop_insn (code
, to
, from
,
318 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
322 /* Otherwise use a libcall. */
323 libcall
= convert_optab_libfunc (tab
, to_mode
, from_mode
);
325 /* Is this conversion implemented yet? */
326 gcc_assert (libcall
);
329 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
331 insns
= get_insns ();
333 emit_libcall_block (insns
, to
, value
,
334 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
336 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
340 /* Handle pointer conversion. */ /* SPEE 900220. */
341 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
345 if (GET_MODE_PRECISION (from_mode
) > GET_MODE_PRECISION (to_mode
))
352 if (convert_optab_handler (ctab
, to_mode
, from_mode
)
355 emit_unop_insn (convert_optab_handler (ctab
, to_mode
, from_mode
),
361 /* Targets are expected to provide conversion insns between PxImode and
362 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
363 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
365 scalar_int_mode full_mode
366 = smallest_int_mode_for_size (GET_MODE_BITSIZE (to_mode
));
368 gcc_assert (convert_optab_handler (trunc_optab
, to_mode
, full_mode
)
369 != CODE_FOR_nothing
);
371 if (full_mode
!= from_mode
)
372 from
= convert_to_mode (full_mode
, from
, unsignedp
);
373 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, full_mode
),
377 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
380 scalar_int_mode full_mode
381 = smallest_int_mode_for_size (GET_MODE_BITSIZE (from_mode
));
382 convert_optab ctab
= unsignedp
? zext_optab
: sext_optab
;
383 enum insn_code icode
;
385 icode
= convert_optab_handler (ctab
, full_mode
, from_mode
);
386 gcc_assert (icode
!= CODE_FOR_nothing
);
388 if (to_mode
== full_mode
)
390 emit_unop_insn (icode
, to
, from
, UNKNOWN
);
394 new_from
= gen_reg_rtx (full_mode
);
395 emit_unop_insn (icode
, new_from
, from
, UNKNOWN
);
397 /* else proceed to integer conversions below. */
398 from_mode
= full_mode
;
402 /* Make sure both are fixed-point modes or both are not. */
403 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
) ==
404 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode
));
405 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
))
407 /* If we widen from_mode to to_mode and they are in the same class,
408 we won't saturate the result.
409 Otherwise, always saturate the result to play safe. */
410 if (GET_MODE_CLASS (from_mode
) == GET_MODE_CLASS (to_mode
)
411 && GET_MODE_SIZE (from_mode
) < GET_MODE_SIZE (to_mode
))
412 expand_fixed_convert (to
, from
, 0, 0);
414 expand_fixed_convert (to
, from
, 0, 1);
418 /* Now both modes are integers. */
420 /* Handle expanding beyond a word. */
421 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
)
422 && GET_MODE_PRECISION (to_mode
) > BITS_PER_WORD
)
429 scalar_mode lowpart_mode
;
430 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
432 /* Try converting directly if the insn is supported. */
433 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
436 /* If FROM is a SUBREG, put it into a register. Do this
437 so that we always generate the same set of insns for
438 better cse'ing; if an intermediate assignment occurred,
439 we won't be doing the operation directly on the SUBREG. */
440 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
441 from
= force_reg (from_mode
, from
);
442 emit_unop_insn (code
, to
, from
, equiv_code
);
445 /* Next, try converting via full word. */
446 else if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
447 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
448 != CODE_FOR_nothing
))
450 rtx word_to
= gen_reg_rtx (word_mode
);
453 if (reg_overlap_mentioned_p (to
, from
))
454 from
= force_reg (from_mode
, from
);
457 convert_move (word_to
, from
, unsignedp
);
458 emit_unop_insn (code
, to
, word_to
, equiv_code
);
462 /* No special multiword conversion insn; do it by hand. */
465 /* Since we will turn this into a no conflict block, we must ensure
466 the source does not overlap the target so force it into an isolated
467 register when maybe so. Likewise for any MEM input, since the
468 conversion sequence might require several references to it and we
469 must ensure we're getting the same value every time. */
471 if (MEM_P (from
) || reg_overlap_mentioned_p (to
, from
))
472 from
= force_reg (from_mode
, from
);
474 /* Get a copy of FROM widened to a word, if necessary. */
475 if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
)
476 lowpart_mode
= word_mode
;
478 lowpart_mode
= from_mode
;
480 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
482 lowpart
= gen_lowpart (lowpart_mode
, to
);
483 emit_move_insn (lowpart
, lowfrom
);
485 /* Compute the value to put in each remaining word. */
487 fill_value
= const0_rtx
;
489 fill_value
= emit_store_flag_force (gen_reg_rtx (word_mode
),
490 LT
, lowfrom
, const0_rtx
,
491 lowpart_mode
, 0, -1);
493 /* Fill the remaining words. */
494 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
496 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
497 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
499 gcc_assert (subword
);
501 if (fill_value
!= subword
)
502 emit_move_insn (subword
, fill_value
);
505 insns
= get_insns ();
512 /* Truncating multi-word to a word or less. */
513 if (GET_MODE_PRECISION (from_mode
) > BITS_PER_WORD
514 && GET_MODE_PRECISION (to_mode
) <= BITS_PER_WORD
)
517 && ! MEM_VOLATILE_P (from
)
518 && direct_load
[(int) to_mode
]
519 && ! mode_dependent_address_p (XEXP (from
, 0),
520 MEM_ADDR_SPACE (from
)))
522 || GET_CODE (from
) == SUBREG
))
523 from
= force_reg (from_mode
, from
);
524 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
528 /* Now follow all the conversions between integers
529 no more than a word long. */
531 /* For truncation, usually we can just refer to FROM in a narrower mode. */
532 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
533 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, from_mode
))
536 && ! MEM_VOLATILE_P (from
)
537 && direct_load
[(int) to_mode
]
538 && ! mode_dependent_address_p (XEXP (from
, 0),
539 MEM_ADDR_SPACE (from
)))
541 || GET_CODE (from
) == SUBREG
))
542 from
= force_reg (from_mode
, from
);
543 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
544 && !targetm
.hard_regno_mode_ok (REGNO (from
), to_mode
))
545 from
= copy_to_reg (from
);
546 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
550 /* Handle extension. */
551 if (GET_MODE_PRECISION (to_mode
) > GET_MODE_PRECISION (from_mode
))
553 /* Convert directly if that works. */
554 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
557 emit_unop_insn (code
, to
, from
, equiv_code
);
562 scalar_mode intermediate
;
566 /* Search for a mode to convert via. */
567 opt_scalar_mode intermediate_iter
;
568 FOR_EACH_MODE_FROM (intermediate_iter
, from_mode
)
570 scalar_mode intermediate
= intermediate_iter
.require ();
571 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
573 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
574 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
,
576 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
577 != CODE_FOR_nothing
))
579 convert_move (to
, convert_to_mode (intermediate
, from
,
580 unsignedp
), unsignedp
);
585 /* No suitable intermediate mode.
586 Generate what we need with shifts. */
587 shift_amount
= (GET_MODE_PRECISION (to_mode
)
588 - GET_MODE_PRECISION (from_mode
));
589 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
590 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
592 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
595 emit_move_insn (to
, tmp
);
600 /* Support special truncate insns for certain modes. */
601 if (convert_optab_handler (trunc_optab
, to_mode
,
602 from_mode
) != CODE_FOR_nothing
)
604 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, from_mode
),
609 /* Handle truncation of volatile memrefs, and so on;
610 the things that couldn't be truncated directly,
611 and for which there was no special instruction.
613 ??? Code above formerly short-circuited this, for most integer
614 mode pairs, with a force_reg in from_mode followed by a recursive
615 call to this routine. Appears always to have been wrong. */
616 if (GET_MODE_PRECISION (to_mode
) < GET_MODE_PRECISION (from_mode
))
618 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
619 emit_move_insn (to
, temp
);
623 /* Mode combination is not recognized. */
627 /* Return an rtx for a value that would result
628 from converting X to mode MODE.
629 Both X and MODE may be floating, or both integer.
630 UNSIGNEDP is nonzero if X is an unsigned value.
631 This can be done by referring to a part of X in place
632 or by copying to a new temporary with conversion. */
635 convert_to_mode (machine_mode mode
, rtx x
, int unsignedp
)
637 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
640 /* Return an rtx for a value that would result
641 from converting X from mode OLDMODE to mode MODE.
642 Both modes may be floating, or both integer.
643 UNSIGNEDP is nonzero if X is an unsigned value.
645 This can be done by referring to a part of X in place
646 or by copying to a new temporary with conversion.
648 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
651 convert_modes (machine_mode mode
, machine_mode oldmode
, rtx x
, int unsignedp
)
654 scalar_int_mode int_mode
;
656 /* If FROM is a SUBREG that indicates that we have already done at least
657 the required extension, strip it. */
659 if (GET_CODE (x
) == SUBREG
660 && SUBREG_PROMOTED_VAR_P (x
)
661 && is_a
<scalar_int_mode
> (mode
, &int_mode
)
662 && (GET_MODE_PRECISION (subreg_promoted_mode (x
))
663 >= GET_MODE_PRECISION (int_mode
))
664 && SUBREG_CHECK_PROMOTED_SIGN (x
, unsignedp
))
665 x
= gen_lowpart (int_mode
, SUBREG_REG (x
));
667 if (GET_MODE (x
) != VOIDmode
)
668 oldmode
= GET_MODE (x
);
673 if (CONST_SCALAR_INT_P (x
)
674 && is_int_mode (mode
, &int_mode
))
676 /* If the caller did not tell us the old mode, then there is not
677 much to do with respect to canonicalization. We have to
678 assume that all the bits are significant. */
679 if (GET_MODE_CLASS (oldmode
) != MODE_INT
)
680 oldmode
= MAX_MODE_INT
;
681 wide_int w
= wide_int::from (rtx_mode_t (x
, oldmode
),
682 GET_MODE_PRECISION (int_mode
),
683 unsignedp
? UNSIGNED
: SIGNED
);
684 return immed_wide_int_const (w
, int_mode
);
687 /* We can do this with a gen_lowpart if both desired and current modes
688 are integer, and this is either a constant integer, a register, or a
690 scalar_int_mode int_oldmode
;
691 if (is_int_mode (mode
, &int_mode
)
692 && is_int_mode (oldmode
, &int_oldmode
)
693 && GET_MODE_PRECISION (int_mode
) <= GET_MODE_PRECISION (int_oldmode
)
694 && ((MEM_P (x
) && !MEM_VOLATILE_P (x
) && direct_load
[(int) int_mode
])
695 || CONST_POLY_INT_P (x
)
697 && (!HARD_REGISTER_P (x
)
698 || targetm
.hard_regno_mode_ok (REGNO (x
), int_mode
))
699 && TRULY_NOOP_TRUNCATION_MODES_P (int_mode
, GET_MODE (x
)))))
700 return gen_lowpart (int_mode
, x
);
702 /* Converting from integer constant into mode is always equivalent to an
704 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
706 gcc_assert (known_eq (GET_MODE_BITSIZE (mode
),
707 GET_MODE_BITSIZE (oldmode
)));
708 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
711 temp
= gen_reg_rtx (mode
);
712 convert_move (temp
, x
, unsignedp
);
716 /* Return the largest alignment we can use for doing a move (or store)
717 of MAX_PIECES. ALIGN is the largest alignment we could use. */
720 alignment_for_piecewise_move (unsigned int max_pieces
, unsigned int align
)
722 scalar_int_mode tmode
723 = int_mode_for_size (max_pieces
* BITS_PER_UNIT
, 1).require ();
725 if (align
>= GET_MODE_ALIGNMENT (tmode
))
726 align
= GET_MODE_ALIGNMENT (tmode
);
729 scalar_int_mode xmode
= NARROWEST_INT_MODE
;
730 opt_scalar_int_mode mode_iter
;
731 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_INT
)
733 tmode
= mode_iter
.require ();
734 if (GET_MODE_SIZE (tmode
) > max_pieces
735 || targetm
.slow_unaligned_access (tmode
, align
))
740 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
746 /* Return the widest integer mode that is narrower than SIZE bytes. */
748 static scalar_int_mode
749 widest_int_mode_for_size (unsigned int size
)
751 scalar_int_mode result
= NARROWEST_INT_MODE
;
753 gcc_checking_assert (size
> 1);
755 opt_scalar_int_mode tmode
;
756 FOR_EACH_MODE_IN_CLASS (tmode
, MODE_INT
)
757 if (GET_MODE_SIZE (tmode
.require ()) < size
)
758 result
= tmode
.require ();
763 /* Determine whether an operation OP on LEN bytes with alignment ALIGN can
764 and should be performed piecewise. */
767 can_do_by_pieces (unsigned HOST_WIDE_INT len
, unsigned int align
,
768 enum by_pieces_operation op
)
770 return targetm
.use_by_pieces_infrastructure_p (len
, align
, op
,
771 optimize_insn_for_speed_p ());
774 /* Determine whether the LEN bytes can be moved by using several move
775 instructions. Return nonzero if a call to move_by_pieces should
779 can_move_by_pieces (unsigned HOST_WIDE_INT len
, unsigned int align
)
781 return can_do_by_pieces (len
, align
, MOVE_BY_PIECES
);
784 /* Return number of insns required to perform operation OP by pieces
785 for L bytes. ALIGN (in bits) is maximum alignment we can assume. */
787 unsigned HOST_WIDE_INT
788 by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
789 unsigned int max_size
, by_pieces_operation op
)
791 unsigned HOST_WIDE_INT n_insns
= 0;
793 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
795 while (max_size
> 1 && l
> 0)
797 scalar_int_mode mode
= widest_int_mode_for_size (max_size
);
798 enum insn_code icode
;
800 unsigned int modesize
= GET_MODE_SIZE (mode
);
802 icode
= optab_handler (mov_optab
, mode
);
803 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
805 unsigned HOST_WIDE_INT n_pieces
= l
/ modesize
;
813 case COMPARE_BY_PIECES
:
814 int batch
= targetm
.compare_by_pieces_branch_ratio (mode
);
815 int batch_ops
= 4 * batch
- 1;
816 unsigned HOST_WIDE_INT full
= n_pieces
/ batch
;
817 n_insns
+= full
* batch_ops
;
818 if (n_pieces
% batch
!= 0)
831 /* Used when performing piecewise block operations, holds information
832 about one of the memory objects involved. The member functions
833 can be used to generate code for loading from the object and
834 updating the address when iterating. */
838 /* The object being referenced, a MEM. Can be NULL_RTX to indicate
841 /* The address of the object. Can differ from that seen in the
842 MEM rtx if we copied the address to a register. */
844 /* Nonzero if the address on the object has an autoincrement already,
845 signifies whether that was an increment or decrement. */
846 signed char m_addr_inc
;
847 /* Nonzero if we intend to use autoinc without the address already
848 having autoinc form. We will insert add insns around each memory
849 reference, expecting later passes to form autoinc addressing modes.
850 The only supported options are predecrement and postincrement. */
851 signed char m_explicit_inc
;
852 /* True if we have either of the two possible cases of using
855 /* True if this is an address to be used for load operations rather
859 /* Optionally, a function to obtain constants for any given offset into
860 the objects, and data associated with it. */
861 by_pieces_constfn m_constfn
;
864 pieces_addr (rtx
, bool, by_pieces_constfn
, void *);
865 rtx
adjust (scalar_int_mode
, HOST_WIDE_INT
);
866 void increment_address (HOST_WIDE_INT
);
867 void maybe_predec (HOST_WIDE_INT
);
868 void maybe_postinc (HOST_WIDE_INT
);
869 void decide_autoinc (machine_mode
, bool, HOST_WIDE_INT
);
876 /* Initialize a pieces_addr structure from an object OBJ. IS_LOAD is
877 true if the operation to be performed on this object is a load
878 rather than a store. For stores, OBJ can be NULL, in which case we
879 assume the operation is a stack push. For loads, the optional
880 CONSTFN and its associated CFNDATA can be used in place of the
883 pieces_addr::pieces_addr (rtx obj
, bool is_load
, by_pieces_constfn constfn
,
885 : m_obj (obj
), m_is_load (is_load
), m_constfn (constfn
), m_cfndata (cfndata
)
891 rtx addr
= XEXP (obj
, 0);
892 rtx_code code
= GET_CODE (addr
);
894 bool dec
= code
== PRE_DEC
|| code
== POST_DEC
;
895 bool inc
= code
== PRE_INC
|| code
== POST_INC
;
898 m_addr_inc
= dec
? -1 : 1;
900 /* While we have always looked for these codes here, the code
901 implementing the memory operation has never handled them.
902 Support could be added later if necessary or beneficial. */
903 gcc_assert (code
!= PRE_INC
&& code
!= POST_DEC
);
911 if (STACK_GROWS_DOWNWARD
)
917 gcc_assert (constfn
!= NULL
);
921 gcc_assert (is_load
);
924 /* Decide whether to use autoinc for an address involved in a memory op.
925 MODE is the mode of the accesses, REVERSE is true if we've decided to
926 perform the operation starting from the end, and LEN is the length of
927 the operation. Don't override an earlier decision to set m_auto. */
930 pieces_addr::decide_autoinc (machine_mode
ARG_UNUSED (mode
), bool reverse
,
933 if (m_auto
|| m_obj
== NULL_RTX
)
936 bool use_predec
= (m_is_load
937 ? USE_LOAD_PRE_DECREMENT (mode
)
938 : USE_STORE_PRE_DECREMENT (mode
));
939 bool use_postinc
= (m_is_load
940 ? USE_LOAD_POST_INCREMENT (mode
)
941 : USE_STORE_POST_INCREMENT (mode
));
942 machine_mode addr_mode
= get_address_mode (m_obj
);
944 if (use_predec
&& reverse
)
946 m_addr
= copy_to_mode_reg (addr_mode
,
947 plus_constant (addr_mode
,
952 else if (use_postinc
&& !reverse
)
954 m_addr
= copy_to_mode_reg (addr_mode
, m_addr
);
958 else if (CONSTANT_P (m_addr
))
959 m_addr
= copy_to_mode_reg (addr_mode
, m_addr
);
962 /* Adjust the address to refer to the data at OFFSET in MODE. If we
963 are using autoincrement for this address, we don't add the offset,
964 but we still modify the MEM's properties. */
967 pieces_addr::adjust (scalar_int_mode mode
, HOST_WIDE_INT offset
)
970 return m_constfn (m_cfndata
, offset
, mode
);
971 if (m_obj
== NULL_RTX
)
974 return adjust_automodify_address (m_obj
, mode
, m_addr
, offset
);
976 return adjust_address (m_obj
, mode
, offset
);
979 /* Emit an add instruction to increment the address by SIZE. */
982 pieces_addr::increment_address (HOST_WIDE_INT size
)
984 rtx amount
= gen_int_mode (size
, GET_MODE (m_addr
));
985 emit_insn (gen_add2_insn (m_addr
, amount
));
988 /* If we are supposed to decrement the address after each access, emit code
989 to do so now. Increment by SIZE (which has should have the correct sign
993 pieces_addr::maybe_predec (HOST_WIDE_INT size
)
995 if (m_explicit_inc
>= 0)
997 gcc_assert (HAVE_PRE_DECREMENT
);
998 increment_address (size
);
1001 /* If we are supposed to decrement the address after each access, emit code
1002 to do so now. Increment by SIZE. */
1005 pieces_addr::maybe_postinc (HOST_WIDE_INT size
)
1007 if (m_explicit_inc
<= 0)
1009 gcc_assert (HAVE_POST_INCREMENT
);
1010 increment_address (size
);
1013 /* This structure is used by do_op_by_pieces to describe the operation
1016 class op_by_pieces_d
1019 pieces_addr m_to
, m_from
;
1020 unsigned HOST_WIDE_INT m_len
;
1021 HOST_WIDE_INT m_offset
;
1022 unsigned int m_align
;
1023 unsigned int m_max_size
;
1026 /* Virtual functions, overriden by derived classes for the specific
1028 virtual void generate (rtx
, rtx
, machine_mode
) = 0;
1029 virtual bool prepare_mode (machine_mode
, unsigned int) = 0;
1030 virtual void finish_mode (machine_mode
)
1035 op_by_pieces_d (rtx
, bool, rtx
, bool, by_pieces_constfn
, void *,
1036 unsigned HOST_WIDE_INT
, unsigned int);
1040 /* The constructor for an op_by_pieces_d structure. We require two
1041 objects named TO and FROM, which are identified as loads or stores
1042 by TO_LOAD and FROM_LOAD. If FROM is a load, the optional FROM_CFN
1043 and its associated FROM_CFN_DATA can be used to replace loads with
1044 constant values. LEN describes the length of the operation. */
1046 op_by_pieces_d::op_by_pieces_d (rtx to
, bool to_load
,
1047 rtx from
, bool from_load
,
1048 by_pieces_constfn from_cfn
,
1049 void *from_cfn_data
,
1050 unsigned HOST_WIDE_INT len
,
1052 : m_to (to
, to_load
, NULL
, NULL
),
1053 m_from (from
, from_load
, from_cfn
, from_cfn_data
),
1054 m_len (len
), m_max_size (MOVE_MAX_PIECES
+ 1)
1056 int toi
= m_to
.get_addr_inc ();
1057 int fromi
= m_from
.get_addr_inc ();
1058 if (toi
>= 0 && fromi
>= 0)
1060 else if (toi
<= 0 && fromi
<= 0)
1065 m_offset
= m_reverse
? len
: 0;
1066 align
= MIN (to
? MEM_ALIGN (to
) : align
,
1067 from
? MEM_ALIGN (from
) : align
);
1069 /* If copying requires more than two move insns,
1070 copy addresses to registers (to make displacements shorter)
1071 and use post-increment if available. */
1072 if (by_pieces_ninsns (len
, align
, m_max_size
, MOVE_BY_PIECES
) > 2)
1074 /* Find the mode of the largest comparison. */
1075 scalar_int_mode mode
= widest_int_mode_for_size (m_max_size
);
1077 m_from
.decide_autoinc (mode
, m_reverse
, len
);
1078 m_to
.decide_autoinc (mode
, m_reverse
, len
);
1081 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
1085 /* This function contains the main loop used for expanding a block
1086 operation. First move what we can in the largest integer mode,
1087 then go to successively smaller modes. For every access, call
1088 GENFUN with the two operands and the EXTRA_DATA. */
1091 op_by_pieces_d::run ()
1093 while (m_max_size
> 1 && m_len
> 0)
1095 scalar_int_mode mode
= widest_int_mode_for_size (m_max_size
);
1097 if (prepare_mode (mode
, m_align
))
1099 unsigned int size
= GET_MODE_SIZE (mode
);
1100 rtx to1
= NULL_RTX
, from1
;
1102 while (m_len
>= size
)
1107 to1
= m_to
.adjust (mode
, m_offset
);
1108 from1
= m_from
.adjust (mode
, m_offset
);
1110 m_to
.maybe_predec (-(HOST_WIDE_INT
)size
);
1111 m_from
.maybe_predec (-(HOST_WIDE_INT
)size
);
1113 generate (to1
, from1
, mode
);
1115 m_to
.maybe_postinc (size
);
1116 m_from
.maybe_postinc (size
);
1127 m_max_size
= GET_MODE_SIZE (mode
);
1130 /* The code above should have handled everything. */
1131 gcc_assert (!m_len
);
1134 /* Derived class from op_by_pieces_d, providing support for block move
1137 class move_by_pieces_d
: public op_by_pieces_d
1139 insn_gen_fn m_gen_fun
;
1140 void generate (rtx
, rtx
, machine_mode
);
1141 bool prepare_mode (machine_mode
, unsigned int);
1144 move_by_pieces_d (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
1146 : op_by_pieces_d (to
, false, from
, true, NULL
, NULL
, len
, align
)
1149 rtx
finish_retmode (memop_ret
);
1152 /* Return true if MODE can be used for a set of copies, given an
1153 alignment ALIGN. Prepare whatever data is necessary for later
1154 calls to generate. */
1157 move_by_pieces_d::prepare_mode (machine_mode mode
, unsigned int align
)
1159 insn_code icode
= optab_handler (mov_optab
, mode
);
1160 m_gen_fun
= GEN_FCN (icode
);
1161 return icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
);
1164 /* A callback used when iterating for a compare_by_pieces_operation.
1165 OP0 and OP1 are the values that have been loaded and should be
1166 compared in MODE. If OP0 is NULL, this means we should generate a
1167 push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1168 gen function that should be used to generate the mode. */
1171 move_by_pieces_d::generate (rtx op0
, rtx op1
,
1172 machine_mode mode ATTRIBUTE_UNUSED
)
1174 #ifdef PUSH_ROUNDING
1175 if (op0
== NULL_RTX
)
1177 emit_single_push_insn (mode
, op1
, NULL
);
1181 emit_insn (m_gen_fun (op0
, op1
));
1184 /* Perform the final adjustment at the end of a string to obtain the
1185 correct return value for the block operation.
1186 Return value is based on RETMODE argument. */
1189 move_by_pieces_d::finish_retmode (memop_ret retmode
)
1191 gcc_assert (!m_reverse
);
1192 if (retmode
== RETURN_END_MINUS_ONE
)
1194 m_to
.maybe_postinc (-1);
1197 return m_to
.adjust (QImode
, m_offset
);
1200 /* Generate several move instructions to copy LEN bytes from block FROM to
1201 block TO. (These are MEM rtx's with BLKmode).
1203 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1204 used to push FROM to the stack.
1206 ALIGN is maximum stack alignment we can assume.
1208 Return value is based on RETMODE argument. */
1211 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
1212 unsigned int align
, memop_ret retmode
)
1214 #ifndef PUSH_ROUNDING
1219 move_by_pieces_d
data (to
, from
, len
, align
);
1224 return data
.finish_retmode (retmode
);
1229 /* Derived class from op_by_pieces_d, providing support for block move
1232 class store_by_pieces_d
: public op_by_pieces_d
1234 insn_gen_fn m_gen_fun
;
1235 void generate (rtx
, rtx
, machine_mode
);
1236 bool prepare_mode (machine_mode
, unsigned int);
1239 store_by_pieces_d (rtx to
, by_pieces_constfn cfn
, void *cfn_data
,
1240 unsigned HOST_WIDE_INT len
, unsigned int align
)
1241 : op_by_pieces_d (to
, false, NULL_RTX
, true, cfn
, cfn_data
, len
, align
)
1244 rtx
finish_retmode (memop_ret
);
1247 /* Return true if MODE can be used for a set of stores, given an
1248 alignment ALIGN. Prepare whatever data is necessary for later
1249 calls to generate. */
1252 store_by_pieces_d::prepare_mode (machine_mode mode
, unsigned int align
)
1254 insn_code icode
= optab_handler (mov_optab
, mode
);
1255 m_gen_fun
= GEN_FCN (icode
);
1256 return icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
);
1259 /* A callback used when iterating for a store_by_pieces_operation.
1260 OP0 and OP1 are the values that have been loaded and should be
1261 compared in MODE. If OP0 is NULL, this means we should generate a
1262 push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1263 gen function that should be used to generate the mode. */
1266 store_by_pieces_d::generate (rtx op0
, rtx op1
, machine_mode
)
1268 emit_insn (m_gen_fun (op0
, op1
));
1271 /* Perform the final adjustment at the end of a string to obtain the
1272 correct return value for the block operation.
1273 Return value is based on RETMODE argument. */
1276 store_by_pieces_d::finish_retmode (memop_ret retmode
)
1278 gcc_assert (!m_reverse
);
1279 if (retmode
== RETURN_END_MINUS_ONE
)
1281 m_to
.maybe_postinc (-1);
1284 return m_to
.adjust (QImode
, m_offset
);
1287 /* Determine whether the LEN bytes generated by CONSTFUN can be
1288 stored to memory using several move instructions. CONSTFUNDATA is
1289 a pointer which will be passed as argument in every CONSTFUN call.
1290 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
1291 a memset operation and false if it's a copy of a constant string.
1292 Return nonzero if a call to store_by_pieces should succeed. */
1295 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
1296 rtx (*constfun
) (void *, HOST_WIDE_INT
, scalar_int_mode
),
1297 void *constfundata
, unsigned int align
, bool memsetp
)
1299 unsigned HOST_WIDE_INT l
;
1300 unsigned int max_size
;
1301 HOST_WIDE_INT offset
= 0;
1302 enum insn_code icode
;
1304 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
1305 rtx cst ATTRIBUTE_UNUSED
;
1310 if (!targetm
.use_by_pieces_infrastructure_p (len
, align
,
1314 optimize_insn_for_speed_p ()))
1317 align
= alignment_for_piecewise_move (STORE_MAX_PIECES
, align
);
1319 /* We would first store what we can in the largest integer mode, then go to
1320 successively smaller modes. */
1323 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
1327 max_size
= STORE_MAX_PIECES
+ 1;
1328 while (max_size
> 1 && l
> 0)
1330 scalar_int_mode mode
= widest_int_mode_for_size (max_size
);
1332 icode
= optab_handler (mov_optab
, mode
);
1333 if (icode
!= CODE_FOR_nothing
1334 && align
>= GET_MODE_ALIGNMENT (mode
))
1336 unsigned int size
= GET_MODE_SIZE (mode
);
1343 cst
= (*constfun
) (constfundata
, offset
, mode
);
1344 if (!targetm
.legitimate_constant_p (mode
, cst
))
1354 max_size
= GET_MODE_SIZE (mode
);
1357 /* The code above should have handled everything. */
1364 /* Generate several move instructions to store LEN bytes generated by
1365 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
1366 pointer which will be passed as argument in every CONSTFUN call.
1367 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
1368 a memset operation and false if it's a copy of a constant string.
1369 Return value is based on RETMODE argument. */
1372 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
1373 rtx (*constfun
) (void *, HOST_WIDE_INT
, scalar_int_mode
),
1374 void *constfundata
, unsigned int align
, bool memsetp
,
1379 gcc_assert (retmode
!= RETURN_END_MINUS_ONE
);
1383 gcc_assert (targetm
.use_by_pieces_infrastructure_p
1385 memsetp
? SET_BY_PIECES
: STORE_BY_PIECES
,
1386 optimize_insn_for_speed_p ()));
1388 store_by_pieces_d
data (to
, constfun
, constfundata
, len
, align
);
1392 return data
.finish_retmode (retmode
);
1397 /* Callback routine for clear_by_pieces.
1398 Return const0_rtx unconditionally. */
1401 clear_by_pieces_1 (void *, HOST_WIDE_INT
, scalar_int_mode
)
1406 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
1407 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
1410 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
1415 store_by_pieces_d
data (to
, clear_by_pieces_1
, NULL
, len
, align
);
1419 /* Context used by compare_by_pieces_genfn. It stores the fail label
1420 to jump to in case of miscomparison, and for branch ratios greater than 1,
1421 it stores an accumulator and the current and maximum counts before
1422 emitting another branch. */
1424 class compare_by_pieces_d
: public op_by_pieces_d
1426 rtx_code_label
*m_fail_label
;
1428 int m_count
, m_batch
;
1430 void generate (rtx
, rtx
, machine_mode
);
1431 bool prepare_mode (machine_mode
, unsigned int);
1432 void finish_mode (machine_mode
);
1434 compare_by_pieces_d (rtx op0
, rtx op1
, by_pieces_constfn op1_cfn
,
1435 void *op1_cfn_data
, HOST_WIDE_INT len
, int align
,
1436 rtx_code_label
*fail_label
)
1437 : op_by_pieces_d (op0
, true, op1
, true, op1_cfn
, op1_cfn_data
, len
, align
)
1439 m_fail_label
= fail_label
;
1443 /* A callback used when iterating for a compare_by_pieces_operation.
1444 OP0 and OP1 are the values that have been loaded and should be
1445 compared in MODE. DATA holds a pointer to the compare_by_pieces_data
1446 context structure. */
1449 compare_by_pieces_d::generate (rtx op0
, rtx op1
, machine_mode mode
)
1453 rtx temp
= expand_binop (mode
, sub_optab
, op0
, op1
, NULL_RTX
,
1454 true, OPTAB_LIB_WIDEN
);
1456 temp
= expand_binop (mode
, ior_optab
, m_accumulator
, temp
, temp
,
1457 true, OPTAB_LIB_WIDEN
);
1458 m_accumulator
= temp
;
1460 if (++m_count
< m_batch
)
1464 op0
= m_accumulator
;
1466 m_accumulator
= NULL_RTX
;
1468 do_compare_rtx_and_jump (op0
, op1
, NE
, true, mode
, NULL_RTX
, NULL
,
1469 m_fail_label
, profile_probability::uninitialized ());
1472 /* Return true if MODE can be used for a set of moves and comparisons,
1473 given an alignment ALIGN. Prepare whatever data is necessary for
1474 later calls to generate. */
1477 compare_by_pieces_d::prepare_mode (machine_mode mode
, unsigned int align
)
1479 insn_code icode
= optab_handler (mov_optab
, mode
);
1480 if (icode
== CODE_FOR_nothing
1481 || align
< GET_MODE_ALIGNMENT (mode
)
1482 || !can_compare_p (EQ
, mode
, ccp_jump
))
1484 m_batch
= targetm
.compare_by_pieces_branch_ratio (mode
);
1487 m_accumulator
= NULL_RTX
;
1492 /* Called after expanding a series of comparisons in MODE. If we have
1493 accumulated results for which we haven't emitted a branch yet, do
1497 compare_by_pieces_d::finish_mode (machine_mode mode
)
1499 if (m_accumulator
!= NULL_RTX
)
1500 do_compare_rtx_and_jump (m_accumulator
, const0_rtx
, NE
, true, mode
,
1501 NULL_RTX
, NULL
, m_fail_label
,
1502 profile_probability::uninitialized ());
1505 /* Generate several move instructions to compare LEN bytes from blocks
1506 ARG0 and ARG1. (These are MEM rtx's with BLKmode).
1508 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1509 used to push FROM to the stack.
1511 ALIGN is maximum stack alignment we can assume.
1513 Optionally, the caller can pass a constfn and associated data in A1_CFN
1514 and A1_CFN_DATA. describing that the second operand being compared is a
1515 known constant and how to obtain its data. */
1518 compare_by_pieces (rtx arg0
, rtx arg1
, unsigned HOST_WIDE_INT len
,
1519 rtx target
, unsigned int align
,
1520 by_pieces_constfn a1_cfn
, void *a1_cfn_data
)
1522 rtx_code_label
*fail_label
= gen_label_rtx ();
1523 rtx_code_label
*end_label
= gen_label_rtx ();
1525 if (target
== NULL_RTX
1526 || !REG_P (target
) || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
1527 target
= gen_reg_rtx (TYPE_MODE (integer_type_node
));
1529 compare_by_pieces_d
data (arg0
, arg1
, a1_cfn
, a1_cfn_data
, len
, align
,
1534 emit_move_insn (target
, const0_rtx
);
1535 emit_jump (end_label
);
1537 emit_label (fail_label
);
1538 emit_move_insn (target
, const1_rtx
);
1539 emit_label (end_label
);
1544 /* Emit code to move a block Y to a block X. This may be done with
1545 string-move instructions, with multiple scalar move instructions,
1546 or with a library call.
1548 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1549 SIZE is an rtx that says how long they are.
1550 ALIGN is the maximum alignment we can assume they have.
1551 METHOD describes what kind of copy this is, and what mechanisms may be used.
1552 MIN_SIZE is the minimal size of block to move
1553 MAX_SIZE is the maximal size of block to move, if it can not be represented
1554 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1556 Return the address of the new block, if memcpy is called and returns it,
1560 emit_block_move_hints (rtx x
, rtx y
, rtx size
, enum block_op_methods method
,
1561 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1562 unsigned HOST_WIDE_INT min_size
,
1563 unsigned HOST_WIDE_INT max_size
,
1564 unsigned HOST_WIDE_INT probable_max_size
)
1571 if (CONST_INT_P (size
) && INTVAL (size
) == 0)
1576 case BLOCK_OP_NORMAL
:
1577 case BLOCK_OP_TAILCALL
:
1581 case BLOCK_OP_CALL_PARM
:
1582 may_use_call
= block_move_libcall_safe_for_call_parm ();
1584 /* Make inhibit_defer_pop nonzero around the library call
1585 to force it to pop the arguments right away. */
1589 case BLOCK_OP_NO_LIBCALL
:
1593 case BLOCK_OP_NO_LIBCALL_RET
:
1601 gcc_assert (MEM_P (x
) && MEM_P (y
));
1602 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1603 gcc_assert (align
>= BITS_PER_UNIT
);
1605 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1606 block copy is more efficient for other large modes, e.g. DCmode. */
1607 x
= adjust_address (x
, BLKmode
, 0);
1608 y
= adjust_address (y
, BLKmode
, 0);
1610 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1611 can be incorrect is coming from __builtin_memcpy. */
1612 poly_int64 const_size
;
1613 if (poly_int_rtx_p (size
, &const_size
))
1615 x
= shallow_copy_rtx (x
);
1616 y
= shallow_copy_rtx (y
);
1617 set_mem_size (x
, const_size
);
1618 set_mem_size (y
, const_size
);
1621 if (CONST_INT_P (size
) && can_move_by_pieces (INTVAL (size
), align
))
1622 move_by_pieces (x
, y
, INTVAL (size
), align
, RETURN_BEGIN
);
1623 else if (emit_block_move_via_movmem (x
, y
, size
, align
,
1624 expected_align
, expected_size
,
1625 min_size
, max_size
, probable_max_size
))
1627 else if (may_use_call
1628 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
))
1629 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y
)))
1631 if (may_use_call
< 0)
1634 /* Since x and y are passed to a libcall, mark the corresponding
1635 tree EXPR as addressable. */
1636 tree y_expr
= MEM_EXPR (y
);
1637 tree x_expr
= MEM_EXPR (x
);
1639 mark_addressable (y_expr
);
1641 mark_addressable (x_expr
);
1642 retval
= emit_block_copy_via_libcall (x
, y
, size
,
1643 method
== BLOCK_OP_TAILCALL
);
1647 emit_block_move_via_loop (x
, y
, size
, align
);
1649 if (method
== BLOCK_OP_CALL_PARM
)
1656 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1658 unsigned HOST_WIDE_INT max
, min
= 0;
1659 if (GET_CODE (size
) == CONST_INT
)
1660 min
= max
= UINTVAL (size
);
1662 max
= GET_MODE_MASK (GET_MODE (size
));
1663 return emit_block_move_hints (x
, y
, size
, method
, 0, -1,
1667 /* A subroutine of emit_block_move. Returns true if calling the
1668 block move libcall will not clobber any parameters which may have
1669 already been placed on the stack. */
1672 block_move_libcall_safe_for_call_parm (void)
1674 #if defined (REG_PARM_STACK_SPACE)
1678 /* If arguments are pushed on the stack, then they're safe. */
1682 /* If registers go on the stack anyway, any argument is sure to clobber
1683 an outgoing argument. */
1684 #if defined (REG_PARM_STACK_SPACE)
1685 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1686 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1687 depend on its argument. */
1689 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn
? NULL_TREE
: TREE_TYPE (fn
)))
1690 && REG_PARM_STACK_SPACE (fn
) != 0)
1694 /* If any argument goes in memory, then it might clobber an outgoing
1697 CUMULATIVE_ARGS args_so_far_v
;
1698 cumulative_args_t args_so_far
;
1701 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1702 INIT_CUMULATIVE_ARGS (args_so_far_v
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1703 args_so_far
= pack_cumulative_args (&args_so_far_v
);
1705 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1706 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1708 machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1709 rtx tmp
= targetm
.calls
.function_arg (args_so_far
, mode
,
1711 if (!tmp
|| !REG_P (tmp
))
1713 if (targetm
.calls
.arg_partial_bytes (args_so_far
, mode
, NULL
, 1))
1715 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
1722 /* A subroutine of emit_block_move. Expand a movmem pattern;
1723 return true if successful. */
1726 emit_block_move_via_movmem (rtx x
, rtx y
, rtx size
, unsigned int align
,
1727 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1728 unsigned HOST_WIDE_INT min_size
,
1729 unsigned HOST_WIDE_INT max_size
,
1730 unsigned HOST_WIDE_INT probable_max_size
)
1732 int save_volatile_ok
= volatile_ok
;
1734 if (expected_align
< align
)
1735 expected_align
= align
;
1736 if (expected_size
!= -1)
1738 if ((unsigned HOST_WIDE_INT
)expected_size
> probable_max_size
)
1739 expected_size
= probable_max_size
;
1740 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
1741 expected_size
= min_size
;
1744 /* Since this is a move insn, we don't care about volatility. */
1747 /* Try the most limited insn first, because there's no point
1748 including more than one in the machine description unless
1749 the more limited one has some advantage. */
1751 opt_scalar_int_mode mode_iter
;
1752 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_INT
)
1754 scalar_int_mode mode
= mode_iter
.require ();
1755 enum insn_code code
= direct_optab_handler (movmem_optab
, mode
);
1757 if (code
!= CODE_FOR_nothing
1758 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1759 here because if SIZE is less than the mode mask, as it is
1760 returned by the macro, it will definitely be less than the
1761 actual mode mask. Since SIZE is within the Pmode address
1762 space, we limit MODE to Pmode. */
1763 && ((CONST_INT_P (size
)
1764 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1765 <= (GET_MODE_MASK (mode
) >> 1)))
1766 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
1767 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
1769 struct expand_operand ops
[9];
1772 /* ??? When called via emit_block_move_for_call, it'd be
1773 nice if there were some way to inform the backend, so
1774 that it doesn't fail the expansion because it thinks
1775 emitting the libcall would be more efficient. */
1776 nops
= insn_data
[(int) code
].n_generator_args
;
1777 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
1779 create_fixed_operand (&ops
[0], x
);
1780 create_fixed_operand (&ops
[1], y
);
1781 /* The check above guarantees that this size conversion is valid. */
1782 create_convert_operand_to (&ops
[2], size
, mode
, true);
1783 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
1786 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
1787 create_integer_operand (&ops
[5], expected_size
);
1791 create_integer_operand (&ops
[6], min_size
);
1792 /* If we can not represent the maximal size,
1793 make parameter NULL. */
1794 if ((HOST_WIDE_INT
) max_size
!= -1)
1795 create_integer_operand (&ops
[7], max_size
);
1797 create_fixed_operand (&ops
[7], NULL
);
1801 /* If we can not represent the maximal size,
1802 make parameter NULL. */
1803 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
1804 create_integer_operand (&ops
[8], probable_max_size
);
1806 create_fixed_operand (&ops
[8], NULL
);
1808 if (maybe_expand_insn (code
, nops
, ops
))
1810 volatile_ok
= save_volatile_ok
;
1816 volatile_ok
= save_volatile_ok
;
1820 /* A subroutine of emit_block_move. Copy the data via an explicit
1821 loop. This is used only when libcalls are forbidden. */
1822 /* ??? It'd be nice to copy in hunks larger than QImode. */
1825 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1826 unsigned int align ATTRIBUTE_UNUSED
)
1828 rtx_code_label
*cmp_label
, *top_label
;
1829 rtx iter
, x_addr
, y_addr
, tmp
;
1830 machine_mode x_addr_mode
= get_address_mode (x
);
1831 machine_mode y_addr_mode
= get_address_mode (y
);
1832 machine_mode iter_mode
;
1834 iter_mode
= GET_MODE (size
);
1835 if (iter_mode
== VOIDmode
)
1836 iter_mode
= word_mode
;
1838 top_label
= gen_label_rtx ();
1839 cmp_label
= gen_label_rtx ();
1840 iter
= gen_reg_rtx (iter_mode
);
1842 emit_move_insn (iter
, const0_rtx
);
1844 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1845 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1846 do_pending_stack_adjust ();
1848 emit_jump (cmp_label
);
1849 emit_label (top_label
);
1851 tmp
= convert_modes (x_addr_mode
, iter_mode
, iter
, true);
1852 x_addr
= simplify_gen_binary (PLUS
, x_addr_mode
, x_addr
, tmp
);
1854 if (x_addr_mode
!= y_addr_mode
)
1855 tmp
= convert_modes (y_addr_mode
, iter_mode
, iter
, true);
1856 y_addr
= simplify_gen_binary (PLUS
, y_addr_mode
, y_addr
, tmp
);
1858 x
= change_address (x
, QImode
, x_addr
);
1859 y
= change_address (y
, QImode
, y_addr
);
1861 emit_move_insn (x
, y
);
1863 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1864 true, OPTAB_LIB_WIDEN
);
1866 emit_move_insn (iter
, tmp
);
1868 emit_label (cmp_label
);
1870 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1872 profile_probability::guessed_always ()
1873 .apply_scale (9, 10));
1876 /* Expand a call to memcpy or memmove or memcmp, and return the result.
1877 TAILCALL is true if this is a tail call. */
1880 emit_block_op_via_libcall (enum built_in_function fncode
, rtx dst
, rtx src
,
1881 rtx size
, bool tailcall
)
1883 rtx dst_addr
, src_addr
;
1884 tree call_expr
, dst_tree
, src_tree
, size_tree
;
1885 machine_mode size_mode
;
1887 dst_addr
= copy_addr_to_reg (XEXP (dst
, 0));
1888 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1889 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1891 src_addr
= copy_addr_to_reg (XEXP (src
, 0));
1892 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1893 src_tree
= make_tree (ptr_type_node
, src_addr
);
1895 size_mode
= TYPE_MODE (sizetype
);
1896 size
= convert_to_mode (size_mode
, size
, 1);
1897 size
= copy_to_mode_reg (size_mode
, size
);
1898 size_tree
= make_tree (sizetype
, size
);
1900 /* It is incorrect to use the libcall calling conventions for calls to
1901 memcpy/memmove/memcmp because they can be provided by the user. */
1902 tree fn
= builtin_decl_implicit (fncode
);
1903 call_expr
= build_call_expr (fn
, 3, dst_tree
, src_tree
, size_tree
);
1904 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
1906 return expand_call (call_expr
, NULL_RTX
, false);
1909 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
1910 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
1911 otherwise return null. */
1914 expand_cmpstrn_or_cmpmem (insn_code icode
, rtx target
, rtx arg1_rtx
,
1915 rtx arg2_rtx
, tree arg3_type
, rtx arg3_rtx
,
1916 HOST_WIDE_INT align
)
1918 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
1920 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
1923 struct expand_operand ops
[5];
1924 create_output_operand (&ops
[0], target
, insn_mode
);
1925 create_fixed_operand (&ops
[1], arg1_rtx
);
1926 create_fixed_operand (&ops
[2], arg2_rtx
);
1927 create_convert_operand_from (&ops
[3], arg3_rtx
, TYPE_MODE (arg3_type
),
1928 TYPE_UNSIGNED (arg3_type
));
1929 create_integer_operand (&ops
[4], align
);
1930 if (maybe_expand_insn (icode
, 5, ops
))
1931 return ops
[0].value
;
1935 /* Expand a block compare between X and Y with length LEN using the
1936 cmpmem optab, placing the result in TARGET. LEN_TYPE is the type
1937 of the expression that was used to calculate the length. ALIGN
1938 gives the known minimum common alignment. */
1941 emit_block_cmp_via_cmpmem (rtx x
, rtx y
, rtx len
, tree len_type
, rtx target
,
1944 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
1945 implementing memcmp because it will stop if it encounters two
1947 insn_code icode
= direct_optab_handler (cmpmem_optab
, SImode
);
1949 if (icode
== CODE_FOR_nothing
)
1952 return expand_cmpstrn_or_cmpmem (icode
, target
, x
, y
, len_type
, len
, align
);
1955 /* Emit code to compare a block Y to a block X. This may be done with
1956 string-compare instructions, with multiple scalar instructions,
1957 or with a library call.
1959 Both X and Y must be MEM rtx's. LEN is an rtx that says how long
1960 they are. LEN_TYPE is the type of the expression that was used to
1963 If EQUALITY_ONLY is true, it means we don't have to return the tri-state
1964 value of a normal memcmp call, instead we can just compare for equality.
1965 If FORCE_LIBCALL is true, we should emit a call to memcmp rather than
1968 Optionally, the caller can pass a constfn and associated data in Y_CFN
1969 and Y_CFN_DATA. describing that the second operand being compared is a
1970 known constant and how to obtain its data.
1971 Return the result of the comparison, or NULL_RTX if we failed to
1972 perform the operation. */
1975 emit_block_cmp_hints (rtx x
, rtx y
, rtx len
, tree len_type
, rtx target
,
1976 bool equality_only
, by_pieces_constfn y_cfn
,
1981 if (CONST_INT_P (len
) && INTVAL (len
) == 0)
1984 gcc_assert (MEM_P (x
) && MEM_P (y
));
1985 unsigned int align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1986 gcc_assert (align
>= BITS_PER_UNIT
);
1988 x
= adjust_address (x
, BLKmode
, 0);
1989 y
= adjust_address (y
, BLKmode
, 0);
1992 && CONST_INT_P (len
)
1993 && can_do_by_pieces (INTVAL (len
), align
, COMPARE_BY_PIECES
))
1994 result
= compare_by_pieces (x
, y
, INTVAL (len
), target
, align
,
1997 result
= emit_block_cmp_via_cmpmem (x
, y
, len
, len_type
, target
, align
);
2002 /* Copy all or part of a value X into registers starting at REGNO.
2003 The number of registers to be filled is NREGS. */
2006 move_block_to_reg (int regno
, rtx x
, int nregs
, machine_mode mode
)
2011 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
2012 x
= validize_mem (force_const_mem (mode
, x
));
2014 /* See if the machine can do this with a load multiple insn. */
2015 if (targetm
.have_load_multiple ())
2017 rtx_insn
*last
= get_last_insn ();
2018 rtx first
= gen_rtx_REG (word_mode
, regno
);
2019 if (rtx_insn
*pat
= targetm
.gen_load_multiple (first
, x
,
2026 delete_insns_since (last
);
2029 for (int i
= 0; i
< nregs
; i
++)
2030 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
2031 operand_subword_force (x
, i
, mode
));
2034 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2035 The number of registers to be filled is NREGS. */
2038 move_block_from_reg (int regno
, rtx x
, int nregs
)
2043 /* See if the machine can do this with a store multiple insn. */
2044 if (targetm
.have_store_multiple ())
2046 rtx_insn
*last
= get_last_insn ();
2047 rtx first
= gen_rtx_REG (word_mode
, regno
);
2048 if (rtx_insn
*pat
= targetm
.gen_store_multiple (x
, first
,
2055 delete_insns_since (last
);
2058 for (int i
= 0; i
< nregs
; i
++)
2060 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
2064 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
2068 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2069 ORIG, where ORIG is a non-consecutive group of registers represented by
2070 a PARALLEL. The clone is identical to the original except in that the
2071 original set of registers is replaced by a new set of pseudo registers.
2072 The new set has the same modes as the original set. */
2075 gen_group_rtx (rtx orig
)
2080 gcc_assert (GET_CODE (orig
) == PARALLEL
);
2082 length
= XVECLEN (orig
, 0);
2083 tmps
= XALLOCAVEC (rtx
, length
);
2085 /* Skip a NULL entry in first slot. */
2086 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
2091 for (; i
< length
; i
++)
2093 machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
2094 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
2096 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
2099 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
2102 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
2103 except that values are placed in TMPS[i], and must later be moved
2104 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
2107 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
,
2112 machine_mode m
= GET_MODE (orig_src
);
2114 gcc_assert (GET_CODE (dst
) == PARALLEL
);
2117 && !SCALAR_INT_MODE_P (m
)
2118 && !MEM_P (orig_src
)
2119 && GET_CODE (orig_src
) != CONCAT
)
2121 scalar_int_mode imode
;
2122 if (int_mode_for_mode (GET_MODE (orig_src
)).exists (&imode
))
2124 src
= gen_reg_rtx (imode
);
2125 emit_move_insn (gen_lowpart (GET_MODE (orig_src
), src
), orig_src
);
2129 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
);
2130 emit_move_insn (src
, orig_src
);
2132 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
2136 /* Check for a NULL entry, used to indicate that the parameter goes
2137 both on the stack and in registers. */
2138 if (XEXP (XVECEXP (dst
, 0, 0), 0))
2143 /* Process the pieces. */
2144 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2146 machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
2147 poly_int64 bytepos
= rtx_to_poly_int64 (XEXP (XVECEXP (dst
, 0, i
), 1));
2148 poly_int64 bytelen
= GET_MODE_SIZE (mode
);
2149 poly_int64 shift
= 0;
2151 /* Handle trailing fragments that run over the size of the struct.
2152 It's the target's responsibility to make sure that the fragment
2153 cannot be strictly smaller in some cases and strictly larger
2155 gcc_checking_assert (ordered_p (bytepos
+ bytelen
, ssize
));
2156 if (known_size_p (ssize
) && maybe_gt (bytepos
+ bytelen
, ssize
))
2158 /* Arrange to shift the fragment to where it belongs.
2159 extract_bit_field loads to the lsb of the reg. */
2161 #ifdef BLOCK_REG_PADDING
2162 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
2163 == (BYTES_BIG_ENDIAN
? PAD_UPWARD
: PAD_DOWNWARD
)
2168 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2169 bytelen
= ssize
- bytepos
;
2170 gcc_assert (maybe_gt (bytelen
, 0));
2173 /* If we won't be loading directly from memory, protect the real source
2174 from strange tricks we might play; but make sure that the source can
2175 be loaded directly into the destination. */
2177 if (!MEM_P (orig_src
)
2178 && (!CONSTANT_P (orig_src
)
2179 || (GET_MODE (orig_src
) != mode
2180 && GET_MODE (orig_src
) != VOIDmode
)))
2182 if (GET_MODE (orig_src
) == VOIDmode
)
2183 src
= gen_reg_rtx (mode
);
2185 src
= gen_reg_rtx (GET_MODE (orig_src
));
2187 emit_move_insn (src
, orig_src
);
2190 /* Optimize the access just a bit. */
2192 && (! targetm
.slow_unaligned_access (mode
, MEM_ALIGN (src
))
2193 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
2194 && multiple_p (bytepos
* BITS_PER_UNIT
, GET_MODE_ALIGNMENT (mode
))
2195 && known_eq (bytelen
, GET_MODE_SIZE (mode
)))
2197 tmps
[i
] = gen_reg_rtx (mode
);
2198 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
2200 else if (COMPLEX_MODE_P (mode
)
2201 && GET_MODE (src
) == mode
2202 && known_eq (bytelen
, GET_MODE_SIZE (mode
)))
2203 /* Let emit_move_complex do the bulk of the work. */
2205 else if (GET_CODE (src
) == CONCAT
)
2207 poly_int64 slen
= GET_MODE_SIZE (GET_MODE (src
));
2208 poly_int64 slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
2212 if (can_div_trunc_p (bytepos
, slen0
, &elt
, &subpos
)
2213 && known_le (subpos
+ bytelen
, slen0
))
2215 /* The following assumes that the concatenated objects all
2216 have the same size. In this case, a simple calculation
2217 can be used to determine the object and the bit field
2219 tmps
[i
] = XEXP (src
, elt
);
2220 if (maybe_ne (subpos
, 0)
2221 || maybe_ne (subpos
+ bytelen
, slen0
)
2222 || (!CONSTANT_P (tmps
[i
])
2223 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
)))
2224 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
2225 subpos
* BITS_PER_UNIT
,
2226 1, NULL_RTX
, mode
, mode
, false,
2233 gcc_assert (known_eq (bytepos
, 0));
2234 mem
= assign_stack_temp (GET_MODE (src
), slen
);
2235 emit_move_insn (mem
, src
);
2236 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
2237 0, 1, NULL_RTX
, mode
, mode
, false,
2241 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2242 SIMD register, which is currently broken. While we get GCC
2243 to emit proper RTL for these cases, let's dump to memory. */
2244 else if (VECTOR_MODE_P (GET_MODE (dst
))
2247 poly_uint64 slen
= GET_MODE_SIZE (GET_MODE (src
));
2250 mem
= assign_stack_temp (GET_MODE (src
), slen
);
2251 emit_move_insn (mem
, src
);
2252 tmps
[i
] = adjust_address (mem
, mode
, bytepos
);
2254 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
2255 && XVECLEN (dst
, 0) > 1)
2256 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE (dst
), bytepos
);
2257 else if (CONSTANT_P (src
))
2259 if (known_eq (bytelen
, ssize
))
2265 /* TODO: const_wide_int can have sizes other than this... */
2266 gcc_assert (known_eq (2 * bytelen
, ssize
));
2267 split_double (src
, &first
, &second
);
2274 else if (REG_P (src
) && GET_MODE (src
) == mode
)
2277 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
2278 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
2279 mode
, mode
, false, NULL
);
2281 if (maybe_ne (shift
, 0))
2282 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
2287 /* Emit code to move a block SRC of type TYPE to a block DST,
2288 where DST is non-consecutive registers represented by a PARALLEL.
2289 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
2293 emit_group_load (rtx dst
, rtx src
, tree type
, poly_int64 ssize
)
2298 tmps
= XALLOCAVEC (rtx
, XVECLEN (dst
, 0));
2299 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
2301 /* Copy the extracted pieces into the proper (probable) hard regs. */
2302 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
2304 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
2307 emit_move_insn (d
, tmps
[i
]);
2311 /* Similar, but load SRC into new pseudos in a format that looks like
2312 PARALLEL. This can later be fed to emit_group_move to get things
2313 in the right place. */
2316 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, poly_int64 ssize
)
2321 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
2322 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
2324 /* Convert the vector to look just like the original PARALLEL, except
2325 with the computed values. */
2326 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
2328 rtx e
= XVECEXP (parallel
, 0, i
);
2329 rtx d
= XEXP (e
, 0);
2333 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
2334 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
2336 RTVEC_ELT (vec
, i
) = e
;
2339 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
2342 /* Emit code to move a block SRC to block DST, where SRC and DST are
2343 non-consecutive groups of registers, each represented by a PARALLEL. */
2346 emit_group_move (rtx dst
, rtx src
)
2350 gcc_assert (GET_CODE (src
) == PARALLEL
2351 && GET_CODE (dst
) == PARALLEL
2352 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
2354 /* Skip first entry if NULL. */
2355 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
2356 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
2357 XEXP (XVECEXP (src
, 0, i
), 0));
2360 /* Move a group of registers represented by a PARALLEL into pseudos. */
2363 emit_group_move_into_temps (rtx src
)
2365 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
2368 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
2370 rtx e
= XVECEXP (src
, 0, i
);
2371 rtx d
= XEXP (e
, 0);
2374 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
2375 RTVEC_ELT (vec
, i
) = e
;
2378 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
2381 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
2382 where SRC is non-consecutive registers represented by a PARALLEL.
2383 SSIZE represents the total size of block ORIG_DST, or -1 if not
2387 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
,
2391 int start
, finish
, i
;
2392 machine_mode m
= GET_MODE (orig_dst
);
2394 gcc_assert (GET_CODE (src
) == PARALLEL
);
2396 if (!SCALAR_INT_MODE_P (m
)
2397 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
2399 scalar_int_mode imode
;
2400 if (int_mode_for_mode (GET_MODE (orig_dst
)).exists (&imode
))
2402 dst
= gen_reg_rtx (imode
);
2403 emit_group_store (dst
, src
, type
, ssize
);
2404 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
2408 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
);
2409 emit_group_store (dst
, src
, type
, ssize
);
2411 emit_move_insn (orig_dst
, dst
);
2415 /* Check for a NULL entry, used to indicate that the parameter goes
2416 both on the stack and in registers. */
2417 if (XEXP (XVECEXP (src
, 0, 0), 0))
2421 finish
= XVECLEN (src
, 0);
2423 tmps
= XALLOCAVEC (rtx
, finish
);
2425 /* Copy the (probable) hard regs into pseudos. */
2426 for (i
= start
; i
< finish
; i
++)
2428 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2429 if (!REG_P (reg
) || REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
2431 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2432 emit_move_insn (tmps
[i
], reg
);
2438 /* If we won't be storing directly into memory, protect the real destination
2439 from strange tricks we might play. */
2441 if (GET_CODE (dst
) == PARALLEL
)
2445 /* We can get a PARALLEL dst if there is a conditional expression in
2446 a return statement. In that case, the dst and src are the same,
2447 so no action is necessary. */
2448 if (rtx_equal_p (dst
, src
))
2451 /* It is unclear if we can ever reach here, but we may as well handle
2452 it. Allocate a temporary, and split this into a store/load to/from
2454 temp
= assign_stack_temp (GET_MODE (dst
), ssize
);
2455 emit_group_store (temp
, src
, type
, ssize
);
2456 emit_group_load (dst
, temp
, type
, ssize
);
2459 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
2461 machine_mode outer
= GET_MODE (dst
);
2467 if (!REG_P (dst
) || REGNO (dst
) < FIRST_PSEUDO_REGISTER
)
2468 dst
= gen_reg_rtx (outer
);
2470 /* Make life a bit easier for combine. */
2471 /* If the first element of the vector is the low part
2472 of the destination mode, use a paradoxical subreg to
2473 initialize the destination. */
2476 inner
= GET_MODE (tmps
[start
]);
2477 bytepos
= subreg_lowpart_offset (inner
, outer
);
2478 if (known_eq (rtx_to_poly_int64 (XEXP (XVECEXP (src
, 0, start
), 1)),
2481 temp
= simplify_gen_subreg (outer
, tmps
[start
],
2485 emit_move_insn (dst
, temp
);
2492 /* If the first element wasn't the low part, try the last. */
2494 && start
< finish
- 1)
2496 inner
= GET_MODE (tmps
[finish
- 1]);
2497 bytepos
= subreg_lowpart_offset (inner
, outer
);
2498 if (known_eq (rtx_to_poly_int64 (XEXP (XVECEXP (src
, 0,
2502 temp
= simplify_gen_subreg (outer
, tmps
[finish
- 1],
2506 emit_move_insn (dst
, temp
);
2513 /* Otherwise, simply initialize the result to zero. */
2515 emit_move_insn (dst
, CONST0_RTX (outer
));
2518 /* Process the pieces. */
2519 for (i
= start
; i
< finish
; i
++)
2521 poly_int64 bytepos
= rtx_to_poly_int64 (XEXP (XVECEXP (src
, 0, i
), 1));
2522 machine_mode mode
= GET_MODE (tmps
[i
]);
2523 poly_int64 bytelen
= GET_MODE_SIZE (mode
);
2524 poly_uint64 adj_bytelen
;
2527 /* Handle trailing fragments that run over the size of the struct.
2528 It's the target's responsibility to make sure that the fragment
2529 cannot be strictly smaller in some cases and strictly larger
2531 gcc_checking_assert (ordered_p (bytepos
+ bytelen
, ssize
));
2532 if (known_size_p (ssize
) && maybe_gt (bytepos
+ bytelen
, ssize
))
2533 adj_bytelen
= ssize
- bytepos
;
2535 adj_bytelen
= bytelen
;
2537 if (GET_CODE (dst
) == CONCAT
)
2539 if (known_le (bytepos
+ adj_bytelen
,
2540 GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)))))
2541 dest
= XEXP (dst
, 0);
2542 else if (known_ge (bytepos
, GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)))))
2544 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2545 dest
= XEXP (dst
, 1);
2549 machine_mode dest_mode
= GET_MODE (dest
);
2550 machine_mode tmp_mode
= GET_MODE (tmps
[i
]);
2552 gcc_assert (known_eq (bytepos
, 0) && XVECLEN (src
, 0));
2554 if (GET_MODE_ALIGNMENT (dest_mode
)
2555 >= GET_MODE_ALIGNMENT (tmp_mode
))
2557 dest
= assign_stack_temp (dest_mode
,
2558 GET_MODE_SIZE (dest_mode
));
2559 emit_move_insn (adjust_address (dest
,
2567 dest
= assign_stack_temp (tmp_mode
,
2568 GET_MODE_SIZE (tmp_mode
));
2569 emit_move_insn (dest
, tmps
[i
]);
2570 dst
= adjust_address (dest
, dest_mode
, bytepos
);
2576 /* Handle trailing fragments that run over the size of the struct. */
2577 if (known_size_p (ssize
) && maybe_gt (bytepos
+ bytelen
, ssize
))
2579 /* store_bit_field always takes its value from the lsb.
2580 Move the fragment to the lsb if it's not already there. */
2582 #ifdef BLOCK_REG_PADDING
2583 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2584 == (BYTES_BIG_ENDIAN
? PAD_UPWARD
: PAD_DOWNWARD
)
2590 poly_int64 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2591 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
2595 /* Make sure not to write past the end of the struct. */
2596 store_bit_field (dest
,
2597 adj_bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2598 bytepos
* BITS_PER_UNIT
, ssize
* BITS_PER_UNIT
- 1,
2599 VOIDmode
, tmps
[i
], false);
2602 /* Optimize the access just a bit. */
2603 else if (MEM_P (dest
)
2604 && (!targetm
.slow_unaligned_access (mode
, MEM_ALIGN (dest
))
2605 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2606 && multiple_p (bytepos
* BITS_PER_UNIT
,
2607 GET_MODE_ALIGNMENT (mode
))
2608 && known_eq (bytelen
, GET_MODE_SIZE (mode
)))
2609 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2612 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2613 0, 0, mode
, tmps
[i
], false);
2616 /* Copy from the pseudo into the (probable) hard reg. */
2617 if (orig_dst
!= dst
)
2618 emit_move_insn (orig_dst
, dst
);
2621 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2622 of the value stored in X. */
2625 maybe_emit_group_store (rtx x
, tree type
)
2627 machine_mode mode
= TYPE_MODE (type
);
2628 gcc_checking_assert (GET_MODE (x
) == VOIDmode
|| GET_MODE (x
) == mode
);
2629 if (GET_CODE (x
) == PARALLEL
)
2631 rtx result
= gen_reg_rtx (mode
);
2632 emit_group_store (result
, x
, type
, int_size_in_bytes (type
));
2638 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2640 This is used on targets that return BLKmode values in registers. */
2643 copy_blkmode_from_reg (rtx target
, rtx srcreg
, tree type
)
2645 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2646 rtx src
= NULL
, dst
= NULL
;
2647 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2648 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2649 /* No current ABI uses variable-sized modes to pass a BLKmnode type. */
2650 fixed_size_mode mode
= as_a
<fixed_size_mode
> (GET_MODE (srcreg
));
2651 fixed_size_mode tmode
= as_a
<fixed_size_mode
> (GET_MODE (target
));
2652 fixed_size_mode copy_mode
;
2654 /* BLKmode registers created in the back-end shouldn't have survived. */
2655 gcc_assert (mode
!= BLKmode
);
2657 /* If the structure doesn't take up a whole number of words, see whether
2658 SRCREG is padded on the left or on the right. If it's on the left,
2659 set PADDING_CORRECTION to the number of bits to skip.
2661 In most ABIs, the structure will be returned at the least end of
2662 the register, which translates to right padding on little-endian
2663 targets and left padding on big-endian targets. The opposite
2664 holds if the structure is returned at the most significant
2665 end of the register. */
2666 if (bytes
% UNITS_PER_WORD
!= 0
2667 && (targetm
.calls
.return_in_msb (type
)
2669 : BYTES_BIG_ENDIAN
))
2671 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2673 /* We can use a single move if we have an exact mode for the size. */
2674 else if (MEM_P (target
)
2675 && (!targetm
.slow_unaligned_access (mode
, MEM_ALIGN (target
))
2676 || MEM_ALIGN (target
) >= GET_MODE_ALIGNMENT (mode
))
2677 && bytes
== GET_MODE_SIZE (mode
))
2679 emit_move_insn (adjust_address (target
, mode
, 0), srcreg
);
2683 /* And if we additionally have the same mode for a register. */
2684 else if (REG_P (target
)
2685 && GET_MODE (target
) == mode
2686 && bytes
== GET_MODE_SIZE (mode
))
2688 emit_move_insn (target
, srcreg
);
2692 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2693 into a new pseudo which is a full word. */
2694 if (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
2696 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2700 /* Copy the structure BITSIZE bits at a time. If the target lives in
2701 memory, take care of not reading/writing past its end by selecting
2702 a copy mode suited to BITSIZE. This should always be possible given
2705 If the target lives in register, make sure not to select a copy mode
2706 larger than the mode of the register.
2708 We could probably emit more efficient code for machines which do not use
2709 strict alignment, but it doesn't seem worth the effort at the current
2712 copy_mode
= word_mode
;
2715 opt_scalar_int_mode mem_mode
= int_mode_for_size (bitsize
, 1);
2716 if (mem_mode
.exists ())
2717 copy_mode
= mem_mode
.require ();
2719 else if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2722 for (bitpos
= 0, xbitpos
= padding_correction
;
2723 bitpos
< bytes
* BITS_PER_UNIT
;
2724 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2726 /* We need a new source operand each time xbitpos is on a
2727 word boundary and when xbitpos == padding_correction
2728 (the first time through). */
2729 if (xbitpos
% BITS_PER_WORD
== 0 || xbitpos
== padding_correction
)
2730 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
, mode
);
2732 /* We need a new destination operand each time bitpos is on
2734 if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2736 else if (bitpos
% BITS_PER_WORD
== 0)
2737 dst
= operand_subword (target
, bitpos
/ BITS_PER_WORD
, 1, tmode
);
2739 /* Use xbitpos for the source extraction (right justified) and
2740 bitpos for the destination store (left justified). */
2741 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, 0, 0, copy_mode
,
2742 extract_bit_field (src
, bitsize
,
2743 xbitpos
% BITS_PER_WORD
, 1,
2744 NULL_RTX
, copy_mode
, copy_mode
,
2750 /* Copy BLKmode value SRC into a register of mode MODE_IN. Return the
2751 register if it contains any data, otherwise return null.
2753 This is used on targets that return BLKmode values in registers. */
2756 copy_blkmode_to_reg (machine_mode mode_in
, tree src
)
2759 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0, bytes
;
2760 unsigned int bitsize
;
2761 rtx
*dst_words
, dst
, x
, src_word
= NULL_RTX
, dst_word
= NULL_RTX
;
2762 /* No current ABI uses variable-sized modes to pass a BLKmnode type. */
2763 fixed_size_mode mode
= as_a
<fixed_size_mode
> (mode_in
);
2764 fixed_size_mode dst_mode
;
2765 scalar_int_mode min_mode
;
2767 gcc_assert (TYPE_MODE (TREE_TYPE (src
)) == BLKmode
);
2769 x
= expand_normal (src
);
2771 bytes
= arg_int_size_in_bytes (TREE_TYPE (src
));
2775 /* If the structure doesn't take up a whole number of words, see
2776 whether the register value should be padded on the left or on
2777 the right. Set PADDING_CORRECTION to the number of padding
2778 bits needed on the left side.
2780 In most ABIs, the structure will be returned at the least end of
2781 the register, which translates to right padding on little-endian
2782 targets and left padding on big-endian targets. The opposite
2783 holds if the structure is returned at the most significant
2784 end of the register. */
2785 if (bytes
% UNITS_PER_WORD
!= 0
2786 && (targetm
.calls
.return_in_msb (TREE_TYPE (src
))
2788 : BYTES_BIG_ENDIAN
))
2789 padding_correction
= (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
)
2792 n_regs
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
2793 dst_words
= XALLOCAVEC (rtx
, n_regs
);
2794 bitsize
= MIN (TYPE_ALIGN (TREE_TYPE (src
)), BITS_PER_WORD
);
2795 min_mode
= smallest_int_mode_for_size (bitsize
);
2797 /* Copy the structure BITSIZE bits at a time. */
2798 for (bitpos
= 0, xbitpos
= padding_correction
;
2799 bitpos
< bytes
* BITS_PER_UNIT
;
2800 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2802 /* We need a new destination pseudo each time xbitpos is
2803 on a word boundary and when xbitpos == padding_correction
2804 (the first time through). */
2805 if (xbitpos
% BITS_PER_WORD
== 0
2806 || xbitpos
== padding_correction
)
2808 /* Generate an appropriate register. */
2809 dst_word
= gen_reg_rtx (word_mode
);
2810 dst_words
[xbitpos
/ BITS_PER_WORD
] = dst_word
;
2812 /* Clear the destination before we move anything into it. */
2813 emit_move_insn (dst_word
, CONST0_RTX (word_mode
));
2816 /* Find the largest integer mode that can be used to copy all or as
2817 many bits as possible of the structure if the target supports larger
2818 copies. There are too many corner cases here w.r.t to alignments on
2819 the read/writes. So if there is any padding just use single byte
2821 opt_scalar_int_mode mode_iter
;
2822 if (padding_correction
== 0 && !STRICT_ALIGNMENT
)
2824 FOR_EACH_MODE_FROM (mode_iter
, min_mode
)
2826 unsigned int msize
= GET_MODE_BITSIZE (mode_iter
.require ());
2827 if (msize
<= ((bytes
* BITS_PER_UNIT
) - bitpos
)
2828 && msize
<= BITS_PER_WORD
)
2835 /* We need a new source operand each time bitpos is on a word
2837 if (bitpos
% BITS_PER_WORD
== 0)
2838 src_word
= operand_subword_force (x
, bitpos
/ BITS_PER_WORD
, BLKmode
);
2840 /* Use bitpos for the source extraction (left justified) and
2841 xbitpos for the destination store (right justified). */
2842 store_bit_field (dst_word
, bitsize
, xbitpos
% BITS_PER_WORD
,
2844 extract_bit_field (src_word
, bitsize
,
2845 bitpos
% BITS_PER_WORD
, 1,
2846 NULL_RTX
, word_mode
, word_mode
,
2851 if (mode
== BLKmode
)
2853 /* Find the smallest integer mode large enough to hold the
2854 entire structure. */
2855 opt_scalar_int_mode mode_iter
;
2856 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_INT
)
2857 if (GET_MODE_SIZE (mode_iter
.require ()) >= bytes
)
2860 /* A suitable mode should have been found. */
2861 mode
= mode_iter
.require ();
2864 if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
2865 dst_mode
= word_mode
;
2868 dst
= gen_reg_rtx (dst_mode
);
2870 for (i
= 0; i
< n_regs
; i
++)
2871 emit_move_insn (operand_subword (dst
, i
, 0, dst_mode
), dst_words
[i
]);
2873 if (mode
!= dst_mode
)
2874 dst
= gen_lowpart (mode
, dst
);
2879 /* Add a USE expression for REG to the (possibly empty) list pointed
2880 to by CALL_FUSAGE. REG must denote a hard register. */
2883 use_reg_mode (rtx
*call_fusage
, rtx reg
, machine_mode mode
)
2885 gcc_assert (REG_P (reg
));
2887 if (!HARD_REGISTER_P (reg
))
2891 = gen_rtx_EXPR_LIST (mode
, gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2894 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2895 to by CALL_FUSAGE. REG must denote a hard register. */
2898 clobber_reg_mode (rtx
*call_fusage
, rtx reg
, machine_mode mode
)
2900 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2903 = gen_rtx_EXPR_LIST (mode
, gen_rtx_CLOBBER (VOIDmode
, reg
), *call_fusage
);
2906 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2907 starting at REGNO. All of these registers must be hard registers. */
2910 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2914 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2916 for (i
= 0; i
< nregs
; i
++)
2917 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2920 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2921 PARALLEL REGS. This is for calls that pass values in multiple
2922 non-contiguous locations. The Irix 6 ABI has examples of this. */
2925 use_group_regs (rtx
*call_fusage
, rtx regs
)
2929 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2931 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2933 /* A NULL entry means the parameter goes both on the stack and in
2934 registers. This can also be a MEM for targets that pass values
2935 partially on the stack and partially in registers. */
2936 if (reg
!= 0 && REG_P (reg
))
2937 use_reg (call_fusage
, reg
);
2941 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2942 assigment and the code of the expresion on the RHS is CODE. Return
2946 get_def_for_expr (tree name
, enum tree_code code
)
2950 if (TREE_CODE (name
) != SSA_NAME
)
2953 def_stmt
= get_gimple_for_ssa_name (name
);
2955 || gimple_assign_rhs_code (def_stmt
) != code
)
2961 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2962 assigment and the class of the expresion on the RHS is CLASS. Return
2966 get_def_for_expr_class (tree name
, enum tree_code_class tclass
)
2970 if (TREE_CODE (name
) != SSA_NAME
)
2973 def_stmt
= get_gimple_for_ssa_name (name
);
2975 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) != tclass
)
2981 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2982 its length in bytes. */
2985 clear_storage_hints (rtx object
, rtx size
, enum block_op_methods method
,
2986 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
2987 unsigned HOST_WIDE_INT min_size
,
2988 unsigned HOST_WIDE_INT max_size
,
2989 unsigned HOST_WIDE_INT probable_max_size
)
2991 machine_mode mode
= GET_MODE (object
);
2994 gcc_assert (method
== BLOCK_OP_NORMAL
|| method
== BLOCK_OP_TAILCALL
);
2996 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2997 just move a zero. Otherwise, do this a piece at a time. */
2998 poly_int64 size_val
;
3000 && poly_int_rtx_p (size
, &size_val
)
3001 && known_eq (size_val
, GET_MODE_SIZE (mode
)))
3003 rtx zero
= CONST0_RTX (mode
);
3006 emit_move_insn (object
, zero
);
3010 if (COMPLEX_MODE_P (mode
))
3012 zero
= CONST0_RTX (GET_MODE_INNER (mode
));
3015 write_complex_part (object
, zero
, 0);
3016 write_complex_part (object
, zero
, 1);
3022 if (size
== const0_rtx
)
3025 align
= MEM_ALIGN (object
);
3027 if (CONST_INT_P (size
)
3028 && targetm
.use_by_pieces_infrastructure_p (INTVAL (size
), align
,
3030 optimize_insn_for_speed_p ()))
3031 clear_by_pieces (object
, INTVAL (size
), align
);
3032 else if (set_storage_via_setmem (object
, size
, const0_rtx
, align
,
3033 expected_align
, expected_size
,
3034 min_size
, max_size
, probable_max_size
))
3036 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object
)))
3037 return set_storage_via_libcall (object
, size
, const0_rtx
,
3038 method
== BLOCK_OP_TAILCALL
);
3046 clear_storage (rtx object
, rtx size
, enum block_op_methods method
)
3048 unsigned HOST_WIDE_INT max
, min
= 0;
3049 if (GET_CODE (size
) == CONST_INT
)
3050 min
= max
= UINTVAL (size
);
3052 max
= GET_MODE_MASK (GET_MODE (size
));
3053 return clear_storage_hints (object
, size
, method
, 0, -1, min
, max
, max
);
3057 /* A subroutine of clear_storage. Expand a call to memset.
3058 Return the return value of memset, 0 otherwise. */
3061 set_storage_via_libcall (rtx object
, rtx size
, rtx val
, bool tailcall
)
3063 tree call_expr
, fn
, object_tree
, size_tree
, val_tree
;
3064 machine_mode size_mode
;
3066 object
= copy_addr_to_reg (XEXP (object
, 0));
3067 object_tree
= make_tree (ptr_type_node
, object
);
3069 if (!CONST_INT_P (val
))
3070 val
= convert_to_mode (TYPE_MODE (integer_type_node
), val
, 1);
3071 val_tree
= make_tree (integer_type_node
, val
);
3073 size_mode
= TYPE_MODE (sizetype
);
3074 size
= convert_to_mode (size_mode
, size
, 1);
3075 size
= copy_to_mode_reg (size_mode
, size
);
3076 size_tree
= make_tree (sizetype
, size
);
3078 /* It is incorrect to use the libcall calling conventions for calls to
3079 memset because it can be provided by the user. */
3080 fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
3081 call_expr
= build_call_expr (fn
, 3, object_tree
, val_tree
, size_tree
);
3082 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
3084 return expand_call (call_expr
, NULL_RTX
, false);
3087 /* Expand a setmem pattern; return true if successful. */
3090 set_storage_via_setmem (rtx object
, rtx size
, rtx val
, unsigned int align
,
3091 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
3092 unsigned HOST_WIDE_INT min_size
,
3093 unsigned HOST_WIDE_INT max_size
,
3094 unsigned HOST_WIDE_INT probable_max_size
)
3096 /* Try the most limited insn first, because there's no point
3097 including more than one in the machine description unless
3098 the more limited one has some advantage. */
3100 if (expected_align
< align
)
3101 expected_align
= align
;
3102 if (expected_size
!= -1)
3104 if ((unsigned HOST_WIDE_INT
)expected_size
> max_size
)
3105 expected_size
= max_size
;
3106 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
3107 expected_size
= min_size
;
3110 opt_scalar_int_mode mode_iter
;
3111 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_INT
)
3113 scalar_int_mode mode
= mode_iter
.require ();
3114 enum insn_code code
= direct_optab_handler (setmem_optab
, mode
);
3116 if (code
!= CODE_FOR_nothing
3117 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
3118 here because if SIZE is less than the mode mask, as it is
3119 returned by the macro, it will definitely be less than the
3120 actual mode mask. Since SIZE is within the Pmode address
3121 space, we limit MODE to Pmode. */
3122 && ((CONST_INT_P (size
)
3123 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
3124 <= (GET_MODE_MASK (mode
) >> 1)))
3125 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
3126 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
3128 struct expand_operand ops
[9];
3131 nops
= insn_data
[(int) code
].n_generator_args
;
3132 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
3134 create_fixed_operand (&ops
[0], object
);
3135 /* The check above guarantees that this size conversion is valid. */
3136 create_convert_operand_to (&ops
[1], size
, mode
, true);
3137 create_convert_operand_from (&ops
[2], val
, byte_mode
, true);
3138 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
3141 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
3142 create_integer_operand (&ops
[5], expected_size
);
3146 create_integer_operand (&ops
[6], min_size
);
3147 /* If we can not represent the maximal size,
3148 make parameter NULL. */
3149 if ((HOST_WIDE_INT
) max_size
!= -1)
3150 create_integer_operand (&ops
[7], max_size
);
3152 create_fixed_operand (&ops
[7], NULL
);
3156 /* If we can not represent the maximal size,
3157 make parameter NULL. */
3158 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
3159 create_integer_operand (&ops
[8], probable_max_size
);
3161 create_fixed_operand (&ops
[8], NULL
);
3163 if (maybe_expand_insn (code
, nops
, ops
))
3172 /* Write to one of the components of the complex value CPLX. Write VAL to
3173 the real part if IMAG_P is false, and the imaginary part if its true. */
3176 write_complex_part (rtx cplx
, rtx val
, bool imag_p
)
3182 if (GET_CODE (cplx
) == CONCAT
)
3184 emit_move_insn (XEXP (cplx
, imag_p
), val
);
3188 cmode
= GET_MODE (cplx
);
3189 imode
= GET_MODE_INNER (cmode
);
3190 ibitsize
= GET_MODE_BITSIZE (imode
);
3192 /* For MEMs simplify_gen_subreg may generate an invalid new address
3193 because, e.g., the original address is considered mode-dependent
3194 by the target, which restricts simplify_subreg from invoking
3195 adjust_address_nv. Instead of preparing fallback support for an
3196 invalid address, we call adjust_address_nv directly. */
3199 emit_move_insn (adjust_address_nv (cplx
, imode
,
3200 imag_p
? GET_MODE_SIZE (imode
) : 0),
3205 /* If the sub-object is at least word sized, then we know that subregging
3206 will work. This special case is important, since store_bit_field
3207 wants to operate on integer modes, and there's rarely an OImode to
3208 correspond to TCmode. */
3209 if (ibitsize
>= BITS_PER_WORD
3210 /* For hard regs we have exact predicates. Assume we can split
3211 the original object if it spans an even number of hard regs.
3212 This special case is important for SCmode on 64-bit platforms
3213 where the natural size of floating-point regs is 32-bit. */
3215 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3216 && REG_NREGS (cplx
) % 2 == 0))
3218 rtx part
= simplify_gen_subreg (imode
, cplx
, cmode
,
3219 imag_p
? GET_MODE_SIZE (imode
) : 0);
3222 emit_move_insn (part
, val
);
3226 /* simplify_gen_subreg may fail for sub-word MEMs. */
3227 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3230 store_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0, 0, 0, imode
, val
,
3234 /* Extract one of the components of the complex value CPLX. Extract the
3235 real part if IMAG_P is false, and the imaginary part if it's true. */
3238 read_complex_part (rtx cplx
, bool imag_p
)
3244 if (GET_CODE (cplx
) == CONCAT
)
3245 return XEXP (cplx
, imag_p
);
3247 cmode
= GET_MODE (cplx
);
3248 imode
= GET_MODE_INNER (cmode
);
3249 ibitsize
= GET_MODE_BITSIZE (imode
);
3251 /* Special case reads from complex constants that got spilled to memory. */
3252 if (MEM_P (cplx
) && GET_CODE (XEXP (cplx
, 0)) == SYMBOL_REF
)
3254 tree decl
= SYMBOL_REF_DECL (XEXP (cplx
, 0));
3255 if (decl
&& TREE_CODE (decl
) == COMPLEX_CST
)
3257 tree part
= imag_p
? TREE_IMAGPART (decl
) : TREE_REALPART (decl
);
3258 if (CONSTANT_CLASS_P (part
))
3259 return expand_expr (part
, NULL_RTX
, imode
, EXPAND_NORMAL
);
3263 /* For MEMs simplify_gen_subreg may generate an invalid new address
3264 because, e.g., the original address is considered mode-dependent
3265 by the target, which restricts simplify_subreg from invoking
3266 adjust_address_nv. Instead of preparing fallback support for an
3267 invalid address, we call adjust_address_nv directly. */
3269 return adjust_address_nv (cplx
, imode
,
3270 imag_p
? GET_MODE_SIZE (imode
) : 0);
3272 /* If the sub-object is at least word sized, then we know that subregging
3273 will work. This special case is important, since extract_bit_field
3274 wants to operate on integer modes, and there's rarely an OImode to
3275 correspond to TCmode. */
3276 if (ibitsize
>= BITS_PER_WORD
3277 /* For hard regs we have exact predicates. Assume we can split
3278 the original object if it spans an even number of hard regs.
3279 This special case is important for SCmode on 64-bit platforms
3280 where the natural size of floating-point regs is 32-bit. */
3282 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3283 && REG_NREGS (cplx
) % 2 == 0))
3285 rtx ret
= simplify_gen_subreg (imode
, cplx
, cmode
,
3286 imag_p
? GET_MODE_SIZE (imode
) : 0);
3290 /* simplify_gen_subreg may fail for sub-word MEMs. */
3291 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3294 return extract_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0,
3295 true, NULL_RTX
, imode
, imode
, false, NULL
);
3298 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3299 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3300 represented in NEW_MODE. If FORCE is true, this will never happen, as
3301 we'll force-create a SUBREG if needed. */
3304 emit_move_change_mode (machine_mode new_mode
,
3305 machine_mode old_mode
, rtx x
, bool force
)
3309 if (push_operand (x
, GET_MODE (x
)))
3311 ret
= gen_rtx_MEM (new_mode
, XEXP (x
, 0));
3312 MEM_COPY_ATTRIBUTES (ret
, x
);
3316 /* We don't have to worry about changing the address since the
3317 size in bytes is supposed to be the same. */
3318 if (reload_in_progress
)
3320 /* Copy the MEM to change the mode and move any
3321 substitutions from the old MEM to the new one. */
3322 ret
= adjust_address_nv (x
, new_mode
, 0);
3323 copy_replacements (x
, ret
);
3326 ret
= adjust_address (x
, new_mode
, 0);
3330 /* Note that we do want simplify_subreg's behavior of validating
3331 that the new mode is ok for a hard register. If we were to use
3332 simplify_gen_subreg, we would create the subreg, but would
3333 probably run into the target not being able to implement it. */
3334 /* Except, of course, when FORCE is true, when this is exactly what
3335 we want. Which is needed for CCmodes on some targets. */
3337 ret
= simplify_gen_subreg (new_mode
, x
, old_mode
, 0);
3339 ret
= simplify_subreg (new_mode
, x
, old_mode
, 0);
3345 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3346 an integer mode of the same size as MODE. Returns the instruction
3347 emitted, or NULL if such a move could not be generated. */
3350 emit_move_via_integer (machine_mode mode
, rtx x
, rtx y
, bool force
)
3352 scalar_int_mode imode
;
3353 enum insn_code code
;
3355 /* There must exist a mode of the exact size we require. */
3356 if (!int_mode_for_mode (mode
).exists (&imode
))
3359 /* The target must support moves in this mode. */
3360 code
= optab_handler (mov_optab
, imode
);
3361 if (code
== CODE_FOR_nothing
)
3364 x
= emit_move_change_mode (imode
, mode
, x
, force
);
3367 y
= emit_move_change_mode (imode
, mode
, y
, force
);
3370 return emit_insn (GEN_FCN (code
) (x
, y
));
3373 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3374 Return an equivalent MEM that does not use an auto-increment. */
3377 emit_move_resolve_push (machine_mode mode
, rtx x
)
3379 enum rtx_code code
= GET_CODE (XEXP (x
, 0));
3382 poly_int64 adjust
= GET_MODE_SIZE (mode
);
3383 #ifdef PUSH_ROUNDING
3384 adjust
= PUSH_ROUNDING (adjust
);
3386 if (code
== PRE_DEC
|| code
== POST_DEC
)
3388 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
3390 rtx expr
= XEXP (XEXP (x
, 0), 1);
3392 gcc_assert (GET_CODE (expr
) == PLUS
|| GET_CODE (expr
) == MINUS
);
3393 poly_int64 val
= rtx_to_poly_int64 (XEXP (expr
, 1));
3394 if (GET_CODE (expr
) == MINUS
)
3396 gcc_assert (known_eq (adjust
, val
) || known_eq (adjust
, -val
));
3400 /* Do not use anti_adjust_stack, since we don't want to update
3401 stack_pointer_delta. */
3402 temp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
3403 gen_int_mode (adjust
, Pmode
), stack_pointer_rtx
,
3404 0, OPTAB_LIB_WIDEN
);
3405 if (temp
!= stack_pointer_rtx
)
3406 emit_move_insn (stack_pointer_rtx
, temp
);
3413 temp
= stack_pointer_rtx
;
3418 temp
= plus_constant (Pmode
, stack_pointer_rtx
, -adjust
);
3424 return replace_equiv_address (x
, temp
);
3427 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3428 X is known to satisfy push_operand, and MODE is known to be complex.
3429 Returns the last instruction emitted. */
3432 emit_move_complex_push (machine_mode mode
, rtx x
, rtx y
)
3434 scalar_mode submode
= GET_MODE_INNER (mode
);
3437 #ifdef PUSH_ROUNDING
3438 poly_int64 submodesize
= GET_MODE_SIZE (submode
);
3440 /* In case we output to the stack, but the size is smaller than the
3441 machine can push exactly, we need to use move instructions. */
3442 if (maybe_ne (PUSH_ROUNDING (submodesize
), submodesize
))
3444 x
= emit_move_resolve_push (mode
, x
);
3445 return emit_move_insn (x
, y
);
3449 /* Note that the real part always precedes the imag part in memory
3450 regardless of machine's endianness. */
3451 switch (GET_CODE (XEXP (x
, 0)))
3465 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3466 read_complex_part (y
, imag_first
));
3467 return emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3468 read_complex_part (y
, !imag_first
));
3471 /* A subroutine of emit_move_complex. Perform the move from Y to X
3472 via two moves of the parts. Returns the last instruction emitted. */
3475 emit_move_complex_parts (rtx x
, rtx y
)
3477 /* Show the output dies here. This is necessary for SUBREGs
3478 of pseudos since we cannot track their lifetimes correctly;
3479 hard regs shouldn't appear here except as return values. */
3480 if (!reload_completed
&& !reload_in_progress
3481 && REG_P (x
) && !reg_overlap_mentioned_p (x
, y
))
3484 write_complex_part (x
, read_complex_part (y
, false), false);
3485 write_complex_part (x
, read_complex_part (y
, true), true);
3487 return get_last_insn ();
3490 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3491 MODE is known to be complex. Returns the last instruction emitted. */
3494 emit_move_complex (machine_mode mode
, rtx x
, rtx y
)
3498 /* Need to take special care for pushes, to maintain proper ordering
3499 of the data, and possibly extra padding. */
3500 if (push_operand (x
, mode
))
3501 return emit_move_complex_push (mode
, x
, y
);
3503 /* See if we can coerce the target into moving both values at once, except
3504 for floating point where we favor moving as parts if this is easy. */
3505 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
3506 && optab_handler (mov_optab
, GET_MODE_INNER (mode
)) != CODE_FOR_nothing
3508 && HARD_REGISTER_P (x
)
3509 && REG_NREGS (x
) == 1)
3511 && HARD_REGISTER_P (y
)
3512 && REG_NREGS (y
) == 1))
3514 /* Not possible if the values are inherently not adjacent. */
3515 else if (GET_CODE (x
) == CONCAT
|| GET_CODE (y
) == CONCAT
)
3517 /* Is possible if both are registers (or subregs of registers). */
3518 else if (register_operand (x
, mode
) && register_operand (y
, mode
))
3520 /* If one of the operands is a memory, and alignment constraints
3521 are friendly enough, we may be able to do combined memory operations.
3522 We do not attempt this if Y is a constant because that combination is
3523 usually better with the by-parts thing below. */
3524 else if ((MEM_P (x
) ? !CONSTANT_P (y
) : MEM_P (y
))
3525 && (!STRICT_ALIGNMENT
3526 || get_mode_alignment (mode
) == BIGGEST_ALIGNMENT
))
3535 /* For memory to memory moves, optimal behavior can be had with the
3536 existing block move logic. */
3537 if (MEM_P (x
) && MEM_P (y
))
3539 emit_block_move (x
, y
, gen_int_mode (GET_MODE_SIZE (mode
), Pmode
),
3540 BLOCK_OP_NO_LIBCALL
);
3541 return get_last_insn ();
3544 ret
= emit_move_via_integer (mode
, x
, y
, true);
3549 return emit_move_complex_parts (x
, y
);
3552 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3553 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3556 emit_move_ccmode (machine_mode mode
, rtx x
, rtx y
)
3560 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3563 enum insn_code code
= optab_handler (mov_optab
, CCmode
);
3564 if (code
!= CODE_FOR_nothing
)
3566 x
= emit_move_change_mode (CCmode
, mode
, x
, true);
3567 y
= emit_move_change_mode (CCmode
, mode
, y
, true);
3568 return emit_insn (GEN_FCN (code
) (x
, y
));
3572 /* Otherwise, find the MODE_INT mode of the same width. */
3573 ret
= emit_move_via_integer (mode
, x
, y
, false);
3574 gcc_assert (ret
!= NULL
);
3578 /* Return true if word I of OP lies entirely in the
3579 undefined bits of a paradoxical subreg. */
3582 undefined_operand_subword_p (const_rtx op
, int i
)
3584 if (GET_CODE (op
) != SUBREG
)
3586 machine_mode innermostmode
= GET_MODE (SUBREG_REG (op
));
3587 poly_int64 offset
= i
* UNITS_PER_WORD
+ subreg_memory_offset (op
);
3588 return (known_ge (offset
, GET_MODE_SIZE (innermostmode
))
3589 || known_le (offset
, -UNITS_PER_WORD
));
3592 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3593 MODE is any multi-word or full-word mode that lacks a move_insn
3594 pattern. Note that you will get better code if you define such
3595 patterns, even if they must turn into multiple assembler instructions. */
3598 emit_move_multi_word (machine_mode mode
, rtx x
, rtx y
)
3600 rtx_insn
*last_insn
= 0;
3606 /* This function can only handle cases where the number of words is
3607 known at compile time. */
3608 mode_size
= GET_MODE_SIZE (mode
).to_constant ();
3609 gcc_assert (mode_size
>= UNITS_PER_WORD
);
3611 /* If X is a push on the stack, do the push now and replace
3612 X with a reference to the stack pointer. */
3613 if (push_operand (x
, mode
))
3614 x
= emit_move_resolve_push (mode
, x
);
3616 /* If we are in reload, see if either operand is a MEM whose address
3617 is scheduled for replacement. */
3618 if (reload_in_progress
&& MEM_P (x
)
3619 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3620 x
= replace_equiv_address_nv (x
, inner
);
3621 if (reload_in_progress
&& MEM_P (y
)
3622 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3623 y
= replace_equiv_address_nv (y
, inner
);
3627 need_clobber
= false;
3628 for (i
= 0; i
< CEIL (mode_size
, UNITS_PER_WORD
); i
++)
3630 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3633 /* Do not generate code for a move if it would come entirely
3634 from the undefined bits of a paradoxical subreg. */
3635 if (undefined_operand_subword_p (y
, i
))
3638 ypart
= operand_subword (y
, i
, 1, mode
);
3640 /* If we can't get a part of Y, put Y into memory if it is a
3641 constant. Otherwise, force it into a register. Then we must
3642 be able to get a part of Y. */
3643 if (ypart
== 0 && CONSTANT_P (y
))
3645 y
= use_anchored_address (force_const_mem (mode
, y
));
3646 ypart
= operand_subword (y
, i
, 1, mode
);
3648 else if (ypart
== 0)
3649 ypart
= operand_subword_force (y
, i
, mode
);
3651 gcc_assert (xpart
&& ypart
);
3653 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3655 last_insn
= emit_move_insn (xpart
, ypart
);
3661 /* Show the output dies here. This is necessary for SUBREGs
3662 of pseudos since we cannot track their lifetimes correctly;
3663 hard regs shouldn't appear here except as return values.
3664 We never want to emit such a clobber after reload. */
3666 && ! (reload_in_progress
|| reload_completed
)
3667 && need_clobber
!= 0)
3675 /* Low level part of emit_move_insn.
3676 Called just like emit_move_insn, but assumes X and Y
3677 are basically valid. */
3680 emit_move_insn_1 (rtx x
, rtx y
)
3682 machine_mode mode
= GET_MODE (x
);
3683 enum insn_code code
;
3685 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
3687 code
= optab_handler (mov_optab
, mode
);
3688 if (code
!= CODE_FOR_nothing
)
3689 return emit_insn (GEN_FCN (code
) (x
, y
));
3691 /* Expand complex moves by moving real part and imag part. */
3692 if (COMPLEX_MODE_P (mode
))
3693 return emit_move_complex (mode
, x
, y
);
3695 if (GET_MODE_CLASS (mode
) == MODE_DECIMAL_FLOAT
3696 || ALL_FIXED_POINT_MODE_P (mode
))
3698 rtx_insn
*result
= emit_move_via_integer (mode
, x
, y
, true);
3700 /* If we can't find an integer mode, use multi words. */
3704 return emit_move_multi_word (mode
, x
, y
);
3707 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3708 return emit_move_ccmode (mode
, x
, y
);
3710 /* Try using a move pattern for the corresponding integer mode. This is
3711 only safe when simplify_subreg can convert MODE constants into integer
3712 constants. At present, it can only do this reliably if the value
3713 fits within a HOST_WIDE_INT. */
3715 || known_le (GET_MODE_BITSIZE (mode
), HOST_BITS_PER_WIDE_INT
))
3717 rtx_insn
*ret
= emit_move_via_integer (mode
, x
, y
, lra_in_progress
);
3721 if (! lra_in_progress
|| recog (PATTERN (ret
), ret
, 0) >= 0)
3726 return emit_move_multi_word (mode
, x
, y
);
3729 /* Generate code to copy Y into X.
3730 Both Y and X must have the same mode, except that
3731 Y can be a constant with VOIDmode.
3732 This mode cannot be BLKmode; use emit_block_move for that.
3734 Return the last instruction emitted. */
3737 emit_move_insn (rtx x
, rtx y
)
3739 machine_mode mode
= GET_MODE (x
);
3740 rtx y_cst
= NULL_RTX
;
3741 rtx_insn
*last_insn
;
3744 gcc_assert (mode
!= BLKmode
3745 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
3750 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3751 && (last_insn
= compress_float_constant (x
, y
)))
3756 if (!targetm
.legitimate_constant_p (mode
, y
))
3758 y
= force_const_mem (mode
, y
);
3760 /* If the target's cannot_force_const_mem prevented the spill,
3761 assume that the target's move expanders will also take care
3762 of the non-legitimate constant. */
3766 y
= use_anchored_address (y
);
3770 /* If X or Y are memory references, verify that their addresses are valid
3773 && (! memory_address_addr_space_p (GET_MODE (x
), XEXP (x
, 0),
3775 && ! push_operand (x
, GET_MODE (x
))))
3776 x
= validize_mem (x
);
3779 && ! memory_address_addr_space_p (GET_MODE (y
), XEXP (y
, 0),
3780 MEM_ADDR_SPACE (y
)))
3781 y
= validize_mem (y
);
3783 gcc_assert (mode
!= BLKmode
);
3785 last_insn
= emit_move_insn_1 (x
, y
);
3787 if (y_cst
&& REG_P (x
)
3788 && (set
= single_set (last_insn
)) != NULL_RTX
3789 && SET_DEST (set
) == x
3790 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3791 set_unique_reg_note (last_insn
, REG_EQUAL
, copy_rtx (y_cst
));
3796 /* Generate the body of an instruction to copy Y into X.
3797 It may be a list of insns, if one insn isn't enough. */
3800 gen_move_insn (rtx x
, rtx y
)
3805 emit_move_insn_1 (x
, y
);
3811 /* If Y is representable exactly in a narrower mode, and the target can
3812 perform the extension directly from constant or memory, then emit the
3813 move as an extension. */
3816 compress_float_constant (rtx x
, rtx y
)
3818 machine_mode dstmode
= GET_MODE (x
);
3819 machine_mode orig_srcmode
= GET_MODE (y
);
3820 machine_mode srcmode
;
3821 const REAL_VALUE_TYPE
*r
;
3822 int oldcost
, newcost
;
3823 bool speed
= optimize_insn_for_speed_p ();
3825 r
= CONST_DOUBLE_REAL_VALUE (y
);
3827 if (targetm
.legitimate_constant_p (dstmode
, y
))
3828 oldcost
= set_src_cost (y
, orig_srcmode
, speed
);
3830 oldcost
= set_src_cost (force_const_mem (dstmode
, y
), dstmode
, speed
);
3832 FOR_EACH_MODE_UNTIL (srcmode
, orig_srcmode
)
3836 rtx_insn
*last_insn
;
3838 /* Skip if the target can't extend this way. */
3839 ic
= can_extend_p (dstmode
, srcmode
, 0);
3840 if (ic
== CODE_FOR_nothing
)
3843 /* Skip if the narrowed value isn't exact. */
3844 if (! exact_real_truncate (srcmode
, r
))
3847 trunc_y
= const_double_from_real_value (*r
, srcmode
);
3849 if (targetm
.legitimate_constant_p (srcmode
, trunc_y
))
3851 /* Skip if the target needs extra instructions to perform
3853 if (!insn_operand_matches (ic
, 1, trunc_y
))
3855 /* This is valid, but may not be cheaper than the original. */
3856 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3858 if (oldcost
< newcost
)
3861 else if (float_extend_from_mem
[dstmode
][srcmode
])
3863 trunc_y
= force_const_mem (srcmode
, trunc_y
);
3864 /* This is valid, but may not be cheaper than the original. */
3865 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3867 if (oldcost
< newcost
)
3869 trunc_y
= validize_mem (trunc_y
);
3874 /* For CSE's benefit, force the compressed constant pool entry
3875 into a new pseudo. This constant may be used in different modes,
3876 and if not, combine will put things back together for us. */
3877 trunc_y
= force_reg (srcmode
, trunc_y
);
3879 /* If x is a hard register, perform the extension into a pseudo,
3880 so that e.g. stack realignment code is aware of it. */
3882 if (REG_P (x
) && HARD_REGISTER_P (x
))
3883 target
= gen_reg_rtx (dstmode
);
3885 emit_unop_insn (ic
, target
, trunc_y
, UNKNOWN
);
3886 last_insn
= get_last_insn ();
3889 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3892 return emit_move_insn (x
, target
);
3899 /* Pushing data onto the stack. */
3901 /* Push a block of length SIZE (perhaps variable)
3902 and return an rtx to address the beginning of the block.
3903 The value may be virtual_outgoing_args_rtx.
3905 EXTRA is the number of bytes of padding to push in addition to SIZE.
3906 BELOW nonzero means this padding comes at low addresses;
3907 otherwise, the padding comes at high addresses. */
3910 push_block (rtx size
, poly_int64 extra
, int below
)
3914 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3915 if (CONSTANT_P (size
))
3916 anti_adjust_stack (plus_constant (Pmode
, size
, extra
));
3917 else if (REG_P (size
) && known_eq (extra
, 0))
3918 anti_adjust_stack (size
);
3921 temp
= copy_to_mode_reg (Pmode
, size
);
3922 if (maybe_ne (extra
, 0))
3923 temp
= expand_binop (Pmode
, add_optab
, temp
,
3924 gen_int_mode (extra
, Pmode
),
3925 temp
, 0, OPTAB_LIB_WIDEN
);
3926 anti_adjust_stack (temp
);
3929 if (STACK_GROWS_DOWNWARD
)
3931 temp
= virtual_outgoing_args_rtx
;
3932 if (maybe_ne (extra
, 0) && below
)
3933 temp
= plus_constant (Pmode
, temp
, extra
);
3938 if (poly_int_rtx_p (size
, &csize
))
3939 temp
= plus_constant (Pmode
, virtual_outgoing_args_rtx
,
3940 -csize
- (below
? 0 : extra
));
3941 else if (maybe_ne (extra
, 0) && !below
)
3942 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3943 negate_rtx (Pmode
, plus_constant (Pmode
, size
,
3946 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3947 negate_rtx (Pmode
, size
));
3950 return memory_address (NARROWEST_INT_MODE
, temp
);
3953 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3956 mem_autoinc_base (rtx mem
)
3960 rtx addr
= XEXP (mem
, 0);
3961 if (GET_RTX_CLASS (GET_CODE (addr
)) == RTX_AUTOINC
)
3962 return XEXP (addr
, 0);
3967 /* A utility routine used here, in reload, and in try_split. The insns
3968 after PREV up to and including LAST are known to adjust the stack,
3969 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3970 placing notes as appropriate. PREV may be NULL, indicating the
3971 entire insn sequence prior to LAST should be scanned.
3973 The set of allowed stack pointer modifications is small:
3974 (1) One or more auto-inc style memory references (aka pushes),
3975 (2) One or more addition/subtraction with the SP as destination,
3976 (3) A single move insn with the SP as destination,
3977 (4) A call_pop insn,
3978 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3980 Insns in the sequence that do not modify the SP are ignored,
3981 except for noreturn calls.
3983 The return value is the amount of adjustment that can be trivially
3984 verified, via immediate operand or auto-inc. If the adjustment
3985 cannot be trivially extracted, the return value is HOST_WIDE_INT_MIN. */
3988 find_args_size_adjust (rtx_insn
*insn
)
3993 pat
= PATTERN (insn
);
3996 /* Look for a call_pop pattern. */
3999 /* We have to allow non-call_pop patterns for the case
4000 of emit_single_push_insn of a TLS address. */
4001 if (GET_CODE (pat
) != PARALLEL
)
4004 /* All call_pop have a stack pointer adjust in the parallel.
4005 The call itself is always first, and the stack adjust is
4006 usually last, so search from the end. */
4007 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; --i
)
4009 set
= XVECEXP (pat
, 0, i
);
4010 if (GET_CODE (set
) != SET
)
4012 dest
= SET_DEST (set
);
4013 if (dest
== stack_pointer_rtx
)
4016 /* We'd better have found the stack pointer adjust. */
4019 /* Fall through to process the extracted SET and DEST
4020 as if it was a standalone insn. */
4022 else if (GET_CODE (pat
) == SET
)
4024 else if ((set
= single_set (insn
)) != NULL
)
4026 else if (GET_CODE (pat
) == PARALLEL
)
4028 /* ??? Some older ports use a parallel with a stack adjust
4029 and a store for a PUSH_ROUNDING pattern, rather than a
4030 PRE/POST_MODIFY rtx. Don't force them to update yet... */
4031 /* ??? See h8300 and m68k, pushqi1. */
4032 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; --i
)
4034 set
= XVECEXP (pat
, 0, i
);
4035 if (GET_CODE (set
) != SET
)
4037 dest
= SET_DEST (set
);
4038 if (dest
== stack_pointer_rtx
)
4041 /* We do not expect an auto-inc of the sp in the parallel. */
4042 gcc_checking_assert (mem_autoinc_base (dest
) != stack_pointer_rtx
);
4043 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
4044 != stack_pointer_rtx
);
4052 dest
= SET_DEST (set
);
4054 /* Look for direct modifications of the stack pointer. */
4055 if (REG_P (dest
) && REGNO (dest
) == STACK_POINTER_REGNUM
)
4057 /* Look for a trivial adjustment, otherwise assume nothing. */
4058 /* Note that the SPU restore_stack_block pattern refers to
4059 the stack pointer in V4SImode. Consider that non-trivial. */
4061 if (SCALAR_INT_MODE_P (GET_MODE (dest
))
4062 && strip_offset (SET_SRC (set
), &offset
) == stack_pointer_rtx
)
4064 /* ??? Reload can generate no-op moves, which will be cleaned
4065 up later. Recognize it and continue searching. */
4066 else if (rtx_equal_p (dest
, SET_SRC (set
)))
4069 return HOST_WIDE_INT_MIN
;
4075 /* Otherwise only think about autoinc patterns. */
4076 if (mem_autoinc_base (dest
) == stack_pointer_rtx
)
4079 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
4080 != stack_pointer_rtx
);
4082 else if (mem_autoinc_base (SET_SRC (set
)) == stack_pointer_rtx
)
4083 mem
= SET_SRC (set
);
4087 addr
= XEXP (mem
, 0);
4088 switch (GET_CODE (addr
))
4092 return GET_MODE_SIZE (GET_MODE (mem
));
4095 return -GET_MODE_SIZE (GET_MODE (mem
));
4098 addr
= XEXP (addr
, 1);
4099 gcc_assert (GET_CODE (addr
) == PLUS
);
4100 gcc_assert (XEXP (addr
, 0) == stack_pointer_rtx
);
4101 return rtx_to_poly_int64 (XEXP (addr
, 1));
4109 fixup_args_size_notes (rtx_insn
*prev
, rtx_insn
*last
,
4110 poly_int64 end_args_size
)
4112 poly_int64 args_size
= end_args_size
;
4113 bool saw_unknown
= false;
4116 for (insn
= last
; insn
!= prev
; insn
= PREV_INSN (insn
))
4118 if (!NONDEBUG_INSN_P (insn
))
4121 /* We might have existing REG_ARGS_SIZE notes, e.g. when pushing
4122 a call argument containing a TLS address that itself requires
4123 a call to __tls_get_addr. The handling of stack_pointer_delta
4124 in emit_single_push_insn is supposed to ensure that any such
4125 notes are already correct. */
4126 rtx note
= find_reg_note (insn
, REG_ARGS_SIZE
, NULL_RTX
);
4127 gcc_assert (!note
|| known_eq (args_size
, get_args_size (note
)));
4129 poly_int64 this_delta
= find_args_size_adjust (insn
);
4130 if (known_eq (this_delta
, 0))
4133 || ACCUMULATE_OUTGOING_ARGS
4134 || find_reg_note (insn
, REG_NORETURN
, NULL_RTX
) == NULL_RTX
)
4138 gcc_assert (!saw_unknown
);
4139 if (known_eq (this_delta
, HOST_WIDE_INT_MIN
))
4143 add_args_size_note (insn
, args_size
);
4144 if (STACK_GROWS_DOWNWARD
)
4145 this_delta
= -poly_uint64 (this_delta
);
4148 args_size
= HOST_WIDE_INT_MIN
;
4150 args_size
-= this_delta
;
4156 #ifdef PUSH_ROUNDING
4157 /* Emit single push insn. */
4160 emit_single_push_insn_1 (machine_mode mode
, rtx x
, tree type
)
4163 poly_int64 rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
4165 enum insn_code icode
;
4167 /* If there is push pattern, use it. Otherwise try old way of throwing
4168 MEM representing push operation to move expander. */
4169 icode
= optab_handler (push_optab
, mode
);
4170 if (icode
!= CODE_FOR_nothing
)
4172 struct expand_operand ops
[1];
4174 create_input_operand (&ops
[0], x
, mode
);
4175 if (maybe_expand_insn (icode
, 1, ops
))
4178 if (known_eq (GET_MODE_SIZE (mode
), rounded_size
))
4179 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
4180 /* If we are to pad downward, adjust the stack pointer first and
4181 then store X into the stack location using an offset. This is
4182 because emit_move_insn does not know how to pad; it does not have
4184 else if (targetm
.calls
.function_arg_padding (mode
, type
) == PAD_DOWNWARD
)
4186 emit_move_insn (stack_pointer_rtx
,
4187 expand_binop (Pmode
,
4188 STACK_GROWS_DOWNWARD
? sub_optab
4191 gen_int_mode (rounded_size
, Pmode
),
4192 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
4194 poly_int64 offset
= rounded_size
- GET_MODE_SIZE (mode
);
4195 if (STACK_GROWS_DOWNWARD
&& STACK_PUSH_CODE
== POST_DEC
)
4196 /* We have already decremented the stack pointer, so get the
4198 offset
+= rounded_size
;
4200 if (!STACK_GROWS_DOWNWARD
&& STACK_PUSH_CODE
== POST_INC
)
4201 /* We have already incremented the stack pointer, so get the
4203 offset
-= rounded_size
;
4205 dest_addr
= plus_constant (Pmode
, stack_pointer_rtx
, offset
);
4209 if (STACK_GROWS_DOWNWARD
)
4210 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4211 dest_addr
= plus_constant (Pmode
, stack_pointer_rtx
, -rounded_size
);
4213 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4214 dest_addr
= plus_constant (Pmode
, stack_pointer_rtx
, rounded_size
);
4216 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
4219 dest
= gen_rtx_MEM (mode
, dest_addr
);
4223 set_mem_attributes (dest
, type
, 1);
4225 if (cfun
->tail_call_marked
)
4226 /* Function incoming arguments may overlap with sibling call
4227 outgoing arguments and we cannot allow reordering of reads
4228 from function arguments with stores to outgoing arguments
4229 of sibling calls. */
4230 set_mem_alias_set (dest
, 0);
4232 emit_move_insn (dest
, x
);
4235 /* Emit and annotate a single push insn. */
4238 emit_single_push_insn (machine_mode mode
, rtx x
, tree type
)
4240 poly_int64 delta
, old_delta
= stack_pointer_delta
;
4241 rtx_insn
*prev
= get_last_insn ();
4244 emit_single_push_insn_1 (mode
, x
, type
);
4246 /* Adjust stack_pointer_delta to describe the situation after the push
4247 we just performed. Note that we must do this after the push rather
4248 than before the push in case calculating X needs pushes and pops of
4249 its own (e.g. if calling __tls_get_addr). The REG_ARGS_SIZE notes
4250 for such pushes and pops must not include the effect of the future
4252 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
4254 last
= get_last_insn ();
4256 /* Notice the common case where we emitted exactly one insn. */
4257 if (PREV_INSN (last
) == prev
)
4259 add_args_size_note (last
, stack_pointer_delta
);
4263 delta
= fixup_args_size_notes (prev
, last
, stack_pointer_delta
);
4264 gcc_assert (known_eq (delta
, HOST_WIDE_INT_MIN
)
4265 || known_eq (delta
, old_delta
));
4269 /* If reading SIZE bytes from X will end up reading from
4270 Y return the number of bytes that overlap. Return -1
4271 if there is no overlap or -2 if we can't determine
4272 (for example when X and Y have different base registers). */
4275 memory_load_overlap (rtx x
, rtx y
, HOST_WIDE_INT size
)
4277 rtx tmp
= plus_constant (Pmode
, x
, size
);
4278 rtx sub
= simplify_gen_binary (MINUS
, Pmode
, tmp
, y
);
4280 if (!CONST_INT_P (sub
))
4283 HOST_WIDE_INT val
= INTVAL (sub
);
4285 return IN_RANGE (val
, 1, size
) ? val
: -1;
4288 /* Generate code to push X onto the stack, assuming it has mode MODE and
4290 MODE is redundant except when X is a CONST_INT (since they don't
4292 SIZE is an rtx for the size of data to be copied (in bytes),
4293 needed only if X is BLKmode.
4294 Return true if successful. May return false if asked to push a
4295 partial argument during a sibcall optimization (as specified by
4296 SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4299 ALIGN (in bits) is maximum alignment we can assume.
4301 If PARTIAL and REG are both nonzero, then copy that many of the first
4302 bytes of X into registers starting with REG, and push the rest of X.
4303 The amount of space pushed is decreased by PARTIAL bytes.
4304 REG must be a hard register in this case.
4305 If REG is zero but PARTIAL is not, take any all others actions for an
4306 argument partially in registers, but do not actually load any
4309 EXTRA is the amount in bytes of extra space to leave next to this arg.
4310 This is ignored if an argument block has already been allocated.
4312 On a machine that lacks real push insns, ARGS_ADDR is the address of
4313 the bottom of the argument block for this call. We use indexing off there
4314 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4315 argument block has not been preallocated.
4317 ARGS_SO_FAR is the size of args previously pushed for this call.
4319 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4320 for arguments passed in registers. If nonzero, it will be the number
4321 of bytes required. */
4324 emit_push_insn (rtx x
, machine_mode mode
, tree type
, rtx size
,
4325 unsigned int align
, int partial
, rtx reg
, poly_int64 extra
,
4326 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
4327 rtx alignment_pad
, bool sibcall_p
)
4330 pad_direction stack_direction
4331 = STACK_GROWS_DOWNWARD
? PAD_DOWNWARD
: PAD_UPWARD
;
4333 /* Decide where to pad the argument: PAD_DOWNWARD for below,
4334 PAD_UPWARD for above, or PAD_NONE for don't pad it.
4335 Default is below for small data on big-endian machines; else above. */
4336 pad_direction where_pad
= targetm
.calls
.function_arg_padding (mode
, type
);
4338 /* Invert direction if stack is post-decrement.
4340 if (STACK_PUSH_CODE
== POST_DEC
)
4341 if (where_pad
!= PAD_NONE
)
4342 where_pad
= (where_pad
== PAD_DOWNWARD
? PAD_UPWARD
: PAD_DOWNWARD
);
4346 int nregs
= partial
/ UNITS_PER_WORD
;
4347 rtx
*tmp_regs
= NULL
;
4348 int overlapping
= 0;
4351 || (STRICT_ALIGNMENT
&& align
< GET_MODE_ALIGNMENT (mode
)))
4353 /* Copy a block into the stack, entirely or partially. */
4360 offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4361 used
= partial
- offset
;
4363 if (mode
!= BLKmode
)
4365 /* A value is to be stored in an insufficiently aligned
4366 stack slot; copy via a suitably aligned slot if
4368 size
= gen_int_mode (GET_MODE_SIZE (mode
), Pmode
);
4369 if (!MEM_P (xinner
))
4371 temp
= assign_temp (type
, 1, 1);
4372 emit_move_insn (temp
, xinner
);
4379 /* USED is now the # of bytes we need not copy to the stack
4380 because registers will take care of them. */
4383 xinner
= adjust_address (xinner
, BLKmode
, used
);
4385 /* If the partial register-part of the arg counts in its stack size,
4386 skip the part of stack space corresponding to the registers.
4387 Otherwise, start copying to the beginning of the stack space,
4388 by setting SKIP to 0. */
4389 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
4391 #ifdef PUSH_ROUNDING
4392 /* Do it with several push insns if that doesn't take lots of insns
4393 and if there is no difficulty with push insns that skip bytes
4394 on the stack for alignment purposes. */
4397 && CONST_INT_P (size
)
4399 && MEM_ALIGN (xinner
) >= align
4400 && can_move_by_pieces ((unsigned) INTVAL (size
) - used
, align
)
4401 /* Here we avoid the case of a structure whose weak alignment
4402 forces many pushes of a small amount of data,
4403 and such small pushes do rounding that causes trouble. */
4404 && ((!targetm
.slow_unaligned_access (word_mode
, align
))
4405 || align
>= BIGGEST_ALIGNMENT
4406 || known_eq (PUSH_ROUNDING (align
/ BITS_PER_UNIT
),
4407 align
/ BITS_PER_UNIT
))
4408 && known_eq (PUSH_ROUNDING (INTVAL (size
)), INTVAL (size
)))
4410 /* Push padding now if padding above and stack grows down,
4411 or if padding below and stack grows up.
4412 But if space already allocated, this has already been done. */
4413 if (maybe_ne (extra
, 0)
4415 && where_pad
!= PAD_NONE
4416 && where_pad
!= stack_direction
)
4417 anti_adjust_stack (gen_int_mode (extra
, Pmode
));
4419 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
,
4423 #endif /* PUSH_ROUNDING */
4427 /* Otherwise make space on the stack and copy the data
4428 to the address of that space. */
4430 /* Deduct words put into registers from the size we must copy. */
4433 if (CONST_INT_P (size
))
4434 size
= GEN_INT (INTVAL (size
) - used
);
4436 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
4437 gen_int_mode (used
, GET_MODE (size
)),
4438 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4441 /* Get the address of the stack space.
4442 In this case, we do not deal with EXTRA separately.
4443 A single stack adjust will do. */
4447 temp
= push_block (size
, extra
, where_pad
== PAD_DOWNWARD
);
4450 else if (poly_int_rtx_p (args_so_far
, &offset
))
4451 temp
= memory_address (BLKmode
,
4452 plus_constant (Pmode
, args_addr
,
4455 temp
= memory_address (BLKmode
,
4456 plus_constant (Pmode
,
4457 gen_rtx_PLUS (Pmode
,
4462 if (!ACCUMULATE_OUTGOING_ARGS
)
4464 /* If the source is referenced relative to the stack pointer,
4465 copy it to another register to stabilize it. We do not need
4466 to do this if we know that we won't be changing sp. */
4468 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
4469 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
4470 temp
= copy_to_reg (temp
);
4473 target
= gen_rtx_MEM (BLKmode
, temp
);
4475 /* We do *not* set_mem_attributes here, because incoming arguments
4476 may overlap with sibling call outgoing arguments and we cannot
4477 allow reordering of reads from function arguments with stores
4478 to outgoing arguments of sibling calls. We do, however, want
4479 to record the alignment of the stack slot. */
4480 /* ALIGN may well be better aligned than TYPE, e.g. due to
4481 PARM_BOUNDARY. Assume the caller isn't lying. */
4482 set_mem_align (target
, align
);
4484 /* If part should go in registers and pushing to that part would
4485 overwrite some of the values that need to go into regs, load the
4486 overlapping values into temporary pseudos to be moved into the hard
4487 regs at the end after the stack pushing has completed.
4488 We cannot load them directly into the hard regs here because
4489 they can be clobbered by the block move expansions.
4492 if (partial
> 0 && reg
!= 0 && mode
== BLKmode
4493 && GET_CODE (reg
) != PARALLEL
)
4495 overlapping
= memory_load_overlap (XEXP (x
, 0), temp
, partial
);
4496 if (overlapping
> 0)
4498 gcc_assert (overlapping
% UNITS_PER_WORD
== 0);
4499 overlapping
/= UNITS_PER_WORD
;
4501 tmp_regs
= XALLOCAVEC (rtx
, overlapping
);
4503 for (int i
= 0; i
< overlapping
; i
++)
4504 tmp_regs
[i
] = gen_reg_rtx (word_mode
);
4506 for (int i
= 0; i
< overlapping
; i
++)
4507 emit_move_insn (tmp_regs
[i
],
4508 operand_subword_force (target
, i
, mode
));
4510 else if (overlapping
== -1)
4512 /* Could not determine whether there is overlap.
4513 Fail the sibcall. */
4521 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
4524 else if (partial
> 0)
4526 /* Scalar partly in registers. This case is only supported
4527 for fixed-wdth modes. */
4528 int size
= GET_MODE_SIZE (mode
).to_constant ();
4529 size
/= UNITS_PER_WORD
;
4532 /* # bytes of start of argument
4533 that we must make space for but need not store. */
4534 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4535 int args_offset
= INTVAL (args_so_far
);
4538 /* Push padding now if padding above and stack grows down,
4539 or if padding below and stack grows up.
4540 But if space already allocated, this has already been done. */
4541 if (maybe_ne (extra
, 0)
4543 && where_pad
!= PAD_NONE
4544 && where_pad
!= stack_direction
)
4545 anti_adjust_stack (gen_int_mode (extra
, Pmode
));
4547 /* If we make space by pushing it, we might as well push
4548 the real data. Otherwise, we can leave OFFSET nonzero
4549 and leave the space uninitialized. */
4553 /* Now NOT_STACK gets the number of words that we don't need to
4554 allocate on the stack. Convert OFFSET to words too. */
4555 not_stack
= (partial
- offset
) / UNITS_PER_WORD
;
4556 offset
/= UNITS_PER_WORD
;
4558 /* If the partial register-part of the arg counts in its stack size,
4559 skip the part of stack space corresponding to the registers.
4560 Otherwise, start copying to the beginning of the stack space,
4561 by setting SKIP to 0. */
4562 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
4564 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
4565 x
= validize_mem (force_const_mem (mode
, x
));
4567 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4568 SUBREGs of such registers are not allowed. */
4569 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
4570 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
4571 x
= copy_to_reg (x
);
4573 /* Loop over all the words allocated on the stack for this arg. */
4574 /* We can do it by words, because any scalar bigger than a word
4575 has a size a multiple of a word. */
4576 for (i
= size
- 1; i
>= not_stack
; i
--)
4577 if (i
>= not_stack
+ offset
)
4578 if (!emit_push_insn (operand_subword_force (x
, i
, mode
),
4579 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
4581 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
4583 reg_parm_stack_space
, alignment_pad
, sibcall_p
))
4591 /* Push padding now if padding above and stack grows down,
4592 or if padding below and stack grows up.
4593 But if space already allocated, this has already been done. */
4594 if (maybe_ne (extra
, 0)
4596 && where_pad
!= PAD_NONE
4597 && where_pad
!= stack_direction
)
4598 anti_adjust_stack (gen_int_mode (extra
, Pmode
));
4600 #ifdef PUSH_ROUNDING
4601 if (args_addr
== 0 && PUSH_ARGS
)
4602 emit_single_push_insn (mode
, x
, type
);
4606 addr
= simplify_gen_binary (PLUS
, Pmode
, args_addr
, args_so_far
);
4607 dest
= gen_rtx_MEM (mode
, memory_address (mode
, addr
));
4609 /* We do *not* set_mem_attributes here, because incoming arguments
4610 may overlap with sibling call outgoing arguments and we cannot
4611 allow reordering of reads from function arguments with stores
4612 to outgoing arguments of sibling calls. We do, however, want
4613 to record the alignment of the stack slot. */
4614 /* ALIGN may well be better aligned than TYPE, e.g. due to
4615 PARM_BOUNDARY. Assume the caller isn't lying. */
4616 set_mem_align (dest
, align
);
4618 emit_move_insn (dest
, x
);
4622 /* Move the partial arguments into the registers and any overlapping
4623 values that we moved into the pseudos in tmp_regs. */
4624 if (partial
> 0 && reg
!= 0)
4626 /* Handle calls that pass values in multiple non-contiguous locations.
4627 The Irix 6 ABI has examples of this. */
4628 if (GET_CODE (reg
) == PARALLEL
)
4629 emit_group_load (reg
, x
, type
, -1);
4632 gcc_assert (partial
% UNITS_PER_WORD
== 0);
4633 move_block_to_reg (REGNO (reg
), x
, nregs
- overlapping
, mode
);
4635 for (int i
= 0; i
< overlapping
; i
++)
4636 emit_move_insn (gen_rtx_REG (word_mode
, REGNO (reg
)
4637 + nregs
- overlapping
+ i
),
4643 if (maybe_ne (extra
, 0) && args_addr
== 0 && where_pad
== stack_direction
)
4644 anti_adjust_stack (gen_int_mode (extra
, Pmode
));
4646 if (alignment_pad
&& args_addr
== 0)
4647 anti_adjust_stack (alignment_pad
);
4652 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4656 get_subtarget (rtx x
)
4660 /* Only registers can be subtargets. */
4662 /* Don't use hard regs to avoid extending their life. */
4663 || REGNO (x
) < FIRST_PSEUDO_REGISTER
4667 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4668 FIELD is a bitfield. Returns true if the optimization was successful,
4669 and there's nothing else to do. */
4672 optimize_bitfield_assignment_op (poly_uint64 pbitsize
,
4673 poly_uint64 pbitpos
,
4674 poly_uint64 pbitregion_start
,
4675 poly_uint64 pbitregion_end
,
4676 machine_mode mode1
, rtx str_rtx
,
4677 tree to
, tree src
, bool reverse
)
4679 /* str_mode is not guaranteed to be a scalar type. */
4680 machine_mode str_mode
= GET_MODE (str_rtx
);
4681 unsigned int str_bitsize
;
4686 enum tree_code code
;
4688 unsigned HOST_WIDE_INT bitsize
, bitpos
, bitregion_start
, bitregion_end
;
4689 if (mode1
!= VOIDmode
4690 || !pbitsize
.is_constant (&bitsize
)
4691 || !pbitpos
.is_constant (&bitpos
)
4692 || !pbitregion_start
.is_constant (&bitregion_start
)
4693 || !pbitregion_end
.is_constant (&bitregion_end
)
4694 || bitsize
>= BITS_PER_WORD
4695 || !GET_MODE_BITSIZE (str_mode
).is_constant (&str_bitsize
)
4696 || str_bitsize
> BITS_PER_WORD
4697 || TREE_SIDE_EFFECTS (to
)
4698 || TREE_THIS_VOLATILE (to
))
4702 if (TREE_CODE (src
) != SSA_NAME
)
4704 if (TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
)
4707 srcstmt
= get_gimple_for_ssa_name (src
);
4709 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt
)) != tcc_binary
)
4712 code
= gimple_assign_rhs_code (srcstmt
);
4714 op0
= gimple_assign_rhs1 (srcstmt
);
4716 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4717 to find its initialization. Hopefully the initialization will
4718 be from a bitfield load. */
4719 if (TREE_CODE (op0
) == SSA_NAME
)
4721 gimple
*op0stmt
= get_gimple_for_ssa_name (op0
);
4723 /* We want to eventually have OP0 be the same as TO, which
4724 should be a bitfield. */
4726 || !is_gimple_assign (op0stmt
)
4727 || gimple_assign_rhs_code (op0stmt
) != TREE_CODE (to
))
4729 op0
= gimple_assign_rhs1 (op0stmt
);
4732 op1
= gimple_assign_rhs2 (srcstmt
);
4734 if (!operand_equal_p (to
, op0
, 0))
4737 if (MEM_P (str_rtx
))
4739 unsigned HOST_WIDE_INT offset1
;
4741 if (str_bitsize
== 0 || str_bitsize
> BITS_PER_WORD
)
4742 str_bitsize
= BITS_PER_WORD
;
4744 scalar_int_mode best_mode
;
4745 if (!get_best_mode (bitsize
, bitpos
, bitregion_start
, bitregion_end
,
4746 MEM_ALIGN (str_rtx
), str_bitsize
, false, &best_mode
))
4748 str_mode
= best_mode
;
4749 str_bitsize
= GET_MODE_BITSIZE (best_mode
);
4752 bitpos
%= str_bitsize
;
4753 offset1
= (offset1
- bitpos
) / BITS_PER_UNIT
;
4754 str_rtx
= adjust_address (str_rtx
, str_mode
, offset1
);
4756 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
4759 /* If the bit field covers the whole REG/MEM, store_field
4760 will likely generate better code. */
4761 if (bitsize
>= str_bitsize
)
4764 /* We can't handle fields split across multiple entities. */
4765 if (bitpos
+ bitsize
> str_bitsize
)
4768 if (reverse
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
4769 bitpos
= str_bitsize
- bitpos
- bitsize
;
4775 /* For now, just optimize the case of the topmost bitfield
4776 where we don't need to do any masking and also
4777 1 bit bitfields where xor can be used.
4778 We might win by one instruction for the other bitfields
4779 too if insv/extv instructions aren't used, so that
4780 can be added later. */
4781 if ((reverse
|| bitpos
+ bitsize
!= str_bitsize
)
4782 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
4785 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4786 value
= convert_modes (str_mode
,
4787 TYPE_MODE (TREE_TYPE (op1
)), value
,
4788 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4790 /* We may be accessing data outside the field, which means
4791 we can alias adjacent data. */
4792 if (MEM_P (str_rtx
))
4794 str_rtx
= shallow_copy_rtx (str_rtx
);
4795 set_mem_alias_set (str_rtx
, 0);
4796 set_mem_expr (str_rtx
, 0);
4799 if (bitsize
== 1 && (reverse
|| bitpos
+ bitsize
!= str_bitsize
))
4801 value
= expand_and (str_mode
, value
, const1_rtx
, NULL
);
4805 binop
= code
== PLUS_EXPR
? add_optab
: sub_optab
;
4807 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4809 value
= flip_storage_order (str_mode
, value
);
4810 result
= expand_binop (str_mode
, binop
, str_rtx
,
4811 value
, str_rtx
, 1, OPTAB_WIDEN
);
4812 if (result
!= str_rtx
)
4813 emit_move_insn (str_rtx
, result
);
4818 if (TREE_CODE (op1
) != INTEGER_CST
)
4820 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4821 value
= convert_modes (str_mode
,
4822 TYPE_MODE (TREE_TYPE (op1
)), value
,
4823 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4825 /* We may be accessing data outside the field, which means
4826 we can alias adjacent data. */
4827 if (MEM_P (str_rtx
))
4829 str_rtx
= shallow_copy_rtx (str_rtx
);
4830 set_mem_alias_set (str_rtx
, 0);
4831 set_mem_expr (str_rtx
, 0);
4834 binop
= code
== BIT_IOR_EXPR
? ior_optab
: xor_optab
;
4835 if (bitpos
+ bitsize
!= str_bitsize
)
4837 rtx mask
= gen_int_mode ((HOST_WIDE_INT_1U
<< bitsize
) - 1,
4839 value
= expand_and (str_mode
, value
, mask
, NULL_RTX
);
4841 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4843 value
= flip_storage_order (str_mode
, value
);
4844 result
= expand_binop (str_mode
, binop
, str_rtx
,
4845 value
, str_rtx
, 1, OPTAB_WIDEN
);
4846 if (result
!= str_rtx
)
4847 emit_move_insn (str_rtx
, result
);
4857 /* In the C++ memory model, consecutive bit fields in a structure are
4858 considered one memory location.
4860 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4861 returns the bit range of consecutive bits in which this COMPONENT_REF
4862 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4863 and *OFFSET may be adjusted in the process.
4865 If the access does not need to be restricted, 0 is returned in both
4866 *BITSTART and *BITEND. */
4869 get_bit_range (poly_uint64_pod
*bitstart
, poly_uint64_pod
*bitend
, tree exp
,
4870 poly_int64_pod
*bitpos
, tree
*offset
)
4872 poly_int64 bitoffset
;
4875 gcc_assert (TREE_CODE (exp
) == COMPONENT_REF
);
4877 field
= TREE_OPERAND (exp
, 1);
4878 repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
4879 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4880 need to limit the range we can access. */
4883 *bitstart
= *bitend
= 0;
4887 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4888 part of a larger bit field, then the representative does not serve any
4889 useful purpose. This can occur in Ada. */
4890 if (handled_component_p (TREE_OPERAND (exp
, 0)))
4893 poly_int64 rbitsize
, rbitpos
;
4895 int unsignedp
, reversep
, volatilep
= 0;
4896 get_inner_reference (TREE_OPERAND (exp
, 0), &rbitsize
, &rbitpos
,
4897 &roffset
, &rmode
, &unsignedp
, &reversep
,
4899 if (!multiple_p (rbitpos
, BITS_PER_UNIT
))
4901 *bitstart
= *bitend
= 0;
4906 /* Compute the adjustment to bitpos from the offset of the field
4907 relative to the representative. DECL_FIELD_OFFSET of field and
4908 repr are the same by construction if they are not constants,
4909 see finish_bitfield_layout. */
4910 poly_uint64 field_offset
, repr_offset
;
4911 if (poly_int_tree_p (DECL_FIELD_OFFSET (field
), &field_offset
)
4912 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
4913 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
4916 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
4917 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
4919 /* If the adjustment is larger than bitpos, we would have a negative bit
4920 position for the lower bound and this may wreak havoc later. Adjust
4921 offset and bitpos to make the lower bound non-negative in that case. */
4922 if (maybe_gt (bitoffset
, *bitpos
))
4924 poly_int64 adjust_bits
= upper_bound (bitoffset
, *bitpos
) - *bitpos
;
4925 poly_int64 adjust_bytes
= exact_div (adjust_bits
, BITS_PER_UNIT
);
4927 *bitpos
+= adjust_bits
;
4928 if (*offset
== NULL_TREE
)
4929 *offset
= size_int (-adjust_bytes
);
4931 *offset
= size_binop (MINUS_EXPR
, *offset
, size_int (adjust_bytes
));
4935 *bitstart
= *bitpos
- bitoffset
;
4937 *bitend
= *bitstart
+ tree_to_poly_uint64 (DECL_SIZE (repr
)) - 1;
4940 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4941 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4942 DECL_RTL was not set yet, return NORTL. */
4945 addr_expr_of_non_mem_decl_p_1 (tree addr
, bool nortl
)
4947 if (TREE_CODE (addr
) != ADDR_EXPR
)
4950 tree base
= TREE_OPERAND (addr
, 0);
4953 || TREE_ADDRESSABLE (base
)
4954 || DECL_MODE (base
) == BLKmode
)
4957 if (!DECL_RTL_SET_P (base
))
4960 return (!MEM_P (DECL_RTL (base
)));
4963 /* Returns true if the MEM_REF REF refers to an object that does not
4964 reside in memory and has non-BLKmode. */
4967 mem_ref_refers_to_non_mem_p (tree ref
)
4969 tree base
= TREE_OPERAND (ref
, 0);
4970 return addr_expr_of_non_mem_decl_p_1 (base
, false);
4973 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4974 is true, try generating a nontemporal store. */
4977 expand_assignment (tree to
, tree from
, bool nontemporal
)
4983 enum insn_code icode
;
4985 /* Don't crash if the lhs of the assignment was erroneous. */
4986 if (TREE_CODE (to
) == ERROR_MARK
)
4988 expand_normal (from
);
4992 /* Optimize away no-op moves without side-effects. */
4993 if (operand_equal_p (to
, from
, 0))
4996 /* Handle misaligned stores. */
4997 mode
= TYPE_MODE (TREE_TYPE (to
));
4998 if ((TREE_CODE (to
) == MEM_REF
4999 || TREE_CODE (to
) == TARGET_MEM_REF
)
5001 && !mem_ref_refers_to_non_mem_p (to
)
5002 && ((align
= get_object_alignment (to
))
5003 < GET_MODE_ALIGNMENT (mode
))
5004 && (((icode
= optab_handler (movmisalign_optab
, mode
))
5005 != CODE_FOR_nothing
)
5006 || targetm
.slow_unaligned_access (mode
, align
)))
5010 reg
= expand_expr (from
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
5011 reg
= force_not_mem (reg
);
5012 mem
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5013 if (TREE_CODE (to
) == MEM_REF
&& REF_REVERSE_STORAGE_ORDER (to
))
5014 reg
= flip_storage_order (mode
, reg
);
5016 if (icode
!= CODE_FOR_nothing
)
5018 struct expand_operand ops
[2];
5020 create_fixed_operand (&ops
[0], mem
);
5021 create_input_operand (&ops
[1], reg
, mode
);
5022 /* The movmisalign<mode> pattern cannot fail, else the assignment
5023 would silently be omitted. */
5024 expand_insn (icode
, 2, ops
);
5027 store_bit_field (mem
, GET_MODE_BITSIZE (mode
), 0, 0, 0, mode
, reg
,
5032 /* Assignment of a structure component needs special treatment
5033 if the structure component's rtx is not simply a MEM.
5034 Assignment of an array element at a constant index, and assignment of
5035 an array element in an unaligned packed structure field, has the same
5036 problem. Same for (partially) storing into a non-memory object. */
5037 if (handled_component_p (to
)
5038 || (TREE_CODE (to
) == MEM_REF
5039 && (REF_REVERSE_STORAGE_ORDER (to
)
5040 || mem_ref_refers_to_non_mem_p (to
)))
5041 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
5044 poly_int64 bitsize
, bitpos
;
5045 poly_uint64 bitregion_start
= 0;
5046 poly_uint64 bitregion_end
= 0;
5048 int unsignedp
, reversep
, volatilep
= 0;
5052 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
5053 &unsignedp
, &reversep
, &volatilep
);
5055 /* Make sure bitpos is not negative, it can wreak havoc later. */
5056 if (maybe_lt (bitpos
, 0))
5058 gcc_assert (offset
== NULL_TREE
);
5059 offset
= size_int (bits_to_bytes_round_down (bitpos
));
5060 bitpos
= num_trailing_bits (bitpos
);
5063 if (TREE_CODE (to
) == COMPONENT_REF
5064 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to
, 1)))
5065 get_bit_range (&bitregion_start
, &bitregion_end
, to
, &bitpos
, &offset
);
5066 /* The C++ memory model naturally applies to byte-aligned fields.
5067 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
5068 BITSIZE are not byte-aligned, there is no need to limit the range
5069 we can access. This can occur with packed structures in Ada. */
5070 else if (maybe_gt (bitsize
, 0)
5071 && multiple_p (bitsize
, BITS_PER_UNIT
)
5072 && multiple_p (bitpos
, BITS_PER_UNIT
))
5074 bitregion_start
= bitpos
;
5075 bitregion_end
= bitpos
+ bitsize
- 1;
5078 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5080 /* If the field has a mode, we want to access it in the
5081 field's mode, not the computed mode.
5082 If a MEM has VOIDmode (external with incomplete type),
5083 use BLKmode for it instead. */
5086 if (mode1
!= VOIDmode
)
5087 to_rtx
= adjust_address (to_rtx
, mode1
, 0);
5088 else if (GET_MODE (to_rtx
) == VOIDmode
)
5089 to_rtx
= adjust_address (to_rtx
, BLKmode
, 0);
5094 machine_mode address_mode
;
5097 if (!MEM_P (to_rtx
))
5099 /* We can get constant negative offsets into arrays with broken
5100 user code. Translate this to a trap instead of ICEing. */
5101 gcc_assert (TREE_CODE (offset
) == INTEGER_CST
);
5102 expand_builtin_trap ();
5103 to_rtx
= gen_rtx_MEM (BLKmode
, const0_rtx
);
5106 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
5107 address_mode
= get_address_mode (to_rtx
);
5108 if (GET_MODE (offset_rtx
) != address_mode
)
5110 /* We cannot be sure that the RTL in offset_rtx is valid outside
5111 of a memory address context, so force it into a register
5112 before attempting to convert it to the desired mode. */
5113 offset_rtx
= force_operand (offset_rtx
, NULL_RTX
);
5114 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
5117 /* If we have an expression in OFFSET_RTX and a non-zero
5118 byte offset in BITPOS, adding the byte offset before the
5119 OFFSET_RTX results in better intermediate code, which makes
5120 later rtl optimization passes perform better.
5122 We prefer intermediate code like this:
5124 r124:DI=r123:DI+0x18
5129 r124:DI=r123:DI+0x10
5130 [r124:DI+0x8]=r121:DI
5132 This is only done for aligned data values, as these can
5133 be expected to result in single move instructions. */
5135 if (mode1
!= VOIDmode
5136 && maybe_ne (bitpos
, 0)
5137 && maybe_gt (bitsize
, 0)
5138 && multiple_p (bitpos
, BITS_PER_UNIT
, &bytepos
)
5139 && multiple_p (bitpos
, bitsize
)
5140 && multiple_p (bitsize
, GET_MODE_ALIGNMENT (mode1
))
5141 && MEM_ALIGN (to_rtx
) >= GET_MODE_ALIGNMENT (mode1
))
5143 to_rtx
= adjust_address (to_rtx
, mode1
, bytepos
);
5144 bitregion_start
= 0;
5145 if (known_ge (bitregion_end
, poly_uint64 (bitpos
)))
5146 bitregion_end
-= bitpos
;
5150 to_rtx
= offset_address (to_rtx
, offset_rtx
,
5151 highest_pow2_factor_for_target (to
,
5155 /* No action is needed if the target is not a memory and the field
5156 lies completely outside that target. This can occur if the source
5157 code contains an out-of-bounds access to a small array. */
5159 && GET_MODE (to_rtx
) != BLKmode
5160 && known_ge (bitpos
, GET_MODE_PRECISION (GET_MODE (to_rtx
))))
5162 expand_normal (from
);
5165 /* Handle expand_expr of a complex value returning a CONCAT. */
5166 else if (GET_CODE (to_rtx
) == CONCAT
)
5168 machine_mode to_mode
= GET_MODE (to_rtx
);
5169 gcc_checking_assert (COMPLEX_MODE_P (to_mode
));
5170 poly_int64 mode_bitsize
= GET_MODE_BITSIZE (to_mode
);
5171 unsigned short inner_bitsize
= GET_MODE_UNIT_BITSIZE (to_mode
);
5172 if (TYPE_MODE (TREE_TYPE (from
)) == to_mode
5173 && known_eq (bitpos
, 0)
5174 && known_eq (bitsize
, mode_bitsize
))
5175 result
= store_expr (from
, to_rtx
, false, nontemporal
, reversep
);
5176 else if (TYPE_MODE (TREE_TYPE (from
)) == GET_MODE_INNER (to_mode
)
5177 && known_eq (bitsize
, inner_bitsize
)
5178 && (known_eq (bitpos
, 0)
5179 || known_eq (bitpos
, inner_bitsize
)))
5180 result
= store_expr (from
, XEXP (to_rtx
, maybe_ne (bitpos
, 0)),
5181 false, nontemporal
, reversep
);
5182 else if (known_le (bitpos
+ bitsize
, inner_bitsize
))
5183 result
= store_field (XEXP (to_rtx
, 0), bitsize
, bitpos
,
5184 bitregion_start
, bitregion_end
,
5185 mode1
, from
, get_alias_set (to
),
5186 nontemporal
, reversep
);
5187 else if (known_ge (bitpos
, inner_bitsize
))
5188 result
= store_field (XEXP (to_rtx
, 1), bitsize
,
5189 bitpos
- inner_bitsize
,
5190 bitregion_start
, bitregion_end
,
5191 mode1
, from
, get_alias_set (to
),
5192 nontemporal
, reversep
);
5193 else if (known_eq (bitpos
, 0) && known_eq (bitsize
, mode_bitsize
))
5195 result
= expand_normal (from
);
5196 if (GET_CODE (result
) == CONCAT
)
5198 to_mode
= GET_MODE_INNER (to_mode
);
5199 machine_mode from_mode
= GET_MODE_INNER (GET_MODE (result
));
5201 = simplify_gen_subreg (to_mode
, XEXP (result
, 0),
5204 = simplify_gen_subreg (to_mode
, XEXP (result
, 1),
5206 if (!from_real
|| !from_imag
)
5207 goto concat_store_slow
;
5208 emit_move_insn (XEXP (to_rtx
, 0), from_real
);
5209 emit_move_insn (XEXP (to_rtx
, 1), from_imag
);
5214 = simplify_gen_subreg (to_mode
, result
,
5215 TYPE_MODE (TREE_TYPE (from
)), 0);
5218 emit_move_insn (XEXP (to_rtx
, 0),
5219 read_complex_part (from_rtx
, false));
5220 emit_move_insn (XEXP (to_rtx
, 1),
5221 read_complex_part (from_rtx
, true));
5225 machine_mode to_mode
5226 = GET_MODE_INNER (GET_MODE (to_rtx
));
5228 = simplify_gen_subreg (to_mode
, result
,
5229 TYPE_MODE (TREE_TYPE (from
)),
5232 = simplify_gen_subreg (to_mode
, result
,
5233 TYPE_MODE (TREE_TYPE (from
)),
5234 GET_MODE_SIZE (to_mode
));
5235 if (!from_real
|| !from_imag
)
5236 goto concat_store_slow
;
5237 emit_move_insn (XEXP (to_rtx
, 0), from_real
);
5238 emit_move_insn (XEXP (to_rtx
, 1), from_imag
);
5245 rtx temp
= assign_stack_temp (to_mode
,
5246 GET_MODE_SIZE (GET_MODE (to_rtx
)));
5247 write_complex_part (temp
, XEXP (to_rtx
, 0), false);
5248 write_complex_part (temp
, XEXP (to_rtx
, 1), true);
5249 result
= store_field (temp
, bitsize
, bitpos
,
5250 bitregion_start
, bitregion_end
,
5251 mode1
, from
, get_alias_set (to
),
5252 nontemporal
, reversep
);
5253 emit_move_insn (XEXP (to_rtx
, 0), read_complex_part (temp
, false));
5254 emit_move_insn (XEXP (to_rtx
, 1), read_complex_part (temp
, true));
5261 /* If the field is at offset zero, we could have been given the
5262 DECL_RTX of the parent struct. Don't munge it. */
5263 to_rtx
= shallow_copy_rtx (to_rtx
);
5264 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
5266 MEM_VOLATILE_P (to_rtx
) = 1;
5269 gcc_checking_assert (known_ge (bitpos
, 0));
5270 if (optimize_bitfield_assignment_op (bitsize
, bitpos
,
5271 bitregion_start
, bitregion_end
,
5272 mode1
, to_rtx
, to
, from
,
5276 result
= store_field (to_rtx
, bitsize
, bitpos
,
5277 bitregion_start
, bitregion_end
,
5278 mode1
, from
, get_alias_set (to
),
5279 nontemporal
, reversep
);
5283 preserve_temp_slots (result
);
5288 /* If the rhs is a function call and its value is not an aggregate,
5289 call the function before we start to compute the lhs.
5290 This is needed for correct code for cases such as
5291 val = setjmp (buf) on machines where reference to val
5292 requires loading up part of an address in a separate insn.
5294 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5295 since it might be a promoted variable where the zero- or sign- extension
5296 needs to be done. Handling this in the normal way is safe because no
5297 computation is done before the call. The same is true for SSA names. */
5298 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
5299 && COMPLETE_TYPE_P (TREE_TYPE (from
))
5300 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
5302 || TREE_CODE (to
) == PARM_DECL
5303 || TREE_CODE (to
) == RESULT_DECL
)
5304 && REG_P (DECL_RTL (to
)))
5305 || TREE_CODE (to
) == SSA_NAME
))
5310 value
= expand_normal (from
);
5313 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5315 /* Handle calls that return values in multiple non-contiguous locations.
5316 The Irix 6 ABI has examples of this. */
5317 if (GET_CODE (to_rtx
) == PARALLEL
)
5319 if (GET_CODE (value
) == PARALLEL
)
5320 emit_group_move (to_rtx
, value
);
5322 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
5323 int_size_in_bytes (TREE_TYPE (from
)));
5325 else if (GET_CODE (value
) == PARALLEL
)
5326 emit_group_store (to_rtx
, value
, TREE_TYPE (from
),
5327 int_size_in_bytes (TREE_TYPE (from
)));
5328 else if (GET_MODE (to_rtx
) == BLKmode
)
5330 /* Handle calls that return BLKmode values in registers. */
5332 copy_blkmode_from_reg (to_rtx
, value
, TREE_TYPE (from
));
5334 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
5338 if (POINTER_TYPE_P (TREE_TYPE (to
)))
5339 value
= convert_memory_address_addr_space
5340 (as_a
<scalar_int_mode
> (GET_MODE (to_rtx
)), value
,
5341 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to
))));
5343 emit_move_insn (to_rtx
, value
);
5346 preserve_temp_slots (to_rtx
);
5351 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5352 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5354 /* Don't move directly into a return register. */
5355 if (TREE_CODE (to
) == RESULT_DECL
5356 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
5362 /* If the source is itself a return value, it still is in a pseudo at
5363 this point so we can move it back to the return register directly. */
5365 && TYPE_MODE (TREE_TYPE (from
)) == BLKmode
5366 && TREE_CODE (from
) != CALL_EXPR
)
5367 temp
= copy_blkmode_to_reg (GET_MODE (to_rtx
), from
);
5369 temp
= expand_expr (from
, NULL_RTX
, GET_MODE (to_rtx
), EXPAND_NORMAL
);
5371 /* Handle calls that return values in multiple non-contiguous locations.
5372 The Irix 6 ABI has examples of this. */
5373 if (GET_CODE (to_rtx
) == PARALLEL
)
5375 if (GET_CODE (temp
) == PARALLEL
)
5376 emit_group_move (to_rtx
, temp
);
5378 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
5379 int_size_in_bytes (TREE_TYPE (from
)));
5382 emit_move_insn (to_rtx
, temp
);
5384 preserve_temp_slots (to_rtx
);
5389 /* In case we are returning the contents of an object which overlaps
5390 the place the value is being stored, use a safe function when copying
5391 a value through a pointer into a structure value return block. */
5392 if (TREE_CODE (to
) == RESULT_DECL
5393 && TREE_CODE (from
) == INDIRECT_REF
5394 && ADDR_SPACE_GENERIC_P
5395 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from
, 0)))))
5396 && refs_may_alias_p (to
, from
)
5397 && cfun
->returns_struct
5398 && !cfun
->returns_pcc_struct
)
5403 size
= expr_size (from
);
5404 from_rtx
= expand_normal (from
);
5406 emit_block_move_via_libcall (XEXP (to_rtx
, 0), XEXP (from_rtx
, 0), size
);
5408 preserve_temp_slots (to_rtx
);
5413 /* Compute FROM and store the value in the rtx we got. */
5416 result
= store_expr (from
, to_rtx
, 0, nontemporal
, false);
5417 preserve_temp_slots (result
);
5422 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5423 succeeded, false otherwise. */
5426 emit_storent_insn (rtx to
, rtx from
)
5428 struct expand_operand ops
[2];
5429 machine_mode mode
= GET_MODE (to
);
5430 enum insn_code code
= optab_handler (storent_optab
, mode
);
5432 if (code
== CODE_FOR_nothing
)
5435 create_fixed_operand (&ops
[0], to
);
5436 create_input_operand (&ops
[1], from
, mode
);
5437 return maybe_expand_insn (code
, 2, ops
);
5440 /* Generate code for computing expression EXP,
5441 and storing the value into TARGET.
5443 If the mode is BLKmode then we may return TARGET itself.
5444 It turns out that in BLKmode it doesn't cause a problem.
5445 because C has no operators that could combine two different
5446 assignments into the same BLKmode object with different values
5447 with no sequence point. Will other languages need this to
5450 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5451 stack, and block moves may need to be treated specially.
5453 If NONTEMPORAL is true, try using a nontemporal store instruction.
5455 If REVERSE is true, the store is to be done in reverse order. */
5458 store_expr (tree exp
, rtx target
, int call_param_p
,
5459 bool nontemporal
, bool reverse
)
5462 rtx alt_rtl
= NULL_RTX
;
5463 location_t loc
= curr_insn_location ();
5465 if (VOID_TYPE_P (TREE_TYPE (exp
)))
5467 /* C++ can generate ?: expressions with a throw expression in one
5468 branch and an rvalue in the other. Here, we resolve attempts to
5469 store the throw expression's nonexistent result. */
5470 gcc_assert (!call_param_p
);
5471 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5474 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
5476 /* Perform first part of compound expression, then assign from second
5478 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
5479 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5480 return store_expr (TREE_OPERAND (exp
, 1), target
,
5481 call_param_p
, nontemporal
, reverse
);
5483 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
5485 /* For conditional expression, get safe form of the target. Then
5486 test the condition, doing the appropriate assignment on either
5487 side. This avoids the creation of unnecessary temporaries.
5488 For non-BLKmode, it is more efficient not to do this. */
5490 rtx_code_label
*lab1
= gen_label_rtx (), *lab2
= gen_label_rtx ();
5492 do_pending_stack_adjust ();
5494 jumpifnot (TREE_OPERAND (exp
, 0), lab1
,
5495 profile_probability::uninitialized ());
5496 store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
,
5497 nontemporal
, reverse
);
5498 emit_jump_insn (targetm
.gen_jump (lab2
));
5501 store_expr (TREE_OPERAND (exp
, 2), target
, call_param_p
,
5502 nontemporal
, reverse
);
5508 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
5509 /* If this is a scalar in a register that is stored in a wider mode
5510 than the declared mode, compute the result into its declared mode
5511 and then convert to the wider mode. Our value is the computed
5514 rtx inner_target
= 0;
5515 scalar_int_mode outer_mode
= subreg_unpromoted_mode (target
);
5516 scalar_int_mode inner_mode
= subreg_promoted_mode (target
);
5518 /* We can do the conversion inside EXP, which will often result
5519 in some optimizations. Do the conversion in two steps: first
5520 change the signedness, if needed, then the extend. But don't
5521 do this if the type of EXP is a subtype of something else
5522 since then the conversion might involve more than just
5523 converting modes. */
5524 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
5525 && TREE_TYPE (TREE_TYPE (exp
)) == 0
5526 && GET_MODE_PRECISION (outer_mode
)
5527 == TYPE_PRECISION (TREE_TYPE (exp
)))
5529 if (!SUBREG_CHECK_PROMOTED_SIGN (target
,
5530 TYPE_UNSIGNED (TREE_TYPE (exp
))))
5532 /* Some types, e.g. Fortran's logical*4, won't have a signed
5533 version, so use the mode instead. */
5535 = (signed_or_unsigned_type_for
5536 (SUBREG_PROMOTED_SIGN (target
), TREE_TYPE (exp
)));
5538 ntype
= lang_hooks
.types
.type_for_mode
5539 (TYPE_MODE (TREE_TYPE (exp
)),
5540 SUBREG_PROMOTED_SIGN (target
));
5542 exp
= fold_convert_loc (loc
, ntype
, exp
);
5545 exp
= fold_convert_loc (loc
, lang_hooks
.types
.type_for_mode
5546 (inner_mode
, SUBREG_PROMOTED_SIGN (target
)),
5549 inner_target
= SUBREG_REG (target
);
5552 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
5553 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5556 /* If TEMP is a VOIDmode constant, use convert_modes to make
5557 sure that we properly convert it. */
5558 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
5560 temp
= convert_modes (outer_mode
, TYPE_MODE (TREE_TYPE (exp
)),
5561 temp
, SUBREG_PROMOTED_SIGN (target
));
5562 temp
= convert_modes (inner_mode
, outer_mode
, temp
,
5563 SUBREG_PROMOTED_SIGN (target
));
5566 convert_move (SUBREG_REG (target
), temp
,
5567 SUBREG_PROMOTED_SIGN (target
));
5571 else if ((TREE_CODE (exp
) == STRING_CST
5572 || (TREE_CODE (exp
) == MEM_REF
5573 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
5574 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5576 && integer_zerop (TREE_OPERAND (exp
, 1))))
5577 && !nontemporal
&& !call_param_p
5580 /* Optimize initialization of an array with a STRING_CST. */
5581 HOST_WIDE_INT exp_len
, str_copy_len
;
5583 tree str
= TREE_CODE (exp
) == STRING_CST
5584 ? exp
: TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5586 exp_len
= int_expr_size (exp
);
5590 if (TREE_STRING_LENGTH (str
) <= 0)
5593 str_copy_len
= strlen (TREE_STRING_POINTER (str
));
5594 if (str_copy_len
< TREE_STRING_LENGTH (str
) - 1)
5597 str_copy_len
= TREE_STRING_LENGTH (str
);
5598 if ((STORE_MAX_PIECES
& (STORE_MAX_PIECES
- 1)) == 0
5599 && TREE_STRING_POINTER (str
)[TREE_STRING_LENGTH (str
) - 1] == '\0')
5601 str_copy_len
+= STORE_MAX_PIECES
- 1;
5602 str_copy_len
&= ~(STORE_MAX_PIECES
- 1);
5604 str_copy_len
= MIN (str_copy_len
, exp_len
);
5605 if (!can_store_by_pieces (str_copy_len
, builtin_strncpy_read_str
,
5606 CONST_CAST (char *, TREE_STRING_POINTER (str
)),
5607 MEM_ALIGN (target
), false))
5612 dest_mem
= store_by_pieces (dest_mem
,
5613 str_copy_len
, builtin_strncpy_read_str
,
5615 TREE_STRING_POINTER (str
)),
5616 MEM_ALIGN (target
), false,
5617 (exp_len
> str_copy_len
? RETURN_END
:
5619 if (exp_len
> str_copy_len
)
5620 clear_storage (adjust_address (dest_mem
, BLKmode
, 0),
5621 GEN_INT (exp_len
- str_copy_len
),
5630 /* If we want to use a nontemporal or a reverse order store, force the
5631 value into a register first. */
5632 tmp_target
= nontemporal
|| reverse
? NULL_RTX
: target
;
5633 temp
= expand_expr_real (exp
, tmp_target
, GET_MODE (target
),
5635 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
5639 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5640 the same as that of TARGET, adjust the constant. This is needed, for
5641 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5642 only a word-sized value. */
5643 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
5644 && TREE_CODE (exp
) != ERROR_MARK
5645 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
5647 if (GET_MODE_CLASS (GET_MODE (target
))
5648 != GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp
)))
5649 && known_eq (GET_MODE_BITSIZE (GET_MODE (target
)),
5650 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
)))))
5652 rtx t
= simplify_gen_subreg (GET_MODE (target
), temp
,
5653 TYPE_MODE (TREE_TYPE (exp
)), 0);
5657 if (GET_MODE (temp
) == VOIDmode
)
5658 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5659 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5662 /* If value was not generated in the target, store it there.
5663 Convert the value to TARGET's type first if necessary and emit the
5664 pending incrementations that have been queued when expanding EXP.
5665 Note that we cannot emit the whole queue blindly because this will
5666 effectively disable the POST_INC optimization later.
5668 If TEMP and TARGET compare equal according to rtx_equal_p, but
5669 one or both of them are volatile memory refs, we have to distinguish
5671 - expand_expr has used TARGET. In this case, we must not generate
5672 another copy. This can be detected by TARGET being equal according
5674 - expand_expr has not used TARGET - that means that the source just
5675 happens to have the same RTX form. Since temp will have been created
5676 by expand_expr, it will compare unequal according to == .
5677 We must generate a copy in this case, to reach the correct number
5678 of volatile memory references. */
5680 if ((! rtx_equal_p (temp
, target
)
5681 || (temp
!= target
&& (side_effects_p (temp
)
5682 || side_effects_p (target
))))
5683 && TREE_CODE (exp
) != ERROR_MARK
5684 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5685 but TARGET is not valid memory reference, TEMP will differ
5686 from TARGET although it is really the same location. */
5688 && rtx_equal_p (alt_rtl
, target
)
5689 && !side_effects_p (alt_rtl
)
5690 && !side_effects_p (target
))
5691 /* If there's nothing to copy, don't bother. Don't call
5692 expr_size unless necessary, because some front-ends (C++)
5693 expr_size-hook must not be given objects that are not
5694 supposed to be bit-copied or bit-initialized. */
5695 && expr_size (exp
) != const0_rtx
)
5697 if (GET_MODE (temp
) != GET_MODE (target
) && GET_MODE (temp
) != VOIDmode
)
5699 if (GET_MODE (target
) == BLKmode
)
5701 /* Handle calls that return BLKmode values in registers. */
5702 if (REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
5703 copy_blkmode_from_reg (target
, temp
, TREE_TYPE (exp
));
5705 store_bit_field (target
,
5706 INTVAL (expr_size (exp
)) * BITS_PER_UNIT
,
5707 0, 0, 0, GET_MODE (temp
), temp
, reverse
);
5710 convert_move (target
, temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5713 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
5715 /* Handle copying a string constant into an array. The string
5716 constant may be shorter than the array. So copy just the string's
5717 actual length, and clear the rest. First get the size of the data
5718 type of the string, which is actually the size of the target. */
5719 rtx size
= expr_size (exp
);
5721 if (CONST_INT_P (size
)
5722 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
5723 emit_block_move (target
, temp
, size
,
5725 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5728 machine_mode pointer_mode
5729 = targetm
.addr_space
.pointer_mode (MEM_ADDR_SPACE (target
));
5730 machine_mode address_mode
= get_address_mode (target
);
5732 /* Compute the size of the data to copy from the string. */
5734 = size_binop_loc (loc
, MIN_EXPR
,
5735 make_tree (sizetype
, size
),
5736 size_int (TREE_STRING_LENGTH (exp
)));
5738 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
5740 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
5741 rtx_code_label
*label
= 0;
5743 /* Copy that much. */
5744 copy_size_rtx
= convert_to_mode (pointer_mode
, copy_size_rtx
,
5745 TYPE_UNSIGNED (sizetype
));
5746 emit_block_move (target
, temp
, copy_size_rtx
,
5748 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5750 /* Figure out how much is left in TARGET that we have to clear.
5751 Do all calculations in pointer_mode. */
5752 poly_int64 const_copy_size
;
5753 if (poly_int_rtx_p (copy_size_rtx
, &const_copy_size
))
5755 size
= plus_constant (address_mode
, size
, -const_copy_size
);
5756 target
= adjust_address (target
, BLKmode
, const_copy_size
);
5760 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
5761 copy_size_rtx
, NULL_RTX
, 0,
5764 if (GET_MODE (copy_size_rtx
) != address_mode
)
5765 copy_size_rtx
= convert_to_mode (address_mode
,
5767 TYPE_UNSIGNED (sizetype
));
5769 target
= offset_address (target
, copy_size_rtx
,
5770 highest_pow2_factor (copy_size
));
5771 label
= gen_label_rtx ();
5772 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
5773 GET_MODE (size
), 0, label
);
5776 if (size
!= const0_rtx
)
5777 clear_storage (target
, size
, BLOCK_OP_NORMAL
);
5783 /* Handle calls that return values in multiple non-contiguous locations.
5784 The Irix 6 ABI has examples of this. */
5785 else if (GET_CODE (target
) == PARALLEL
)
5787 if (GET_CODE (temp
) == PARALLEL
)
5788 emit_group_move (target
, temp
);
5790 emit_group_load (target
, temp
, TREE_TYPE (exp
),
5791 int_size_in_bytes (TREE_TYPE (exp
)));
5793 else if (GET_CODE (temp
) == PARALLEL
)
5794 emit_group_store (target
, temp
, TREE_TYPE (exp
),
5795 int_size_in_bytes (TREE_TYPE (exp
)));
5796 else if (GET_MODE (temp
) == BLKmode
)
5797 emit_block_move (target
, temp
, expr_size (exp
),
5799 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5800 /* If we emit a nontemporal store, there is nothing else to do. */
5801 else if (nontemporal
&& emit_storent_insn (target
, temp
))
5806 temp
= flip_storage_order (GET_MODE (target
), temp
);
5807 temp
= force_operand (temp
, target
);
5809 emit_move_insn (target
, temp
);
5816 /* Return true if field F of structure TYPE is a flexible array. */
5819 flexible_array_member_p (const_tree f
, const_tree type
)
5824 return (DECL_CHAIN (f
) == NULL
5825 && TREE_CODE (tf
) == ARRAY_TYPE
5827 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf
))
5828 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf
)))
5829 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf
))
5830 && int_size_in_bytes (type
) >= 0);
5833 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5834 must have in order for it to completely initialize a value of type TYPE.
5835 Return -1 if the number isn't known.
5837 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5839 static HOST_WIDE_INT
5840 count_type_elements (const_tree type
, bool for_ctor_p
)
5842 switch (TREE_CODE (type
))
5848 nelts
= array_type_nelts (type
);
5849 if (nelts
&& tree_fits_uhwi_p (nelts
))
5851 unsigned HOST_WIDE_INT n
;
5853 n
= tree_to_uhwi (nelts
) + 1;
5854 if (n
== 0 || for_ctor_p
)
5857 return n
* count_type_elements (TREE_TYPE (type
), false);
5859 return for_ctor_p
? -1 : 1;
5864 unsigned HOST_WIDE_INT n
;
5868 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5869 if (TREE_CODE (f
) == FIELD_DECL
)
5872 n
+= count_type_elements (TREE_TYPE (f
), false);
5873 else if (!flexible_array_member_p (f
, type
))
5874 /* Don't count flexible arrays, which are not supposed
5875 to be initialized. */
5883 case QUAL_UNION_TYPE
:
5888 gcc_assert (!for_ctor_p
);
5889 /* Estimate the number of scalars in each field and pick the
5890 maximum. Other estimates would do instead; the idea is simply
5891 to make sure that the estimate is not sensitive to the ordering
5894 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5895 if (TREE_CODE (f
) == FIELD_DECL
)
5897 m
= count_type_elements (TREE_TYPE (f
), false);
5898 /* If the field doesn't span the whole union, add an extra
5899 scalar for the rest. */
5900 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f
)),
5901 TYPE_SIZE (type
)) != 1)
5914 unsigned HOST_WIDE_INT nelts
;
5915 if (TYPE_VECTOR_SUBPARTS (type
).is_constant (&nelts
))
5923 case FIXED_POINT_TYPE
:
5928 case REFERENCE_TYPE
:
5944 /* Helper for categorize_ctor_elements. Identical interface. */
5947 categorize_ctor_elements_1 (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
5948 HOST_WIDE_INT
*p_unique_nz_elts
,
5949 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
5951 unsigned HOST_WIDE_INT idx
;
5952 HOST_WIDE_INT nz_elts
, unique_nz_elts
, init_elts
, num_fields
;
5953 tree value
, purpose
, elt_type
;
5955 /* Whether CTOR is a valid constant initializer, in accordance with what
5956 initializer_constant_valid_p does. If inferred from the constructor
5957 elements, true until proven otherwise. */
5958 bool const_from_elts_p
= constructor_static_from_elts_p (ctor
);
5959 bool const_p
= const_from_elts_p
? true : TREE_STATIC (ctor
);
5965 elt_type
= NULL_TREE
;
5967 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), idx
, purpose
, value
)
5969 HOST_WIDE_INT mult
= 1;
5971 if (purpose
&& TREE_CODE (purpose
) == RANGE_EXPR
)
5973 tree lo_index
= TREE_OPERAND (purpose
, 0);
5974 tree hi_index
= TREE_OPERAND (purpose
, 1);
5976 if (tree_fits_uhwi_p (lo_index
) && tree_fits_uhwi_p (hi_index
))
5977 mult
= (tree_to_uhwi (hi_index
)
5978 - tree_to_uhwi (lo_index
) + 1);
5981 elt_type
= TREE_TYPE (value
);
5983 switch (TREE_CODE (value
))
5987 HOST_WIDE_INT nz
= 0, unz
= 0, ic
= 0;
5989 bool const_elt_p
= categorize_ctor_elements_1 (value
, &nz
, &unz
,
5992 nz_elts
+= mult
* nz
;
5993 unique_nz_elts
+= unz
;
5994 init_elts
+= mult
* ic
;
5996 if (const_from_elts_p
&& const_p
)
5997 const_p
= const_elt_p
;
6004 if (!initializer_zerop (value
))
6013 nz_elts
+= mult
* TREE_STRING_LENGTH (value
);
6014 unique_nz_elts
+= TREE_STRING_LENGTH (value
);
6015 init_elts
+= mult
* TREE_STRING_LENGTH (value
);
6019 if (!initializer_zerop (TREE_REALPART (value
)))
6024 if (!initializer_zerop (TREE_IMAGPART (value
)))
6029 init_elts
+= 2 * mult
;
6034 /* We can only construct constant-length vectors using
6036 unsigned int nunits
= VECTOR_CST_NELTS (value
).to_constant ();
6037 for (unsigned int i
= 0; i
< nunits
; ++i
)
6039 tree v
= VECTOR_CST_ELT (value
, i
);
6040 if (!initializer_zerop (v
))
6052 HOST_WIDE_INT tc
= count_type_elements (elt_type
, false);
6053 nz_elts
+= mult
* tc
;
6054 unique_nz_elts
+= tc
;
6055 init_elts
+= mult
* tc
;
6057 if (const_from_elts_p
&& const_p
)
6059 = initializer_constant_valid_p (value
,
6061 TYPE_REVERSE_STORAGE_ORDER
6069 if (*p_complete
&& !complete_ctor_at_level_p (TREE_TYPE (ctor
),
6070 num_fields
, elt_type
))
6071 *p_complete
= false;
6073 *p_nz_elts
+= nz_elts
;
6074 *p_unique_nz_elts
+= unique_nz_elts
;
6075 *p_init_elts
+= init_elts
;
6080 /* Examine CTOR to discover:
6081 * how many scalar fields are set to nonzero values,
6082 and place it in *P_NZ_ELTS;
6083 * the same, but counting RANGE_EXPRs as multiplier of 1 instead of
6084 high - low + 1 (this can be useful for callers to determine ctors
6085 that could be cheaply initialized with - perhaps nested - loops
6086 compared to copied from huge read-only data),
6087 and place it in *P_UNIQUE_NZ_ELTS;
6088 * how many scalar fields in total are in CTOR,
6089 and place it in *P_ELT_COUNT.
6090 * whether the constructor is complete -- in the sense that every
6091 meaningful byte is explicitly given a value --
6092 and place it in *P_COMPLETE.
6094 Return whether or not CTOR is a valid static constant initializer, the same
6095 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
6098 categorize_ctor_elements (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
6099 HOST_WIDE_INT
*p_unique_nz_elts
,
6100 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
6103 *p_unique_nz_elts
= 0;
6107 return categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_unique_nz_elts
,
6108 p_init_elts
, p_complete
);
6111 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
6112 of which had type LAST_TYPE. Each element was itself a complete
6113 initializer, in the sense that every meaningful byte was explicitly
6114 given a value. Return true if the same is true for the constructor
6118 complete_ctor_at_level_p (const_tree type
, HOST_WIDE_INT num_elts
,
6119 const_tree last_type
)
6121 if (TREE_CODE (type
) == UNION_TYPE
6122 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
6127 gcc_assert (num_elts
== 1 && last_type
);
6129 /* ??? We could look at each element of the union, and find the
6130 largest element. Which would avoid comparing the size of the
6131 initialized element against any tail padding in the union.
6132 Doesn't seem worth the effort... */
6133 return simple_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (last_type
)) == 1;
6136 return count_type_elements (type
, true) == num_elts
;
6139 /* Return 1 if EXP contains mostly (3/4) zeros. */
6142 mostly_zeros_p (const_tree exp
)
6144 if (TREE_CODE (exp
) == CONSTRUCTOR
)
6146 HOST_WIDE_INT nz_elts
, unz_elts
, init_elts
;
6149 categorize_ctor_elements (exp
, &nz_elts
, &unz_elts
, &init_elts
,
6151 return !complete_p
|| nz_elts
< init_elts
/ 4;
6154 return initializer_zerop (exp
);
6157 /* Return 1 if EXP contains all zeros. */
6160 all_zeros_p (const_tree exp
)
6162 if (TREE_CODE (exp
) == CONSTRUCTOR
)
6164 HOST_WIDE_INT nz_elts
, unz_elts
, init_elts
;
6167 categorize_ctor_elements (exp
, &nz_elts
, &unz_elts
, &init_elts
,
6169 return nz_elts
== 0;
6172 return initializer_zerop (exp
);
6175 /* Helper function for store_constructor.
6176 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
6177 CLEARED is as for store_constructor.
6178 ALIAS_SET is the alias set to use for any stores.
6179 If REVERSE is true, the store is to be done in reverse order.
6181 This provides a recursive shortcut back to store_constructor when it isn't
6182 necessary to go through store_field. This is so that we can pass through
6183 the cleared field to let store_constructor know that we may not have to
6184 clear a substructure if the outer structure has already been cleared. */
6187 store_constructor_field (rtx target
, poly_uint64 bitsize
, poly_int64 bitpos
,
6188 poly_uint64 bitregion_start
,
6189 poly_uint64 bitregion_end
,
6191 tree exp
, int cleared
,
6192 alias_set_type alias_set
, bool reverse
)
6195 poly_uint64 bytesize
;
6196 if (TREE_CODE (exp
) == CONSTRUCTOR
6197 /* We can only call store_constructor recursively if the size and
6198 bit position are on a byte boundary. */
6199 && multiple_p (bitpos
, BITS_PER_UNIT
, &bytepos
)
6200 && maybe_ne (bitsize
, 0U)
6201 && multiple_p (bitsize
, BITS_PER_UNIT
, &bytesize
)
6202 /* If we have a nonzero bitpos for a register target, then we just
6203 let store_field do the bitfield handling. This is unlikely to
6204 generate unnecessary clear instructions anyways. */
6205 && (known_eq (bitpos
, 0) || MEM_P (target
)))
6209 machine_mode target_mode
= GET_MODE (target
);
6210 if (target_mode
!= BLKmode
6211 && !multiple_p (bitpos
, GET_MODE_ALIGNMENT (target_mode
)))
6212 target_mode
= BLKmode
;
6213 target
= adjust_address (target
, target_mode
, bytepos
);
6217 /* Update the alias set, if required. */
6218 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
6219 && MEM_ALIAS_SET (target
) != 0)
6221 target
= copy_rtx (target
);
6222 set_mem_alias_set (target
, alias_set
);
6225 store_constructor (exp
, target
, cleared
, bytesize
, reverse
);
6228 store_field (target
, bitsize
, bitpos
, bitregion_start
, bitregion_end
, mode
,
6229 exp
, alias_set
, false, reverse
);
6233 /* Returns the number of FIELD_DECLs in TYPE. */
6236 fields_length (const_tree type
)
6238 tree t
= TYPE_FIELDS (type
);
6241 for (; t
; t
= DECL_CHAIN (t
))
6242 if (TREE_CODE (t
) == FIELD_DECL
)
6249 /* Store the value of constructor EXP into the rtx TARGET.
6250 TARGET is either a REG or a MEM; we know it cannot conflict, since
6251 safe_from_p has been called.
6252 CLEARED is true if TARGET is known to have been zero'd.
6253 SIZE is the number of bytes of TARGET we are allowed to modify: this
6254 may not be the same as the size of EXP if we are assigning to a field
6255 which has been packed to exclude padding bits.
6256 If REVERSE is true, the store is to be done in reverse order. */
6259 store_constructor (tree exp
, rtx target
, int cleared
, poly_int64 size
,
6262 tree type
= TREE_TYPE (exp
);
6263 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
6264 poly_int64 bitregion_end
= known_gt (size
, 0) ? size
* BITS_PER_UNIT
- 1 : 0;
6266 switch (TREE_CODE (type
))
6270 case QUAL_UNION_TYPE
:
6272 unsigned HOST_WIDE_INT idx
;
6275 /* The storage order is specified for every aggregate type. */
6276 reverse
= TYPE_REVERSE_STORAGE_ORDER (type
);
6278 /* If size is zero or the target is already cleared, do nothing. */
6279 if (known_eq (size
, 0) || cleared
)
6281 /* We either clear the aggregate or indicate the value is dead. */
6282 else if ((TREE_CODE (type
) == UNION_TYPE
6283 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
6284 && ! CONSTRUCTOR_ELTS (exp
))
6285 /* If the constructor is empty, clear the union. */
6287 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
6291 /* If we are building a static constructor into a register,
6292 set the initial value as zero so we can fold the value into
6293 a constant. But if more than one register is involved,
6294 this probably loses. */
6295 else if (REG_P (target
) && TREE_STATIC (exp
)
6296 && known_le (GET_MODE_SIZE (GET_MODE (target
)),
6297 REGMODE_NATURAL_SIZE (GET_MODE (target
))))
6299 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6303 /* If the constructor has fewer fields than the structure or
6304 if we are initializing the structure to mostly zeros, clear
6305 the whole structure first. Don't do this if TARGET is a
6306 register whose mode size isn't equal to SIZE since
6307 clear_storage can't handle this case. */
6308 else if (known_size_p (size
)
6309 && (((int) CONSTRUCTOR_NELTS (exp
) != fields_length (type
))
6310 || mostly_zeros_p (exp
))
6312 || known_eq (GET_MODE_SIZE (GET_MODE (target
)), size
)))
6314 clear_storage (target
, gen_int_mode (size
, Pmode
),
6319 if (REG_P (target
) && !cleared
)
6320 emit_clobber (target
);
6322 /* Store each element of the constructor into the
6323 corresponding field of TARGET. */
6324 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, field
, value
)
6327 HOST_WIDE_INT bitsize
;
6328 HOST_WIDE_INT bitpos
= 0;
6330 rtx to_rtx
= target
;
6332 /* Just ignore missing fields. We cleared the whole
6333 structure, above, if any fields are missing. */
6337 if (cleared
&& initializer_zerop (value
))
6340 if (tree_fits_uhwi_p (DECL_SIZE (field
)))
6341 bitsize
= tree_to_uhwi (DECL_SIZE (field
));
6345 mode
= DECL_MODE (field
);
6346 if (DECL_BIT_FIELD (field
))
6349 offset
= DECL_FIELD_OFFSET (field
);
6350 if (tree_fits_shwi_p (offset
)
6351 && tree_fits_shwi_p (bit_position (field
)))
6353 bitpos
= int_bit_position (field
);
6359 /* If this initializes a field that is smaller than a
6360 word, at the start of a word, try to widen it to a full
6361 word. This special case allows us to output C++ member
6362 function initializations in a form that the optimizers
6364 if (WORD_REGISTER_OPERATIONS
6366 && bitsize
< BITS_PER_WORD
6367 && bitpos
% BITS_PER_WORD
== 0
6368 && GET_MODE_CLASS (mode
) == MODE_INT
6369 && TREE_CODE (value
) == INTEGER_CST
6371 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
6373 tree type
= TREE_TYPE (value
);
6375 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
6377 type
= lang_hooks
.types
.type_for_mode
6378 (word_mode
, TYPE_UNSIGNED (type
));
6379 value
= fold_convert (type
, value
);
6380 /* Make sure the bits beyond the original bitsize are zero
6381 so that we can correctly avoid extra zeroing stores in
6382 later constructor elements. */
6384 = wide_int_to_tree (type
, wi::mask (bitsize
, false,
6386 value
= fold_build2 (BIT_AND_EXPR
, type
, value
, bitsize_mask
);
6389 if (BYTES_BIG_ENDIAN
)
6391 = fold_build2 (LSHIFT_EXPR
, type
, value
,
6392 build_int_cst (type
,
6393 BITS_PER_WORD
- bitsize
));
6394 bitsize
= BITS_PER_WORD
;
6398 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
6399 && DECL_NONADDRESSABLE_P (field
))
6401 to_rtx
= copy_rtx (to_rtx
);
6402 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
6405 store_constructor_field (to_rtx
, bitsize
, bitpos
,
6406 0, bitregion_end
, mode
,
6408 get_alias_set (TREE_TYPE (field
)),
6416 unsigned HOST_WIDE_INT i
;
6419 tree elttype
= TREE_TYPE (type
);
6421 HOST_WIDE_INT minelt
= 0;
6422 HOST_WIDE_INT maxelt
= 0;
6424 /* The storage order is specified for every aggregate type. */
6425 reverse
= TYPE_REVERSE_STORAGE_ORDER (type
);
6427 domain
= TYPE_DOMAIN (type
);
6428 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
6429 && TYPE_MAX_VALUE (domain
)
6430 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain
))
6431 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain
)));
6433 /* If we have constant bounds for the range of the type, get them. */
6436 minelt
= tree_to_shwi (TYPE_MIN_VALUE (domain
));
6437 maxelt
= tree_to_shwi (TYPE_MAX_VALUE (domain
));
6440 /* If the constructor has fewer elements than the array, clear
6441 the whole array first. Similarly if this is static
6442 constructor of a non-BLKmode object. */
6445 else if (REG_P (target
) && TREE_STATIC (exp
))
6449 unsigned HOST_WIDE_INT idx
;
6451 HOST_WIDE_INT count
= 0, zero_count
= 0;
6452 need_to_clear
= ! const_bounds_p
;
6454 /* This loop is a more accurate version of the loop in
6455 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6456 is also needed to check for missing elements. */
6457 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, index
, value
)
6459 HOST_WIDE_INT this_node_count
;
6464 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6466 tree lo_index
= TREE_OPERAND (index
, 0);
6467 tree hi_index
= TREE_OPERAND (index
, 1);
6469 if (! tree_fits_uhwi_p (lo_index
)
6470 || ! tree_fits_uhwi_p (hi_index
))
6476 this_node_count
= (tree_to_uhwi (hi_index
)
6477 - tree_to_uhwi (lo_index
) + 1);
6480 this_node_count
= 1;
6482 count
+= this_node_count
;
6483 if (mostly_zeros_p (value
))
6484 zero_count
+= this_node_count
;
6487 /* Clear the entire array first if there are any missing
6488 elements, or if the incidence of zero elements is >=
6491 && (count
< maxelt
- minelt
+ 1
6492 || 4 * zero_count
>= 3 * count
))
6496 if (need_to_clear
&& maybe_gt (size
, 0))
6499 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6501 clear_storage (target
, gen_int_mode (size
, Pmode
),
6506 if (!cleared
&& REG_P (target
))
6507 /* Inform later passes that the old value is dead. */
6508 emit_clobber (target
);
6510 /* Store each element of the constructor into the
6511 corresponding element of TARGET, determined by counting the
6513 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), i
, index
, value
)
6517 HOST_WIDE_INT bitpos
;
6518 rtx xtarget
= target
;
6520 if (cleared
&& initializer_zerop (value
))
6523 mode
= TYPE_MODE (elttype
);
6524 if (mode
!= BLKmode
)
6525 bitsize
= GET_MODE_BITSIZE (mode
);
6526 else if (!poly_int_tree_p (TYPE_SIZE (elttype
), &bitsize
))
6529 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6531 tree lo_index
= TREE_OPERAND (index
, 0);
6532 tree hi_index
= TREE_OPERAND (index
, 1);
6533 rtx index_r
, pos_rtx
;
6534 HOST_WIDE_INT lo
, hi
, count
;
6537 /* If the range is constant and "small", unroll the loop. */
6539 && tree_fits_shwi_p (lo_index
)
6540 && tree_fits_shwi_p (hi_index
)
6541 && (lo
= tree_to_shwi (lo_index
),
6542 hi
= tree_to_shwi (hi_index
),
6543 count
= hi
- lo
+ 1,
6546 || (tree_fits_uhwi_p (TYPE_SIZE (elttype
))
6547 && (tree_to_uhwi (TYPE_SIZE (elttype
)) * count
6550 lo
-= minelt
; hi
-= minelt
;
6551 for (; lo
<= hi
; lo
++)
6553 bitpos
= lo
* tree_to_shwi (TYPE_SIZE (elttype
));
6556 && !MEM_KEEP_ALIAS_SET_P (target
)
6557 && TREE_CODE (type
) == ARRAY_TYPE
6558 && TYPE_NONALIASED_COMPONENT (type
))
6560 target
= copy_rtx (target
);
6561 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6564 store_constructor_field
6565 (target
, bitsize
, bitpos
, 0, bitregion_end
,
6566 mode
, value
, cleared
,
6567 get_alias_set (elttype
), reverse
);
6572 rtx_code_label
*loop_start
= gen_label_rtx ();
6573 rtx_code_label
*loop_end
= gen_label_rtx ();
6576 expand_normal (hi_index
);
6578 index
= build_decl (EXPR_LOCATION (exp
),
6579 VAR_DECL
, NULL_TREE
, domain
);
6580 index_r
= gen_reg_rtx (promote_decl_mode (index
, NULL
));
6581 SET_DECL_RTL (index
, index_r
);
6582 store_expr (lo_index
, index_r
, 0, false, reverse
);
6584 /* Build the head of the loop. */
6585 do_pending_stack_adjust ();
6586 emit_label (loop_start
);
6588 /* Assign value to element index. */
6590 fold_convert (ssizetype
,
6591 fold_build2 (MINUS_EXPR
,
6594 TYPE_MIN_VALUE (domain
)));
6597 size_binop (MULT_EXPR
, position
,
6598 fold_convert (ssizetype
,
6599 TYPE_SIZE_UNIT (elttype
)));
6601 pos_rtx
= expand_normal (position
);
6602 xtarget
= offset_address (target
, pos_rtx
,
6603 highest_pow2_factor (position
));
6604 xtarget
= adjust_address (xtarget
, mode
, 0);
6605 if (TREE_CODE (value
) == CONSTRUCTOR
)
6606 store_constructor (value
, xtarget
, cleared
,
6607 exact_div (bitsize
, BITS_PER_UNIT
),
6610 store_expr (value
, xtarget
, 0, false, reverse
);
6612 /* Generate a conditional jump to exit the loop. */
6613 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
6615 jumpif (exit_cond
, loop_end
,
6616 profile_probability::uninitialized ());
6618 /* Update the loop counter, and jump to the head of
6620 expand_assignment (index
,
6621 build2 (PLUS_EXPR
, TREE_TYPE (index
),
6622 index
, integer_one_node
),
6625 emit_jump (loop_start
);
6627 /* Build the end of the loop. */
6628 emit_label (loop_end
);
6631 else if ((index
!= 0 && ! tree_fits_shwi_p (index
))
6632 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype
)))
6637 index
= ssize_int (1);
6640 index
= fold_convert (ssizetype
,
6641 fold_build2 (MINUS_EXPR
,
6644 TYPE_MIN_VALUE (domain
)));
6647 size_binop (MULT_EXPR
, index
,
6648 fold_convert (ssizetype
,
6649 TYPE_SIZE_UNIT (elttype
)));
6650 xtarget
= offset_address (target
,
6651 expand_normal (position
),
6652 highest_pow2_factor (position
));
6653 xtarget
= adjust_address (xtarget
, mode
, 0);
6654 store_expr (value
, xtarget
, 0, false, reverse
);
6659 bitpos
= ((tree_to_shwi (index
) - minelt
)
6660 * tree_to_uhwi (TYPE_SIZE (elttype
)));
6662 bitpos
= (i
* tree_to_uhwi (TYPE_SIZE (elttype
)));
6664 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
6665 && TREE_CODE (type
) == ARRAY_TYPE
6666 && TYPE_NONALIASED_COMPONENT (type
))
6668 target
= copy_rtx (target
);
6669 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6671 store_constructor_field (target
, bitsize
, bitpos
, 0,
6672 bitregion_end
, mode
, value
,
6673 cleared
, get_alias_set (elttype
),
6682 unsigned HOST_WIDE_INT idx
;
6683 constructor_elt
*ce
;
6686 insn_code icode
= CODE_FOR_nothing
;
6688 tree elttype
= TREE_TYPE (type
);
6689 int elt_size
= tree_to_uhwi (TYPE_SIZE (elttype
));
6690 machine_mode eltmode
= TYPE_MODE (elttype
);
6691 HOST_WIDE_INT bitsize
;
6692 HOST_WIDE_INT bitpos
;
6693 rtvec vector
= NULL
;
6695 unsigned HOST_WIDE_INT const_n_elts
;
6696 alias_set_type alias
;
6697 bool vec_vec_init_p
= false;
6698 machine_mode mode
= GET_MODE (target
);
6700 gcc_assert (eltmode
!= BLKmode
);
6702 /* Try using vec_duplicate_optab for uniform vectors. */
6703 if (!TREE_SIDE_EFFECTS (exp
)
6704 && VECTOR_MODE_P (mode
)
6705 && eltmode
== GET_MODE_INNER (mode
)
6706 && ((icode
= optab_handler (vec_duplicate_optab
, mode
))
6707 != CODE_FOR_nothing
)
6708 && (elt
= uniform_vector_p (exp
)))
6710 struct expand_operand ops
[2];
6711 create_output_operand (&ops
[0], target
, mode
);
6712 create_input_operand (&ops
[1], expand_normal (elt
), eltmode
);
6713 expand_insn (icode
, 2, ops
);
6714 if (!rtx_equal_p (target
, ops
[0].value
))
6715 emit_move_insn (target
, ops
[0].value
);
6719 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
6721 && VECTOR_MODE_P (mode
)
6722 && n_elts
.is_constant (&const_n_elts
))
6724 machine_mode emode
= eltmode
;
6726 if (CONSTRUCTOR_NELTS (exp
)
6727 && (TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (exp
, 0)->value
))
6730 tree etype
= TREE_TYPE (CONSTRUCTOR_ELT (exp
, 0)->value
);
6731 gcc_assert (known_eq (CONSTRUCTOR_NELTS (exp
)
6732 * TYPE_VECTOR_SUBPARTS (etype
),
6734 emode
= TYPE_MODE (etype
);
6736 icode
= convert_optab_handler (vec_init_optab
, mode
, emode
);
6737 if (icode
!= CODE_FOR_nothing
)
6739 unsigned int i
, n
= const_n_elts
;
6741 if (emode
!= eltmode
)
6743 n
= CONSTRUCTOR_NELTS (exp
);
6744 vec_vec_init_p
= true;
6746 vector
= rtvec_alloc (n
);
6747 for (i
= 0; i
< n
; i
++)
6748 RTVEC_ELT (vector
, i
) = CONST0_RTX (emode
);
6752 /* If the constructor has fewer elements than the vector,
6753 clear the whole array first. Similarly if this is static
6754 constructor of a non-BLKmode object. */
6757 else if (REG_P (target
) && TREE_STATIC (exp
))
6761 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
6764 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
6766 tree sz
= TYPE_SIZE (TREE_TYPE (value
));
6768 = tree_to_uhwi (int_const_binop (TRUNC_DIV_EXPR
, sz
,
6769 TYPE_SIZE (elttype
)));
6771 count
+= n_elts_here
;
6772 if (mostly_zeros_p (value
))
6773 zero_count
+= n_elts_here
;
6776 /* Clear the entire vector first if there are any missing elements,
6777 or if the incidence of zero elements is >= 75%. */
6778 need_to_clear
= (maybe_lt (count
, n_elts
)
6779 || 4 * zero_count
>= 3 * count
);
6782 if (need_to_clear
&& maybe_gt (size
, 0) && !vector
)
6785 emit_move_insn (target
, CONST0_RTX (mode
));
6787 clear_storage (target
, gen_int_mode (size
, Pmode
),
6792 /* Inform later passes that the old value is dead. */
6793 if (!cleared
&& !vector
&& REG_P (target
))
6794 emit_move_insn (target
, CONST0_RTX (mode
));
6797 alias
= MEM_ALIAS_SET (target
);
6799 alias
= get_alias_set (elttype
);
6801 /* Store each element of the constructor into the corresponding
6802 element of TARGET, determined by counting the elements. */
6803 for (idx
= 0, i
= 0;
6804 vec_safe_iterate (CONSTRUCTOR_ELTS (exp
), idx
, &ce
);
6805 idx
++, i
+= bitsize
/ elt_size
)
6807 HOST_WIDE_INT eltpos
;
6808 tree value
= ce
->value
;
6810 bitsize
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value
)));
6811 if (cleared
&& initializer_zerop (value
))
6815 eltpos
= tree_to_uhwi (ce
->index
);
6823 gcc_assert (ce
->index
== NULL_TREE
);
6824 gcc_assert (TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
);
6828 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
6829 RTVEC_ELT (vector
, eltpos
) = expand_normal (value
);
6833 machine_mode value_mode
6834 = (TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
6835 ? TYPE_MODE (TREE_TYPE (value
)) : eltmode
);
6836 bitpos
= eltpos
* elt_size
;
6837 store_constructor_field (target
, bitsize
, bitpos
, 0,
6838 bitregion_end
, value_mode
,
6839 value
, cleared
, alias
, reverse
);
6844 emit_insn (GEN_FCN (icode
) (target
,
6845 gen_rtx_PARALLEL (mode
, vector
)));
6854 /* Store the value of EXP (an expression tree)
6855 into a subfield of TARGET which has mode MODE and occupies
6856 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6857 If MODE is VOIDmode, it means that we are storing into a bit-field.
6859 BITREGION_START is bitpos of the first bitfield in this region.
6860 BITREGION_END is the bitpos of the ending bitfield in this region.
6861 These two fields are 0, if the C++ memory model does not apply,
6862 or we are not interested in keeping track of bitfield regions.
6864 Always return const0_rtx unless we have something particular to
6867 ALIAS_SET is the alias set for the destination. This value will
6868 (in general) be different from that for TARGET, since TARGET is a
6869 reference to the containing structure.
6871 If NONTEMPORAL is true, try generating a nontemporal store.
6873 If REVERSE is true, the store is to be done in reverse order. */
6876 store_field (rtx target
, poly_int64 bitsize
, poly_int64 bitpos
,
6877 poly_uint64 bitregion_start
, poly_uint64 bitregion_end
,
6878 machine_mode mode
, tree exp
,
6879 alias_set_type alias_set
, bool nontemporal
, bool reverse
)
6881 if (TREE_CODE (exp
) == ERROR_MARK
)
6884 /* If we have nothing to store, do nothing unless the expression has
6885 side-effects. Don't do that for zero sized addressable lhs of
6887 if (known_eq (bitsize
, 0)
6888 && (!TREE_ADDRESSABLE (TREE_TYPE (exp
))
6889 || TREE_CODE (exp
) != CALL_EXPR
))
6890 return expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6892 if (GET_CODE (target
) == CONCAT
)
6894 /* We're storing into a struct containing a single __complex. */
6896 gcc_assert (known_eq (bitpos
, 0));
6897 return store_expr (exp
, target
, 0, nontemporal
, reverse
);
6900 /* If the structure is in a register or if the component
6901 is a bit field, we cannot use addressing to access it.
6902 Use bit-field techniques or SUBREG to store in it. */
6904 poly_int64 decl_bitsize
;
6905 if (mode
== VOIDmode
6906 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
6907 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
6908 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
6910 || GET_CODE (target
) == SUBREG
6911 /* If the field isn't aligned enough to store as an ordinary memref,
6912 store it as a bit field. */
6914 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
6915 || !multiple_p (bitpos
, GET_MODE_ALIGNMENT (mode
)))
6916 && targetm
.slow_unaligned_access (mode
, MEM_ALIGN (target
)))
6917 || !multiple_p (bitpos
, BITS_PER_UNIT
)))
6918 || (known_size_p (bitsize
)
6920 && maybe_gt (GET_MODE_BITSIZE (mode
), bitsize
))
6921 /* If the RHS and field are a constant size and the size of the
6922 RHS isn't the same size as the bitfield, we must use bitfield
6924 || (known_size_p (bitsize
)
6925 && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp
)))
6926 && maybe_ne (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp
))),
6928 /* Except for initialization of full bytes from a CONSTRUCTOR, which
6929 we will handle specially below. */
6930 && !(TREE_CODE (exp
) == CONSTRUCTOR
6931 && multiple_p (bitsize
, BITS_PER_UNIT
))
6932 /* And except for bitwise copying of TREE_ADDRESSABLE types,
6933 where the FIELD_DECL has the right bitsize, but TREE_TYPE (exp)
6934 includes some extra padding. store_expr / expand_expr will in
6935 that case call get_inner_reference that will have the bitsize
6936 we check here and thus the block move will not clobber the
6937 padding that shouldn't be clobbered. In the future we could
6938 replace the TREE_ADDRESSABLE check with a check that
6939 get_base_address needs to live in memory. */
6940 && (!TREE_ADDRESSABLE (TREE_TYPE (exp
))
6941 || TREE_CODE (exp
) != COMPONENT_REF
6942 || !multiple_p (bitsize
, BITS_PER_UNIT
)
6943 || !multiple_p (bitpos
, BITS_PER_UNIT
)
6944 || !poly_int_tree_p (DECL_SIZE (TREE_OPERAND (exp
, 1)),
6946 || maybe_ne (decl_bitsize
, bitsize
)))
6947 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6948 decl we must use bitfield operations. */
6949 || (known_size_p (bitsize
)
6950 && TREE_CODE (exp
) == MEM_REF
6951 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
6952 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6953 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6954 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) != BLKmode
))
6959 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6960 implies a mask operation. If the precision is the same size as
6961 the field we're storing into, that mask is redundant. This is
6962 particularly common with bit field assignments generated by the
6964 nop_def
= get_def_for_expr (exp
, NOP_EXPR
);
6967 tree type
= TREE_TYPE (exp
);
6968 if (INTEGRAL_TYPE_P (type
)
6969 && maybe_ne (TYPE_PRECISION (type
),
6970 GET_MODE_BITSIZE (TYPE_MODE (type
)))
6971 && known_eq (bitsize
, TYPE_PRECISION (type
)))
6973 tree op
= gimple_assign_rhs1 (nop_def
);
6974 type
= TREE_TYPE (op
);
6975 if (INTEGRAL_TYPE_P (type
)
6976 && known_ge (TYPE_PRECISION (type
), bitsize
))
6981 temp
= expand_normal (exp
);
6983 /* We don't support variable-sized BLKmode bitfields, since our
6984 handling of BLKmode is bound up with the ability to break
6985 things into words. */
6986 gcc_assert (mode
!= BLKmode
|| bitsize
.is_constant ());
6988 /* Handle calls that return values in multiple non-contiguous locations.
6989 The Irix 6 ABI has examples of this. */
6990 if (GET_CODE (temp
) == PARALLEL
)
6992 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
6993 machine_mode temp_mode
= GET_MODE (temp
);
6994 if (temp_mode
== BLKmode
|| temp_mode
== VOIDmode
)
6995 temp_mode
= smallest_int_mode_for_size (size
* BITS_PER_UNIT
);
6996 rtx temp_target
= gen_reg_rtx (temp_mode
);
6997 emit_group_store (temp_target
, temp
, TREE_TYPE (exp
), size
);
7001 /* Handle calls that return BLKmode values in registers. */
7002 else if (mode
== BLKmode
&& REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
7004 rtx temp_target
= gen_reg_rtx (GET_MODE (temp
));
7005 copy_blkmode_from_reg (temp_target
, temp
, TREE_TYPE (exp
));
7009 /* If the value has aggregate type and an integral mode then, if BITSIZE
7010 is narrower than this mode and this is for big-endian data, we first
7011 need to put the value into the low-order bits for store_bit_field,
7012 except when MODE is BLKmode and BITSIZE larger than the word size
7013 (see the handling of fields larger than a word in store_bit_field).
7014 Moreover, the field may be not aligned on a byte boundary; in this
7015 case, if it has reverse storage order, it needs to be accessed as a
7016 scalar field with reverse storage order and we must first put the
7017 value into target order. */
7018 scalar_int_mode temp_mode
;
7019 if (AGGREGATE_TYPE_P (TREE_TYPE (exp
))
7020 && is_int_mode (GET_MODE (temp
), &temp_mode
))
7022 HOST_WIDE_INT size
= GET_MODE_BITSIZE (temp_mode
);
7024 reverse
= TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (exp
));
7027 temp
= flip_storage_order (temp_mode
, temp
);
7029 gcc_checking_assert (known_le (bitsize
, size
));
7030 if (maybe_lt (bitsize
, size
)
7031 && reverse
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
7032 /* Use of to_constant for BLKmode was checked above. */
7033 && !(mode
== BLKmode
&& bitsize
.to_constant () > BITS_PER_WORD
))
7034 temp
= expand_shift (RSHIFT_EXPR
, temp_mode
, temp
,
7035 size
- bitsize
, NULL_RTX
, 1);
7038 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
7039 if (mode
!= VOIDmode
&& mode
!= BLKmode
7040 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
7041 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
7043 /* If the mode of TEMP and TARGET is BLKmode, both must be in memory
7044 and BITPOS must be aligned on a byte boundary. If so, we simply do
7045 a block copy. Likewise for a BLKmode-like TARGET. */
7046 if (GET_MODE (temp
) == BLKmode
7047 && (GET_MODE (target
) == BLKmode
7049 && GET_MODE_CLASS (GET_MODE (target
)) == MODE_INT
7050 && multiple_p (bitpos
, BITS_PER_UNIT
)
7051 && multiple_p (bitsize
, BITS_PER_UNIT
))))
7053 gcc_assert (MEM_P (target
) && MEM_P (temp
));
7054 poly_int64 bytepos
= exact_div (bitpos
, BITS_PER_UNIT
);
7055 poly_int64 bytesize
= bits_to_bytes_round_up (bitsize
);
7057 target
= adjust_address (target
, VOIDmode
, bytepos
);
7058 emit_block_move (target
, temp
,
7059 gen_int_mode (bytesize
, Pmode
),
7065 /* If the mode of TEMP is still BLKmode and BITSIZE not larger than the
7066 word size, we need to load the value (see again store_bit_field). */
7067 if (GET_MODE (temp
) == BLKmode
&& known_le (bitsize
, BITS_PER_WORD
))
7069 scalar_int_mode temp_mode
= smallest_int_mode_for_size (bitsize
);
7070 temp
= extract_bit_field (temp
, bitsize
, 0, 1, NULL_RTX
, temp_mode
,
7071 temp_mode
, false, NULL
);
7074 /* Store the value in the bitfield. */
7075 gcc_checking_assert (known_ge (bitpos
, 0));
7076 store_bit_field (target
, bitsize
, bitpos
,
7077 bitregion_start
, bitregion_end
,
7078 mode
, temp
, reverse
);
7084 /* Now build a reference to just the desired component. */
7085 rtx to_rtx
= adjust_address (target
, mode
,
7086 exact_div (bitpos
, BITS_PER_UNIT
));
7088 if (to_rtx
== target
)
7089 to_rtx
= copy_rtx (to_rtx
);
7091 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
7092 set_mem_alias_set (to_rtx
, alias_set
);
7094 /* Above we avoided using bitfield operations for storing a CONSTRUCTOR
7095 into a target smaller than its type; handle that case now. */
7096 if (TREE_CODE (exp
) == CONSTRUCTOR
&& known_size_p (bitsize
))
7098 poly_int64 bytesize
= exact_div (bitsize
, BITS_PER_UNIT
);
7099 store_constructor (exp
, to_rtx
, 0, bytesize
, reverse
);
7103 return store_expr (exp
, to_rtx
, 0, nontemporal
, reverse
);
7107 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
7108 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
7109 codes and find the ultimate containing object, which we return.
7111 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
7112 bit position, *PUNSIGNEDP to the signedness and *PREVERSEP to the
7113 storage order of the field.
7114 If the position of the field is variable, we store a tree
7115 giving the variable offset (in units) in *POFFSET.
7116 This offset is in addition to the bit position.
7117 If the position is not variable, we store 0 in *POFFSET.
7119 If any of the extraction expressions is volatile,
7120 we store 1 in *PVOLATILEP. Otherwise we don't change that.
7122 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
7123 Otherwise, it is a mode that can be used to access the field.
7125 If the field describes a variable-sized object, *PMODE is set to
7126 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
7127 this case, but the address of the object can be found. */
7130 get_inner_reference (tree exp
, poly_int64_pod
*pbitsize
,
7131 poly_int64_pod
*pbitpos
, tree
*poffset
,
7132 machine_mode
*pmode
, int *punsignedp
,
7133 int *preversep
, int *pvolatilep
)
7136 machine_mode mode
= VOIDmode
;
7137 bool blkmode_bitfield
= false;
7138 tree offset
= size_zero_node
;
7139 poly_offset_int bit_offset
= 0;
7141 /* First get the mode, signedness, storage order and size. We do this from
7142 just the outermost expression. */
7144 if (TREE_CODE (exp
) == COMPONENT_REF
)
7146 tree field
= TREE_OPERAND (exp
, 1);
7147 size_tree
= DECL_SIZE (field
);
7148 if (flag_strict_volatile_bitfields
> 0
7149 && TREE_THIS_VOLATILE (exp
)
7150 && DECL_BIT_FIELD_TYPE (field
)
7151 && DECL_MODE (field
) != BLKmode
)
7152 /* Volatile bitfields should be accessed in the mode of the
7153 field's type, not the mode computed based on the bit
7155 mode
= TYPE_MODE (DECL_BIT_FIELD_TYPE (field
));
7156 else if (!DECL_BIT_FIELD (field
))
7158 mode
= DECL_MODE (field
);
7159 /* For vector fields re-check the target flags, as DECL_MODE
7160 could have been set with different target flags than
7161 the current function has. */
7163 && VECTOR_TYPE_P (TREE_TYPE (field
))
7164 && VECTOR_MODE_P (TYPE_MODE_RAW (TREE_TYPE (field
))))
7165 mode
= TYPE_MODE (TREE_TYPE (field
));
7167 else if (DECL_MODE (field
) == BLKmode
)
7168 blkmode_bitfield
= true;
7170 *punsignedp
= DECL_UNSIGNED (field
);
7172 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
7174 size_tree
= TREE_OPERAND (exp
, 1);
7175 *punsignedp
= (! INTEGRAL_TYPE_P (TREE_TYPE (exp
))
7176 || TYPE_UNSIGNED (TREE_TYPE (exp
)));
7178 /* For vector types, with the correct size of access, use the mode of
7180 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == VECTOR_TYPE
7181 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
7182 && tree_int_cst_equal (size_tree
, TYPE_SIZE (TREE_TYPE (exp
))))
7183 mode
= TYPE_MODE (TREE_TYPE (exp
));
7187 mode
= TYPE_MODE (TREE_TYPE (exp
));
7188 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
7190 if (mode
== BLKmode
)
7191 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
7193 *pbitsize
= GET_MODE_BITSIZE (mode
);
7198 if (! tree_fits_uhwi_p (size_tree
))
7199 mode
= BLKmode
, *pbitsize
= -1;
7201 *pbitsize
= tree_to_uhwi (size_tree
);
7204 *preversep
= reverse_storage_order_for_component_p (exp
);
7206 /* Compute cumulative bit-offset for nested component-refs and array-refs,
7207 and find the ultimate containing object. */
7210 switch (TREE_CODE (exp
))
7213 bit_offset
+= wi::to_poly_offset (TREE_OPERAND (exp
, 2));
7218 tree field
= TREE_OPERAND (exp
, 1);
7219 tree this_offset
= component_ref_field_offset (exp
);
7221 /* If this field hasn't been filled in yet, don't go past it.
7222 This should only happen when folding expressions made during
7223 type construction. */
7224 if (this_offset
== 0)
7227 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
7228 bit_offset
+= wi::to_poly_offset (DECL_FIELD_BIT_OFFSET (field
));
7230 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
7235 case ARRAY_RANGE_REF
:
7237 tree index
= TREE_OPERAND (exp
, 1);
7238 tree low_bound
= array_ref_low_bound (exp
);
7239 tree unit_size
= array_ref_element_size (exp
);
7241 /* We assume all arrays have sizes that are a multiple of a byte.
7242 First subtract the lower bound, if any, in the type of the
7243 index, then convert to sizetype and multiply by the size of
7244 the array element. */
7245 if (! integer_zerop (low_bound
))
7246 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
7249 offset
= size_binop (PLUS_EXPR
, offset
,
7250 size_binop (MULT_EXPR
,
7251 fold_convert (sizetype
, index
),
7260 bit_offset
+= *pbitsize
;
7263 case VIEW_CONVERT_EXPR
:
7267 /* Hand back the decl for MEM[&decl, off]. */
7268 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
7270 tree off
= TREE_OPERAND (exp
, 1);
7271 if (!integer_zerop (off
))
7273 poly_offset_int boff
= mem_ref_offset (exp
);
7274 boff
<<= LOG2_BITS_PER_UNIT
;
7277 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7285 /* If any reference in the chain is volatile, the effect is volatile. */
7286 if (TREE_THIS_VOLATILE (exp
))
7289 exp
= TREE_OPERAND (exp
, 0);
7293 /* If OFFSET is constant, see if we can return the whole thing as a
7294 constant bit position. Make sure to handle overflow during
7296 if (poly_int_tree_p (offset
))
7298 poly_offset_int tem
= wi::sext (wi::to_poly_offset (offset
),
7299 TYPE_PRECISION (sizetype
));
7300 tem
<<= LOG2_BITS_PER_UNIT
;
7302 if (tem
.to_shwi (pbitpos
))
7303 *poffset
= offset
= NULL_TREE
;
7306 /* Otherwise, split it up. */
7309 /* Avoid returning a negative bitpos as this may wreak havoc later. */
7310 if (!bit_offset
.to_shwi (pbitpos
) || maybe_lt (*pbitpos
, 0))
7312 *pbitpos
= num_trailing_bits (bit_offset
.force_shwi ());
7313 poly_offset_int bytes
= bits_to_bytes_round_down (bit_offset
);
7314 offset
= size_binop (PLUS_EXPR
, offset
,
7315 build_int_cst (sizetype
, bytes
.force_shwi ()));
7321 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
7322 if (mode
== VOIDmode
7324 && multiple_p (*pbitpos
, BITS_PER_UNIT
)
7325 && multiple_p (*pbitsize
, BITS_PER_UNIT
))
7333 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7335 static unsigned HOST_WIDE_INT
7336 target_align (const_tree target
)
7338 /* We might have a chain of nested references with intermediate misaligning
7339 bitfields components, so need to recurse to find out. */
7341 unsigned HOST_WIDE_INT this_align
, outer_align
;
7343 switch (TREE_CODE (target
))
7349 this_align
= DECL_ALIGN (TREE_OPERAND (target
, 1));
7350 outer_align
= target_align (TREE_OPERAND (target
, 0));
7351 return MIN (this_align
, outer_align
);
7354 case ARRAY_RANGE_REF
:
7355 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7356 outer_align
= target_align (TREE_OPERAND (target
, 0));
7357 return MIN (this_align
, outer_align
);
7360 case NON_LVALUE_EXPR
:
7361 case VIEW_CONVERT_EXPR
:
7362 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7363 outer_align
= target_align (TREE_OPERAND (target
, 0));
7364 return MAX (this_align
, outer_align
);
7367 return TYPE_ALIGN (TREE_TYPE (target
));
7372 /* Given an rtx VALUE that may contain additions and multiplications, return
7373 an equivalent value that just refers to a register, memory, or constant.
7374 This is done by generating instructions to perform the arithmetic and
7375 returning a pseudo-register containing the value.
7377 The returned value may be a REG, SUBREG, MEM or constant. */
7380 force_operand (rtx value
, rtx target
)
7383 /* Use subtarget as the target for operand 0 of a binary operation. */
7384 rtx subtarget
= get_subtarget (target
);
7385 enum rtx_code code
= GET_CODE (value
);
7387 /* Check for subreg applied to an expression produced by loop optimizer. */
7389 && !REG_P (SUBREG_REG (value
))
7390 && !MEM_P (SUBREG_REG (value
)))
7393 = simplify_gen_subreg (GET_MODE (value
),
7394 force_reg (GET_MODE (SUBREG_REG (value
)),
7395 force_operand (SUBREG_REG (value
),
7397 GET_MODE (SUBREG_REG (value
)),
7398 SUBREG_BYTE (value
));
7399 code
= GET_CODE (value
);
7402 /* Check for a PIC address load. */
7403 if ((code
== PLUS
|| code
== MINUS
)
7404 && XEXP (value
, 0) == pic_offset_table_rtx
7405 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
7406 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
7407 || GET_CODE (XEXP (value
, 1)) == CONST
))
7410 subtarget
= gen_reg_rtx (GET_MODE (value
));
7411 emit_move_insn (subtarget
, value
);
7415 if (ARITHMETIC_P (value
))
7417 op2
= XEXP (value
, 1);
7418 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
7420 if (code
== MINUS
&& CONST_INT_P (op2
))
7423 op2
= negate_rtx (GET_MODE (value
), op2
);
7426 /* Check for an addition with OP2 a constant integer and our first
7427 operand a PLUS of a virtual register and something else. In that
7428 case, we want to emit the sum of the virtual register and the
7429 constant first and then add the other value. This allows virtual
7430 register instantiation to simply modify the constant rather than
7431 creating another one around this addition. */
7432 if (code
== PLUS
&& CONST_INT_P (op2
)
7433 && GET_CODE (XEXP (value
, 0)) == PLUS
7434 && REG_P (XEXP (XEXP (value
, 0), 0))
7435 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7436 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
7438 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
7439 XEXP (XEXP (value
, 0), 0), op2
,
7440 subtarget
, 0, OPTAB_LIB_WIDEN
);
7441 return expand_simple_binop (GET_MODE (value
), code
, temp
,
7442 force_operand (XEXP (XEXP (value
,
7444 target
, 0, OPTAB_LIB_WIDEN
);
7447 op1
= force_operand (XEXP (value
, 0), subtarget
);
7448 op2
= force_operand (op2
, NULL_RTX
);
7452 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
7454 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
7455 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7456 target
, 1, OPTAB_LIB_WIDEN
);
7458 return expand_divmod (0,
7459 FLOAT_MODE_P (GET_MODE (value
))
7460 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
7461 GET_MODE (value
), op1
, op2
, target
, 0);
7463 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7466 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
7469 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7472 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7473 target
, 0, OPTAB_LIB_WIDEN
);
7475 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7476 target
, 1, OPTAB_LIB_WIDEN
);
7479 if (UNARY_P (value
))
7482 target
= gen_reg_rtx (GET_MODE (value
));
7483 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
7490 case FLOAT_TRUNCATE
:
7491 convert_move (target
, op1
, code
== ZERO_EXTEND
);
7496 expand_fix (target
, op1
, code
== UNSIGNED_FIX
);
7500 case UNSIGNED_FLOAT
:
7501 expand_float (target
, op1
, code
== UNSIGNED_FLOAT
);
7505 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
7509 #ifdef INSN_SCHEDULING
7510 /* On machines that have insn scheduling, we want all memory reference to be
7511 explicit, so we need to deal with such paradoxical SUBREGs. */
7512 if (paradoxical_subreg_p (value
) && MEM_P (SUBREG_REG (value
)))
7514 = simplify_gen_subreg (GET_MODE (value
),
7515 force_reg (GET_MODE (SUBREG_REG (value
)),
7516 force_operand (SUBREG_REG (value
),
7518 GET_MODE (SUBREG_REG (value
)),
7519 SUBREG_BYTE (value
));
7525 /* Subroutine of expand_expr: return nonzero iff there is no way that
7526 EXP can reference X, which is being modified. TOP_P is nonzero if this
7527 call is going to be used to determine whether we need a temporary
7528 for EXP, as opposed to a recursive call to this function.
7530 It is always safe for this routine to return zero since it merely
7531 searches for optimization opportunities. */
7534 safe_from_p (const_rtx x
, tree exp
, int top_p
)
7540 /* If EXP has varying size, we MUST use a target since we currently
7541 have no way of allocating temporaries of variable size
7542 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7543 So we assume here that something at a higher level has prevented a
7544 clash. This is somewhat bogus, but the best we can do. Only
7545 do this when X is BLKmode and when we are at the top level. */
7546 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
7547 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
7548 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
7549 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
7550 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
7552 && GET_MODE (x
) == BLKmode
)
7553 /* If X is in the outgoing argument area, it is always safe. */
7555 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
7556 || (GET_CODE (XEXP (x
, 0)) == PLUS
7557 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
7560 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7561 find the underlying pseudo. */
7562 if (GET_CODE (x
) == SUBREG
)
7565 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7569 /* Now look at our tree code and possibly recurse. */
7570 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
7572 case tcc_declaration
:
7573 exp_rtl
= DECL_RTL_IF_SET (exp
);
7579 case tcc_exceptional
:
7580 if (TREE_CODE (exp
) == TREE_LIST
)
7584 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
7586 exp
= TREE_CHAIN (exp
);
7589 if (TREE_CODE (exp
) != TREE_LIST
)
7590 return safe_from_p (x
, exp
, 0);
7593 else if (TREE_CODE (exp
) == CONSTRUCTOR
)
7595 constructor_elt
*ce
;
7596 unsigned HOST_WIDE_INT idx
;
7598 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp
), idx
, ce
)
7599 if ((ce
->index
!= NULL_TREE
&& !safe_from_p (x
, ce
->index
, 0))
7600 || !safe_from_p (x
, ce
->value
, 0))
7604 else if (TREE_CODE (exp
) == ERROR_MARK
)
7605 return 1; /* An already-visited SAVE_EXPR? */
7610 /* The only case we look at here is the DECL_INITIAL inside a
7612 return (TREE_CODE (exp
) != DECL_EXPR
7613 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
7614 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
7615 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
7618 case tcc_comparison
:
7619 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
7624 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7626 case tcc_expression
:
7629 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7630 the expression. If it is set, we conflict iff we are that rtx or
7631 both are in memory. Otherwise, we check all operands of the
7632 expression recursively. */
7634 switch (TREE_CODE (exp
))
7637 /* If the operand is static or we are static, we can't conflict.
7638 Likewise if we don't conflict with the operand at all. */
7639 if (staticp (TREE_OPERAND (exp
, 0))
7640 || TREE_STATIC (exp
)
7641 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
7644 /* Otherwise, the only way this can conflict is if we are taking
7645 the address of a DECL a that address if part of X, which is
7647 exp
= TREE_OPERAND (exp
, 0);
7650 if (!DECL_RTL_SET_P (exp
)
7651 || !MEM_P (DECL_RTL (exp
)))
7654 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
7660 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
7661 get_alias_set (exp
)))
7666 /* Assume that the call will clobber all hard registers and
7668 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7673 case WITH_CLEANUP_EXPR
:
7674 case CLEANUP_POINT_EXPR
:
7675 /* Lowered by gimplify.c. */
7679 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7685 /* If we have an rtx, we do not need to scan our operands. */
7689 nops
= TREE_OPERAND_LENGTH (exp
);
7690 for (i
= 0; i
< nops
; i
++)
7691 if (TREE_OPERAND (exp
, i
) != 0
7692 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
7698 /* Should never get a type here. */
7702 /* If we have an rtl, find any enclosed object. Then see if we conflict
7706 if (GET_CODE (exp_rtl
) == SUBREG
)
7708 exp_rtl
= SUBREG_REG (exp_rtl
);
7710 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
7714 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7715 are memory and they conflict. */
7716 return ! (rtx_equal_p (x
, exp_rtl
)
7717 || (MEM_P (x
) && MEM_P (exp_rtl
)
7718 && true_dependence (exp_rtl
, VOIDmode
, x
)));
7721 /* If we reach here, it is safe. */
7726 /* Return the highest power of two that EXP is known to be a multiple of.
7727 This is used in updating alignment of MEMs in array references. */
7729 unsigned HOST_WIDE_INT
7730 highest_pow2_factor (const_tree exp
)
7732 unsigned HOST_WIDE_INT ret
;
7733 int trailing_zeros
= tree_ctz (exp
);
7734 if (trailing_zeros
>= HOST_BITS_PER_WIDE_INT
)
7735 return BIGGEST_ALIGNMENT
;
7736 ret
= HOST_WIDE_INT_1U
<< trailing_zeros
;
7737 if (ret
> BIGGEST_ALIGNMENT
)
7738 return BIGGEST_ALIGNMENT
;
7742 /* Similar, except that the alignment requirements of TARGET are
7743 taken into account. Assume it is at least as aligned as its
7744 type, unless it is a COMPONENT_REF in which case the layout of
7745 the structure gives the alignment. */
7747 static unsigned HOST_WIDE_INT
7748 highest_pow2_factor_for_target (const_tree target
, const_tree exp
)
7750 unsigned HOST_WIDE_INT talign
= target_align (target
) / BITS_PER_UNIT
;
7751 unsigned HOST_WIDE_INT factor
= highest_pow2_factor (exp
);
7753 return MAX (factor
, talign
);
7756 /* Convert the tree comparison code TCODE to the rtl one where the
7757 signedness is UNSIGNEDP. */
7759 static enum rtx_code
7760 convert_tree_comp_to_rtx (enum tree_code tcode
, int unsignedp
)
7772 code
= unsignedp
? LTU
: LT
;
7775 code
= unsignedp
? LEU
: LE
;
7778 code
= unsignedp
? GTU
: GT
;
7781 code
= unsignedp
? GEU
: GE
;
7783 case UNORDERED_EXPR
:
7814 /* Subroutine of expand_expr. Expand the two operands of a binary
7815 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7816 The value may be stored in TARGET if TARGET is nonzero. The
7817 MODIFIER argument is as documented by expand_expr. */
7820 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
7821 enum expand_modifier modifier
)
7823 if (! safe_from_p (target
, exp1
, 1))
7825 if (operand_equal_p (exp0
, exp1
, 0))
7827 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7828 *op1
= copy_rtx (*op0
);
7832 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7833 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
7838 /* Return a MEM that contains constant EXP. DEFER is as for
7839 output_constant_def and MODIFIER is as for expand_expr. */
7842 expand_expr_constant (tree exp
, int defer
, enum expand_modifier modifier
)
7846 mem
= output_constant_def (exp
, defer
);
7847 if (modifier
!= EXPAND_INITIALIZER
)
7848 mem
= use_anchored_address (mem
);
7852 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7853 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7856 expand_expr_addr_expr_1 (tree exp
, rtx target
, scalar_int_mode tmode
,
7857 enum expand_modifier modifier
, addr_space_t as
)
7859 rtx result
, subtarget
;
7861 poly_int64 bitsize
, bitpos
;
7862 int unsignedp
, reversep
, volatilep
= 0;
7865 /* If we are taking the address of a constant and are at the top level,
7866 we have to use output_constant_def since we can't call force_const_mem
7868 /* ??? This should be considered a front-end bug. We should not be
7869 generating ADDR_EXPR of something that isn't an LVALUE. The only
7870 exception here is STRING_CST. */
7871 if (CONSTANT_CLASS_P (exp
))
7873 result
= XEXP (expand_expr_constant (exp
, 0, modifier
), 0);
7874 if (modifier
< EXPAND_SUM
)
7875 result
= force_operand (result
, target
);
7879 /* Everything must be something allowed by is_gimple_addressable. */
7880 switch (TREE_CODE (exp
))
7883 /* This case will happen via recursion for &a->b. */
7884 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7888 tree tem
= TREE_OPERAND (exp
, 0);
7889 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
7890 tem
= fold_build_pointer_plus (tem
, TREE_OPERAND (exp
, 1));
7891 return expand_expr (tem
, target
, tmode
, modifier
);
7894 case TARGET_MEM_REF
:
7895 return addr_for_mem_ref (exp
, as
, true);
7898 /* Expand the initializer like constants above. */
7899 result
= XEXP (expand_expr_constant (DECL_INITIAL (exp
),
7901 if (modifier
< EXPAND_SUM
)
7902 result
= force_operand (result
, target
);
7906 /* The real part of the complex number is always first, therefore
7907 the address is the same as the address of the parent object. */
7910 inner
= TREE_OPERAND (exp
, 0);
7914 /* The imaginary part of the complex number is always second.
7915 The expression is therefore always offset by the size of the
7918 bitpos
= GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (exp
)));
7919 inner
= TREE_OPERAND (exp
, 0);
7922 case COMPOUND_LITERAL_EXPR
:
7923 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7924 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7925 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7926 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7927 the initializers aren't gimplified. */
7928 if (COMPOUND_LITERAL_EXPR_DECL (exp
)
7929 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp
)))
7930 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp
),
7931 target
, tmode
, modifier
, as
);
7934 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7935 expand_expr, as that can have various side effects; LABEL_DECLs for
7936 example, may not have their DECL_RTL set yet. Expand the rtl of
7937 CONSTRUCTORs too, which should yield a memory reference for the
7938 constructor's contents. Assume language specific tree nodes can
7939 be expanded in some interesting way. */
7940 gcc_assert (TREE_CODE (exp
) < LAST_AND_UNUSED_TREE_CODE
);
7942 || TREE_CODE (exp
) == CONSTRUCTOR
7943 || TREE_CODE (exp
) == COMPOUND_LITERAL_EXPR
)
7945 result
= expand_expr (exp
, target
, tmode
,
7946 modifier
== EXPAND_INITIALIZER
7947 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
7949 /* If the DECL isn't in memory, then the DECL wasn't properly
7950 marked TREE_ADDRESSABLE, which will be either a front-end
7951 or a tree optimizer bug. */
7953 gcc_assert (MEM_P (result
));
7954 result
= XEXP (result
, 0);
7956 /* ??? Is this needed anymore? */
7958 TREE_USED (exp
) = 1;
7960 if (modifier
!= EXPAND_INITIALIZER
7961 && modifier
!= EXPAND_CONST_ADDRESS
7962 && modifier
!= EXPAND_SUM
)
7963 result
= force_operand (result
, target
);
7967 /* Pass FALSE as the last argument to get_inner_reference although
7968 we are expanding to RTL. The rationale is that we know how to
7969 handle "aligning nodes" here: we can just bypass them because
7970 they won't change the final object whose address will be returned
7971 (they actually exist only for that purpose). */
7972 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode1
,
7973 &unsignedp
, &reversep
, &volatilep
);
7977 /* We must have made progress. */
7978 gcc_assert (inner
!= exp
);
7980 subtarget
= offset
|| maybe_ne (bitpos
, 0) ? NULL_RTX
: target
;
7981 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7982 inner alignment, force the inner to be sufficiently aligned. */
7983 if (CONSTANT_CLASS_P (inner
)
7984 && TYPE_ALIGN (TREE_TYPE (inner
)) < TYPE_ALIGN (TREE_TYPE (exp
)))
7986 inner
= copy_node (inner
);
7987 TREE_TYPE (inner
) = copy_node (TREE_TYPE (inner
));
7988 SET_TYPE_ALIGN (TREE_TYPE (inner
), TYPE_ALIGN (TREE_TYPE (exp
)));
7989 TYPE_USER_ALIGN (TREE_TYPE (inner
)) = 1;
7991 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
, as
);
7997 if (modifier
!= EXPAND_NORMAL
)
7998 result
= force_operand (result
, NULL
);
7999 tmp
= expand_expr (offset
, NULL_RTX
, tmode
,
8000 modifier
== EXPAND_INITIALIZER
8001 ? EXPAND_INITIALIZER
: EXPAND_NORMAL
);
8003 /* expand_expr is allowed to return an object in a mode other
8004 than TMODE. If it did, we need to convert. */
8005 if (GET_MODE (tmp
) != VOIDmode
&& tmode
!= GET_MODE (tmp
))
8006 tmp
= convert_modes (tmode
, GET_MODE (tmp
),
8007 tmp
, TYPE_UNSIGNED (TREE_TYPE (offset
)));
8008 result
= convert_memory_address_addr_space (tmode
, result
, as
);
8009 tmp
= convert_memory_address_addr_space (tmode
, tmp
, as
);
8011 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8012 result
= simplify_gen_binary (PLUS
, tmode
, result
, tmp
);
8015 subtarget
= maybe_ne (bitpos
, 0) ? NULL_RTX
: target
;
8016 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
8017 1, OPTAB_LIB_WIDEN
);
8021 if (maybe_ne (bitpos
, 0))
8023 /* Someone beforehand should have rejected taking the address
8024 of an object that isn't byte-aligned. */
8025 poly_int64 bytepos
= exact_div (bitpos
, BITS_PER_UNIT
);
8026 result
= convert_memory_address_addr_space (tmode
, result
, as
);
8027 result
= plus_constant (tmode
, result
, bytepos
);
8028 if (modifier
< EXPAND_SUM
)
8029 result
= force_operand (result
, target
);
8035 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
8036 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
8039 expand_expr_addr_expr (tree exp
, rtx target
, machine_mode tmode
,
8040 enum expand_modifier modifier
)
8042 addr_space_t as
= ADDR_SPACE_GENERIC
;
8043 scalar_int_mode address_mode
= Pmode
;
8044 scalar_int_mode pointer_mode
= ptr_mode
;
8048 /* Target mode of VOIDmode says "whatever's natural". */
8049 if (tmode
== VOIDmode
)
8050 tmode
= TYPE_MODE (TREE_TYPE (exp
));
8052 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
8054 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
8055 address_mode
= targetm
.addr_space
.address_mode (as
);
8056 pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
8059 /* We can get called with some Weird Things if the user does silliness
8060 like "(short) &a". In that case, convert_memory_address won't do
8061 the right thing, so ignore the given target mode. */
8062 scalar_int_mode new_tmode
= (tmode
== pointer_mode
8066 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
8067 new_tmode
, modifier
, as
);
8069 /* Despite expand_expr claims concerning ignoring TMODE when not
8070 strictly convenient, stuff breaks if we don't honor it. Note
8071 that combined with the above, we only do this for pointer modes. */
8072 rmode
= GET_MODE (result
);
8073 if (rmode
== VOIDmode
)
8075 if (rmode
!= new_tmode
)
8076 result
= convert_memory_address_addr_space (new_tmode
, result
, as
);
8081 /* Generate code for computing CONSTRUCTOR EXP.
8082 An rtx for the computed value is returned. If AVOID_TEMP_MEM
8083 is TRUE, instead of creating a temporary variable in memory
8084 NULL is returned and the caller needs to handle it differently. */
8087 expand_constructor (tree exp
, rtx target
, enum expand_modifier modifier
,
8088 bool avoid_temp_mem
)
8090 tree type
= TREE_TYPE (exp
);
8091 machine_mode mode
= TYPE_MODE (type
);
8093 /* Try to avoid creating a temporary at all. This is possible
8094 if all of the initializer is zero.
8095 FIXME: try to handle all [0..255] initializers we can handle
8097 if (TREE_STATIC (exp
)
8098 && !TREE_ADDRESSABLE (exp
)
8099 && target
!= 0 && mode
== BLKmode
8100 && all_zeros_p (exp
))
8102 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
8106 /* All elts simple constants => refer to a constant in memory. But
8107 if this is a non-BLKmode mode, let it store a field at a time
8108 since that should make a CONST_INT, CONST_WIDE_INT or
8109 CONST_DOUBLE when we fold. Likewise, if we have a target we can
8110 use, it is best to store directly into the target unless the type
8111 is large enough that memcpy will be used. If we are making an
8112 initializer and all operands are constant, put it in memory as
8115 FIXME: Avoid trying to fill vector constructors piece-meal.
8116 Output them with output_constant_def below unless we're sure
8117 they're zeros. This should go away when vector initializers
8118 are treated like VECTOR_CST instead of arrays. */
8119 if ((TREE_STATIC (exp
)
8120 && ((mode
== BLKmode
8121 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
8122 || TREE_ADDRESSABLE (exp
)
8123 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type
))
8124 && (! can_move_by_pieces
8125 (tree_to_uhwi (TYPE_SIZE_UNIT (type
)),
8127 && ! mostly_zeros_p (exp
))))
8128 || ((modifier
== EXPAND_INITIALIZER
|| modifier
== EXPAND_CONST_ADDRESS
)
8129 && TREE_CONSTANT (exp
)))
8136 constructor
= expand_expr_constant (exp
, 1, modifier
);
8138 if (modifier
!= EXPAND_CONST_ADDRESS
8139 && modifier
!= EXPAND_INITIALIZER
8140 && modifier
!= EXPAND_SUM
)
8141 constructor
= validize_mem (constructor
);
8146 /* Handle calls that pass values in multiple non-contiguous
8147 locations. The Irix 6 ABI has examples of this. */
8148 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
8149 || GET_CODE (target
) == PARALLEL
|| modifier
== EXPAND_STACK_PARM
)
8154 target
= assign_temp (type
, TREE_ADDRESSABLE (exp
), 1);
8157 store_constructor (exp
, target
, 0, int_expr_size (exp
), false);
8162 /* expand_expr: generate code for computing expression EXP.
8163 An rtx for the computed value is returned. The value is never null.
8164 In the case of a void EXP, const0_rtx is returned.
8166 The value may be stored in TARGET if TARGET is nonzero.
8167 TARGET is just a suggestion; callers must assume that
8168 the rtx returned may not be the same as TARGET.
8170 If TARGET is CONST0_RTX, it means that the value will be ignored.
8172 If TMODE is not VOIDmode, it suggests generating the
8173 result in mode TMODE. But this is done only when convenient.
8174 Otherwise, TMODE is ignored and the value generated in its natural mode.
8175 TMODE is just a suggestion; callers must assume that
8176 the rtx returned may not have mode TMODE.
8178 Note that TARGET may have neither TMODE nor MODE. In that case, it
8179 probably will not be used.
8181 If MODIFIER is EXPAND_SUM then when EXP is an addition
8182 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
8183 or a nest of (PLUS ...) and (MINUS ...) where the terms are
8184 products as above, or REG or MEM, or constant.
8185 Ordinarily in such cases we would output mul or add instructions
8186 and then return a pseudo reg containing the sum.
8188 EXPAND_INITIALIZER is much like EXPAND_SUM except that
8189 it also marks a label as absolutely required (it can't be dead).
8190 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
8191 This is used for outputting expressions used in initializers.
8193 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
8194 with a constant address even if that address is not normally legitimate.
8195 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
8197 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
8198 a call parameter. Such targets require special care as we haven't yet
8199 marked TARGET so that it's safe from being trashed by libcalls. We
8200 don't want to use TARGET for anything but the final result;
8201 Intermediate values must go elsewhere. Additionally, calls to
8202 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
8204 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
8205 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
8206 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
8207 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
8210 If INNER_REFERENCE_P is true, we are expanding an inner reference.
8211 In this case, we don't adjust a returned MEM rtx that wouldn't be
8212 sufficiently aligned for its mode; instead, it's up to the caller
8213 to deal with it afterwards. This is used to make sure that unaligned
8214 base objects for which out-of-bounds accesses are supported, for
8215 example record types with trailing arrays, aren't realigned behind
8216 the back of the caller.
8217 The normal operating mode is to pass FALSE for this parameter. */
8220 expand_expr_real (tree exp
, rtx target
, machine_mode tmode
,
8221 enum expand_modifier modifier
, rtx
*alt_rtl
,
8222 bool inner_reference_p
)
8226 /* Handle ERROR_MARK before anybody tries to access its type. */
8227 if (TREE_CODE (exp
) == ERROR_MARK
8228 || (TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
))
8230 ret
= CONST0_RTX (tmode
);
8231 return ret
? ret
: const0_rtx
;
8234 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
,
8239 /* Try to expand the conditional expression which is represented by
8240 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If it succeeds
8241 return the rtl reg which represents the result. Otherwise return
8245 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED
,
8246 tree treeop1 ATTRIBUTE_UNUSED
,
8247 tree treeop2 ATTRIBUTE_UNUSED
)
8250 rtx op00
, op01
, op1
, op2
;
8251 enum rtx_code comparison_code
;
8252 machine_mode comparison_mode
;
8255 tree type
= TREE_TYPE (treeop1
);
8256 int unsignedp
= TYPE_UNSIGNED (type
);
8257 machine_mode mode
= TYPE_MODE (type
);
8258 machine_mode orig_mode
= mode
;
8259 static bool expanding_cond_expr_using_cmove
= false;
8261 /* Conditional move expansion can end up TERing two operands which,
8262 when recursively hitting conditional expressions can result in
8263 exponential behavior if the cmove expansion ultimatively fails.
8264 It's hardly profitable to TER a cmove into a cmove so avoid doing
8265 that by failing early if we end up recursing. */
8266 if (expanding_cond_expr_using_cmove
)
8269 /* If we cannot do a conditional move on the mode, try doing it
8270 with the promoted mode. */
8271 if (!can_conditionally_move_p (mode
))
8273 mode
= promote_mode (type
, mode
, &unsignedp
);
8274 if (!can_conditionally_move_p (mode
))
8276 temp
= assign_temp (type
, 0, 0); /* Use promoted mode for temp. */
8279 temp
= assign_temp (type
, 0, 1);
8281 expanding_cond_expr_using_cmove
= true;
8283 expand_operands (treeop1
, treeop2
,
8284 temp
, &op1
, &op2
, EXPAND_NORMAL
);
8286 if (TREE_CODE (treeop0
) == SSA_NAME
8287 && (srcstmt
= get_def_for_expr_class (treeop0
, tcc_comparison
)))
8289 tree type
= TREE_TYPE (gimple_assign_rhs1 (srcstmt
));
8290 enum tree_code cmpcode
= gimple_assign_rhs_code (srcstmt
);
8291 op00
= expand_normal (gimple_assign_rhs1 (srcstmt
));
8292 op01
= expand_normal (gimple_assign_rhs2 (srcstmt
));
8293 comparison_mode
= TYPE_MODE (type
);
8294 unsignedp
= TYPE_UNSIGNED (type
);
8295 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
8297 else if (COMPARISON_CLASS_P (treeop0
))
8299 tree type
= TREE_TYPE (TREE_OPERAND (treeop0
, 0));
8300 enum tree_code cmpcode
= TREE_CODE (treeop0
);
8301 op00
= expand_normal (TREE_OPERAND (treeop0
, 0));
8302 op01
= expand_normal (TREE_OPERAND (treeop0
, 1));
8303 unsignedp
= TYPE_UNSIGNED (type
);
8304 comparison_mode
= TYPE_MODE (type
);
8305 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
8309 op00
= expand_normal (treeop0
);
8311 comparison_code
= NE
;
8312 comparison_mode
= GET_MODE (op00
);
8313 if (comparison_mode
== VOIDmode
)
8314 comparison_mode
= TYPE_MODE (TREE_TYPE (treeop0
));
8316 expanding_cond_expr_using_cmove
= false;
8318 if (GET_MODE (op1
) != mode
)
8319 op1
= gen_lowpart (mode
, op1
);
8321 if (GET_MODE (op2
) != mode
)
8322 op2
= gen_lowpart (mode
, op2
);
8324 /* Try to emit the conditional move. */
8325 insn
= emit_conditional_move (temp
, comparison_code
,
8326 op00
, op01
, comparison_mode
,
8330 /* If we could do the conditional move, emit the sequence,
8334 rtx_insn
*seq
= get_insns ();
8337 return convert_modes (orig_mode
, mode
, temp
, 0);
8340 /* Otherwise discard the sequence and fall back to code with
8347 expand_expr_real_2 (sepops ops
, rtx target
, machine_mode tmode
,
8348 enum expand_modifier modifier
)
8350 rtx op0
, op1
, op2
, temp
;
8351 rtx_code_label
*lab
;
8355 scalar_int_mode int_mode
;
8356 enum tree_code code
= ops
->code
;
8358 rtx subtarget
, original_target
;
8360 bool reduce_bit_field
;
8361 location_t loc
= ops
->location
;
8362 tree treeop0
, treeop1
, treeop2
;
8363 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8364 ? reduce_to_bit_field_precision ((expr), \
8370 mode
= TYPE_MODE (type
);
8371 unsignedp
= TYPE_UNSIGNED (type
);
8377 /* We should be called only on simple (binary or unary) expressions,
8378 exactly those that are valid in gimple expressions that aren't
8379 GIMPLE_SINGLE_RHS (or invalid). */
8380 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
8381 || get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
8382 || get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
);
8384 ignore
= (target
== const0_rtx
8385 || ((CONVERT_EXPR_CODE_P (code
)
8386 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
8387 && TREE_CODE (type
) == VOID_TYPE
));
8389 /* We should be called only if we need the result. */
8390 gcc_assert (!ignore
);
8392 /* An operation in what may be a bit-field type needs the
8393 result to be reduced to the precision of the bit-field type,
8394 which is narrower than that of the type's mode. */
8395 reduce_bit_field
= (INTEGRAL_TYPE_P (type
)
8396 && !type_has_mode_precision_p (type
));
8398 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
8401 /* Use subtarget as the target for operand 0 of a binary operation. */
8402 subtarget
= get_subtarget (target
);
8403 original_target
= target
;
8407 case NON_LVALUE_EXPR
:
8410 if (treeop0
== error_mark_node
)
8413 if (TREE_CODE (type
) == UNION_TYPE
)
8415 tree valtype
= TREE_TYPE (treeop0
);
8417 /* If both input and output are BLKmode, this conversion isn't doing
8418 anything except possibly changing memory attribute. */
8419 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
8421 rtx result
= expand_expr (treeop0
, target
, tmode
,
8424 result
= copy_rtx (result
);
8425 set_mem_attributes (result
, type
, 0);
8431 if (TYPE_MODE (type
) != BLKmode
)
8432 target
= gen_reg_rtx (TYPE_MODE (type
));
8434 target
= assign_temp (type
, 1, 1);
8438 /* Store data into beginning of memory target. */
8439 store_expr (treeop0
,
8440 adjust_address (target
, TYPE_MODE (valtype
), 0),
8441 modifier
== EXPAND_STACK_PARM
,
8442 false, TYPE_REVERSE_STORAGE_ORDER (type
));
8446 gcc_assert (REG_P (target
)
8447 && !TYPE_REVERSE_STORAGE_ORDER (type
));
8449 /* Store this field into a union of the proper type. */
8450 poly_uint64 op0_size
8451 = tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (treeop0
)));
8452 poly_uint64 union_size
= GET_MODE_BITSIZE (mode
);
8453 store_field (target
,
8454 /* The conversion must be constructed so that
8455 we know at compile time how many bits
8457 ordered_min (op0_size
, union_size
),
8458 0, 0, 0, TYPE_MODE (valtype
), treeop0
, 0,
8462 /* Return the entire union. */
8466 if (mode
== TYPE_MODE (TREE_TYPE (treeop0
)))
8468 op0
= expand_expr (treeop0
, target
, VOIDmode
,
8471 /* If the signedness of the conversion differs and OP0 is
8472 a promoted SUBREG, clear that indication since we now
8473 have to do the proper extension. */
8474 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)) != unsignedp
8475 && GET_CODE (op0
) == SUBREG
)
8476 SUBREG_PROMOTED_VAR_P (op0
) = 0;
8478 return REDUCE_BIT_FIELD (op0
);
8481 op0
= expand_expr (treeop0
, NULL_RTX
, mode
,
8482 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
);
8483 if (GET_MODE (op0
) == mode
)
8486 /* If OP0 is a constant, just convert it into the proper mode. */
8487 else if (CONSTANT_P (op0
))
8489 tree inner_type
= TREE_TYPE (treeop0
);
8490 machine_mode inner_mode
= GET_MODE (op0
);
8492 if (inner_mode
== VOIDmode
)
8493 inner_mode
= TYPE_MODE (inner_type
);
8495 if (modifier
== EXPAND_INITIALIZER
)
8496 op0
= lowpart_subreg (mode
, op0
, inner_mode
);
8498 op0
= convert_modes (mode
, inner_mode
, op0
,
8499 TYPE_UNSIGNED (inner_type
));
8502 else if (modifier
== EXPAND_INITIALIZER
)
8503 op0
= gen_rtx_fmt_e (TYPE_UNSIGNED (TREE_TYPE (treeop0
))
8504 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
8506 else if (target
== 0)
8507 op0
= convert_to_mode (mode
, op0
,
8508 TYPE_UNSIGNED (TREE_TYPE
8512 convert_move (target
, op0
,
8513 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8517 return REDUCE_BIT_FIELD (op0
);
8519 case ADDR_SPACE_CONVERT_EXPR
:
8521 tree treeop0_type
= TREE_TYPE (treeop0
);
8523 gcc_assert (POINTER_TYPE_P (type
));
8524 gcc_assert (POINTER_TYPE_P (treeop0_type
));
8526 addr_space_t as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
8527 addr_space_t as_from
= TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type
));
8529 /* Conversions between pointers to the same address space should
8530 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8531 gcc_assert (as_to
!= as_from
);
8533 op0
= expand_expr (treeop0
, NULL_RTX
, VOIDmode
, modifier
);
8535 /* Ask target code to handle conversion between pointers
8536 to overlapping address spaces. */
8537 if (targetm
.addr_space
.subset_p (as_to
, as_from
)
8538 || targetm
.addr_space
.subset_p (as_from
, as_to
))
8540 op0
= targetm
.addr_space
.convert (op0
, treeop0_type
, type
);
8544 /* For disjoint address spaces, converting anything but a null
8545 pointer invokes undefined behavior. We truncate or extend the
8546 value as if we'd converted via integers, which handles 0 as
8547 required, and all others as the programmer likely expects. */
8548 #ifndef POINTERS_EXTEND_UNSIGNED
8549 const int POINTERS_EXTEND_UNSIGNED
= 1;
8551 op0
= convert_modes (mode
, TYPE_MODE (treeop0_type
),
8552 op0
, POINTERS_EXTEND_UNSIGNED
);
8558 case POINTER_PLUS_EXPR
:
8559 /* Even though the sizetype mode and the pointer's mode can be different
8560 expand is able to handle this correctly and get the correct result out
8561 of the PLUS_EXPR code. */
8562 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8563 if sizetype precision is smaller than pointer precision. */
8564 if (TYPE_PRECISION (sizetype
) < TYPE_PRECISION (type
))
8565 treeop1
= fold_convert_loc (loc
, type
,
8566 fold_convert_loc (loc
, ssizetype
,
8568 /* If sizetype precision is larger than pointer precision, truncate the
8569 offset to have matching modes. */
8570 else if (TYPE_PRECISION (sizetype
) > TYPE_PRECISION (type
))
8571 treeop1
= fold_convert_loc (loc
, type
, treeop1
);
8575 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8576 something else, make sure we add the register to the constant and
8577 then to the other thing. This case can occur during strength
8578 reduction and doing it this way will produce better code if the
8579 frame pointer or argument pointer is eliminated.
8581 fold-const.c will ensure that the constant is always in the inner
8582 PLUS_EXPR, so the only case we need to do anything about is if
8583 sp, ap, or fp is our second argument, in which case we must swap
8584 the innermost first argument and our second argument. */
8586 if (TREE_CODE (treeop0
) == PLUS_EXPR
8587 && TREE_CODE (TREE_OPERAND (treeop0
, 1)) == INTEGER_CST
8589 && (DECL_RTL (treeop1
) == frame_pointer_rtx
8590 || DECL_RTL (treeop1
) == stack_pointer_rtx
8591 || DECL_RTL (treeop1
) == arg_pointer_rtx
))
8596 /* If the result is to be ptr_mode and we are adding an integer to
8597 something, we might be forming a constant. So try to use
8598 plus_constant. If it produces a sum and we can't accept it,
8599 use force_operand. This allows P = &ARR[const] to generate
8600 efficient code on machines where a SYMBOL_REF is not a valid
8603 If this is an EXPAND_SUM call, always return the sum. */
8604 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
8605 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
8607 if (modifier
== EXPAND_STACK_PARM
)
8609 if (TREE_CODE (treeop0
) == INTEGER_CST
8610 && HWI_COMPUTABLE_MODE_P (mode
)
8611 && TREE_CONSTANT (treeop1
))
8615 machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop1
));
8617 op1
= expand_expr (treeop1
, subtarget
, VOIDmode
,
8619 /* Use wi::shwi to ensure that the constant is
8620 truncated according to the mode of OP1, then sign extended
8621 to a HOST_WIDE_INT. Using the constant directly can result
8622 in non-canonical RTL in a 64x32 cross compile. */
8623 wc
= TREE_INT_CST_LOW (treeop0
);
8625 immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8626 op1
= plus_constant (mode
, op1
, INTVAL (constant_part
));
8627 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8628 op1
= force_operand (op1
, target
);
8629 return REDUCE_BIT_FIELD (op1
);
8632 else if (TREE_CODE (treeop1
) == INTEGER_CST
8633 && HWI_COMPUTABLE_MODE_P (mode
)
8634 && TREE_CONSTANT (treeop0
))
8638 machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop0
));
8640 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8641 (modifier
== EXPAND_INITIALIZER
8642 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
8643 if (! CONSTANT_P (op0
))
8645 op1
= expand_expr (treeop1
, NULL_RTX
,
8646 VOIDmode
, modifier
);
8647 /* Return a PLUS if modifier says it's OK. */
8648 if (modifier
== EXPAND_SUM
8649 || modifier
== EXPAND_INITIALIZER
)
8650 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
8653 /* Use wi::shwi to ensure that the constant is
8654 truncated according to the mode of OP1, then sign extended
8655 to a HOST_WIDE_INT. Using the constant directly can result
8656 in non-canonical RTL in a 64x32 cross compile. */
8657 wc
= TREE_INT_CST_LOW (treeop1
);
8659 = immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8660 op0
= plus_constant (mode
, op0
, INTVAL (constant_part
));
8661 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8662 op0
= force_operand (op0
, target
);
8663 return REDUCE_BIT_FIELD (op0
);
8667 /* Use TER to expand pointer addition of a negated value
8668 as pointer subtraction. */
8669 if ((POINTER_TYPE_P (TREE_TYPE (treeop0
))
8670 || (TREE_CODE (TREE_TYPE (treeop0
)) == VECTOR_TYPE
8671 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0
)))))
8672 && TREE_CODE (treeop1
) == SSA_NAME
8673 && TYPE_MODE (TREE_TYPE (treeop0
))
8674 == TYPE_MODE (TREE_TYPE (treeop1
)))
8676 gimple
*def
= get_def_for_expr (treeop1
, NEGATE_EXPR
);
8679 treeop1
= gimple_assign_rhs1 (def
);
8685 /* No sense saving up arithmetic to be done
8686 if it's all in the wrong mode to form part of an address.
8687 And force_operand won't know whether to sign-extend or
8689 if (modifier
!= EXPAND_INITIALIZER
8690 && (modifier
!= EXPAND_SUM
|| mode
!= ptr_mode
))
8692 expand_operands (treeop0
, treeop1
,
8693 subtarget
, &op0
, &op1
, modifier
);
8694 if (op0
== const0_rtx
)
8696 if (op1
== const0_rtx
)
8701 expand_operands (treeop0
, treeop1
,
8702 subtarget
, &op0
, &op1
, modifier
);
8703 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8706 case POINTER_DIFF_EXPR
:
8708 /* For initializers, we are allowed to return a MINUS of two
8709 symbolic constants. Here we handle all cases when both operands
8711 /* Handle difference of two symbolic constants,
8712 for the sake of an initializer. */
8713 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8714 && really_constant_p (treeop0
)
8715 && really_constant_p (treeop1
))
8717 expand_operands (treeop0
, treeop1
,
8718 NULL_RTX
, &op0
, &op1
, modifier
);
8719 return simplify_gen_binary (MINUS
, mode
, op0
, op1
);
8722 /* No sense saving up arithmetic to be done
8723 if it's all in the wrong mode to form part of an address.
8724 And force_operand won't know whether to sign-extend or
8726 if (modifier
!= EXPAND_INITIALIZER
8727 && (modifier
!= EXPAND_SUM
|| mode
!= ptr_mode
))
8730 expand_operands (treeop0
, treeop1
,
8731 subtarget
, &op0
, &op1
, modifier
);
8733 /* Convert A - const to A + (-const). */
8734 if (CONST_INT_P (op1
))
8736 op1
= negate_rtx (mode
, op1
);
8737 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8742 case WIDEN_MULT_PLUS_EXPR
:
8743 case WIDEN_MULT_MINUS_EXPR
:
8744 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8745 op2
= expand_normal (treeop2
);
8746 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
8750 case WIDEN_MULT_EXPR
:
8751 /* If first operand is constant, swap them.
8752 Thus the following special case checks need only
8753 check the second operand. */
8754 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8755 std::swap (treeop0
, treeop1
);
8757 /* First, check if we have a multiplication of one signed and one
8758 unsigned operand. */
8759 if (TREE_CODE (treeop1
) != INTEGER_CST
8760 && (TYPE_UNSIGNED (TREE_TYPE (treeop0
))
8761 != TYPE_UNSIGNED (TREE_TYPE (treeop1
))))
8763 machine_mode innermode
= TYPE_MODE (TREE_TYPE (treeop0
));
8764 this_optab
= usmul_widen_optab
;
8765 if (find_widening_optab_handler (this_optab
, mode
, innermode
)
8766 != CODE_FOR_nothing
)
8768 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8769 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8772 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op1
, &op0
,
8774 /* op0 and op1 might still be constant, despite the above
8775 != INTEGER_CST check. Handle it. */
8776 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8778 op0
= convert_modes (innermode
, mode
, op0
, true);
8779 op1
= convert_modes (innermode
, mode
, op1
, false);
8780 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8781 target
, unsignedp
));
8786 /* Check for a multiplication with matching signedness. */
8787 else if ((TREE_CODE (treeop1
) == INTEGER_CST
8788 && int_fits_type_p (treeop1
, TREE_TYPE (treeop0
)))
8789 || (TYPE_UNSIGNED (TREE_TYPE (treeop1
))
8790 == TYPE_UNSIGNED (TREE_TYPE (treeop0
))))
8792 tree op0type
= TREE_TYPE (treeop0
);
8793 machine_mode innermode
= TYPE_MODE (op0type
);
8794 bool zextend_p
= TYPE_UNSIGNED (op0type
);
8795 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
8796 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
8798 if (TREE_CODE (treeop0
) != INTEGER_CST
)
8800 if (find_widening_optab_handler (this_optab
, mode
, innermode
)
8801 != CODE_FOR_nothing
)
8803 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8805 /* op0 and op1 might still be constant, despite the above
8806 != INTEGER_CST check. Handle it. */
8807 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8810 op0
= convert_modes (innermode
, mode
, op0
, zextend_p
);
8812 = convert_modes (innermode
, mode
, op1
,
8813 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8814 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8818 temp
= expand_widening_mult (mode
, op0
, op1
, target
,
8819 unsignedp
, this_optab
);
8820 return REDUCE_BIT_FIELD (temp
);
8822 if (find_widening_optab_handler (other_optab
, mode
, innermode
)
8824 && innermode
== word_mode
)
8827 op0
= expand_normal (treeop0
);
8828 if (TREE_CODE (treeop1
) == INTEGER_CST
)
8829 op1
= convert_modes (word_mode
, mode
,
8830 expand_normal (treeop1
),
8831 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8833 op1
= expand_normal (treeop1
);
8834 /* op0 and op1 might still be constant, despite the above
8835 != INTEGER_CST check. Handle it. */
8836 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8837 goto widen_mult_const
;
8838 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8839 unsignedp
, OPTAB_LIB_WIDEN
);
8840 hipart
= gen_highpart (word_mode
, temp
);
8841 htem
= expand_mult_highpart_adjust (word_mode
, hipart
,
8845 emit_move_insn (hipart
, htem
);
8846 return REDUCE_BIT_FIELD (temp
);
8850 treeop0
= fold_build1 (CONVERT_EXPR
, type
, treeop0
);
8851 treeop1
= fold_build1 (CONVERT_EXPR
, type
, treeop1
);
8852 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8853 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8856 /* If this is a fixed-point operation, then we cannot use the code
8857 below because "expand_mult" doesn't support sat/no-sat fixed-point
8859 if (ALL_FIXED_POINT_MODE_P (mode
))
8862 /* If first operand is constant, swap them.
8863 Thus the following special case checks need only
8864 check the second operand. */
8865 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8866 std::swap (treeop0
, treeop1
);
8868 /* Attempt to return something suitable for generating an
8869 indexed address, for machines that support that. */
8871 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8872 && tree_fits_shwi_p (treeop1
))
8874 tree exp1
= treeop1
;
8876 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8880 op0
= force_operand (op0
, NULL_RTX
);
8882 op0
= copy_to_mode_reg (mode
, op0
);
8884 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
8885 gen_int_mode (tree_to_shwi (exp1
),
8886 TYPE_MODE (TREE_TYPE (exp1
)))));
8889 if (modifier
== EXPAND_STACK_PARM
)
8892 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8893 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8895 case TRUNC_MOD_EXPR
:
8896 case FLOOR_MOD_EXPR
:
8898 case ROUND_MOD_EXPR
:
8900 case TRUNC_DIV_EXPR
:
8901 case FLOOR_DIV_EXPR
:
8903 case ROUND_DIV_EXPR
:
8904 case EXACT_DIV_EXPR
:
8906 /* If this is a fixed-point operation, then we cannot use the code
8907 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8909 if (ALL_FIXED_POINT_MODE_P (mode
))
8912 if (modifier
== EXPAND_STACK_PARM
)
8914 /* Possible optimization: compute the dividend with EXPAND_SUM
8915 then if the divisor is constant can optimize the case
8916 where some terms of the dividend have coeffs divisible by it. */
8917 expand_operands (treeop0
, treeop1
,
8918 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8919 bool mod_p
= code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
8920 || code
== CEIL_MOD_EXPR
|| code
== ROUND_MOD_EXPR
;
8921 if (SCALAR_INT_MODE_P (mode
)
8923 && get_range_pos_neg (treeop0
) == 1
8924 && get_range_pos_neg (treeop1
) == 1)
8926 /* If both arguments are known to be positive when interpreted
8927 as signed, we can expand it as both signed and unsigned
8928 division or modulo. Choose the cheaper sequence in that case. */
8929 bool speed_p
= optimize_insn_for_speed_p ();
8930 do_pending_stack_adjust ();
8932 rtx uns_ret
= expand_divmod (mod_p
, code
, mode
, op0
, op1
, target
, 1);
8933 rtx_insn
*uns_insns
= get_insns ();
8936 rtx sgn_ret
= expand_divmod (mod_p
, code
, mode
, op0
, op1
, target
, 0);
8937 rtx_insn
*sgn_insns
= get_insns ();
8939 unsigned uns_cost
= seq_cost (uns_insns
, speed_p
);
8940 unsigned sgn_cost
= seq_cost (sgn_insns
, speed_p
);
8942 /* If costs are the same then use as tie breaker the other
8944 if (uns_cost
== sgn_cost
)
8946 uns_cost
= seq_cost (uns_insns
, !speed_p
);
8947 sgn_cost
= seq_cost (sgn_insns
, !speed_p
);
8950 if (uns_cost
< sgn_cost
|| (uns_cost
== sgn_cost
&& unsignedp
))
8952 emit_insn (uns_insns
);
8955 emit_insn (sgn_insns
);
8958 return expand_divmod (mod_p
, code
, mode
, op0
, op1
, target
, unsignedp
);
8963 case MULT_HIGHPART_EXPR
:
8964 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8965 temp
= expand_mult_highpart (mode
, op0
, op1
, target
, unsignedp
);
8969 case FIXED_CONVERT_EXPR
:
8970 op0
= expand_normal (treeop0
);
8971 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8972 target
= gen_reg_rtx (mode
);
8974 if ((TREE_CODE (TREE_TYPE (treeop0
)) == INTEGER_TYPE
8975 && TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8976 || (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_UNSIGNED (type
)))
8977 expand_fixed_convert (target
, op0
, 1, TYPE_SATURATING (type
));
8979 expand_fixed_convert (target
, op0
, 0, TYPE_SATURATING (type
));
8982 case FIX_TRUNC_EXPR
:
8983 op0
= expand_normal (treeop0
);
8984 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8985 target
= gen_reg_rtx (mode
);
8986 expand_fix (target
, op0
, unsignedp
);
8990 op0
= expand_normal (treeop0
);
8991 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8992 target
= gen_reg_rtx (mode
);
8993 /* expand_float can't figure out what to do if FROM has VOIDmode.
8994 So give it the correct mode. With -O, cse will optimize this. */
8995 if (GET_MODE (op0
) == VOIDmode
)
8996 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0
)),
8998 expand_float (target
, op0
,
8999 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
9003 op0
= expand_expr (treeop0
, subtarget
,
9004 VOIDmode
, EXPAND_NORMAL
);
9005 if (modifier
== EXPAND_STACK_PARM
)
9007 temp
= expand_unop (mode
,
9008 optab_for_tree_code (NEGATE_EXPR
, type
,
9012 return REDUCE_BIT_FIELD (temp
);
9016 op0
= expand_expr (treeop0
, subtarget
,
9017 VOIDmode
, EXPAND_NORMAL
);
9018 if (modifier
== EXPAND_STACK_PARM
)
9021 /* ABS_EXPR is not valid for complex arguments. */
9022 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
9023 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
9025 /* Unsigned abs is simply the operand. Testing here means we don't
9026 risk generating incorrect code below. */
9027 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
9030 return expand_abs (mode
, op0
, target
, unsignedp
,
9031 safe_from_p (target
, treeop0
, 1));
9035 target
= original_target
;
9037 || modifier
== EXPAND_STACK_PARM
9038 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
9039 || GET_MODE (target
) != mode
9041 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
9042 target
= gen_reg_rtx (mode
);
9043 expand_operands (treeop0
, treeop1
,
9044 target
, &op0
, &op1
, EXPAND_NORMAL
);
9046 /* First try to do it with a special MIN or MAX instruction.
9047 If that does not win, use a conditional jump to select the proper
9049 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9050 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
9055 /* For vector MIN <x, y>, expand it a VEC_COND_EXPR <x <= y, x, y>
9056 and similarly for MAX <x, y>. */
9057 if (VECTOR_TYPE_P (type
))
9059 tree t0
= make_tree (type
, op0
);
9060 tree t1
= make_tree (type
, op1
);
9061 tree comparison
= build2 (code
== MIN_EXPR
? LE_EXPR
: GE_EXPR
,
9063 return expand_vec_cond_expr (type
, comparison
, t0
, t1
,
9067 /* At this point, a MEM target is no longer useful; we will get better
9070 if (! REG_P (target
))
9071 target
= gen_reg_rtx (mode
);
9073 /* If op1 was placed in target, swap op0 and op1. */
9074 if (target
!= op0
&& target
== op1
)
9075 std::swap (op0
, op1
);
9077 /* We generate better code and avoid problems with op1 mentioning
9078 target by forcing op1 into a pseudo if it isn't a constant. */
9079 if (! CONSTANT_P (op1
))
9080 op1
= force_reg (mode
, op1
);
9083 enum rtx_code comparison_code
;
9086 if (code
== MAX_EXPR
)
9087 comparison_code
= unsignedp
? GEU
: GE
;
9089 comparison_code
= unsignedp
? LEU
: LE
;
9091 /* Canonicalize to comparisons against 0. */
9092 if (op1
== const1_rtx
)
9094 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
9095 or (a != 0 ? a : 1) for unsigned.
9096 For MIN we are safe converting (a <= 1 ? a : 1)
9097 into (a <= 0 ? a : 1) */
9098 cmpop1
= const0_rtx
;
9099 if (code
== MAX_EXPR
)
9100 comparison_code
= unsignedp
? NE
: GT
;
9102 if (op1
== constm1_rtx
&& !unsignedp
)
9104 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
9105 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
9106 cmpop1
= const0_rtx
;
9107 if (code
== MIN_EXPR
)
9108 comparison_code
= LT
;
9111 /* Use a conditional move if possible. */
9112 if (can_conditionally_move_p (mode
))
9118 /* Try to emit the conditional move. */
9119 insn
= emit_conditional_move (target
, comparison_code
,
9124 /* If we could do the conditional move, emit the sequence,
9128 rtx_insn
*seq
= get_insns ();
9134 /* Otherwise discard the sequence and fall back to code with
9140 emit_move_insn (target
, op0
);
9142 lab
= gen_label_rtx ();
9143 do_compare_rtx_and_jump (target
, cmpop1
, comparison_code
,
9144 unsignedp
, mode
, NULL_RTX
, NULL
, lab
,
9145 profile_probability::uninitialized ());
9147 emit_move_insn (target
, op1
);
9152 op0
= expand_expr (treeop0
, subtarget
,
9153 VOIDmode
, EXPAND_NORMAL
);
9154 if (modifier
== EXPAND_STACK_PARM
)
9156 /* In case we have to reduce the result to bitfield precision
9157 for unsigned bitfield expand this as XOR with a proper constant
9159 if (reduce_bit_field
&& TYPE_UNSIGNED (type
))
9161 int_mode
= SCALAR_INT_TYPE_MODE (type
);
9162 wide_int mask
= wi::mask (TYPE_PRECISION (type
),
9163 false, GET_MODE_PRECISION (int_mode
));
9165 temp
= expand_binop (int_mode
, xor_optab
, op0
,
9166 immed_wide_int_const (mask
, int_mode
),
9167 target
, 1, OPTAB_LIB_WIDEN
);
9170 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
9174 /* ??? Can optimize bitwise operations with one arg constant.
9175 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
9176 and (a bitwise1 b) bitwise2 b (etc)
9177 but that is probably not worth while. */
9186 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type
))
9187 || type_has_mode_precision_p (type
));
9193 /* If this is a fixed-point operation, then we cannot use the code
9194 below because "expand_shift" doesn't support sat/no-sat fixed-point
9196 if (ALL_FIXED_POINT_MODE_P (mode
))
9199 if (! safe_from_p (subtarget
, treeop1
, 1))
9201 if (modifier
== EXPAND_STACK_PARM
)
9203 op0
= expand_expr (treeop0
, subtarget
,
9204 VOIDmode
, EXPAND_NORMAL
);
9206 /* Left shift optimization when shifting across word_size boundary.
9208 If mode == GET_MODE_WIDER_MODE (word_mode), then normally
9209 there isn't native instruction to support this wide mode
9210 left shift. Given below scenario:
9212 Type A = (Type) B << C
9215 | dest_high | dest_low |
9219 If the shift amount C caused we shift B to across the word
9220 size boundary, i.e part of B shifted into high half of
9221 destination register, and part of B remains in the low
9222 half, then GCC will use the following left shift expand
9225 1. Initialize dest_low to B.
9226 2. Initialize every bit of dest_high to the sign bit of B.
9227 3. Logic left shift dest_low by C bit to finalize dest_low.
9228 The value of dest_low before this shift is kept in a temp D.
9229 4. Logic left shift dest_high by C.
9230 5. Logic right shift D by (word_size - C).
9231 6. Or the result of 4 and 5 to finalize dest_high.
9233 While, by checking gimple statements, if operand B is
9234 coming from signed extension, then we can simplify above
9237 1. dest_high = src_low >> (word_size - C).
9238 2. dest_low = src_low << C.
9240 We can use one arithmetic right shift to finish all the
9241 purpose of steps 2, 4, 5, 6, thus we reduce the steps
9242 needed from 6 into 2.
9244 The case is similar for zero extension, except that we
9245 initialize dest_high to zero rather than copies of the sign
9246 bit from B. Furthermore, we need to use a logical right shift
9249 The choice of sign-extension versus zero-extension is
9250 determined entirely by whether or not B is signed and is
9251 independent of the current setting of unsignedp. */
9254 if (code
== LSHIFT_EXPR
9257 && GET_MODE_2XWIDER_MODE (word_mode
).exists (&int_mode
)
9259 && TREE_CONSTANT (treeop1
)
9260 && TREE_CODE (treeop0
) == SSA_NAME
)
9262 gimple
*def
= SSA_NAME_DEF_STMT (treeop0
);
9263 if (is_gimple_assign (def
)
9264 && gimple_assign_rhs_code (def
) == NOP_EXPR
)
9266 scalar_int_mode rmode
= SCALAR_INT_TYPE_MODE
9267 (TREE_TYPE (gimple_assign_rhs1 (def
)));
9269 if (GET_MODE_SIZE (rmode
) < GET_MODE_SIZE (int_mode
)
9270 && TREE_INT_CST_LOW (treeop1
) < GET_MODE_BITSIZE (word_mode
)
9271 && ((TREE_INT_CST_LOW (treeop1
) + GET_MODE_BITSIZE (rmode
))
9272 >= GET_MODE_BITSIZE (word_mode
)))
9274 rtx_insn
*seq
, *seq_old
;
9275 poly_uint64 high_off
= subreg_highpart_offset (word_mode
,
9277 bool extend_unsigned
9278 = TYPE_UNSIGNED (TREE_TYPE (gimple_assign_rhs1 (def
)));
9279 rtx low
= lowpart_subreg (word_mode
, op0
, int_mode
);
9280 rtx dest_low
= lowpart_subreg (word_mode
, target
, int_mode
);
9281 rtx dest_high
= simplify_gen_subreg (word_mode
, target
,
9282 int_mode
, high_off
);
9283 HOST_WIDE_INT ramount
= (BITS_PER_WORD
9284 - TREE_INT_CST_LOW (treeop1
));
9285 tree rshift
= build_int_cst (TREE_TYPE (treeop1
), ramount
);
9288 /* dest_high = src_low >> (word_size - C). */
9289 temp
= expand_variable_shift (RSHIFT_EXPR
, word_mode
, low
,
9292 if (temp
!= dest_high
)
9293 emit_move_insn (dest_high
, temp
);
9295 /* dest_low = src_low << C. */
9296 temp
= expand_variable_shift (LSHIFT_EXPR
, word_mode
, low
,
9297 treeop1
, dest_low
, unsignedp
);
9298 if (temp
!= dest_low
)
9299 emit_move_insn (dest_low
, temp
);
9305 if (have_insn_for (ASHIFT
, int_mode
))
9307 bool speed_p
= optimize_insn_for_speed_p ();
9309 rtx ret_old
= expand_variable_shift (code
, int_mode
,
9314 seq_old
= get_insns ();
9316 if (seq_cost (seq
, speed_p
)
9317 >= seq_cost (seq_old
, speed_p
))
9328 if (temp
== NULL_RTX
)
9329 temp
= expand_variable_shift (code
, mode
, op0
, treeop1
, target
,
9331 if (code
== LSHIFT_EXPR
)
9332 temp
= REDUCE_BIT_FIELD (temp
);
9336 /* Could determine the answer when only additive constants differ. Also,
9337 the addition of one can be handled by changing the condition. */
9344 case UNORDERED_EXPR
:
9353 temp
= do_store_flag (ops
,
9354 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
9355 tmode
!= VOIDmode
? tmode
: mode
);
9359 /* Use a compare and a jump for BLKmode comparisons, or for function
9360 type comparisons is have_canonicalize_funcptr_for_compare. */
9363 || modifier
== EXPAND_STACK_PARM
9364 || ! safe_from_p (target
, treeop0
, 1)
9365 || ! safe_from_p (target
, treeop1
, 1)
9366 /* Make sure we don't have a hard reg (such as function's return
9367 value) live across basic blocks, if not optimizing. */
9368 || (!optimize
&& REG_P (target
)
9369 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
9370 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
9372 emit_move_insn (target
, const0_rtx
);
9374 rtx_code_label
*lab1
= gen_label_rtx ();
9375 jumpifnot_1 (code
, treeop0
, treeop1
, lab1
,
9376 profile_probability::uninitialized ());
9378 if (TYPE_PRECISION (type
) == 1 && !TYPE_UNSIGNED (type
))
9379 emit_move_insn (target
, constm1_rtx
);
9381 emit_move_insn (target
, const1_rtx
);
9387 /* Get the rtx code of the operands. */
9388 op0
= expand_normal (treeop0
);
9389 op1
= expand_normal (treeop1
);
9392 target
= gen_reg_rtx (TYPE_MODE (type
));
9394 /* If target overlaps with op1, then either we need to force
9395 op1 into a pseudo (if target also overlaps with op0),
9396 or write the complex parts in reverse order. */
9397 switch (GET_CODE (target
))
9400 if (reg_overlap_mentioned_p (XEXP (target
, 0), op1
))
9402 if (reg_overlap_mentioned_p (XEXP (target
, 1), op0
))
9404 complex_expr_force_op1
:
9405 temp
= gen_reg_rtx (GET_MODE_INNER (GET_MODE (target
)));
9406 emit_move_insn (temp
, op1
);
9410 complex_expr_swap_order
:
9411 /* Move the imaginary (op1) and real (op0) parts to their
9413 write_complex_part (target
, op1
, true);
9414 write_complex_part (target
, op0
, false);
9420 temp
= adjust_address_nv (target
,
9421 GET_MODE_INNER (GET_MODE (target
)), 0);
9422 if (reg_overlap_mentioned_p (temp
, op1
))
9424 scalar_mode imode
= GET_MODE_INNER (GET_MODE (target
));
9425 temp
= adjust_address_nv (target
, imode
,
9426 GET_MODE_SIZE (imode
));
9427 if (reg_overlap_mentioned_p (temp
, op0
))
9428 goto complex_expr_force_op1
;
9429 goto complex_expr_swap_order
;
9433 if (reg_overlap_mentioned_p (target
, op1
))
9435 if (reg_overlap_mentioned_p (target
, op0
))
9436 goto complex_expr_force_op1
;
9437 goto complex_expr_swap_order
;
9442 /* Move the real (op0) and imaginary (op1) parts to their location. */
9443 write_complex_part (target
, op0
, false);
9444 write_complex_part (target
, op1
, true);
9448 case WIDEN_SUM_EXPR
:
9450 tree oprnd0
= treeop0
;
9451 tree oprnd1
= treeop1
;
9453 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9454 target
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, op1
,
9459 case VEC_UNPACK_HI_EXPR
:
9460 case VEC_UNPACK_LO_EXPR
:
9461 case VEC_UNPACK_FIX_TRUNC_HI_EXPR
:
9462 case VEC_UNPACK_FIX_TRUNC_LO_EXPR
:
9464 op0
= expand_normal (treeop0
);
9465 temp
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, NULL_RTX
,
9471 case VEC_UNPACK_FLOAT_HI_EXPR
:
9472 case VEC_UNPACK_FLOAT_LO_EXPR
:
9474 op0
= expand_normal (treeop0
);
9475 /* The signedness is determined from input operand. */
9476 temp
= expand_widen_pattern_expr
9477 (ops
, op0
, NULL_RTX
, NULL_RTX
,
9478 target
, TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
9484 case VEC_WIDEN_MULT_HI_EXPR
:
9485 case VEC_WIDEN_MULT_LO_EXPR
:
9486 case VEC_WIDEN_MULT_EVEN_EXPR
:
9487 case VEC_WIDEN_MULT_ODD_EXPR
:
9488 case VEC_WIDEN_LSHIFT_HI_EXPR
:
9489 case VEC_WIDEN_LSHIFT_LO_EXPR
:
9490 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9491 target
= expand_widen_pattern_expr (ops
, op0
, op1
, NULL_RTX
,
9493 gcc_assert (target
);
9496 case VEC_PACK_SAT_EXPR
:
9497 case VEC_PACK_FIX_TRUNC_EXPR
:
9498 mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9501 case VEC_PACK_TRUNC_EXPR
:
9502 if (VECTOR_BOOLEAN_TYPE_P (type
)
9503 && VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (treeop0
))
9504 && mode
== TYPE_MODE (TREE_TYPE (treeop0
))
9505 && SCALAR_INT_MODE_P (mode
))
9507 struct expand_operand eops
[4];
9508 machine_mode imode
= TYPE_MODE (TREE_TYPE (treeop0
));
9509 expand_operands (treeop0
, treeop1
,
9510 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9511 this_optab
= vec_pack_sbool_trunc_optab
;
9512 enum insn_code icode
= optab_handler (this_optab
, imode
);
9513 create_output_operand (&eops
[0], target
, mode
);
9514 create_convert_operand_from (&eops
[1], op0
, imode
, false);
9515 create_convert_operand_from (&eops
[2], op1
, imode
, false);
9516 temp
= GEN_INT (TYPE_VECTOR_SUBPARTS (type
).to_constant ());
9517 create_input_operand (&eops
[3], temp
, imode
);
9518 expand_insn (icode
, 4, eops
);
9519 return eops
[0].value
;
9521 mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9524 case VEC_PACK_FLOAT_EXPR
:
9525 mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9526 expand_operands (treeop0
, treeop1
,
9527 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9528 this_optab
= optab_for_tree_code (code
, TREE_TYPE (treeop0
),
9530 target
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9531 TYPE_UNSIGNED (TREE_TYPE (treeop0
)),
9533 gcc_assert (target
);
9538 expand_operands (treeop0
, treeop1
, target
, &op0
, &op1
, EXPAND_NORMAL
);
9539 vec_perm_builder sel
;
9540 if (TREE_CODE (treeop2
) == VECTOR_CST
9541 && tree_to_vec_perm_builder (&sel
, treeop2
))
9543 machine_mode sel_mode
= TYPE_MODE (TREE_TYPE (treeop2
));
9544 temp
= expand_vec_perm_const (mode
, op0
, op1
, sel
,
9549 op2
= expand_normal (treeop2
);
9550 temp
= expand_vec_perm_var (mode
, op0
, op1
, op2
, target
);
9558 tree oprnd0
= treeop0
;
9559 tree oprnd1
= treeop1
;
9560 tree oprnd2
= treeop2
;
9563 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9564 op2
= expand_normal (oprnd2
);
9565 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9572 tree oprnd0
= treeop0
;
9573 tree oprnd1
= treeop1
;
9574 tree oprnd2
= treeop2
;
9577 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9578 op2
= expand_normal (oprnd2
);
9579 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9584 case REALIGN_LOAD_EXPR
:
9586 tree oprnd0
= treeop0
;
9587 tree oprnd1
= treeop1
;
9588 tree oprnd2
= treeop2
;
9591 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9592 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9593 op2
= expand_normal (oprnd2
);
9594 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
9602 /* A COND_EXPR with its type being VOID_TYPE represents a
9603 conditional jump and is handled in
9604 expand_gimple_cond_expr. */
9605 gcc_assert (!VOID_TYPE_P (type
));
9607 /* Note that COND_EXPRs whose type is a structure or union
9608 are required to be constructed to contain assignments of
9609 a temporary variable, so that we can evaluate them here
9610 for side effect only. If type is void, we must do likewise. */
9612 gcc_assert (!TREE_ADDRESSABLE (type
)
9614 && TREE_TYPE (treeop1
) != void_type_node
9615 && TREE_TYPE (treeop2
) != void_type_node
);
9617 temp
= expand_cond_expr_using_cmove (treeop0
, treeop1
, treeop2
);
9621 /* If we are not to produce a result, we have no target. Otherwise,
9622 if a target was specified use it; it will not be used as an
9623 intermediate target unless it is safe. If no target, use a
9626 if (modifier
!= EXPAND_STACK_PARM
9628 && safe_from_p (original_target
, treeop0
, 1)
9629 && GET_MODE (original_target
) == mode
9630 && !MEM_P (original_target
))
9631 temp
= original_target
;
9633 temp
= assign_temp (type
, 0, 1);
9635 do_pending_stack_adjust ();
9637 rtx_code_label
*lab0
= gen_label_rtx ();
9638 rtx_code_label
*lab1
= gen_label_rtx ();
9639 jumpifnot (treeop0
, lab0
,
9640 profile_probability::uninitialized ());
9641 store_expr (treeop1
, temp
,
9642 modifier
== EXPAND_STACK_PARM
,
9645 emit_jump_insn (targetm
.gen_jump (lab1
));
9648 store_expr (treeop2
, temp
,
9649 modifier
== EXPAND_STACK_PARM
,
9658 target
= expand_vec_cond_expr (type
, treeop0
, treeop1
, treeop2
, target
);
9661 case VEC_DUPLICATE_EXPR
:
9662 op0
= expand_expr (treeop0
, NULL_RTX
, VOIDmode
, modifier
);
9663 target
= expand_vector_broadcast (mode
, op0
);
9664 gcc_assert (target
);
9667 case VEC_SERIES_EXPR
:
9668 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, modifier
);
9669 return expand_vec_series_expr (mode
, op0
, op1
, target
);
9671 case BIT_INSERT_EXPR
:
9673 unsigned bitpos
= tree_to_uhwi (treeop2
);
9675 if (INTEGRAL_TYPE_P (TREE_TYPE (treeop1
)))
9676 bitsize
= TYPE_PRECISION (TREE_TYPE (treeop1
));
9678 bitsize
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (treeop1
)));
9679 rtx op0
= expand_normal (treeop0
);
9680 rtx op1
= expand_normal (treeop1
);
9681 rtx dst
= gen_reg_rtx (mode
);
9682 emit_move_insn (dst
, op0
);
9683 store_bit_field (dst
, bitsize
, bitpos
, 0, 0,
9684 TYPE_MODE (TREE_TYPE (treeop1
)), op1
, false);
9692 /* Here to do an ordinary binary operator. */
9694 expand_operands (treeop0
, treeop1
,
9695 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9697 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9699 if (modifier
== EXPAND_STACK_PARM
)
9701 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9702 unsignedp
, OPTAB_LIB_WIDEN
);
9704 /* Bitwise operations do not need bitfield reduction as we expect their
9705 operands being properly truncated. */
9706 if (code
== BIT_XOR_EXPR
9707 || code
== BIT_AND_EXPR
9708 || code
== BIT_IOR_EXPR
)
9710 return REDUCE_BIT_FIELD (temp
);
9712 #undef REDUCE_BIT_FIELD
9715 /* Return TRUE if expression STMT is suitable for replacement.
9716 Never consider memory loads as replaceable, because those don't ever lead
9717 into constant expressions. */
9720 stmt_is_replaceable_p (gimple
*stmt
)
9722 if (ssa_is_replaceable_p (stmt
))
9724 /* Don't move around loads. */
9725 if (!gimple_assign_single_p (stmt
)
9726 || is_gimple_val (gimple_assign_rhs1 (stmt
)))
9733 expand_expr_real_1 (tree exp
, rtx target
, machine_mode tmode
,
9734 enum expand_modifier modifier
, rtx
*alt_rtl
,
9735 bool inner_reference_p
)
9737 rtx op0
, op1
, temp
, decl_rtl
;
9740 machine_mode mode
, dmode
;
9741 enum tree_code code
= TREE_CODE (exp
);
9742 rtx subtarget
, original_target
;
9745 bool reduce_bit_field
;
9746 location_t loc
= EXPR_LOCATION (exp
);
9747 struct separate_ops ops
;
9748 tree treeop0
, treeop1
, treeop2
;
9749 tree ssa_name
= NULL_TREE
;
9752 type
= TREE_TYPE (exp
);
9753 mode
= TYPE_MODE (type
);
9754 unsignedp
= TYPE_UNSIGNED (type
);
9756 treeop0
= treeop1
= treeop2
= NULL_TREE
;
9757 if (!VL_EXP_CLASS_P (exp
))
9758 switch (TREE_CODE_LENGTH (code
))
9761 case 3: treeop2
= TREE_OPERAND (exp
, 2); /* FALLTHRU */
9762 case 2: treeop1
= TREE_OPERAND (exp
, 1); /* FALLTHRU */
9763 case 1: treeop0
= TREE_OPERAND (exp
, 0); /* FALLTHRU */
9773 ignore
= (target
== const0_rtx
9774 || ((CONVERT_EXPR_CODE_P (code
)
9775 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
9776 && TREE_CODE (type
) == VOID_TYPE
));
9778 /* An operation in what may be a bit-field type needs the
9779 result to be reduced to the precision of the bit-field type,
9780 which is narrower than that of the type's mode. */
9781 reduce_bit_field
= (!ignore
9782 && INTEGRAL_TYPE_P (type
)
9783 && !type_has_mode_precision_p (type
));
9785 /* If we are going to ignore this result, we need only do something
9786 if there is a side-effect somewhere in the expression. If there
9787 is, short-circuit the most common cases here. Note that we must
9788 not call expand_expr with anything but const0_rtx in case this
9789 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9793 if (! TREE_SIDE_EFFECTS (exp
))
9796 /* Ensure we reference a volatile object even if value is ignored, but
9797 don't do this if all we are doing is taking its address. */
9798 if (TREE_THIS_VOLATILE (exp
)
9799 && TREE_CODE (exp
) != FUNCTION_DECL
9800 && mode
!= VOIDmode
&& mode
!= BLKmode
9801 && modifier
!= EXPAND_CONST_ADDRESS
)
9803 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
9809 if (TREE_CODE_CLASS (code
) == tcc_unary
9810 || code
== BIT_FIELD_REF
9811 || code
== COMPONENT_REF
9812 || code
== INDIRECT_REF
)
9813 return expand_expr (treeop0
, const0_rtx
, VOIDmode
,
9816 else if (TREE_CODE_CLASS (code
) == tcc_binary
9817 || TREE_CODE_CLASS (code
) == tcc_comparison
9818 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
9820 expand_expr (treeop0
, const0_rtx
, VOIDmode
, modifier
);
9821 expand_expr (treeop1
, const0_rtx
, VOIDmode
, modifier
);
9828 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
9831 /* Use subtarget as the target for operand 0 of a binary operation. */
9832 subtarget
= get_subtarget (target
);
9833 original_target
= target
;
9839 tree function
= decl_function_context (exp
);
9841 temp
= label_rtx (exp
);
9842 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
9844 if (function
!= current_function_decl
9846 LABEL_REF_NONLOCAL_P (temp
) = 1;
9848 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
9853 /* ??? ivopts calls expander, without any preparation from
9854 out-of-ssa. So fake instructions as if this was an access to the
9855 base variable. This unnecessarily allocates a pseudo, see how we can
9856 reuse it, if partition base vars have it set already. */
9857 if (!currently_expanding_to_rtl
)
9859 tree var
= SSA_NAME_VAR (exp
);
9860 if (var
&& DECL_RTL_SET_P (var
))
9861 return DECL_RTL (var
);
9862 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp
)),
9863 LAST_VIRTUAL_REGISTER
+ 1);
9866 g
= get_gimple_for_ssa_name (exp
);
9867 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9869 && modifier
== EXPAND_INITIALIZER
9870 && !SSA_NAME_IS_DEFAULT_DEF (exp
)
9871 && (optimize
|| !SSA_NAME_VAR (exp
)
9872 || DECL_IGNORED_P (SSA_NAME_VAR (exp
)))
9873 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp
)))
9874 g
= SSA_NAME_DEF_STMT (exp
);
9878 location_t saved_loc
= curr_insn_location ();
9879 location_t loc
= gimple_location (g
);
9880 if (loc
!= UNKNOWN_LOCATION
)
9881 set_curr_insn_location (loc
);
9882 ops
.code
= gimple_assign_rhs_code (g
);
9883 switch (get_gimple_rhs_class (ops
.code
))
9885 case GIMPLE_TERNARY_RHS
:
9886 ops
.op2
= gimple_assign_rhs3 (g
);
9888 case GIMPLE_BINARY_RHS
:
9889 ops
.op1
= gimple_assign_rhs2 (g
);
9891 /* Try to expand conditonal compare. */
9892 if (targetm
.gen_ccmp_first
)
9894 gcc_checking_assert (targetm
.gen_ccmp_next
!= NULL
);
9895 r
= expand_ccmp_expr (g
, mode
);
9900 case GIMPLE_UNARY_RHS
:
9901 ops
.op0
= gimple_assign_rhs1 (g
);
9902 ops
.type
= TREE_TYPE (gimple_assign_lhs (g
));
9904 r
= expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
9906 case GIMPLE_SINGLE_RHS
:
9908 r
= expand_expr_real (gimple_assign_rhs1 (g
), target
,
9909 tmode
, modifier
, alt_rtl
,
9916 set_curr_insn_location (saved_loc
);
9917 if (REG_P (r
) && !REG_EXPR (r
))
9918 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp
), r
);
9923 decl_rtl
= get_rtx_for_ssa_name (ssa_name
);
9924 exp
= SSA_NAME_VAR (ssa_name
);
9925 goto expand_decl_rtl
;
9929 /* If a static var's type was incomplete when the decl was written,
9930 but the type is complete now, lay out the decl now. */
9931 if (DECL_SIZE (exp
) == 0
9932 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
9933 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
9934 layout_decl (exp
, 0);
9940 decl_rtl
= DECL_RTL (exp
);
9942 gcc_assert (decl_rtl
);
9944 /* DECL_MODE might change when TYPE_MODE depends on attribute target
9945 settings for VECTOR_TYPE_P that might switch for the function. */
9946 if (currently_expanding_to_rtl
9947 && code
== VAR_DECL
&& MEM_P (decl_rtl
)
9948 && VECTOR_TYPE_P (type
) && exp
&& DECL_MODE (exp
) != mode
)
9949 decl_rtl
= change_address (decl_rtl
, TYPE_MODE (type
), 0);
9951 decl_rtl
= copy_rtx (decl_rtl
);
9953 /* Record writes to register variables. */
9954 if (modifier
== EXPAND_WRITE
9956 && HARD_REGISTER_P (decl_rtl
))
9957 add_to_hard_reg_set (&crtl
->asm_clobbers
,
9958 GET_MODE (decl_rtl
), REGNO (decl_rtl
));
9960 /* Ensure variable marked as used even if it doesn't go through
9961 a parser. If it hasn't be used yet, write out an external
9964 TREE_USED (exp
) = 1;
9966 /* Show we haven't gotten RTL for this yet. */
9969 /* Variables inherited from containing functions should have
9970 been lowered by this point. */
9972 context
= decl_function_context (exp
);
9974 || SCOPE_FILE_SCOPE_P (context
)
9975 || context
== current_function_decl
9976 || TREE_STATIC (exp
)
9977 || DECL_EXTERNAL (exp
)
9978 /* ??? C++ creates functions that are not TREE_STATIC. */
9979 || TREE_CODE (exp
) == FUNCTION_DECL
);
9981 /* This is the case of an array whose size is to be determined
9982 from its initializer, while the initializer is still being parsed.
9983 ??? We aren't parsing while expanding anymore. */
9985 if (MEM_P (decl_rtl
) && REG_P (XEXP (decl_rtl
, 0)))
9986 temp
= validize_mem (decl_rtl
);
9988 /* If DECL_RTL is memory, we are in the normal case and the
9989 address is not valid, get the address into a register. */
9991 else if (MEM_P (decl_rtl
) && modifier
!= EXPAND_INITIALIZER
)
9994 *alt_rtl
= decl_rtl
;
9995 decl_rtl
= use_anchored_address (decl_rtl
);
9996 if (modifier
!= EXPAND_CONST_ADDRESS
9997 && modifier
!= EXPAND_SUM
9998 && !memory_address_addr_space_p (exp
? DECL_MODE (exp
)
9999 : GET_MODE (decl_rtl
),
10000 XEXP (decl_rtl
, 0),
10001 MEM_ADDR_SPACE (decl_rtl
)))
10002 temp
= replace_equiv_address (decl_rtl
,
10003 copy_rtx (XEXP (decl_rtl
, 0)));
10006 /* If we got something, return it. But first, set the alignment
10007 if the address is a register. */
10010 if (exp
&& MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
10011 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
10017 dmode
= DECL_MODE (exp
);
10019 dmode
= TYPE_MODE (TREE_TYPE (ssa_name
));
10021 /* If the mode of DECL_RTL does not match that of the decl,
10022 there are two cases: we are dealing with a BLKmode value
10023 that is returned in a register, or we are dealing with
10024 a promoted value. In the latter case, return a SUBREG
10025 of the wanted mode, but mark it so that we know that it
10026 was already extended. */
10027 if (REG_P (decl_rtl
)
10028 && dmode
!= BLKmode
10029 && GET_MODE (decl_rtl
) != dmode
)
10031 machine_mode pmode
;
10033 /* Get the signedness to be used for this variable. Ensure we get
10034 the same mode we got when the variable was declared. */
10035 if (code
!= SSA_NAME
)
10036 pmode
= promote_decl_mode (exp
, &unsignedp
);
10037 else if ((g
= SSA_NAME_DEF_STMT (ssa_name
))
10038 && gimple_code (g
) == GIMPLE_CALL
10039 && !gimple_call_internal_p (g
))
10040 pmode
= promote_function_mode (type
, mode
, &unsignedp
,
10041 gimple_call_fntype (g
),
10044 pmode
= promote_ssa_mode (ssa_name
, &unsignedp
);
10045 gcc_assert (GET_MODE (decl_rtl
) == pmode
);
10047 temp
= gen_lowpart_SUBREG (mode
, decl_rtl
);
10048 SUBREG_PROMOTED_VAR_P (temp
) = 1;
10049 SUBREG_PROMOTED_SET (temp
, unsignedp
);
10057 /* Given that TYPE_PRECISION (type) is not always equal to
10058 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
10059 the former to the latter according to the signedness of the
10061 scalar_int_mode mode
= SCALAR_INT_TYPE_MODE (type
);
10062 temp
= immed_wide_int_const
10063 (wi::to_wide (exp
, GET_MODE_PRECISION (mode
)), mode
);
10069 tree tmp
= NULL_TREE
;
10070 if (VECTOR_MODE_P (mode
))
10071 return const_vector_from_tree (exp
);
10072 scalar_int_mode int_mode
;
10073 if (is_int_mode (mode
, &int_mode
))
10075 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp
)))
10076 return const_scalar_mask_from_tree (int_mode
, exp
);
10080 = lang_hooks
.types
.type_for_mode (int_mode
, 1);
10082 tmp
= fold_unary_loc (loc
, VIEW_CONVERT_EXPR
,
10083 type_for_mode
, exp
);
10088 vec
<constructor_elt
, va_gc
> *v
;
10089 /* Constructors need to be fixed-length. FIXME. */
10090 unsigned int nunits
= VECTOR_CST_NELTS (exp
).to_constant ();
10091 vec_alloc (v
, nunits
);
10092 for (unsigned int i
= 0; i
< nunits
; ++i
)
10093 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, VECTOR_CST_ELT (exp
, i
));
10094 tmp
= build_constructor (type
, v
);
10096 return expand_expr (tmp
, ignore
? const0_rtx
: target
,
10101 if (modifier
== EXPAND_WRITE
)
10103 /* Writing into CONST_DECL is always invalid, but handle it
10105 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (exp
));
10106 scalar_int_mode address_mode
= targetm
.addr_space
.address_mode (as
);
10107 op0
= expand_expr_addr_expr_1 (exp
, NULL_RTX
, address_mode
,
10108 EXPAND_NORMAL
, as
);
10109 op0
= memory_address_addr_space (mode
, op0
, as
);
10110 temp
= gen_rtx_MEM (mode
, op0
);
10111 set_mem_addr_space (temp
, as
);
10114 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
10117 /* If optimized, generate immediate CONST_DOUBLE
10118 which will be turned into memory by reload if necessary.
10120 We used to force a register so that loop.c could see it. But
10121 this does not allow gen_* patterns to perform optimizations with
10122 the constants. It also produces two insns in cases like "x = 1.0;".
10123 On most machines, floating-point constants are not permitted in
10124 many insns, so we'd end up copying it to a register in any case.
10126 Now, we do the copying in expand_binop, if appropriate. */
10127 return const_double_from_real_value (TREE_REAL_CST (exp
),
10128 TYPE_MODE (TREE_TYPE (exp
)));
10131 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp
),
10132 TYPE_MODE (TREE_TYPE (exp
)));
10135 /* Handle evaluating a complex constant in a CONCAT target. */
10136 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
10138 machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
10141 rtarg
= XEXP (original_target
, 0);
10142 itarg
= XEXP (original_target
, 1);
10144 /* Move the real and imaginary parts separately. */
10145 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, EXPAND_NORMAL
);
10146 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, EXPAND_NORMAL
);
10149 emit_move_insn (rtarg
, op0
);
10151 emit_move_insn (itarg
, op1
);
10153 return original_target
;
10159 temp
= expand_expr_constant (exp
, 1, modifier
);
10161 /* temp contains a constant address.
10162 On RISC machines where a constant address isn't valid,
10163 make some insns to get that address into a register. */
10164 if (modifier
!= EXPAND_CONST_ADDRESS
10165 && modifier
!= EXPAND_INITIALIZER
10166 && modifier
!= EXPAND_SUM
10167 && ! memory_address_addr_space_p (mode
, XEXP (temp
, 0),
10168 MEM_ADDR_SPACE (temp
)))
10169 return replace_equiv_address (temp
,
10170 copy_rtx (XEXP (temp
, 0)));
10174 return immed_wide_int_const (poly_int_cst_value (exp
), mode
);
10178 tree val
= treeop0
;
10179 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
,
10180 inner_reference_p
);
10182 if (!SAVE_EXPR_RESOLVED_P (exp
))
10184 /* We can indeed still hit this case, typically via builtin
10185 expanders calling save_expr immediately before expanding
10186 something. Assume this means that we only have to deal
10187 with non-BLKmode values. */
10188 gcc_assert (GET_MODE (ret
) != BLKmode
);
10190 val
= build_decl (curr_insn_location (),
10191 VAR_DECL
, NULL
, TREE_TYPE (exp
));
10192 DECL_ARTIFICIAL (val
) = 1;
10193 DECL_IGNORED_P (val
) = 1;
10195 TREE_OPERAND (exp
, 0) = treeop0
;
10196 SAVE_EXPR_RESOLVED_P (exp
) = 1;
10198 if (!CONSTANT_P (ret
))
10199 ret
= copy_to_reg (ret
);
10200 SET_DECL_RTL (val
, ret
);
10208 /* If we don't need the result, just ensure we evaluate any
10212 unsigned HOST_WIDE_INT idx
;
10215 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
10216 expand_expr (value
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
10221 return expand_constructor (exp
, target
, modifier
, false);
10223 case TARGET_MEM_REF
:
10226 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
10227 enum insn_code icode
;
10228 unsigned int align
;
10230 op0
= addr_for_mem_ref (exp
, as
, true);
10231 op0
= memory_address_addr_space (mode
, op0
, as
);
10232 temp
= gen_rtx_MEM (mode
, op0
);
10233 set_mem_attributes (temp
, exp
, 0);
10234 set_mem_addr_space (temp
, as
);
10235 align
= get_object_alignment (exp
);
10236 if (modifier
!= EXPAND_WRITE
10237 && modifier
!= EXPAND_MEMORY
10239 && align
< GET_MODE_ALIGNMENT (mode
)
10240 /* If the target does not have special handling for unaligned
10241 loads of mode then it can use regular moves for them. */
10242 && ((icode
= optab_handler (movmisalign_optab
, mode
))
10243 != CODE_FOR_nothing
))
10245 struct expand_operand ops
[2];
10247 /* We've already validated the memory, and we're creating a
10248 new pseudo destination. The predicates really can't fail,
10249 nor can the generator. */
10250 create_output_operand (&ops
[0], NULL_RTX
, mode
);
10251 create_fixed_operand (&ops
[1], temp
);
10252 expand_insn (icode
, 2, ops
);
10253 temp
= ops
[0].value
;
10260 const bool reverse
= REF_REVERSE_STORAGE_ORDER (exp
);
10262 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
10263 machine_mode address_mode
;
10264 tree base
= TREE_OPERAND (exp
, 0);
10266 enum insn_code icode
;
10268 /* Handle expansion of non-aliased memory with non-BLKmode. That
10269 might end up in a register. */
10270 if (mem_ref_refers_to_non_mem_p (exp
))
10272 poly_int64 offset
= mem_ref_offset (exp
).force_shwi ();
10273 base
= TREE_OPERAND (base
, 0);
10274 poly_uint64 type_size
;
10275 if (known_eq (offset
, 0)
10277 && poly_int_tree_p (TYPE_SIZE (type
), &type_size
)
10278 && known_eq (GET_MODE_BITSIZE (DECL_MODE (base
)), type_size
))
10279 return expand_expr (build1 (VIEW_CONVERT_EXPR
, type
, base
),
10280 target
, tmode
, modifier
);
10281 if (TYPE_MODE (type
) == BLKmode
)
10283 temp
= assign_stack_temp (DECL_MODE (base
),
10284 GET_MODE_SIZE (DECL_MODE (base
)));
10285 store_expr (base
, temp
, 0, false, false);
10286 temp
= adjust_address (temp
, BLKmode
, offset
);
10287 set_mem_size (temp
, int_size_in_bytes (type
));
10290 exp
= build3 (BIT_FIELD_REF
, type
, base
, TYPE_SIZE (type
),
10291 bitsize_int (offset
* BITS_PER_UNIT
));
10292 REF_REVERSE_STORAGE_ORDER (exp
) = reverse
;
10293 return expand_expr (exp
, target
, tmode
, modifier
);
10295 address_mode
= targetm
.addr_space
.address_mode (as
);
10296 base
= TREE_OPERAND (exp
, 0);
10297 if ((def_stmt
= get_def_for_expr (base
, BIT_AND_EXPR
)))
10299 tree mask
= gimple_assign_rhs2 (def_stmt
);
10300 base
= build2 (BIT_AND_EXPR
, TREE_TYPE (base
),
10301 gimple_assign_rhs1 (def_stmt
), mask
);
10302 TREE_OPERAND (exp
, 0) = base
;
10304 align
= get_object_alignment (exp
);
10305 op0
= expand_expr (base
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
10306 op0
= memory_address_addr_space (mode
, op0
, as
);
10307 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
10309 rtx off
= immed_wide_int_const (mem_ref_offset (exp
), address_mode
);
10310 op0
= simplify_gen_binary (PLUS
, address_mode
, op0
, off
);
10311 op0
= memory_address_addr_space (mode
, op0
, as
);
10313 temp
= gen_rtx_MEM (mode
, op0
);
10314 set_mem_attributes (temp
, exp
, 0);
10315 set_mem_addr_space (temp
, as
);
10316 if (TREE_THIS_VOLATILE (exp
))
10317 MEM_VOLATILE_P (temp
) = 1;
10318 if (modifier
!= EXPAND_WRITE
10319 && modifier
!= EXPAND_MEMORY
10320 && !inner_reference_p
10322 && align
< GET_MODE_ALIGNMENT (mode
))
10324 if ((icode
= optab_handler (movmisalign_optab
, mode
))
10325 != CODE_FOR_nothing
)
10327 struct expand_operand ops
[2];
10329 /* We've already validated the memory, and we're creating a
10330 new pseudo destination. The predicates really can't fail,
10331 nor can the generator. */
10332 create_output_operand (&ops
[0], NULL_RTX
, mode
);
10333 create_fixed_operand (&ops
[1], temp
);
10334 expand_insn (icode
, 2, ops
);
10335 temp
= ops
[0].value
;
10337 else if (targetm
.slow_unaligned_access (mode
, align
))
10338 temp
= extract_bit_field (temp
, GET_MODE_BITSIZE (mode
),
10339 0, TYPE_UNSIGNED (TREE_TYPE (exp
)),
10340 (modifier
== EXPAND_STACK_PARM
10341 ? NULL_RTX
: target
),
10342 mode
, mode
, false, alt_rtl
);
10345 && modifier
!= EXPAND_MEMORY
10346 && modifier
!= EXPAND_WRITE
)
10347 temp
= flip_storage_order (mode
, temp
);
10354 tree array
= treeop0
;
10355 tree index
= treeop1
;
10358 /* Fold an expression like: "foo"[2].
10359 This is not done in fold so it won't happen inside &.
10360 Don't fold if this is for wide characters since it's too
10361 difficult to do correctly and this is a very rare case. */
10363 if (modifier
!= EXPAND_CONST_ADDRESS
10364 && modifier
!= EXPAND_INITIALIZER
10365 && modifier
!= EXPAND_MEMORY
)
10367 tree t
= fold_read_from_constant_string (exp
);
10370 return expand_expr (t
, target
, tmode
, modifier
);
10373 /* If this is a constant index into a constant array,
10374 just get the value from the array. Handle both the cases when
10375 we have an explicit constructor and when our operand is a variable
10376 that was declared const. */
10378 if (modifier
!= EXPAND_CONST_ADDRESS
10379 && modifier
!= EXPAND_INITIALIZER
10380 && modifier
!= EXPAND_MEMORY
10381 && TREE_CODE (array
) == CONSTRUCTOR
10382 && ! TREE_SIDE_EFFECTS (array
)
10383 && TREE_CODE (index
) == INTEGER_CST
)
10385 unsigned HOST_WIDE_INT ix
;
10388 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array
), ix
,
10390 if (tree_int_cst_equal (field
, index
))
10392 if (!TREE_SIDE_EFFECTS (value
))
10393 return expand_expr (fold (value
), target
, tmode
, modifier
);
10398 else if (optimize
>= 1
10399 && modifier
!= EXPAND_CONST_ADDRESS
10400 && modifier
!= EXPAND_INITIALIZER
10401 && modifier
!= EXPAND_MEMORY
10402 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
10403 && TREE_CODE (index
) == INTEGER_CST
10404 && (VAR_P (array
) || TREE_CODE (array
) == CONST_DECL
)
10405 && (init
= ctor_for_folding (array
)) != error_mark_node
)
10407 if (init
== NULL_TREE
)
10409 tree value
= build_zero_cst (type
);
10410 if (TREE_CODE (value
) == CONSTRUCTOR
)
10412 /* If VALUE is a CONSTRUCTOR, this optimization is only
10413 useful if this doesn't store the CONSTRUCTOR into
10414 memory. If it does, it is more efficient to just
10415 load the data from the array directly. */
10416 rtx ret
= expand_constructor (value
, target
,
10418 if (ret
== NULL_RTX
)
10423 return expand_expr (value
, target
, tmode
, modifier
);
10425 else if (TREE_CODE (init
) == CONSTRUCTOR
)
10427 unsigned HOST_WIDE_INT ix
;
10430 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), ix
,
10432 if (tree_int_cst_equal (field
, index
))
10434 if (TREE_SIDE_EFFECTS (value
))
10437 if (TREE_CODE (value
) == CONSTRUCTOR
)
10439 /* If VALUE is a CONSTRUCTOR, this
10440 optimization is only useful if
10441 this doesn't store the CONSTRUCTOR
10442 into memory. If it does, it is more
10443 efficient to just load the data from
10444 the array directly. */
10445 rtx ret
= expand_constructor (value
, target
,
10447 if (ret
== NULL_RTX
)
10452 expand_expr (fold (value
), target
, tmode
, modifier
);
10455 else if (TREE_CODE (init
) == STRING_CST
)
10457 tree low_bound
= array_ref_low_bound (exp
);
10458 tree index1
= fold_convert_loc (loc
, sizetype
, treeop1
);
10460 /* Optimize the special case of a zero lower bound.
10462 We convert the lower bound to sizetype to avoid problems
10463 with constant folding. E.g. suppose the lower bound is
10464 1 and its mode is QI. Without the conversion
10465 (ARRAY + (INDEX - (unsigned char)1))
10467 (ARRAY + (-(unsigned char)1) + INDEX)
10469 (ARRAY + 255 + INDEX). Oops! */
10470 if (!integer_zerop (low_bound
))
10471 index1
= size_diffop_loc (loc
, index1
,
10472 fold_convert_loc (loc
, sizetype
,
10475 if (tree_fits_uhwi_p (index1
)
10476 && compare_tree_int (index1
, TREE_STRING_LENGTH (init
)) < 0)
10478 tree type
= TREE_TYPE (TREE_TYPE (init
));
10479 scalar_int_mode mode
;
10481 if (is_int_mode (TYPE_MODE (type
), &mode
)
10482 && GET_MODE_SIZE (mode
) == 1)
10483 return gen_int_mode (TREE_STRING_POINTER (init
)
10484 [TREE_INT_CST_LOW (index1
)],
10490 goto normal_inner_ref
;
10492 case COMPONENT_REF
:
10493 /* If the operand is a CONSTRUCTOR, we can just extract the
10494 appropriate field if it is present. */
10495 if (TREE_CODE (treeop0
) == CONSTRUCTOR
)
10497 unsigned HOST_WIDE_INT idx
;
10499 scalar_int_mode field_mode
;
10501 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0
),
10503 if (field
== treeop1
10504 /* We can normally use the value of the field in the
10505 CONSTRUCTOR. However, if this is a bitfield in
10506 an integral mode that we can fit in a HOST_WIDE_INT,
10507 we must mask only the number of bits in the bitfield,
10508 since this is done implicitly by the constructor. If
10509 the bitfield does not meet either of those conditions,
10510 we can't do this optimization. */
10511 && (! DECL_BIT_FIELD (field
)
10512 || (is_int_mode (DECL_MODE (field
), &field_mode
)
10513 && (GET_MODE_PRECISION (field_mode
)
10514 <= HOST_BITS_PER_WIDE_INT
))))
10516 if (DECL_BIT_FIELD (field
)
10517 && modifier
== EXPAND_STACK_PARM
)
10519 op0
= expand_expr (value
, target
, tmode
, modifier
);
10520 if (DECL_BIT_FIELD (field
))
10522 HOST_WIDE_INT bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
10523 scalar_int_mode imode
10524 = SCALAR_INT_TYPE_MODE (TREE_TYPE (field
));
10526 if (TYPE_UNSIGNED (TREE_TYPE (field
)))
10528 op1
= gen_int_mode ((HOST_WIDE_INT_1
<< bitsize
) - 1,
10530 op0
= expand_and (imode
, op0
, op1
, target
);
10534 int count
= GET_MODE_PRECISION (imode
) - bitsize
;
10536 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
10538 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
10546 goto normal_inner_ref
;
10548 case BIT_FIELD_REF
:
10549 case ARRAY_RANGE_REF
:
10552 machine_mode mode1
, mode2
;
10553 poly_int64 bitsize
, bitpos
, bytepos
;
10555 int reversep
, volatilep
= 0, must_force_mem
;
10557 = get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode1
,
10558 &unsignedp
, &reversep
, &volatilep
);
10559 rtx orig_op0
, memloc
;
10560 bool clear_mem_expr
= false;
10562 /* If we got back the original object, something is wrong. Perhaps
10563 we are evaluating an expression too early. In any event, don't
10564 infinitely recurse. */
10565 gcc_assert (tem
!= exp
);
10567 /* If TEM's type is a union of variable size, pass TARGET to the inner
10568 computation, since it will need a temporary and TARGET is known
10569 to have to do. This occurs in unchecked conversion in Ada. */
10571 = expand_expr_real (tem
,
10572 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
10573 && COMPLETE_TYPE_P (TREE_TYPE (tem
))
10574 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
10576 && modifier
!= EXPAND_STACK_PARM
10577 ? target
: NULL_RTX
),
10579 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
10582 /* If the field has a mode, we want to access it in the
10583 field's mode, not the computed mode.
10584 If a MEM has VOIDmode (external with incomplete type),
10585 use BLKmode for it instead. */
10588 if (mode1
!= VOIDmode
)
10589 op0
= adjust_address (op0
, mode1
, 0);
10590 else if (GET_MODE (op0
) == VOIDmode
)
10591 op0
= adjust_address (op0
, BLKmode
, 0);
10595 = CONSTANT_P (op0
) ? TYPE_MODE (TREE_TYPE (tem
)) : GET_MODE (op0
);
10597 /* Make sure bitpos is not negative, it can wreak havoc later. */
10598 if (maybe_lt (bitpos
, 0))
10600 gcc_checking_assert (offset
== NULL_TREE
);
10601 offset
= size_int (bits_to_bytes_round_down (bitpos
));
10602 bitpos
= num_trailing_bits (bitpos
);
10605 /* If we have either an offset, a BLKmode result, or a reference
10606 outside the underlying object, we must force it to memory.
10607 Such a case can occur in Ada if we have unchecked conversion
10608 of an expression from a scalar type to an aggregate type or
10609 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10610 passed a partially uninitialized object or a view-conversion
10611 to a larger size. */
10612 must_force_mem
= (offset
10613 || mode1
== BLKmode
10614 || (mode
== BLKmode
10615 && !int_mode_for_size (bitsize
, 1).exists ())
10616 || maybe_gt (bitpos
+ bitsize
,
10617 GET_MODE_BITSIZE (mode2
)));
10619 /* Handle CONCAT first. */
10620 if (GET_CODE (op0
) == CONCAT
&& !must_force_mem
)
10622 if (known_eq (bitpos
, 0)
10623 && known_eq (bitsize
, GET_MODE_BITSIZE (GET_MODE (op0
)))
10624 && COMPLEX_MODE_P (mode1
)
10625 && COMPLEX_MODE_P (GET_MODE (op0
))
10626 && (GET_MODE_PRECISION (GET_MODE_INNER (mode1
))
10627 == GET_MODE_PRECISION (GET_MODE_INNER (GET_MODE (op0
)))))
10630 op0
= flip_storage_order (GET_MODE (op0
), op0
);
10631 if (mode1
!= GET_MODE (op0
))
10634 for (int i
= 0; i
< 2; i
++)
10636 rtx op
= read_complex_part (op0
, i
!= 0);
10637 if (GET_CODE (op
) == SUBREG
)
10638 op
= force_reg (GET_MODE (op
), op
);
10639 rtx temp
= gen_lowpart_common (GET_MODE_INNER (mode1
),
10645 if (!REG_P (op
) && !MEM_P (op
))
10646 op
= force_reg (GET_MODE (op
), op
);
10647 op
= gen_lowpart (GET_MODE_INNER (mode1
), op
);
10651 op0
= gen_rtx_CONCAT (mode1
, parts
[0], parts
[1]);
10655 if (known_eq (bitpos
, 0)
10656 && known_eq (bitsize
,
10657 GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0))))
10658 && maybe_ne (bitsize
, 0))
10660 op0
= XEXP (op0
, 0);
10661 mode2
= GET_MODE (op0
);
10663 else if (known_eq (bitpos
,
10664 GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0))))
10665 && known_eq (bitsize
,
10666 GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 1))))
10667 && maybe_ne (bitpos
, 0)
10668 && maybe_ne (bitsize
, 0))
10670 op0
= XEXP (op0
, 1);
10672 mode2
= GET_MODE (op0
);
10675 /* Otherwise force into memory. */
10676 must_force_mem
= 1;
10679 /* If this is a constant, put it in a register if it is a legitimate
10680 constant and we don't need a memory reference. */
10681 if (CONSTANT_P (op0
)
10682 && mode2
!= BLKmode
10683 && targetm
.legitimate_constant_p (mode2
, op0
)
10684 && !must_force_mem
)
10685 op0
= force_reg (mode2
, op0
);
10687 /* Otherwise, if this is a constant, try to force it to the constant
10688 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10689 is a legitimate constant. */
10690 else if (CONSTANT_P (op0
) && (memloc
= force_const_mem (mode2
, op0
)))
10691 op0
= validize_mem (memloc
);
10693 /* Otherwise, if this is a constant or the object is not in memory
10694 and need be, put it there. */
10695 else if (CONSTANT_P (op0
) || (!MEM_P (op0
) && must_force_mem
))
10697 memloc
= assign_temp (TREE_TYPE (tem
), 1, 1);
10698 emit_move_insn (memloc
, op0
);
10700 clear_mem_expr
= true;
10705 machine_mode address_mode
;
10706 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
10709 gcc_assert (MEM_P (op0
));
10711 address_mode
= get_address_mode (op0
);
10712 if (GET_MODE (offset_rtx
) != address_mode
)
10714 /* We cannot be sure that the RTL in offset_rtx is valid outside
10715 of a memory address context, so force it into a register
10716 before attempting to convert it to the desired mode. */
10717 offset_rtx
= force_operand (offset_rtx
, NULL_RTX
);
10718 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
10721 /* See the comment in expand_assignment for the rationale. */
10722 if (mode1
!= VOIDmode
10723 && maybe_ne (bitpos
, 0)
10724 && maybe_gt (bitsize
, 0)
10725 && multiple_p (bitpos
, BITS_PER_UNIT
, &bytepos
)
10726 && multiple_p (bitpos
, bitsize
)
10727 && multiple_p (bitsize
, GET_MODE_ALIGNMENT (mode1
))
10728 && MEM_ALIGN (op0
) >= GET_MODE_ALIGNMENT (mode1
))
10730 op0
= adjust_address (op0
, mode1
, bytepos
);
10734 op0
= offset_address (op0
, offset_rtx
,
10735 highest_pow2_factor (offset
));
10738 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10739 record its alignment as BIGGEST_ALIGNMENT. */
10741 && known_eq (bitpos
, 0)
10743 && is_aligning_offset (offset
, tem
))
10744 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
10746 /* Don't forget about volatility even if this is a bitfield. */
10747 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
10749 if (op0
== orig_op0
)
10750 op0
= copy_rtx (op0
);
10752 MEM_VOLATILE_P (op0
) = 1;
10755 /* In cases where an aligned union has an unaligned object
10756 as a field, we might be extracting a BLKmode value from
10757 an integer-mode (e.g., SImode) object. Handle this case
10758 by doing the extract into an object as wide as the field
10759 (which we know to be the width of a basic mode), then
10760 storing into memory, and changing the mode to BLKmode. */
10761 if (mode1
== VOIDmode
10762 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
10763 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
10764 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
10765 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
10766 && modifier
!= EXPAND_CONST_ADDRESS
10767 && modifier
!= EXPAND_INITIALIZER
10768 && modifier
!= EXPAND_MEMORY
)
10769 /* If the bitfield is volatile and the bitsize
10770 is narrower than the access size of the bitfield,
10771 we need to extract bitfields from the access. */
10772 || (volatilep
&& TREE_CODE (exp
) == COMPONENT_REF
10773 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp
, 1))
10774 && mode1
!= BLKmode
10775 && maybe_lt (bitsize
, GET_MODE_SIZE (mode1
) * BITS_PER_UNIT
))
10776 /* If the field isn't aligned enough to fetch as a memref,
10777 fetch it as a bit field. */
10778 || (mode1
!= BLKmode
10780 ? MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
10781 || !multiple_p (bitpos
, GET_MODE_ALIGNMENT (mode1
))
10782 : TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
10783 || !multiple_p (bitpos
, GET_MODE_ALIGNMENT (mode
)))
10784 && modifier
!= EXPAND_MEMORY
10785 && ((modifier
== EXPAND_CONST_ADDRESS
10786 || modifier
== EXPAND_INITIALIZER
)
10788 : targetm
.slow_unaligned_access (mode1
,
10790 || !multiple_p (bitpos
, BITS_PER_UNIT
)))
10791 /* If the type and the field are a constant size and the
10792 size of the type isn't the same size as the bitfield,
10793 we must use bitfield operations. */
10794 || (known_size_p (bitsize
)
10795 && TYPE_SIZE (TREE_TYPE (exp
))
10796 && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp
)))
10797 && maybe_ne (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp
))),
10800 machine_mode ext_mode
= mode
;
10802 if (ext_mode
== BLKmode
10803 && ! (target
!= 0 && MEM_P (op0
)
10805 && multiple_p (bitpos
, BITS_PER_UNIT
)))
10806 ext_mode
= int_mode_for_size (bitsize
, 1).else_blk ();
10808 if (ext_mode
== BLKmode
)
10811 target
= assign_temp (type
, 1, 1);
10813 /* ??? Unlike the similar test a few lines below, this one is
10814 very likely obsolete. */
10815 if (known_eq (bitsize
, 0))
10818 /* In this case, BITPOS must start at a byte boundary and
10819 TARGET, if specified, must be a MEM. */
10820 gcc_assert (MEM_P (op0
)
10821 && (!target
|| MEM_P (target
)));
10823 bytepos
= exact_div (bitpos
, BITS_PER_UNIT
);
10824 poly_int64 bytesize
= bits_to_bytes_round_up (bitsize
);
10825 emit_block_move (target
,
10826 adjust_address (op0
, VOIDmode
, bytepos
),
10827 gen_int_mode (bytesize
, Pmode
),
10828 (modifier
== EXPAND_STACK_PARM
10829 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
10834 /* If we have nothing to extract, the result will be 0 for targets
10835 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10836 return 0 for the sake of consistency, as reading a zero-sized
10837 bitfield is valid in Ada and the value is fully specified. */
10838 if (known_eq (bitsize
, 0))
10841 op0
= validize_mem (op0
);
10843 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
10844 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10846 /* If the result has a record type and the extraction is done in
10847 an integral mode, then the field may be not aligned on a byte
10848 boundary; in this case, if it has reverse storage order, it
10849 needs to be extracted as a scalar field with reverse storage
10850 order and put back into memory order afterwards. */
10851 if (TREE_CODE (type
) == RECORD_TYPE
10852 && GET_MODE_CLASS (ext_mode
) == MODE_INT
)
10853 reversep
= TYPE_REVERSE_STORAGE_ORDER (type
);
10855 gcc_checking_assert (known_ge (bitpos
, 0));
10856 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
10857 (modifier
== EXPAND_STACK_PARM
10858 ? NULL_RTX
: target
),
10859 ext_mode
, ext_mode
, reversep
, alt_rtl
);
10861 /* If the result has a record type and the mode of OP0 is an
10862 integral mode then, if BITSIZE is narrower than this mode
10863 and this is for big-endian data, we must put the field
10864 into the high-order bits. And we must also put it back
10865 into memory order if it has been previously reversed. */
10866 scalar_int_mode op0_mode
;
10867 if (TREE_CODE (type
) == RECORD_TYPE
10868 && is_int_mode (GET_MODE (op0
), &op0_mode
))
10870 HOST_WIDE_INT size
= GET_MODE_BITSIZE (op0_mode
);
10872 gcc_checking_assert (known_le (bitsize
, size
));
10873 if (maybe_lt (bitsize
, size
)
10874 && reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
10875 op0
= expand_shift (LSHIFT_EXPR
, op0_mode
, op0
,
10876 size
- bitsize
, op0
, 1);
10879 op0
= flip_storage_order (op0_mode
, op0
);
10882 /* If the result type is BLKmode, store the data into a temporary
10883 of the appropriate type, but with the mode corresponding to the
10884 mode for the data we have (op0's mode). */
10885 if (mode
== BLKmode
)
10888 = assign_stack_temp_for_type (ext_mode
,
10889 GET_MODE_BITSIZE (ext_mode
),
10891 emit_move_insn (new_rtx
, op0
);
10892 op0
= copy_rtx (new_rtx
);
10893 PUT_MODE (op0
, BLKmode
);
10899 /* If the result is BLKmode, use that to access the object
10901 if (mode
== BLKmode
)
10904 /* Get a reference to just this component. */
10905 bytepos
= bits_to_bytes_round_down (bitpos
);
10906 if (modifier
== EXPAND_CONST_ADDRESS
10907 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
10908 op0
= adjust_address_nv (op0
, mode1
, bytepos
);
10910 op0
= adjust_address (op0
, mode1
, bytepos
);
10912 if (op0
== orig_op0
)
10913 op0
= copy_rtx (op0
);
10915 /* Don't set memory attributes if the base expression is
10916 SSA_NAME that got expanded as a MEM. In that case, we should
10917 just honor its original memory attributes. */
10918 if (TREE_CODE (tem
) != SSA_NAME
|| !MEM_P (orig_op0
))
10919 set_mem_attributes (op0
, exp
, 0);
10921 if (REG_P (XEXP (op0
, 0)))
10922 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10924 /* If op0 is a temporary because the original expressions was forced
10925 to memory, clear MEM_EXPR so that the original expression cannot
10926 be marked as addressable through MEM_EXPR of the temporary. */
10927 if (clear_mem_expr
)
10928 set_mem_expr (op0
, NULL_TREE
);
10930 MEM_VOLATILE_P (op0
) |= volatilep
;
10933 && modifier
!= EXPAND_MEMORY
10934 && modifier
!= EXPAND_WRITE
)
10935 op0
= flip_storage_order (mode1
, op0
);
10937 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
10938 || modifier
== EXPAND_CONST_ADDRESS
10939 || modifier
== EXPAND_INITIALIZER
)
10943 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
10945 convert_move (target
, op0
, unsignedp
);
10950 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
10953 /* All valid uses of __builtin_va_arg_pack () are removed during
10955 if (CALL_EXPR_VA_ARG_PACK (exp
))
10956 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
10958 tree fndecl
= get_callee_fndecl (exp
), attr
;
10961 /* Don't diagnose the error attribute in thunks, those are
10962 artificially created. */
10963 && !CALL_FROM_THUNK_P (exp
)
10964 && (attr
= lookup_attribute ("error",
10965 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10967 const char *ident
= lang_hooks
.decl_printable_name (fndecl
, 1);
10968 error ("%Kcall to %qs declared with attribute error: %s", exp
,
10969 identifier_to_locale (ident
),
10970 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10973 /* Don't diagnose the warning attribute in thunks, those are
10974 artificially created. */
10975 && !CALL_FROM_THUNK_P (exp
)
10976 && (attr
= lookup_attribute ("warning",
10977 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10979 const char *ident
= lang_hooks
.decl_printable_name (fndecl
, 1);
10980 warning_at (tree_nonartificial_location (exp
),
10981 OPT_Wattribute_warning
,
10982 "%Kcall to %qs declared with attribute warning: %s",
10983 exp
, identifier_to_locale (ident
),
10984 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10987 /* Check for a built-in function. */
10988 if (fndecl
&& fndecl_built_in_p (fndecl
))
10990 gcc_assert (DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_FRONTEND
);
10991 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
10994 return expand_call (exp
, target
, ignore
);
10996 case VIEW_CONVERT_EXPR
:
10999 /* If we are converting to BLKmode, try to avoid an intermediate
11000 temporary by fetching an inner memory reference. */
11001 if (mode
== BLKmode
11002 && poly_int_tree_p (TYPE_SIZE (type
))
11003 && TYPE_MODE (TREE_TYPE (treeop0
)) != BLKmode
11004 && handled_component_p (treeop0
))
11006 machine_mode mode1
;
11007 poly_int64 bitsize
, bitpos
, bytepos
;
11009 int unsignedp
, reversep
, volatilep
= 0;
11011 = get_inner_reference (treeop0
, &bitsize
, &bitpos
, &offset
, &mode1
,
11012 &unsignedp
, &reversep
, &volatilep
);
11015 /* ??? We should work harder and deal with non-zero offsets. */
11017 && multiple_p (bitpos
, BITS_PER_UNIT
, &bytepos
)
11019 && known_size_p (bitsize
)
11020 && known_eq (wi::to_poly_offset (TYPE_SIZE (type
)), bitsize
))
11022 /* See the normal_inner_ref case for the rationale. */
11024 = expand_expr_real (tem
,
11025 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
11026 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
11028 && modifier
!= EXPAND_STACK_PARM
11029 ? target
: NULL_RTX
),
11031 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
11034 if (MEM_P (orig_op0
))
11038 /* Get a reference to just this component. */
11039 if (modifier
== EXPAND_CONST_ADDRESS
11040 || modifier
== EXPAND_SUM
11041 || modifier
== EXPAND_INITIALIZER
)
11042 op0
= adjust_address_nv (op0
, mode
, bytepos
);
11044 op0
= adjust_address (op0
, mode
, bytepos
);
11046 if (op0
== orig_op0
)
11047 op0
= copy_rtx (op0
);
11049 set_mem_attributes (op0
, treeop0
, 0);
11050 if (REG_P (XEXP (op0
, 0)))
11051 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
11053 MEM_VOLATILE_P (op0
) |= volatilep
;
11059 op0
= expand_expr_real (treeop0
, NULL_RTX
, VOIDmode
, modifier
,
11060 NULL
, inner_reference_p
);
11062 /* If the input and output modes are both the same, we are done. */
11063 if (mode
== GET_MODE (op0
))
11065 /* If neither mode is BLKmode, and both modes are the same size
11066 then we can use gen_lowpart. */
11067 else if (mode
!= BLKmode
11068 && GET_MODE (op0
) != BLKmode
11069 && known_eq (GET_MODE_PRECISION (mode
),
11070 GET_MODE_PRECISION (GET_MODE (op0
)))
11071 && !COMPLEX_MODE_P (GET_MODE (op0
)))
11073 if (GET_CODE (op0
) == SUBREG
)
11074 op0
= force_reg (GET_MODE (op0
), op0
);
11075 temp
= gen_lowpart_common (mode
, op0
);
11080 if (!REG_P (op0
) && !MEM_P (op0
))
11081 op0
= force_reg (GET_MODE (op0
), op0
);
11082 op0
= gen_lowpart (mode
, op0
);
11085 /* If both types are integral, convert from one mode to the other. */
11086 else if (INTEGRAL_TYPE_P (type
) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0
)))
11087 op0
= convert_modes (mode
, GET_MODE (op0
), op0
,
11088 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
11089 /* If the output type is a bit-field type, do an extraction. */
11090 else if (reduce_bit_field
)
11091 return extract_bit_field (op0
, TYPE_PRECISION (type
), 0,
11092 TYPE_UNSIGNED (type
), NULL_RTX
,
11093 mode
, mode
, false, NULL
);
11094 /* As a last resort, spill op0 to memory, and reload it in a
11096 else if (!MEM_P (op0
))
11098 /* If the operand is not a MEM, force it into memory. Since we
11099 are going to be changing the mode of the MEM, don't call
11100 force_const_mem for constants because we don't allow pool
11101 constants to change mode. */
11102 tree inner_type
= TREE_TYPE (treeop0
);
11104 gcc_assert (!TREE_ADDRESSABLE (exp
));
11106 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
11108 = assign_stack_temp_for_type
11109 (TYPE_MODE (inner_type
),
11110 GET_MODE_SIZE (TYPE_MODE (inner_type
)), inner_type
);
11112 emit_move_insn (target
, op0
);
11116 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
11117 output type is such that the operand is known to be aligned, indicate
11118 that it is. Otherwise, we need only be concerned about alignment for
11119 non-BLKmode results. */
11122 enum insn_code icode
;
11124 if (modifier
!= EXPAND_WRITE
11125 && modifier
!= EXPAND_MEMORY
11126 && !inner_reference_p
11128 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode
))
11130 /* If the target does have special handling for unaligned
11131 loads of mode then use them. */
11132 if ((icode
= optab_handler (movmisalign_optab
, mode
))
11133 != CODE_FOR_nothing
)
11137 op0
= adjust_address (op0
, mode
, 0);
11138 /* We've already validated the memory, and we're creating a
11139 new pseudo destination. The predicates really can't
11141 reg
= gen_reg_rtx (mode
);
11143 /* Nor can the insn generator. */
11144 rtx_insn
*insn
= GEN_FCN (icode
) (reg
, op0
);
11148 else if (STRICT_ALIGNMENT
)
11150 poly_uint64 mode_size
= GET_MODE_SIZE (mode
);
11151 poly_uint64 temp_size
= mode_size
;
11152 if (GET_MODE (op0
) != BLKmode
)
11153 temp_size
= upper_bound (temp_size
,
11154 GET_MODE_SIZE (GET_MODE (op0
)));
11156 = assign_stack_temp_for_type (mode
, temp_size
, type
);
11157 rtx new_with_op0_mode
11158 = adjust_address (new_rtx
, GET_MODE (op0
), 0);
11160 gcc_assert (!TREE_ADDRESSABLE (exp
));
11162 if (GET_MODE (op0
) == BLKmode
)
11164 rtx size_rtx
= gen_int_mode (mode_size
, Pmode
);
11165 emit_block_move (new_with_op0_mode
, op0
, size_rtx
,
11166 (modifier
== EXPAND_STACK_PARM
11167 ? BLOCK_OP_CALL_PARM
11168 : BLOCK_OP_NORMAL
));
11171 emit_move_insn (new_with_op0_mode
, op0
);
11177 op0
= adjust_address (op0
, mode
, 0);
11184 tree lhs
= treeop0
;
11185 tree rhs
= treeop1
;
11186 gcc_assert (ignore
);
11188 /* Check for |= or &= of a bitfield of size one into another bitfield
11189 of size 1. In this case, (unless we need the result of the
11190 assignment) we can do this more efficiently with a
11191 test followed by an assignment, if necessary.
11193 ??? At this point, we can't get a BIT_FIELD_REF here. But if
11194 things change so we do, this code should be enhanced to
11196 if (TREE_CODE (lhs
) == COMPONENT_REF
11197 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
11198 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
11199 && TREE_OPERAND (rhs
, 0) == lhs
11200 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
11201 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
11202 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
11204 rtx_code_label
*label
= gen_label_rtx ();
11205 int value
= TREE_CODE (rhs
) == BIT_IOR_EXPR
;
11206 profile_probability prob
= profile_probability::uninitialized ();
11208 jumpifnot (TREE_OPERAND (rhs
, 1), label
, prob
);
11210 jumpif (TREE_OPERAND (rhs
, 1), label
, prob
);
11211 expand_assignment (lhs
, build_int_cst (TREE_TYPE (rhs
), value
),
11213 do_pending_stack_adjust ();
11214 emit_label (label
);
11218 expand_assignment (lhs
, rhs
, false);
11223 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
11225 case REALPART_EXPR
:
11226 op0
= expand_normal (treeop0
);
11227 return read_complex_part (op0
, false);
11229 case IMAGPART_EXPR
:
11230 op0
= expand_normal (treeop0
);
11231 return read_complex_part (op0
, true);
11238 /* Expanded in cfgexpand.c. */
11239 gcc_unreachable ();
11241 case TRY_CATCH_EXPR
:
11243 case EH_FILTER_EXPR
:
11244 case TRY_FINALLY_EXPR
:
11245 /* Lowered by tree-eh.c. */
11246 gcc_unreachable ();
11248 case WITH_CLEANUP_EXPR
:
11249 case CLEANUP_POINT_EXPR
:
11251 case CASE_LABEL_EXPR
:
11256 case COMPOUND_EXPR
:
11257 case PREINCREMENT_EXPR
:
11258 case PREDECREMENT_EXPR
:
11259 case POSTINCREMENT_EXPR
:
11260 case POSTDECREMENT_EXPR
:
11263 case COMPOUND_LITERAL_EXPR
:
11264 /* Lowered by gimplify.c. */
11265 gcc_unreachable ();
11268 /* Function descriptors are not valid except for as
11269 initialization constants, and should not be expanded. */
11270 gcc_unreachable ();
11272 case WITH_SIZE_EXPR
:
11273 /* WITH_SIZE_EXPR expands to its first argument. The caller should
11274 have pulled out the size to use in whatever context it needed. */
11275 return expand_expr_real (treeop0
, original_target
, tmode
,
11276 modifier
, alt_rtl
, inner_reference_p
);
11279 return expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
11283 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
11284 signedness of TYPE), possibly returning the result in TARGET.
11285 TYPE is known to be a partial integer type. */
11287 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
11289 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
11290 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
11292 /* For constant values, reduce using build_int_cst_type. */
11293 poly_int64 const_exp
;
11294 if (poly_int_rtx_p (exp
, &const_exp
))
11296 tree t
= build_int_cst_type (type
, const_exp
);
11297 return expand_expr (t
, target
, VOIDmode
, EXPAND_NORMAL
);
11299 else if (TYPE_UNSIGNED (type
))
11301 scalar_int_mode mode
= as_a
<scalar_int_mode
> (GET_MODE (exp
));
11302 rtx mask
= immed_wide_int_const
11303 (wi::mask (prec
, false, GET_MODE_PRECISION (mode
)), mode
);
11304 return expand_and (mode
, exp
, mask
, target
);
11308 scalar_int_mode mode
= as_a
<scalar_int_mode
> (GET_MODE (exp
));
11309 int count
= GET_MODE_PRECISION (mode
) - prec
;
11310 exp
= expand_shift (LSHIFT_EXPR
, mode
, exp
, count
, target
, 0);
11311 return expand_shift (RSHIFT_EXPR
, mode
, exp
, count
, target
, 0);
11315 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
11316 when applied to the address of EXP produces an address known to be
11317 aligned more than BIGGEST_ALIGNMENT. */
11320 is_aligning_offset (const_tree offset
, const_tree exp
)
11322 /* Strip off any conversions. */
11323 while (CONVERT_EXPR_P (offset
))
11324 offset
= TREE_OPERAND (offset
, 0);
11326 /* We must now have a BIT_AND_EXPR with a constant that is one less than
11327 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
11328 if (TREE_CODE (offset
) != BIT_AND_EXPR
11329 || !tree_fits_uhwi_p (TREE_OPERAND (offset
, 1))
11330 || compare_tree_int (TREE_OPERAND (offset
, 1),
11331 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
11332 || !pow2p_hwi (tree_to_uhwi (TREE_OPERAND (offset
, 1)) + 1))
11335 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
11336 It must be NEGATE_EXPR. Then strip any more conversions. */
11337 offset
= TREE_OPERAND (offset
, 0);
11338 while (CONVERT_EXPR_P (offset
))
11339 offset
= TREE_OPERAND (offset
, 0);
11341 if (TREE_CODE (offset
) != NEGATE_EXPR
)
11344 offset
= TREE_OPERAND (offset
, 0);
11345 while (CONVERT_EXPR_P (offset
))
11346 offset
= TREE_OPERAND (offset
, 0);
11348 /* This must now be the address of EXP. */
11349 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
11352 /* Return the tree node if an ARG corresponds to a string constant or zero
11353 if it doesn't. If we return nonzero, set *PTR_OFFSET to the (possibly
11354 non-constant) offset in bytes within the string that ARG is accessing.
11355 If MEM_SIZE is non-zero the storage size of the memory is returned.
11356 If DECL is non-zero the constant declaration is returned if available. */
11359 string_constant (tree arg
, tree
*ptr_offset
, tree
*mem_size
, tree
*decl
)
11364 /* Non-constant index into the character array in an ARRAY_REF
11365 expression or null. */
11366 tree varidx
= NULL_TREE
;
11368 poly_int64 base_off
= 0;
11370 if (TREE_CODE (arg
) == ADDR_EXPR
)
11372 arg
= TREE_OPERAND (arg
, 0);
11374 if (TREE_CODE (arg
) == ARRAY_REF
)
11376 tree idx
= TREE_OPERAND (arg
, 1);
11377 if (TREE_CODE (idx
) != INTEGER_CST
)
11379 /* From a pointer (but not array) argument extract the variable
11380 index to prevent get_addr_base_and_unit_offset() from failing
11381 due to it. Use it later to compute the non-constant offset
11382 into the string and return it to the caller. */
11384 ref
= TREE_OPERAND (arg
, 0);
11386 if (TREE_CODE (TREE_TYPE (arg
)) == ARRAY_TYPE
)
11389 if (!integer_zerop (array_ref_low_bound (arg
)))
11392 if (!integer_onep (array_ref_element_size (arg
)))
11396 array
= get_addr_base_and_unit_offset (ref
, &base_off
);
11398 || (TREE_CODE (array
) != VAR_DECL
11399 && TREE_CODE (array
) != CONST_DECL
11400 && TREE_CODE (array
) != STRING_CST
))
11403 else if (TREE_CODE (arg
) == PLUS_EXPR
|| TREE_CODE (arg
) == POINTER_PLUS_EXPR
)
11405 tree arg0
= TREE_OPERAND (arg
, 0);
11406 tree arg1
= TREE_OPERAND (arg
, 1);
11409 tree str
= string_constant (arg0
, &offset
, mem_size
, decl
);
11412 str
= string_constant (arg1
, &offset
, mem_size
, decl
);
11418 /* Avoid pointers to arrays (see bug 86622). */
11419 if (POINTER_TYPE_P (TREE_TYPE (arg
))
11420 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == ARRAY_TYPE
11421 && !(decl
&& !*decl
)
11422 && !(decl
&& tree_fits_uhwi_p (DECL_SIZE_UNIT (*decl
))
11423 && mem_size
&& tree_fits_uhwi_p (*mem_size
)
11424 && tree_int_cst_equal (*mem_size
, DECL_SIZE_UNIT (*decl
))))
11427 tree type
= TREE_TYPE (offset
);
11428 arg1
= fold_convert (type
, arg1
);
11429 *ptr_offset
= fold_build2 (PLUS_EXPR
, type
, offset
, arg1
);
11434 else if (TREE_CODE (arg
) == SSA_NAME
)
11436 gimple
*stmt
= SSA_NAME_DEF_STMT (arg
);
11437 if (!is_gimple_assign (stmt
))
11440 tree rhs1
= gimple_assign_rhs1 (stmt
);
11441 tree_code code
= gimple_assign_rhs_code (stmt
);
11442 if (code
== ADDR_EXPR
)
11443 return string_constant (rhs1
, ptr_offset
, mem_size
, decl
);
11444 else if (code
!= POINTER_PLUS_EXPR
)
11448 if (tree str
= string_constant (rhs1
, &offset
, mem_size
, decl
))
11450 /* Avoid pointers to arrays (see bug 86622). */
11451 if (POINTER_TYPE_P (TREE_TYPE (rhs1
))
11452 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs1
))) == ARRAY_TYPE
11453 && !(decl
&& !*decl
)
11454 && !(decl
&& tree_fits_uhwi_p (DECL_SIZE_UNIT (*decl
))
11455 && mem_size
&& tree_fits_uhwi_p (*mem_size
)
11456 && tree_int_cst_equal (*mem_size
, DECL_SIZE_UNIT (*decl
))))
11459 tree rhs2
= gimple_assign_rhs2 (stmt
);
11460 tree type
= TREE_TYPE (offset
);
11461 rhs2
= fold_convert (type
, rhs2
);
11462 *ptr_offset
= fold_build2 (PLUS_EXPR
, type
, offset
, rhs2
);
11467 else if (DECL_P (arg
))
11472 tree offset
= wide_int_to_tree (sizetype
, base_off
);
11475 if (TREE_CODE (TREE_TYPE (array
)) != ARRAY_TYPE
)
11478 gcc_assert (TREE_CODE (arg
) == ARRAY_REF
);
11479 tree chartype
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (arg
, 0)));
11480 if (TREE_CODE (chartype
) != INTEGER_TYPE
)
11483 offset
= fold_convert (sizetype
, varidx
);
11486 if (TREE_CODE (array
) == STRING_CST
)
11488 *ptr_offset
= fold_convert (sizetype
, offset
);
11490 *mem_size
= TYPE_SIZE_UNIT (TREE_TYPE (array
));
11493 gcc_checking_assert (tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (array
)))
11494 >= TREE_STRING_LENGTH (array
));
11498 if (!VAR_P (array
) && TREE_CODE (array
) != CONST_DECL
)
11501 tree init
= ctor_for_folding (array
);
11503 /* Handle variables initialized with string literals. */
11504 if (!init
|| init
== error_mark_node
)
11506 if (TREE_CODE (init
) == CONSTRUCTOR
)
11508 /* Convert the 64-bit constant offset to a wider type to avoid
11511 if (!base_off
.is_constant (&wioff
))
11514 wioff
*= BITS_PER_UNIT
;
11515 if (!wi::fits_uhwi_p (wioff
))
11518 base_off
= wioff
.to_uhwi ();
11519 unsigned HOST_WIDE_INT fieldoff
= 0;
11520 init
= fold_ctor_reference (NULL_TREE
, init
, base_off
, 0, array
,
11522 HOST_WIDE_INT cstoff
;
11523 if (!base_off
.is_constant (&cstoff
))
11526 cstoff
= (cstoff
- fieldoff
) / BITS_PER_UNIT
;
11527 tree off
= build_int_cst (sizetype
, cstoff
);
11529 offset
= fold_build2 (PLUS_EXPR
, TREE_TYPE (offset
), offset
, off
);
11537 *ptr_offset
= offset
;
11539 tree eltype
= TREE_TYPE (init
);
11540 tree initsize
= TYPE_SIZE_UNIT (eltype
);
11542 *mem_size
= initsize
;
11547 if (TREE_CODE (init
) == INTEGER_CST
11548 && (TREE_CODE (TREE_TYPE (array
)) == INTEGER_TYPE
11549 || TYPE_MAIN_VARIANT (eltype
) == char_type_node
))
11551 /* For a reference to (address of) a single constant character,
11552 store the native representation of the character in CHARBUF.
11553 If the reference is to an element of an array or a member
11554 of a struct, only consider narrow characters until ctors
11555 for wide character arrays are transformed to STRING_CSTs
11556 like those for narrow arrays. */
11557 unsigned char charbuf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
11558 int len
= native_encode_expr (init
, charbuf
, sizeof charbuf
, 0);
11561 /* Construct a string literal with elements of ELTYPE and
11562 the representation above. Then strip
11563 the ADDR_EXPR (ARRAY_REF (...)) around the STRING_CST. */
11564 init
= build_string_literal (len
, (char *)charbuf
, eltype
);
11565 init
= TREE_OPERAND (TREE_OPERAND (init
, 0), 0);
11569 if (TREE_CODE (init
) != STRING_CST
)
11572 gcc_checking_assert (tree_to_shwi (initsize
) >= TREE_STRING_LENGTH (init
));
11577 /* Compute the modular multiplicative inverse of A modulo M
11578 using extended Euclid's algorithm. Assumes A and M are coprime. */
11580 mod_inv (const wide_int
&a
, const wide_int
&b
)
11582 /* Verify the assumption. */
11583 gcc_checking_assert (wi::eq_p (wi::gcd (a
, b
), 1));
11585 unsigned int p
= a
.get_precision () + 1;
11586 gcc_checking_assert (b
.get_precision () + 1 == p
);
11587 wide_int c
= wide_int::from (a
, p
, UNSIGNED
);
11588 wide_int d
= wide_int::from (b
, p
, UNSIGNED
);
11589 wide_int x0
= wide_int::from (0, p
, UNSIGNED
);
11590 wide_int x1
= wide_int::from (1, p
, UNSIGNED
);
11592 if (wi::eq_p (b
, 1))
11593 return wide_int::from (1, p
, UNSIGNED
);
11595 while (wi::gt_p (c
, 1, UNSIGNED
))
11598 wide_int q
= wi::divmod_trunc (c
, d
, UNSIGNED
, &d
);
11601 x0
= wi::sub (x1
, wi::mul (q
, x0
));
11604 if (wi::lt_p (x1
, 0, SIGNED
))
11609 /* Optimize x % C1 == C2 for signed modulo if C1 is a power of two and C2
11610 is non-zero and C3 ((1<<(prec-1)) | (C1 - 1)):
11611 for C2 > 0 to x & C3 == C2
11612 for C2 < 0 to x & C3 == (C2 & C3). */
11614 maybe_optimize_pow2p_mod_cmp (enum tree_code code
, tree
*arg0
, tree
*arg1
)
11616 gimple
*stmt
= get_def_for_expr (*arg0
, TRUNC_MOD_EXPR
);
11617 tree treeop0
= gimple_assign_rhs1 (stmt
);
11618 tree treeop1
= gimple_assign_rhs2 (stmt
);
11619 tree type
= TREE_TYPE (*arg0
);
11620 scalar_int_mode mode
;
11621 if (!is_a
<scalar_int_mode
> (TYPE_MODE (type
), &mode
))
11623 if (GET_MODE_BITSIZE (mode
) != TYPE_PRECISION (type
)
11624 || TYPE_PRECISION (type
) <= 1
11625 || TYPE_UNSIGNED (type
)
11626 /* Signed x % c == 0 should have been optimized into unsigned modulo
11628 || integer_zerop (*arg1
)
11629 /* If c is known to be non-negative, modulo will be expanded as unsigned
11631 || get_range_pos_neg (treeop0
) == 1)
11634 /* x % c == d where d < 0 && d <= -c should be always false. */
11635 if (tree_int_cst_sgn (*arg1
) == -1
11636 && -wi::to_widest (treeop1
) >= wi::to_widest (*arg1
))
11639 int prec
= TYPE_PRECISION (type
);
11640 wide_int w
= wi::to_wide (treeop1
) - 1;
11641 w
|= wi::shifted_mask (0, prec
- 1, true, prec
);
11642 tree c3
= wide_int_to_tree (type
, w
);
11644 if (tree_int_cst_sgn (*arg1
) == -1)
11645 c4
= wide_int_to_tree (type
, w
& wi::to_wide (*arg1
));
11647 rtx op0
= expand_normal (treeop0
);
11648 treeop0
= make_tree (TREE_TYPE (treeop0
), op0
);
11650 bool speed_p
= optimize_insn_for_speed_p ();
11652 do_pending_stack_adjust ();
11654 location_t loc
= gimple_location (stmt
);
11655 struct separate_ops ops
;
11656 ops
.code
= TRUNC_MOD_EXPR
;
11657 ops
.location
= loc
;
11658 ops
.type
= TREE_TYPE (treeop0
);
11661 ops
.op2
= NULL_TREE
;
11663 rtx mor
= expand_expr_real_2 (&ops
, NULL_RTX
, TYPE_MODE (ops
.type
),
11665 rtx_insn
*moinsns
= get_insns ();
11668 unsigned mocost
= seq_cost (moinsns
, speed_p
);
11669 mocost
+= rtx_cost (mor
, mode
, EQ
, 0, speed_p
);
11670 mocost
+= rtx_cost (expand_normal (*arg1
), mode
, EQ
, 1, speed_p
);
11672 ops
.code
= BIT_AND_EXPR
;
11673 ops
.location
= loc
;
11674 ops
.type
= TREE_TYPE (treeop0
);
11677 ops
.op2
= NULL_TREE
;
11679 rtx mur
= expand_expr_real_2 (&ops
, NULL_RTX
, TYPE_MODE (ops
.type
),
11681 rtx_insn
*muinsns
= get_insns ();
11684 unsigned mucost
= seq_cost (muinsns
, speed_p
);
11685 mucost
+= rtx_cost (mur
, mode
, EQ
, 0, speed_p
);
11686 mucost
+= rtx_cost (expand_normal (c4
), mode
, EQ
, 1, speed_p
);
11688 if (mocost
<= mucost
)
11690 emit_insn (moinsns
);
11691 *arg0
= make_tree (TREE_TYPE (*arg0
), mor
);
11695 emit_insn (muinsns
);
11696 *arg0
= make_tree (TREE_TYPE (*arg0
), mur
);
11701 /* Attempt to optimize unsigned (X % C1) == C2 (or (X % C1) != C2).
11703 (X - C2) * C3 <= C4 (or >), where
11704 C3 is modular multiplicative inverse of C1 and 1<<prec and
11705 C4 is ((1<<prec) - 1) / C1 or ((1<<prec) - 1) / C1 - 1 (the latter
11706 if C2 > ((1<<prec) - 1) % C1).
11707 If C1 is even, S = ctz (C1) and C2 is 0, use
11708 ((X * C3) r>> S) <= C4, where C3 is modular multiplicative
11709 inverse of C1>>S and 1<<prec and C4 is (((1<<prec) - 1) / (C1>>S)) >> S.
11711 For signed (X % C1) == 0 if C1 is odd to (all operations in it
11713 (X * C3) + C4 <= 2 * C4, where
11714 C3 is modular multiplicative inverse of (unsigned) C1 and 1<<prec and
11715 C4 is ((1<<(prec - 1) - 1) / C1).
11716 If C1 is even, S = ctz(C1), use
11717 ((X * C3) + C4) r>> S <= (C4 >> (S - 1))
11718 where C3 is modular multiplicative inverse of (unsigned)(C1>>S) and 1<<prec
11719 and C4 is ((1<<(prec - 1) - 1) / (C1>>S)) & (-1<<S).
11721 See the Hacker's Delight book, section 10-17. */
11723 maybe_optimize_mod_cmp (enum tree_code code
, tree
*arg0
, tree
*arg1
)
11725 gcc_checking_assert (code
== EQ_EXPR
|| code
== NE_EXPR
);
11726 gcc_checking_assert (TREE_CODE (*arg1
) == INTEGER_CST
);
11731 gimple
*stmt
= get_def_for_expr (*arg0
, TRUNC_MOD_EXPR
);
11735 tree treeop0
= gimple_assign_rhs1 (stmt
);
11736 tree treeop1
= gimple_assign_rhs2 (stmt
);
11737 if (TREE_CODE (treeop0
) != SSA_NAME
11738 || TREE_CODE (treeop1
) != INTEGER_CST
11739 /* Don't optimize the undefined behavior case x % 0;
11740 x % 1 should have been optimized into zero, punt if
11741 it makes it here for whatever reason;
11742 x % -c should have been optimized into x % c. */
11743 || compare_tree_int (treeop1
, 2) <= 0
11744 /* Likewise x % c == d where d >= c should be always false. */
11745 || tree_int_cst_le (treeop1
, *arg1
))
11748 /* Unsigned x % pow2 is handled right already, for signed
11749 modulo handle it in maybe_optimize_pow2p_mod_cmp. */
11750 if (integer_pow2p (treeop1
))
11751 return maybe_optimize_pow2p_mod_cmp (code
, arg0
, arg1
);
11753 tree type
= TREE_TYPE (*arg0
);
11754 scalar_int_mode mode
;
11755 if (!is_a
<scalar_int_mode
> (TYPE_MODE (type
), &mode
))
11757 if (GET_MODE_BITSIZE (mode
) != TYPE_PRECISION (type
)
11758 || TYPE_PRECISION (type
) <= 1)
11761 signop sgn
= UNSIGNED
;
11762 /* If both operands are known to have the sign bit clear, handle
11763 even the signed modulo case as unsigned. treeop1 is always
11764 positive >= 2, checked above. */
11765 if (!TYPE_UNSIGNED (type
) && get_range_pos_neg (treeop0
) != 1)
11768 if (!TYPE_UNSIGNED (type
))
11770 if (tree_int_cst_sgn (*arg1
) == -1)
11772 type
= unsigned_type_for (type
);
11773 if (!type
|| TYPE_MODE (type
) != TYPE_MODE (TREE_TYPE (*arg0
)))
11777 int prec
= TYPE_PRECISION (type
);
11778 wide_int w
= wi::to_wide (treeop1
);
11779 int shift
= wi::ctz (w
);
11780 /* Unsigned (X % C1) == C2 is equivalent to (X - C2) % C1 == 0 if
11781 C2 <= -1U % C1, because for any Z >= 0U - C2 in that case (Z % C1) != 0.
11782 If C1 is odd, we can handle all cases by subtracting
11783 C4 below. We could handle even the even C1 and C2 > -1U % C1 cases
11784 e.g. by testing for overflow on the subtraction, punt on that for now
11786 if ((sgn
== SIGNED
|| shift
) && !integer_zerop (*arg1
))
11790 wide_int x
= wi::umod_trunc (wi::mask (prec
, false, prec
), w
);
11791 if (wi::gtu_p (wi::to_wide (*arg1
), x
))
11795 imm_use_iterator imm_iter
;
11796 use_operand_p use_p
;
11797 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, treeop0
)
11799 gimple
*use_stmt
= USE_STMT (use_p
);
11800 /* Punt if treeop0 is used in the same bb in a division
11801 or another modulo with the same divisor. We should expect
11802 the division and modulo combined together. */
11803 if (use_stmt
== stmt
11804 || gimple_bb (use_stmt
) != gimple_bb (stmt
))
11806 if (!is_gimple_assign (use_stmt
)
11807 || (gimple_assign_rhs_code (use_stmt
) != TRUNC_DIV_EXPR
11808 && gimple_assign_rhs_code (use_stmt
) != TRUNC_MOD_EXPR
))
11810 if (gimple_assign_rhs1 (use_stmt
) != treeop0
11811 || !operand_equal_p (gimple_assign_rhs2 (use_stmt
), treeop1
, 0))
11816 w
= wi::lrshift (w
, shift
);
11817 wide_int a
= wide_int::from (w
, prec
+ 1, UNSIGNED
);
11818 wide_int b
= wi::shifted_mask (prec
, 1, false, prec
+ 1);
11819 wide_int m
= wide_int::from (mod_inv (a
, b
), prec
, UNSIGNED
);
11820 tree c3
= wide_int_to_tree (type
, m
);
11821 tree c5
= NULL_TREE
;
11823 if (sgn
== UNSIGNED
)
11825 d
= wi::divmod_trunc (wi::mask (prec
, false, prec
), w
, UNSIGNED
, &e
);
11826 /* Use <= floor ((1<<prec) - 1) / C1 only if C2 <= ((1<<prec) - 1) % C1,
11827 otherwise use < or subtract one from C4. E.g. for
11828 x % 3U == 0 we transform this into x * 0xaaaaaaab <= 0x55555555, but
11829 x % 3U == 1 already needs to be
11830 (x - 1) * 0xaaaaaaabU <= 0x55555554. */
11831 if (!shift
&& wi::gtu_p (wi::to_wide (*arg1
), e
))
11834 d
= wi::lrshift (d
, shift
);
11838 e
= wi::udiv_trunc (wi::mask (prec
- 1, false, prec
), w
);
11840 d
= wi::lshift (e
, 1);
11843 e
= wi::bit_and (e
, wi::mask (shift
, true, prec
));
11844 d
= wi::lrshift (e
, shift
- 1);
11846 c5
= wide_int_to_tree (type
, e
);
11848 tree c4
= wide_int_to_tree (type
, d
);
11850 rtx op0
= expand_normal (treeop0
);
11851 treeop0
= make_tree (TREE_TYPE (treeop0
), op0
);
11853 bool speed_p
= optimize_insn_for_speed_p ();
11855 do_pending_stack_adjust ();
11857 location_t loc
= gimple_location (stmt
);
11858 struct separate_ops ops
;
11859 ops
.code
= TRUNC_MOD_EXPR
;
11860 ops
.location
= loc
;
11861 ops
.type
= TREE_TYPE (treeop0
);
11864 ops
.op2
= NULL_TREE
;
11866 rtx mor
= expand_expr_real_2 (&ops
, NULL_RTX
, TYPE_MODE (ops
.type
),
11868 rtx_insn
*moinsns
= get_insns ();
11871 unsigned mocost
= seq_cost (moinsns
, speed_p
);
11872 mocost
+= rtx_cost (mor
, mode
, EQ
, 0, speed_p
);
11873 mocost
+= rtx_cost (expand_normal (*arg1
), mode
, EQ
, 1, speed_p
);
11875 tree t
= fold_convert_loc (loc
, type
, treeop0
);
11876 if (!integer_zerop (*arg1
))
11877 t
= fold_build2_loc (loc
, MINUS_EXPR
, type
, t
, fold_convert (type
, *arg1
));
11878 t
= fold_build2_loc (loc
, MULT_EXPR
, type
, t
, c3
);
11880 t
= fold_build2_loc (loc
, PLUS_EXPR
, type
, t
, c5
);
11883 tree s
= build_int_cst (NULL_TREE
, shift
);
11884 t
= fold_build2_loc (loc
, RROTATE_EXPR
, type
, t
, s
);
11888 rtx mur
= expand_normal (t
);
11889 rtx_insn
*muinsns
= get_insns ();
11892 unsigned mucost
= seq_cost (muinsns
, speed_p
);
11893 mucost
+= rtx_cost (mur
, mode
, LE
, 0, speed_p
);
11894 mucost
+= rtx_cost (expand_normal (c4
), mode
, LE
, 1, speed_p
);
11896 if (mocost
<= mucost
)
11898 emit_insn (moinsns
);
11899 *arg0
= make_tree (TREE_TYPE (*arg0
), mor
);
11903 emit_insn (muinsns
);
11904 *arg0
= make_tree (type
, mur
);
11906 return code
== EQ_EXPR
? LE_EXPR
: GT_EXPR
;
11909 /* Generate code to calculate OPS, and exploded expression
11910 using a store-flag instruction and return an rtx for the result.
11911 OPS reflects a comparison.
11913 If TARGET is nonzero, store the result there if convenient.
11915 Return zero if there is no suitable set-flag instruction
11916 available on this machine.
11918 Once expand_expr has been called on the arguments of the comparison,
11919 we are committed to doing the store flag, since it is not safe to
11920 re-evaluate the expression. We emit the store-flag insn by calling
11921 emit_store_flag, but only expand the arguments if we have a reason
11922 to believe that emit_store_flag will be successful. If we think that
11923 it will, but it isn't, we have to simulate the store-flag with a
11924 set/jump/set sequence. */
11927 do_store_flag (sepops ops
, rtx target
, machine_mode mode
)
11929 enum rtx_code code
;
11930 tree arg0
, arg1
, type
;
11931 machine_mode operand_mode
;
11934 rtx subtarget
= target
;
11935 location_t loc
= ops
->location
;
11940 /* Don't crash if the comparison was erroneous. */
11941 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
11944 type
= TREE_TYPE (arg0
);
11945 operand_mode
= TYPE_MODE (type
);
11946 unsignedp
= TYPE_UNSIGNED (type
);
11948 /* We won't bother with BLKmode store-flag operations because it would mean
11949 passing a lot of information to emit_store_flag. */
11950 if (operand_mode
== BLKmode
)
11953 /* We won't bother with store-flag operations involving function pointers
11954 when function pointers must be canonicalized before comparisons. */
11955 if (targetm
.have_canonicalize_funcptr_for_compare ()
11956 && ((POINTER_TYPE_P (TREE_TYPE (arg0
))
11957 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (arg0
))))
11958 || (POINTER_TYPE_P (TREE_TYPE (arg1
))
11959 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (arg1
))))))
11965 /* For vector typed comparisons emit code to generate the desired
11966 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
11967 expander for this. */
11968 if (TREE_CODE (ops
->type
) == VECTOR_TYPE
)
11970 tree ifexp
= build2 (ops
->code
, ops
->type
, arg0
, arg1
);
11971 if (VECTOR_BOOLEAN_TYPE_P (ops
->type
)
11972 && expand_vec_cmp_expr_p (TREE_TYPE (arg0
), ops
->type
, ops
->code
))
11973 return expand_vec_cmp_expr (ops
->type
, ifexp
, target
);
11976 tree if_true
= constant_boolean_node (true, ops
->type
);
11977 tree if_false
= constant_boolean_node (false, ops
->type
);
11978 return expand_vec_cond_expr (ops
->type
, ifexp
, if_true
,
11983 /* Optimize (x % C1) == C2 or (x % C1) != C2 if it is beneficial
11984 into (x - C2) * C3 < C4. */
11985 if ((ops
->code
== EQ_EXPR
|| ops
->code
== NE_EXPR
)
11986 && TREE_CODE (arg0
) == SSA_NAME
11987 && TREE_CODE (arg1
) == INTEGER_CST
)
11989 enum tree_code code
= maybe_optimize_mod_cmp (ops
->code
, &arg0
, &arg1
);
11990 if (code
!= ops
->code
)
11992 struct separate_ops nops
= *ops
;
11993 nops
.code
= ops
->code
= code
;
11996 nops
.type
= TREE_TYPE (arg0
);
11997 return do_store_flag (&nops
, target
, mode
);
12001 /* Get the rtx comparison code to use. We know that EXP is a comparison
12002 operation of some type. Some comparisons against 1 and -1 can be
12003 converted to comparisons with zero. Do so here so that the tests
12004 below will be aware that we have a comparison with zero. These
12005 tests will not catch constants in the first operand, but constants
12006 are rarely passed as the first operand. */
12017 if (integer_onep (arg1
))
12018 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
12020 code
= unsignedp
? LTU
: LT
;
12023 if (! unsignedp
&& integer_all_onesp (arg1
))
12024 arg1
= integer_zero_node
, code
= LT
;
12026 code
= unsignedp
? LEU
: LE
;
12029 if (! unsignedp
&& integer_all_onesp (arg1
))
12030 arg1
= integer_zero_node
, code
= GE
;
12032 code
= unsignedp
? GTU
: GT
;
12035 if (integer_onep (arg1
))
12036 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
12038 code
= unsignedp
? GEU
: GE
;
12041 case UNORDERED_EXPR
:
12067 gcc_unreachable ();
12070 /* Put a constant second. */
12071 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
12072 || TREE_CODE (arg0
) == FIXED_CST
)
12074 std::swap (arg0
, arg1
);
12075 code
= swap_condition (code
);
12078 /* If this is an equality or inequality test of a single bit, we can
12079 do this by shifting the bit being tested to the low-order bit and
12080 masking the result with the constant 1. If the condition was EQ,
12081 we xor it with 1. This does not require an scc insn and is faster
12082 than an scc insn even if we have it.
12084 The code to make this transformation was moved into fold_single_bit_test,
12085 so we just call into the folder and expand its result. */
12087 if ((code
== NE
|| code
== EQ
)
12088 && integer_zerop (arg1
)
12089 && (TYPE_PRECISION (ops
->type
) != 1 || TYPE_UNSIGNED (ops
->type
)))
12091 gimple
*srcstmt
= get_def_for_expr (arg0
, BIT_AND_EXPR
);
12093 && integer_pow2p (gimple_assign_rhs2 (srcstmt
)))
12095 enum tree_code tcode
= code
== NE
? NE_EXPR
: EQ_EXPR
;
12096 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
12097 tree temp
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg1
),
12098 gimple_assign_rhs1 (srcstmt
),
12099 gimple_assign_rhs2 (srcstmt
));
12100 temp
= fold_single_bit_test (loc
, tcode
, temp
, arg1
, type
);
12102 return expand_expr (temp
, target
, VOIDmode
, EXPAND_NORMAL
);
12106 if (! get_subtarget (target
)
12107 || GET_MODE (subtarget
) != operand_mode
)
12110 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
12113 target
= gen_reg_rtx (mode
);
12115 /* Try a cstore if possible. */
12116 return emit_store_flag_force (target
, code
, op0
, op1
,
12117 operand_mode
, unsignedp
,
12118 (TYPE_PRECISION (ops
->type
) == 1
12119 && !TYPE_UNSIGNED (ops
->type
)) ? -1 : 1);
12122 /* Attempt to generate a casesi instruction. Returns 1 if successful,
12123 0 otherwise (i.e. if there is no casesi instruction).
12125 DEFAULT_PROBABILITY is the probability of jumping to the default
12128 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
12129 rtx table_label
, rtx default_label
, rtx fallback_label
,
12130 profile_probability default_probability
)
12132 struct expand_operand ops
[5];
12133 scalar_int_mode index_mode
= SImode
;
12134 rtx op1
, op2
, index
;
12136 if (! targetm
.have_casesi ())
12139 /* The index must be some form of integer. Convert it to SImode. */
12140 scalar_int_mode omode
= SCALAR_INT_TYPE_MODE (index_type
);
12141 if (GET_MODE_BITSIZE (omode
) > GET_MODE_BITSIZE (index_mode
))
12143 rtx rangertx
= expand_normal (range
);
12145 /* We must handle the endpoints in the original mode. */
12146 index_expr
= build2 (MINUS_EXPR
, index_type
,
12147 index_expr
, minval
);
12148 minval
= integer_zero_node
;
12149 index
= expand_normal (index_expr
);
12151 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
12152 omode
, 1, default_label
,
12153 default_probability
);
12154 /* Now we can safely truncate. */
12155 index
= convert_to_mode (index_mode
, index
, 0);
12159 if (omode
!= index_mode
)
12161 index_type
= lang_hooks
.types
.type_for_mode (index_mode
, 0);
12162 index_expr
= fold_convert (index_type
, index_expr
);
12165 index
= expand_normal (index_expr
);
12168 do_pending_stack_adjust ();
12170 op1
= expand_normal (minval
);
12171 op2
= expand_normal (range
);
12173 create_input_operand (&ops
[0], index
, index_mode
);
12174 create_convert_operand_from_type (&ops
[1], op1
, TREE_TYPE (minval
));
12175 create_convert_operand_from_type (&ops
[2], op2
, TREE_TYPE (range
));
12176 create_fixed_operand (&ops
[3], table_label
);
12177 create_fixed_operand (&ops
[4], (default_label
12179 : fallback_label
));
12180 expand_jump_insn (targetm
.code_for_casesi
, 5, ops
);
12184 /* Attempt to generate a tablejump instruction; same concept. */
12185 /* Subroutine of the next function.
12187 INDEX is the value being switched on, with the lowest value
12188 in the table already subtracted.
12189 MODE is its expected mode (needed if INDEX is constant).
12190 RANGE is the length of the jump table.
12191 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
12193 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
12194 index value is out of range.
12195 DEFAULT_PROBABILITY is the probability of jumping to
12196 the default label. */
12199 do_tablejump (rtx index
, machine_mode mode
, rtx range
, rtx table_label
,
12200 rtx default_label
, profile_probability default_probability
)
12204 if (INTVAL (range
) > cfun
->cfg
->max_jumptable_ents
)
12205 cfun
->cfg
->max_jumptable_ents
= INTVAL (range
);
12207 /* Do an unsigned comparison (in the proper mode) between the index
12208 expression and the value which represents the length of the range.
12209 Since we just finished subtracting the lower bound of the range
12210 from the index expression, this comparison allows us to simultaneously
12211 check that the original index expression value is both greater than
12212 or equal to the minimum value of the range and less than or equal to
12213 the maximum value of the range. */
12216 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
12217 default_label
, default_probability
);
12219 /* If index is in range, it must fit in Pmode.
12220 Convert to Pmode so we can index with it. */
12223 unsigned int width
;
12225 /* We know the value of INDEX is between 0 and RANGE. If we have a
12226 sign-extended subreg, and RANGE does not have the sign bit set, then
12227 we have a value that is valid for both sign and zero extension. In
12228 this case, we get better code if we sign extend. */
12229 if (GET_CODE (index
) == SUBREG
12230 && SUBREG_PROMOTED_VAR_P (index
)
12231 && SUBREG_PROMOTED_SIGNED_P (index
)
12232 && ((width
= GET_MODE_PRECISION (as_a
<scalar_int_mode
> (mode
)))
12233 <= HOST_BITS_PER_WIDE_INT
)
12234 && ! (UINTVAL (range
) & (HOST_WIDE_INT_1U
<< (width
- 1))))
12235 index
= convert_to_mode (Pmode
, index
, 0);
12237 index
= convert_to_mode (Pmode
, index
, 1);
12240 /* Don't let a MEM slip through, because then INDEX that comes
12241 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
12242 and break_out_memory_refs will go to work on it and mess it up. */
12243 #ifdef PIC_CASE_VECTOR_ADDRESS
12244 if (flag_pic
&& !REG_P (index
))
12245 index
= copy_to_mode_reg (Pmode
, index
);
12248 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
12249 GET_MODE_SIZE, because this indicates how large insns are. The other
12250 uses should all be Pmode, because they are addresses. This code
12251 could fail if addresses and insns are not the same size. */
12252 index
= simplify_gen_binary (MULT
, Pmode
, index
,
12253 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE
),
12255 index
= simplify_gen_binary (PLUS
, Pmode
, index
,
12256 gen_rtx_LABEL_REF (Pmode
, table_label
));
12258 #ifdef PIC_CASE_VECTOR_ADDRESS
12260 index
= PIC_CASE_VECTOR_ADDRESS (index
);
12263 index
= memory_address (CASE_VECTOR_MODE
, index
);
12264 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
12265 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
12266 convert_move (temp
, vector
, 0);
12268 emit_jump_insn (targetm
.gen_tablejump (temp
, table_label
));
12270 /* If we are generating PIC code or if the table is PC-relative, the
12271 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
12272 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
12277 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
12278 rtx table_label
, rtx default_label
,
12279 profile_probability default_probability
)
12283 if (! targetm
.have_tablejump ())
12286 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
12287 fold_convert (index_type
, index_expr
),
12288 fold_convert (index_type
, minval
));
12289 index
= expand_normal (index_expr
);
12290 do_pending_stack_adjust ();
12292 do_tablejump (index
, TYPE_MODE (index_type
),
12293 convert_modes (TYPE_MODE (index_type
),
12294 TYPE_MODE (TREE_TYPE (range
)),
12295 expand_normal (range
),
12296 TYPE_UNSIGNED (TREE_TYPE (range
))),
12297 table_label
, default_label
, default_probability
);
12301 /* Return a CONST_VECTOR rtx representing vector mask for
12302 a VECTOR_CST of booleans. */
12304 const_vector_mask_from_tree (tree exp
)
12306 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
12307 machine_mode inner
= GET_MODE_INNER (mode
);
12309 rtx_vector_builder
builder (mode
, VECTOR_CST_NPATTERNS (exp
),
12310 VECTOR_CST_NELTS_PER_PATTERN (exp
));
12311 unsigned int count
= builder
.encoded_nelts ();
12312 for (unsigned int i
= 0; i
< count
; ++i
)
12314 tree elt
= VECTOR_CST_ELT (exp
, i
);
12315 gcc_assert (TREE_CODE (elt
) == INTEGER_CST
);
12316 if (integer_zerop (elt
))
12317 builder
.quick_push (CONST0_RTX (inner
));
12318 else if (integer_onep (elt
)
12319 || integer_minus_onep (elt
))
12320 builder
.quick_push (CONSTM1_RTX (inner
));
12322 gcc_unreachable ();
12324 return builder
.build ();
12327 /* EXP is a VECTOR_CST in which each element is either all-zeros or all-ones.
12328 Return a constant scalar rtx of mode MODE in which bit X is set if element
12329 X of EXP is nonzero. */
12331 const_scalar_mask_from_tree (scalar_int_mode mode
, tree exp
)
12333 wide_int res
= wi::zero (GET_MODE_PRECISION (mode
));
12336 /* The result has a fixed number of bits so the input must too. */
12337 unsigned int nunits
= VECTOR_CST_NELTS (exp
).to_constant ();
12338 for (unsigned int i
= 0; i
< nunits
; ++i
)
12340 elt
= VECTOR_CST_ELT (exp
, i
);
12341 gcc_assert (TREE_CODE (elt
) == INTEGER_CST
);
12342 if (integer_all_onesp (elt
))
12343 res
= wi::set_bit (res
, i
);
12345 gcc_assert (integer_zerop (elt
));
12348 return immed_wide_int_const (res
, mode
);
12351 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
12353 const_vector_from_tree (tree exp
)
12355 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
12357 if (initializer_zerop (exp
))
12358 return CONST0_RTX (mode
);
12360 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp
)))
12361 return const_vector_mask_from_tree (exp
);
12363 machine_mode inner
= GET_MODE_INNER (mode
);
12365 rtx_vector_builder
builder (mode
, VECTOR_CST_NPATTERNS (exp
),
12366 VECTOR_CST_NELTS_PER_PATTERN (exp
));
12367 unsigned int count
= builder
.encoded_nelts ();
12368 for (unsigned int i
= 0; i
< count
; ++i
)
12370 tree elt
= VECTOR_CST_ELT (exp
, i
);
12371 if (TREE_CODE (elt
) == REAL_CST
)
12372 builder
.quick_push (const_double_from_real_value (TREE_REAL_CST (elt
),
12374 else if (TREE_CODE (elt
) == FIXED_CST
)
12375 builder
.quick_push (CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt
),
12378 builder
.quick_push (immed_wide_int_const (wi::to_poly_wide (elt
),
12381 return builder
.build ();
12384 /* Build a decl for a personality function given a language prefix. */
12387 build_personality_function (const char *lang
)
12389 const char *unwind_and_version
;
12393 switch (targetm_common
.except_unwind_info (&global_options
))
12398 unwind_and_version
= "_sj0";
12402 unwind_and_version
= "_v0";
12405 unwind_and_version
= "_seh0";
12408 gcc_unreachable ();
12411 name
= ACONCAT (("__", lang
, "_personality", unwind_and_version
, NULL
));
12413 type
= build_function_type_list (integer_type_node
, integer_type_node
,
12414 long_long_unsigned_type_node
,
12415 ptr_type_node
, ptr_type_node
, NULL_TREE
);
12416 decl
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
,
12417 get_identifier (name
), type
);
12418 DECL_ARTIFICIAL (decl
) = 1;
12419 DECL_EXTERNAL (decl
) = 1;
12420 TREE_PUBLIC (decl
) = 1;
12422 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
12423 are the flags assigned by targetm.encode_section_info. */
12424 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl
), 0), NULL
);
12429 /* Extracts the personality function of DECL and returns the corresponding
12433 get_personality_function (tree decl
)
12435 tree personality
= DECL_FUNCTION_PERSONALITY (decl
);
12436 enum eh_personality_kind pk
;
12438 pk
= function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl
));
12439 if (pk
== eh_personality_none
)
12443 && pk
== eh_personality_any
)
12444 personality
= lang_hooks
.eh_personality ();
12446 if (pk
== eh_personality_lang
)
12447 gcc_assert (personality
!= NULL_TREE
);
12449 return XEXP (DECL_RTL (personality
), 0);
12452 /* Returns a tree for the size of EXP in bytes. */
12455 tree_expr_size (const_tree exp
)
12458 && DECL_SIZE_UNIT (exp
) != 0)
12459 return DECL_SIZE_UNIT (exp
);
12461 return size_in_bytes (TREE_TYPE (exp
));
12464 /* Return an rtx for the size in bytes of the value of EXP. */
12467 expr_size (tree exp
)
12471 if (TREE_CODE (exp
) == WITH_SIZE_EXPR
)
12472 size
= TREE_OPERAND (exp
, 1);
12475 size
= tree_expr_size (exp
);
12477 gcc_assert (size
== SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, exp
));
12480 return expand_expr (size
, NULL_RTX
, TYPE_MODE (sizetype
), EXPAND_NORMAL
);
12483 /* Return a wide integer for the size in bytes of the value of EXP, or -1
12484 if the size can vary or is larger than an integer. */
12486 static HOST_WIDE_INT
12487 int_expr_size (tree exp
)
12491 if (TREE_CODE (exp
) == WITH_SIZE_EXPR
)
12492 size
= TREE_OPERAND (exp
, 1);
12495 size
= tree_expr_size (exp
);
12499 if (size
== 0 || !tree_fits_shwi_p (size
))
12502 return tree_to_shwi (size
);