1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
38 #include "diagnostic.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
45 #include "insn-attr.h"
50 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
52 #include "optabs-tree.h"
55 #include "langhooks.h"
56 #include "common/common-target.h"
57 #include "tree-ssa-live.h"
58 #include "tree-outof-ssa.h"
59 #include "tree-ssa-address.h"
61 #include "tree-chkp.h"
64 #include "rtx-vector-builder.h"
67 /* If this is nonzero, we do not bother generating VOLATILE
68 around volatile memory references, and we are willing to
69 output indirect addresses. If cse is to follow, we reject
70 indirect addresses so a useful potential cse is generated;
71 if it is used only once, instruction combination will produce
72 the same indirect address eventually. */
75 static bool block_move_libcall_safe_for_call_parm (void);
76 static bool emit_block_move_via_movmem (rtx
, rtx
, rtx
, unsigned, unsigned, HOST_WIDE_INT
,
77 unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
78 unsigned HOST_WIDE_INT
);
79 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
80 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
81 static rtx_insn
*compress_float_constant (rtx
, rtx
);
82 static rtx
get_subtarget (rtx
);
83 static void store_constructor (tree
, rtx
, int, poly_int64
, bool);
84 static rtx
store_field (rtx
, poly_int64
, poly_int64
, poly_uint64
, poly_uint64
,
85 machine_mode
, tree
, alias_set_type
, bool, bool);
87 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (const_tree
, const_tree
);
89 static int is_aligning_offset (const_tree
, const_tree
);
90 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
91 static rtx
do_store_flag (sepops
, rtx
, machine_mode
);
93 static void emit_single_push_insn (machine_mode
, rtx
, tree
);
95 static void do_tablejump (rtx
, machine_mode
, rtx
, rtx
, rtx
,
97 static rtx
const_vector_from_tree (tree
);
98 static rtx
const_scalar_mask_from_tree (scalar_int_mode
, tree
);
99 static tree
tree_expr_size (const_tree
);
100 static HOST_WIDE_INT
int_expr_size (tree
);
101 static void convert_mode_scalar (rtx
, rtx
, int);
104 /* This is run to set up which modes can be used
105 directly in memory and to initialize the block move optab. It is run
106 at the beginning of compilation and when the target is reinitialized. */
109 init_expr_target (void)
116 /* Try indexing by frame ptr and try by stack ptr.
117 It is known that on the Convex the stack ptr isn't a valid index.
118 With luck, one or the other is valid on any machine. */
119 mem
= gen_rtx_MEM (word_mode
, stack_pointer_rtx
);
120 mem1
= gen_rtx_MEM (word_mode
, frame_pointer_rtx
);
122 /* A scratch register we can modify in-place below to avoid
123 useless RTL allocations. */
124 reg
= gen_rtx_REG (word_mode
, LAST_VIRTUAL_REGISTER
+ 1);
126 rtx_insn
*insn
= as_a
<rtx_insn
*> (rtx_alloc (INSN
));
127 pat
= gen_rtx_SET (NULL_RTX
, NULL_RTX
);
128 PATTERN (insn
) = pat
;
130 for (machine_mode mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
131 mode
= (machine_mode
) ((int) mode
+ 1))
135 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
136 PUT_MODE (mem
, mode
);
137 PUT_MODE (mem1
, mode
);
139 /* See if there is some register that can be used in this mode and
140 directly loaded or stored from memory. */
142 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
143 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
144 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
147 if (!targetm
.hard_regno_mode_ok (regno
, mode
))
150 set_mode_and_regno (reg
, mode
, regno
);
153 SET_DEST (pat
) = reg
;
154 if (recog (pat
, insn
, &num_clobbers
) >= 0)
155 direct_load
[(int) mode
] = 1;
157 SET_SRC (pat
) = mem1
;
158 SET_DEST (pat
) = reg
;
159 if (recog (pat
, insn
, &num_clobbers
) >= 0)
160 direct_load
[(int) mode
] = 1;
163 SET_DEST (pat
) = mem
;
164 if (recog (pat
, insn
, &num_clobbers
) >= 0)
165 direct_store
[(int) mode
] = 1;
168 SET_DEST (pat
) = mem1
;
169 if (recog (pat
, insn
, &num_clobbers
) >= 0)
170 direct_store
[(int) mode
] = 1;
174 mem
= gen_rtx_MEM (VOIDmode
, gen_raw_REG (Pmode
, LAST_VIRTUAL_REGISTER
+ 1));
176 opt_scalar_float_mode mode_iter
;
177 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_FLOAT
)
179 scalar_float_mode mode
= mode_iter
.require ();
180 scalar_float_mode srcmode
;
181 FOR_EACH_MODE_UNTIL (srcmode
, mode
)
185 ic
= can_extend_p (mode
, srcmode
, 0);
186 if (ic
== CODE_FOR_nothing
)
189 PUT_MODE (mem
, srcmode
);
191 if (insn_operand_matches (ic
, 1, mem
))
192 float_extend_from_mem
[mode
][srcmode
] = true;
197 /* This is run at the start of compiling a function. */
202 memset (&crtl
->expr
, 0, sizeof (crtl
->expr
));
205 /* Copy data from FROM to TO, where the machine modes are not the same.
206 Both modes may be integer, or both may be floating, or both may be
208 UNSIGNEDP should be nonzero if FROM is an unsigned type.
209 This causes zero-extension instead of sign-extension. */
212 convert_move (rtx to
, rtx from
, int unsignedp
)
214 machine_mode to_mode
= GET_MODE (to
);
215 machine_mode from_mode
= GET_MODE (from
);
217 gcc_assert (to_mode
!= BLKmode
);
218 gcc_assert (from_mode
!= BLKmode
);
220 /* If the source and destination are already the same, then there's
225 /* If FROM is a SUBREG that indicates that we have already done at least
226 the required extension, strip it. We don't handle such SUBREGs as
229 scalar_int_mode to_int_mode
;
230 if (GET_CODE (from
) == SUBREG
231 && SUBREG_PROMOTED_VAR_P (from
)
232 && is_a
<scalar_int_mode
> (to_mode
, &to_int_mode
)
233 && (GET_MODE_PRECISION (subreg_promoted_mode (from
))
234 >= GET_MODE_PRECISION (to_int_mode
))
235 && SUBREG_CHECK_PROMOTED_SIGN (from
, unsignedp
))
236 from
= gen_lowpart (to_int_mode
, from
), from_mode
= to_int_mode
;
238 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
240 if (to_mode
== from_mode
241 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
243 emit_move_insn (to
, from
);
247 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
249 gcc_assert (GET_MODE_BITSIZE (from_mode
) == GET_MODE_BITSIZE (to_mode
));
251 if (VECTOR_MODE_P (to_mode
))
252 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
254 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
256 emit_move_insn (to
, from
);
260 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
262 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
263 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
267 convert_mode_scalar (to
, from
, unsignedp
);
270 /* Like convert_move, but deals only with scalar modes. */
273 convert_mode_scalar (rtx to
, rtx from
, int unsignedp
)
275 /* Both modes should be scalar types. */
276 scalar_mode from_mode
= as_a
<scalar_mode
> (GET_MODE (from
));
277 scalar_mode to_mode
= as_a
<scalar_mode
> (GET_MODE (to
));
278 bool to_real
= SCALAR_FLOAT_MODE_P (to_mode
);
279 bool from_real
= SCALAR_FLOAT_MODE_P (from_mode
);
283 gcc_assert (to_real
== from_real
);
285 /* rtx code for making an equivalent value. */
286 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
287 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
295 gcc_assert ((GET_MODE_PRECISION (from_mode
)
296 != GET_MODE_PRECISION (to_mode
))
297 || (DECIMAL_FLOAT_MODE_P (from_mode
)
298 != DECIMAL_FLOAT_MODE_P (to_mode
)));
300 if (GET_MODE_PRECISION (from_mode
) == GET_MODE_PRECISION (to_mode
))
301 /* Conversion between decimal float and binary float, same size. */
302 tab
= DECIMAL_FLOAT_MODE_P (from_mode
) ? trunc_optab
: sext_optab
;
303 else if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
308 /* Try converting directly if the insn is supported. */
310 code
= convert_optab_handler (tab
, to_mode
, from_mode
);
311 if (code
!= CODE_FOR_nothing
)
313 emit_unop_insn (code
, to
, from
,
314 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
318 /* Otherwise use a libcall. */
319 libcall
= convert_optab_libfunc (tab
, to_mode
, from_mode
);
321 /* Is this conversion implemented yet? */
322 gcc_assert (libcall
);
325 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
327 insns
= get_insns ();
329 emit_libcall_block (insns
, to
, value
,
330 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
332 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
336 /* Handle pointer conversion. */ /* SPEE 900220. */
337 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
341 if (GET_MODE_PRECISION (from_mode
) > GET_MODE_PRECISION (to_mode
))
348 if (convert_optab_handler (ctab
, to_mode
, from_mode
)
351 emit_unop_insn (convert_optab_handler (ctab
, to_mode
, from_mode
),
357 /* Targets are expected to provide conversion insns between PxImode and
358 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
359 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
361 scalar_int_mode full_mode
362 = smallest_int_mode_for_size (GET_MODE_BITSIZE (to_mode
));
364 gcc_assert (convert_optab_handler (trunc_optab
, to_mode
, full_mode
)
365 != CODE_FOR_nothing
);
367 if (full_mode
!= from_mode
)
368 from
= convert_to_mode (full_mode
, from
, unsignedp
);
369 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, full_mode
),
373 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
376 scalar_int_mode full_mode
377 = smallest_int_mode_for_size (GET_MODE_BITSIZE (from_mode
));
378 convert_optab ctab
= unsignedp
? zext_optab
: sext_optab
;
379 enum insn_code icode
;
381 icode
= convert_optab_handler (ctab
, full_mode
, from_mode
);
382 gcc_assert (icode
!= CODE_FOR_nothing
);
384 if (to_mode
== full_mode
)
386 emit_unop_insn (icode
, to
, from
, UNKNOWN
);
390 new_from
= gen_reg_rtx (full_mode
);
391 emit_unop_insn (icode
, new_from
, from
, UNKNOWN
);
393 /* else proceed to integer conversions below. */
394 from_mode
= full_mode
;
398 /* Make sure both are fixed-point modes or both are not. */
399 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
) ==
400 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode
));
401 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
))
403 /* If we widen from_mode to to_mode and they are in the same class,
404 we won't saturate the result.
405 Otherwise, always saturate the result to play safe. */
406 if (GET_MODE_CLASS (from_mode
) == GET_MODE_CLASS (to_mode
)
407 && GET_MODE_SIZE (from_mode
) < GET_MODE_SIZE (to_mode
))
408 expand_fixed_convert (to
, from
, 0, 0);
410 expand_fixed_convert (to
, from
, 0, 1);
414 /* Now both modes are integers. */
416 /* Handle expanding beyond a word. */
417 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
)
418 && GET_MODE_PRECISION (to_mode
) > BITS_PER_WORD
)
425 scalar_mode lowpart_mode
;
426 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
428 /* Try converting directly if the insn is supported. */
429 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
432 /* If FROM is a SUBREG, put it into a register. Do this
433 so that we always generate the same set of insns for
434 better cse'ing; if an intermediate assignment occurred,
435 we won't be doing the operation directly on the SUBREG. */
436 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
437 from
= force_reg (from_mode
, from
);
438 emit_unop_insn (code
, to
, from
, equiv_code
);
441 /* Next, try converting via full word. */
442 else if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
443 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
444 != CODE_FOR_nothing
))
446 rtx word_to
= gen_reg_rtx (word_mode
);
449 if (reg_overlap_mentioned_p (to
, from
))
450 from
= force_reg (from_mode
, from
);
453 convert_move (word_to
, from
, unsignedp
);
454 emit_unop_insn (code
, to
, word_to
, equiv_code
);
458 /* No special multiword conversion insn; do it by hand. */
461 /* Since we will turn this into a no conflict block, we must ensure
462 the source does not overlap the target so force it into an isolated
463 register when maybe so. Likewise for any MEM input, since the
464 conversion sequence might require several references to it and we
465 must ensure we're getting the same value every time. */
467 if (MEM_P (from
) || reg_overlap_mentioned_p (to
, from
))
468 from
= force_reg (from_mode
, from
);
470 /* Get a copy of FROM widened to a word, if necessary. */
471 if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
)
472 lowpart_mode
= word_mode
;
474 lowpart_mode
= from_mode
;
476 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
478 lowpart
= gen_lowpart (lowpart_mode
, to
);
479 emit_move_insn (lowpart
, lowfrom
);
481 /* Compute the value to put in each remaining word. */
483 fill_value
= const0_rtx
;
485 fill_value
= emit_store_flag_force (gen_reg_rtx (word_mode
),
486 LT
, lowfrom
, const0_rtx
,
487 lowpart_mode
, 0, -1);
489 /* Fill the remaining words. */
490 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
492 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
493 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
495 gcc_assert (subword
);
497 if (fill_value
!= subword
)
498 emit_move_insn (subword
, fill_value
);
501 insns
= get_insns ();
508 /* Truncating multi-word to a word or less. */
509 if (GET_MODE_PRECISION (from_mode
) > BITS_PER_WORD
510 && GET_MODE_PRECISION (to_mode
) <= BITS_PER_WORD
)
513 && ! MEM_VOLATILE_P (from
)
514 && direct_load
[(int) to_mode
]
515 && ! mode_dependent_address_p (XEXP (from
, 0),
516 MEM_ADDR_SPACE (from
)))
518 || GET_CODE (from
) == SUBREG
))
519 from
= force_reg (from_mode
, from
);
520 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
524 /* Now follow all the conversions between integers
525 no more than a word long. */
527 /* For truncation, usually we can just refer to FROM in a narrower mode. */
528 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
529 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, from_mode
))
532 && ! MEM_VOLATILE_P (from
)
533 && direct_load
[(int) to_mode
]
534 && ! mode_dependent_address_p (XEXP (from
, 0),
535 MEM_ADDR_SPACE (from
)))
537 || GET_CODE (from
) == SUBREG
))
538 from
= force_reg (from_mode
, from
);
539 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
540 && !targetm
.hard_regno_mode_ok (REGNO (from
), to_mode
))
541 from
= copy_to_reg (from
);
542 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
546 /* Handle extension. */
547 if (GET_MODE_PRECISION (to_mode
) > GET_MODE_PRECISION (from_mode
))
549 /* Convert directly if that works. */
550 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
553 emit_unop_insn (code
, to
, from
, equiv_code
);
558 scalar_mode intermediate
;
562 /* Search for a mode to convert via. */
563 opt_scalar_mode intermediate_iter
;
564 FOR_EACH_MODE_FROM (intermediate_iter
, from_mode
)
566 scalar_mode intermediate
= intermediate_iter
.require ();
567 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
569 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
570 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
,
572 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
573 != CODE_FOR_nothing
))
575 convert_move (to
, convert_to_mode (intermediate
, from
,
576 unsignedp
), unsignedp
);
581 /* No suitable intermediate mode.
582 Generate what we need with shifts. */
583 shift_amount
= (GET_MODE_PRECISION (to_mode
)
584 - GET_MODE_PRECISION (from_mode
));
585 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
586 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
588 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
591 emit_move_insn (to
, tmp
);
596 /* Support special truncate insns for certain modes. */
597 if (convert_optab_handler (trunc_optab
, to_mode
,
598 from_mode
) != CODE_FOR_nothing
)
600 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, from_mode
),
605 /* Handle truncation of volatile memrefs, and so on;
606 the things that couldn't be truncated directly,
607 and for which there was no special instruction.
609 ??? Code above formerly short-circuited this, for most integer
610 mode pairs, with a force_reg in from_mode followed by a recursive
611 call to this routine. Appears always to have been wrong. */
612 if (GET_MODE_PRECISION (to_mode
) < GET_MODE_PRECISION (from_mode
))
614 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
615 emit_move_insn (to
, temp
);
619 /* Mode combination is not recognized. */
623 /* Return an rtx for a value that would result
624 from converting X to mode MODE.
625 Both X and MODE may be floating, or both integer.
626 UNSIGNEDP is nonzero if X is an unsigned value.
627 This can be done by referring to a part of X in place
628 or by copying to a new temporary with conversion. */
631 convert_to_mode (machine_mode mode
, rtx x
, int unsignedp
)
633 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
636 /* Return an rtx for a value that would result
637 from converting X from mode OLDMODE to mode MODE.
638 Both modes may be floating, or both integer.
639 UNSIGNEDP is nonzero if X is an unsigned value.
641 This can be done by referring to a part of X in place
642 or by copying to a new temporary with conversion.
644 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
647 convert_modes (machine_mode mode
, machine_mode oldmode
, rtx x
, int unsignedp
)
650 scalar_int_mode int_mode
;
652 /* If FROM is a SUBREG that indicates that we have already done at least
653 the required extension, strip it. */
655 if (GET_CODE (x
) == SUBREG
656 && SUBREG_PROMOTED_VAR_P (x
)
657 && is_a
<scalar_int_mode
> (mode
, &int_mode
)
658 && (GET_MODE_PRECISION (subreg_promoted_mode (x
))
659 >= GET_MODE_PRECISION (int_mode
))
660 && SUBREG_CHECK_PROMOTED_SIGN (x
, unsignedp
))
661 x
= gen_lowpart (int_mode
, SUBREG_REG (x
));
663 if (GET_MODE (x
) != VOIDmode
)
664 oldmode
= GET_MODE (x
);
669 if (CONST_SCALAR_INT_P (x
)
670 && is_int_mode (mode
, &int_mode
))
672 /* If the caller did not tell us the old mode, then there is not
673 much to do with respect to canonicalization. We have to
674 assume that all the bits are significant. */
675 if (GET_MODE_CLASS (oldmode
) != MODE_INT
)
676 oldmode
= MAX_MODE_INT
;
677 wide_int w
= wide_int::from (rtx_mode_t (x
, oldmode
),
678 GET_MODE_PRECISION (int_mode
),
679 unsignedp
? UNSIGNED
: SIGNED
);
680 return immed_wide_int_const (w
, int_mode
);
683 /* We can do this with a gen_lowpart if both desired and current modes
684 are integer, and this is either a constant integer, a register, or a
686 scalar_int_mode int_oldmode
;
687 if (is_int_mode (mode
, &int_mode
)
688 && is_int_mode (oldmode
, &int_oldmode
)
689 && GET_MODE_PRECISION (int_mode
) <= GET_MODE_PRECISION (int_oldmode
)
690 && ((MEM_P (x
) && !MEM_VOLATILE_P (x
) && direct_load
[(int) int_mode
])
691 || CONST_POLY_INT_P (x
)
693 && (!HARD_REGISTER_P (x
)
694 || targetm
.hard_regno_mode_ok (REGNO (x
), int_mode
))
695 && TRULY_NOOP_TRUNCATION_MODES_P (int_mode
, GET_MODE (x
)))))
696 return gen_lowpart (int_mode
, x
);
698 /* Converting from integer constant into mode is always equivalent to an
700 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
702 gcc_assert (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (oldmode
));
703 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
706 temp
= gen_reg_rtx (mode
);
707 convert_move (temp
, x
, unsignedp
);
711 /* Return the largest alignment we can use for doing a move (or store)
712 of MAX_PIECES. ALIGN is the largest alignment we could use. */
715 alignment_for_piecewise_move (unsigned int max_pieces
, unsigned int align
)
717 scalar_int_mode tmode
718 = int_mode_for_size (max_pieces
* BITS_PER_UNIT
, 1).require ();
720 if (align
>= GET_MODE_ALIGNMENT (tmode
))
721 align
= GET_MODE_ALIGNMENT (tmode
);
724 scalar_int_mode xmode
= NARROWEST_INT_MODE
;
725 opt_scalar_int_mode mode_iter
;
726 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_INT
)
728 tmode
= mode_iter
.require ();
729 if (GET_MODE_SIZE (tmode
) > max_pieces
730 || targetm
.slow_unaligned_access (tmode
, align
))
735 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
741 /* Return the widest integer mode that is narrower than SIZE bytes. */
743 static scalar_int_mode
744 widest_int_mode_for_size (unsigned int size
)
746 scalar_int_mode result
= NARROWEST_INT_MODE
;
748 gcc_checking_assert (size
> 1);
750 opt_scalar_int_mode tmode
;
751 FOR_EACH_MODE_IN_CLASS (tmode
, MODE_INT
)
752 if (GET_MODE_SIZE (tmode
.require ()) < size
)
753 result
= tmode
.require ();
758 /* Determine whether an operation OP on LEN bytes with alignment ALIGN can
759 and should be performed piecewise. */
762 can_do_by_pieces (unsigned HOST_WIDE_INT len
, unsigned int align
,
763 enum by_pieces_operation op
)
765 return targetm
.use_by_pieces_infrastructure_p (len
, align
, op
,
766 optimize_insn_for_speed_p ());
769 /* Determine whether the LEN bytes can be moved by using several move
770 instructions. Return nonzero if a call to move_by_pieces should
774 can_move_by_pieces (unsigned HOST_WIDE_INT len
, unsigned int align
)
776 return can_do_by_pieces (len
, align
, MOVE_BY_PIECES
);
779 /* Return number of insns required to perform operation OP by pieces
780 for L bytes. ALIGN (in bits) is maximum alignment we can assume. */
782 unsigned HOST_WIDE_INT
783 by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
784 unsigned int max_size
, by_pieces_operation op
)
786 unsigned HOST_WIDE_INT n_insns
= 0;
788 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
790 while (max_size
> 1 && l
> 0)
792 scalar_int_mode mode
= widest_int_mode_for_size (max_size
);
793 enum insn_code icode
;
795 unsigned int modesize
= GET_MODE_SIZE (mode
);
797 icode
= optab_handler (mov_optab
, mode
);
798 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
800 unsigned HOST_WIDE_INT n_pieces
= l
/ modesize
;
808 case COMPARE_BY_PIECES
:
809 int batch
= targetm
.compare_by_pieces_branch_ratio (mode
);
810 int batch_ops
= 4 * batch
- 1;
811 unsigned HOST_WIDE_INT full
= n_pieces
/ batch
;
812 n_insns
+= full
* batch_ops
;
813 if (n_pieces
% batch
!= 0)
826 /* Used when performing piecewise block operations, holds information
827 about one of the memory objects involved. The member functions
828 can be used to generate code for loading from the object and
829 updating the address when iterating. */
833 /* The object being referenced, a MEM. Can be NULL_RTX to indicate
836 /* The address of the object. Can differ from that seen in the
837 MEM rtx if we copied the address to a register. */
839 /* Nonzero if the address on the object has an autoincrement already,
840 signifies whether that was an increment or decrement. */
841 signed char m_addr_inc
;
842 /* Nonzero if we intend to use autoinc without the address already
843 having autoinc form. We will insert add insns around each memory
844 reference, expecting later passes to form autoinc addressing modes.
845 The only supported options are predecrement and postincrement. */
846 signed char m_explicit_inc
;
847 /* True if we have either of the two possible cases of using
850 /* True if this is an address to be used for load operations rather
854 /* Optionally, a function to obtain constants for any given offset into
855 the objects, and data associated with it. */
856 by_pieces_constfn m_constfn
;
859 pieces_addr (rtx
, bool, by_pieces_constfn
, void *);
860 rtx
adjust (scalar_int_mode
, HOST_WIDE_INT
);
861 void increment_address (HOST_WIDE_INT
);
862 void maybe_predec (HOST_WIDE_INT
);
863 void maybe_postinc (HOST_WIDE_INT
);
864 void decide_autoinc (machine_mode
, bool, HOST_WIDE_INT
);
871 /* Initialize a pieces_addr structure from an object OBJ. IS_LOAD is
872 true if the operation to be performed on this object is a load
873 rather than a store. For stores, OBJ can be NULL, in which case we
874 assume the operation is a stack push. For loads, the optional
875 CONSTFN and its associated CFNDATA can be used in place of the
878 pieces_addr::pieces_addr (rtx obj
, bool is_load
, by_pieces_constfn constfn
,
880 : m_obj (obj
), m_is_load (is_load
), m_constfn (constfn
), m_cfndata (cfndata
)
886 rtx addr
= XEXP (obj
, 0);
887 rtx_code code
= GET_CODE (addr
);
889 bool dec
= code
== PRE_DEC
|| code
== POST_DEC
;
890 bool inc
= code
== PRE_INC
|| code
== POST_INC
;
893 m_addr_inc
= dec
? -1 : 1;
895 /* While we have always looked for these codes here, the code
896 implementing the memory operation has never handled them.
897 Support could be added later if necessary or beneficial. */
898 gcc_assert (code
!= PRE_INC
&& code
!= POST_DEC
);
906 if (STACK_GROWS_DOWNWARD
)
912 gcc_assert (constfn
!= NULL
);
916 gcc_assert (is_load
);
919 /* Decide whether to use autoinc for an address involved in a memory op.
920 MODE is the mode of the accesses, REVERSE is true if we've decided to
921 perform the operation starting from the end, and LEN is the length of
922 the operation. Don't override an earlier decision to set m_auto. */
925 pieces_addr::decide_autoinc (machine_mode
ARG_UNUSED (mode
), bool reverse
,
928 if (m_auto
|| m_obj
== NULL_RTX
)
931 bool use_predec
= (m_is_load
932 ? USE_LOAD_PRE_DECREMENT (mode
)
933 : USE_STORE_PRE_DECREMENT (mode
));
934 bool use_postinc
= (m_is_load
935 ? USE_LOAD_POST_INCREMENT (mode
)
936 : USE_STORE_POST_INCREMENT (mode
));
937 machine_mode addr_mode
= get_address_mode (m_obj
);
939 if (use_predec
&& reverse
)
941 m_addr
= copy_to_mode_reg (addr_mode
,
942 plus_constant (addr_mode
,
947 else if (use_postinc
&& !reverse
)
949 m_addr
= copy_to_mode_reg (addr_mode
, m_addr
);
953 else if (CONSTANT_P (m_addr
))
954 m_addr
= copy_to_mode_reg (addr_mode
, m_addr
);
957 /* Adjust the address to refer to the data at OFFSET in MODE. If we
958 are using autoincrement for this address, we don't add the offset,
959 but we still modify the MEM's properties. */
962 pieces_addr::adjust (scalar_int_mode mode
, HOST_WIDE_INT offset
)
965 return m_constfn (m_cfndata
, offset
, mode
);
966 if (m_obj
== NULL_RTX
)
969 return adjust_automodify_address (m_obj
, mode
, m_addr
, offset
);
971 return adjust_address (m_obj
, mode
, offset
);
974 /* Emit an add instruction to increment the address by SIZE. */
977 pieces_addr::increment_address (HOST_WIDE_INT size
)
979 rtx amount
= gen_int_mode (size
, GET_MODE (m_addr
));
980 emit_insn (gen_add2_insn (m_addr
, amount
));
983 /* If we are supposed to decrement the address after each access, emit code
984 to do so now. Increment by SIZE (which has should have the correct sign
988 pieces_addr::maybe_predec (HOST_WIDE_INT size
)
990 if (m_explicit_inc
>= 0)
992 gcc_assert (HAVE_PRE_DECREMENT
);
993 increment_address (size
);
996 /* If we are supposed to decrement the address after each access, emit code
997 to do so now. Increment by SIZE. */
1000 pieces_addr::maybe_postinc (HOST_WIDE_INT size
)
1002 if (m_explicit_inc
<= 0)
1004 gcc_assert (HAVE_POST_INCREMENT
);
1005 increment_address (size
);
1008 /* This structure is used by do_op_by_pieces to describe the operation
1011 class op_by_pieces_d
1014 pieces_addr m_to
, m_from
;
1015 unsigned HOST_WIDE_INT m_len
;
1016 HOST_WIDE_INT m_offset
;
1017 unsigned int m_align
;
1018 unsigned int m_max_size
;
1021 /* Virtual functions, overriden by derived classes for the specific
1023 virtual void generate (rtx
, rtx
, machine_mode
) = 0;
1024 virtual bool prepare_mode (machine_mode
, unsigned int) = 0;
1025 virtual void finish_mode (machine_mode
)
1030 op_by_pieces_d (rtx
, bool, rtx
, bool, by_pieces_constfn
, void *,
1031 unsigned HOST_WIDE_INT
, unsigned int);
1035 /* The constructor for an op_by_pieces_d structure. We require two
1036 objects named TO and FROM, which are identified as loads or stores
1037 by TO_LOAD and FROM_LOAD. If FROM is a load, the optional FROM_CFN
1038 and its associated FROM_CFN_DATA can be used to replace loads with
1039 constant values. LEN describes the length of the operation. */
1041 op_by_pieces_d::op_by_pieces_d (rtx to
, bool to_load
,
1042 rtx from
, bool from_load
,
1043 by_pieces_constfn from_cfn
,
1044 void *from_cfn_data
,
1045 unsigned HOST_WIDE_INT len
,
1047 : m_to (to
, to_load
, NULL
, NULL
),
1048 m_from (from
, from_load
, from_cfn
, from_cfn_data
),
1049 m_len (len
), m_max_size (MOVE_MAX_PIECES
+ 1)
1051 int toi
= m_to
.get_addr_inc ();
1052 int fromi
= m_from
.get_addr_inc ();
1053 if (toi
>= 0 && fromi
>= 0)
1055 else if (toi
<= 0 && fromi
<= 0)
1060 m_offset
= m_reverse
? len
: 0;
1061 align
= MIN (to
? MEM_ALIGN (to
) : align
,
1062 from
? MEM_ALIGN (from
) : align
);
1064 /* If copying requires more than two move insns,
1065 copy addresses to registers (to make displacements shorter)
1066 and use post-increment if available. */
1067 if (by_pieces_ninsns (len
, align
, m_max_size
, MOVE_BY_PIECES
) > 2)
1069 /* Find the mode of the largest comparison. */
1070 scalar_int_mode mode
= widest_int_mode_for_size (m_max_size
);
1072 m_from
.decide_autoinc (mode
, m_reverse
, len
);
1073 m_to
.decide_autoinc (mode
, m_reverse
, len
);
1076 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
1080 /* This function contains the main loop used for expanding a block
1081 operation. First move what we can in the largest integer mode,
1082 then go to successively smaller modes. For every access, call
1083 GENFUN with the two operands and the EXTRA_DATA. */
1086 op_by_pieces_d::run ()
1088 while (m_max_size
> 1 && m_len
> 0)
1090 scalar_int_mode mode
= widest_int_mode_for_size (m_max_size
);
1092 if (prepare_mode (mode
, m_align
))
1094 unsigned int size
= GET_MODE_SIZE (mode
);
1095 rtx to1
= NULL_RTX
, from1
;
1097 while (m_len
>= size
)
1102 to1
= m_to
.adjust (mode
, m_offset
);
1103 from1
= m_from
.adjust (mode
, m_offset
);
1105 m_to
.maybe_predec (-(HOST_WIDE_INT
)size
);
1106 m_from
.maybe_predec (-(HOST_WIDE_INT
)size
);
1108 generate (to1
, from1
, mode
);
1110 m_to
.maybe_postinc (size
);
1111 m_from
.maybe_postinc (size
);
1122 m_max_size
= GET_MODE_SIZE (mode
);
1125 /* The code above should have handled everything. */
1126 gcc_assert (!m_len
);
1129 /* Derived class from op_by_pieces_d, providing support for block move
1132 class move_by_pieces_d
: public op_by_pieces_d
1134 insn_gen_fn m_gen_fun
;
1135 void generate (rtx
, rtx
, machine_mode
);
1136 bool prepare_mode (machine_mode
, unsigned int);
1139 move_by_pieces_d (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
1141 : op_by_pieces_d (to
, false, from
, true, NULL
, NULL
, len
, align
)
1144 rtx
finish_endp (int);
1147 /* Return true if MODE can be used for a set of copies, given an
1148 alignment ALIGN. Prepare whatever data is necessary for later
1149 calls to generate. */
1152 move_by_pieces_d::prepare_mode (machine_mode mode
, unsigned int align
)
1154 insn_code icode
= optab_handler (mov_optab
, mode
);
1155 m_gen_fun
= GEN_FCN (icode
);
1156 return icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
);
1159 /* A callback used when iterating for a compare_by_pieces_operation.
1160 OP0 and OP1 are the values that have been loaded and should be
1161 compared in MODE. If OP0 is NULL, this means we should generate a
1162 push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1163 gen function that should be used to generate the mode. */
1166 move_by_pieces_d::generate (rtx op0
, rtx op1
,
1167 machine_mode mode ATTRIBUTE_UNUSED
)
1169 #ifdef PUSH_ROUNDING
1170 if (op0
== NULL_RTX
)
1172 emit_single_push_insn (mode
, op1
, NULL
);
1176 emit_insn (m_gen_fun (op0
, op1
));
1179 /* Perform the final adjustment at the end of a string to obtain the
1180 correct return value for the block operation. If ENDP is 1 return
1181 memory at the end ala mempcpy, and if ENDP is 2 return memory the
1182 end minus one byte ala stpcpy. */
1185 move_by_pieces_d::finish_endp (int endp
)
1187 gcc_assert (!m_reverse
);
1190 m_to
.maybe_postinc (-1);
1193 return m_to
.adjust (QImode
, m_offset
);
1196 /* Generate several move instructions to copy LEN bytes from block FROM to
1197 block TO. (These are MEM rtx's with BLKmode).
1199 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1200 used to push FROM to the stack.
1202 ALIGN is maximum stack alignment we can assume.
1204 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1205 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1209 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
1210 unsigned int align
, int endp
)
1212 #ifndef PUSH_ROUNDING
1217 move_by_pieces_d
data (to
, from
, len
, align
);
1222 return data
.finish_endp (endp
);
1227 /* Derived class from op_by_pieces_d, providing support for block move
1230 class store_by_pieces_d
: public op_by_pieces_d
1232 insn_gen_fn m_gen_fun
;
1233 void generate (rtx
, rtx
, machine_mode
);
1234 bool prepare_mode (machine_mode
, unsigned int);
1237 store_by_pieces_d (rtx to
, by_pieces_constfn cfn
, void *cfn_data
,
1238 unsigned HOST_WIDE_INT len
, unsigned int align
)
1239 : op_by_pieces_d (to
, false, NULL_RTX
, true, cfn
, cfn_data
, len
, align
)
1242 rtx
finish_endp (int);
1245 /* Return true if MODE can be used for a set of stores, given an
1246 alignment ALIGN. Prepare whatever data is necessary for later
1247 calls to generate. */
1250 store_by_pieces_d::prepare_mode (machine_mode mode
, unsigned int align
)
1252 insn_code icode
= optab_handler (mov_optab
, mode
);
1253 m_gen_fun
= GEN_FCN (icode
);
1254 return icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
);
1257 /* A callback used when iterating for a store_by_pieces_operation.
1258 OP0 and OP1 are the values that have been loaded and should be
1259 compared in MODE. If OP0 is NULL, this means we should generate a
1260 push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1261 gen function that should be used to generate the mode. */
1264 store_by_pieces_d::generate (rtx op0
, rtx op1
, machine_mode
)
1266 emit_insn (m_gen_fun (op0
, op1
));
1269 /* Perform the final adjustment at the end of a string to obtain the
1270 correct return value for the block operation. If ENDP is 1 return
1271 memory at the end ala mempcpy, and if ENDP is 2 return memory the
1272 end minus one byte ala stpcpy. */
1275 store_by_pieces_d::finish_endp (int endp
)
1277 gcc_assert (!m_reverse
);
1280 m_to
.maybe_postinc (-1);
1283 return m_to
.adjust (QImode
, m_offset
);
1286 /* Determine whether the LEN bytes generated by CONSTFUN can be
1287 stored to memory using several move instructions. CONSTFUNDATA is
1288 a pointer which will be passed as argument in every CONSTFUN call.
1289 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
1290 a memset operation and false if it's a copy of a constant string.
1291 Return nonzero if a call to store_by_pieces should succeed. */
1294 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
1295 rtx (*constfun
) (void *, HOST_WIDE_INT
, scalar_int_mode
),
1296 void *constfundata
, unsigned int align
, bool memsetp
)
1298 unsigned HOST_WIDE_INT l
;
1299 unsigned int max_size
;
1300 HOST_WIDE_INT offset
= 0;
1301 enum insn_code icode
;
1303 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
1304 rtx cst ATTRIBUTE_UNUSED
;
1309 if (!targetm
.use_by_pieces_infrastructure_p (len
, align
,
1313 optimize_insn_for_speed_p ()))
1316 align
= alignment_for_piecewise_move (STORE_MAX_PIECES
, align
);
1318 /* We would first store what we can in the largest integer mode, then go to
1319 successively smaller modes. */
1322 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
1326 max_size
= STORE_MAX_PIECES
+ 1;
1327 while (max_size
> 1 && l
> 0)
1329 scalar_int_mode mode
= widest_int_mode_for_size (max_size
);
1331 icode
= optab_handler (mov_optab
, mode
);
1332 if (icode
!= CODE_FOR_nothing
1333 && align
>= GET_MODE_ALIGNMENT (mode
))
1335 unsigned int size
= GET_MODE_SIZE (mode
);
1342 cst
= (*constfun
) (constfundata
, offset
, mode
);
1343 if (!targetm
.legitimate_constant_p (mode
, cst
))
1353 max_size
= GET_MODE_SIZE (mode
);
1356 /* The code above should have handled everything. */
1363 /* Generate several move instructions to store LEN bytes generated by
1364 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
1365 pointer which will be passed as argument in every CONSTFUN call.
1366 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
1367 a memset operation and false if it's a copy of a constant string.
1368 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1369 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1373 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
1374 rtx (*constfun
) (void *, HOST_WIDE_INT
, scalar_int_mode
),
1375 void *constfundata
, unsigned int align
, bool memsetp
, int endp
)
1379 gcc_assert (endp
!= 2);
1383 gcc_assert (targetm
.use_by_pieces_infrastructure_p
1385 memsetp
? SET_BY_PIECES
: STORE_BY_PIECES
,
1386 optimize_insn_for_speed_p ()));
1388 store_by_pieces_d
data (to
, constfun
, constfundata
, len
, align
);
1392 return data
.finish_endp (endp
);
1397 /* Callback routine for clear_by_pieces.
1398 Return const0_rtx unconditionally. */
1401 clear_by_pieces_1 (void *, HOST_WIDE_INT
, scalar_int_mode
)
1406 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
1407 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
1410 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
1415 store_by_pieces_d
data (to
, clear_by_pieces_1
, NULL
, len
, align
);
1419 /* Context used by compare_by_pieces_genfn. It stores the fail label
1420 to jump to in case of miscomparison, and for branch ratios greater than 1,
1421 it stores an accumulator and the current and maximum counts before
1422 emitting another branch. */
1424 class compare_by_pieces_d
: public op_by_pieces_d
1426 rtx_code_label
*m_fail_label
;
1428 int m_count
, m_batch
;
1430 void generate (rtx
, rtx
, machine_mode
);
1431 bool prepare_mode (machine_mode
, unsigned int);
1432 void finish_mode (machine_mode
);
1434 compare_by_pieces_d (rtx op0
, rtx op1
, by_pieces_constfn op1_cfn
,
1435 void *op1_cfn_data
, HOST_WIDE_INT len
, int align
,
1436 rtx_code_label
*fail_label
)
1437 : op_by_pieces_d (op0
, true, op1
, true, op1_cfn
, op1_cfn_data
, len
, align
)
1439 m_fail_label
= fail_label
;
1443 /* A callback used when iterating for a compare_by_pieces_operation.
1444 OP0 and OP1 are the values that have been loaded and should be
1445 compared in MODE. DATA holds a pointer to the compare_by_pieces_data
1446 context structure. */
1449 compare_by_pieces_d::generate (rtx op0
, rtx op1
, machine_mode mode
)
1453 rtx temp
= expand_binop (mode
, sub_optab
, op0
, op1
, NULL_RTX
,
1454 true, OPTAB_LIB_WIDEN
);
1456 temp
= expand_binop (mode
, ior_optab
, m_accumulator
, temp
, temp
,
1457 true, OPTAB_LIB_WIDEN
);
1458 m_accumulator
= temp
;
1460 if (++m_count
< m_batch
)
1464 op0
= m_accumulator
;
1466 m_accumulator
= NULL_RTX
;
1468 do_compare_rtx_and_jump (op0
, op1
, NE
, true, mode
, NULL_RTX
, NULL
,
1469 m_fail_label
, profile_probability::uninitialized ());
1472 /* Return true if MODE can be used for a set of moves and comparisons,
1473 given an alignment ALIGN. Prepare whatever data is necessary for
1474 later calls to generate. */
1477 compare_by_pieces_d::prepare_mode (machine_mode mode
, unsigned int align
)
1479 insn_code icode
= optab_handler (mov_optab
, mode
);
1480 if (icode
== CODE_FOR_nothing
1481 || align
< GET_MODE_ALIGNMENT (mode
)
1482 || !can_compare_p (EQ
, mode
, ccp_jump
))
1484 m_batch
= targetm
.compare_by_pieces_branch_ratio (mode
);
1487 m_accumulator
= NULL_RTX
;
1492 /* Called after expanding a series of comparisons in MODE. If we have
1493 accumulated results for which we haven't emitted a branch yet, do
1497 compare_by_pieces_d::finish_mode (machine_mode mode
)
1499 if (m_accumulator
!= NULL_RTX
)
1500 do_compare_rtx_and_jump (m_accumulator
, const0_rtx
, NE
, true, mode
,
1501 NULL_RTX
, NULL
, m_fail_label
,
1502 profile_probability::uninitialized ());
1505 /* Generate several move instructions to compare LEN bytes from blocks
1506 ARG0 and ARG1. (These are MEM rtx's with BLKmode).
1508 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1509 used to push FROM to the stack.
1511 ALIGN is maximum stack alignment we can assume.
1513 Optionally, the caller can pass a constfn and associated data in A1_CFN
1514 and A1_CFN_DATA. describing that the second operand being compared is a
1515 known constant and how to obtain its data. */
1518 compare_by_pieces (rtx arg0
, rtx arg1
, unsigned HOST_WIDE_INT len
,
1519 rtx target
, unsigned int align
,
1520 by_pieces_constfn a1_cfn
, void *a1_cfn_data
)
1522 rtx_code_label
*fail_label
= gen_label_rtx ();
1523 rtx_code_label
*end_label
= gen_label_rtx ();
1525 if (target
== NULL_RTX
1526 || !REG_P (target
) || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
1527 target
= gen_reg_rtx (TYPE_MODE (integer_type_node
));
1529 compare_by_pieces_d
data (arg0
, arg1
, a1_cfn
, a1_cfn_data
, len
, align
,
1534 emit_move_insn (target
, const0_rtx
);
1535 emit_jump (end_label
);
1537 emit_label (fail_label
);
1538 emit_move_insn (target
, const1_rtx
);
1539 emit_label (end_label
);
1544 /* Emit code to move a block Y to a block X. This may be done with
1545 string-move instructions, with multiple scalar move instructions,
1546 or with a library call.
1548 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1549 SIZE is an rtx that says how long they are.
1550 ALIGN is the maximum alignment we can assume they have.
1551 METHOD describes what kind of copy this is, and what mechanisms may be used.
1552 MIN_SIZE is the minimal size of block to move
1553 MAX_SIZE is the maximal size of block to move, if it can not be represented
1554 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1556 Return the address of the new block, if memcpy is called and returns it,
1560 emit_block_move_hints (rtx x
, rtx y
, rtx size
, enum block_op_methods method
,
1561 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1562 unsigned HOST_WIDE_INT min_size
,
1563 unsigned HOST_WIDE_INT max_size
,
1564 unsigned HOST_WIDE_INT probable_max_size
)
1571 if (CONST_INT_P (size
) && INTVAL (size
) == 0)
1576 case BLOCK_OP_NORMAL
:
1577 case BLOCK_OP_TAILCALL
:
1578 may_use_call
= true;
1581 case BLOCK_OP_CALL_PARM
:
1582 may_use_call
= block_move_libcall_safe_for_call_parm ();
1584 /* Make inhibit_defer_pop nonzero around the library call
1585 to force it to pop the arguments right away. */
1589 case BLOCK_OP_NO_LIBCALL
:
1590 may_use_call
= false;
1597 gcc_assert (MEM_P (x
) && MEM_P (y
));
1598 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1599 gcc_assert (align
>= BITS_PER_UNIT
);
1601 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1602 block copy is more efficient for other large modes, e.g. DCmode. */
1603 x
= adjust_address (x
, BLKmode
, 0);
1604 y
= adjust_address (y
, BLKmode
, 0);
1606 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1607 can be incorrect is coming from __builtin_memcpy. */
1608 if (CONST_INT_P (size
))
1610 x
= shallow_copy_rtx (x
);
1611 y
= shallow_copy_rtx (y
);
1612 set_mem_size (x
, INTVAL (size
));
1613 set_mem_size (y
, INTVAL (size
));
1616 if (CONST_INT_P (size
) && can_move_by_pieces (INTVAL (size
), align
))
1617 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1618 else if (emit_block_move_via_movmem (x
, y
, size
, align
,
1619 expected_align
, expected_size
,
1620 min_size
, max_size
, probable_max_size
))
1622 else if (may_use_call
1623 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
))
1624 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y
)))
1626 /* Since x and y are passed to a libcall, mark the corresponding
1627 tree EXPR as addressable. */
1628 tree y_expr
= MEM_EXPR (y
);
1629 tree x_expr
= MEM_EXPR (x
);
1631 mark_addressable (y_expr
);
1633 mark_addressable (x_expr
);
1634 retval
= emit_block_copy_via_libcall (x
, y
, size
,
1635 method
== BLOCK_OP_TAILCALL
);
1639 emit_block_move_via_loop (x
, y
, size
, align
);
1641 if (method
== BLOCK_OP_CALL_PARM
)
1648 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1650 unsigned HOST_WIDE_INT max
, min
= 0;
1651 if (GET_CODE (size
) == CONST_INT
)
1652 min
= max
= UINTVAL (size
);
1654 max
= GET_MODE_MASK (GET_MODE (size
));
1655 return emit_block_move_hints (x
, y
, size
, method
, 0, -1,
1659 /* A subroutine of emit_block_move. Returns true if calling the
1660 block move libcall will not clobber any parameters which may have
1661 already been placed on the stack. */
1664 block_move_libcall_safe_for_call_parm (void)
1666 #if defined (REG_PARM_STACK_SPACE)
1670 /* If arguments are pushed on the stack, then they're safe. */
1674 /* If registers go on the stack anyway, any argument is sure to clobber
1675 an outgoing argument. */
1676 #if defined (REG_PARM_STACK_SPACE)
1677 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1678 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1679 depend on its argument. */
1681 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn
? NULL_TREE
: TREE_TYPE (fn
)))
1682 && REG_PARM_STACK_SPACE (fn
) != 0)
1686 /* If any argument goes in memory, then it might clobber an outgoing
1689 CUMULATIVE_ARGS args_so_far_v
;
1690 cumulative_args_t args_so_far
;
1693 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1694 INIT_CUMULATIVE_ARGS (args_so_far_v
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1695 args_so_far
= pack_cumulative_args (&args_so_far_v
);
1697 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1698 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1700 machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1701 rtx tmp
= targetm
.calls
.function_arg (args_so_far
, mode
,
1703 if (!tmp
|| !REG_P (tmp
))
1705 if (targetm
.calls
.arg_partial_bytes (args_so_far
, mode
, NULL
, 1))
1707 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
1714 /* A subroutine of emit_block_move. Expand a movmem pattern;
1715 return true if successful. */
1718 emit_block_move_via_movmem (rtx x
, rtx y
, rtx size
, unsigned int align
,
1719 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1720 unsigned HOST_WIDE_INT min_size
,
1721 unsigned HOST_WIDE_INT max_size
,
1722 unsigned HOST_WIDE_INT probable_max_size
)
1724 int save_volatile_ok
= volatile_ok
;
1726 if (expected_align
< align
)
1727 expected_align
= align
;
1728 if (expected_size
!= -1)
1730 if ((unsigned HOST_WIDE_INT
)expected_size
> probable_max_size
)
1731 expected_size
= probable_max_size
;
1732 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
1733 expected_size
= min_size
;
1736 /* Since this is a move insn, we don't care about volatility. */
1739 /* Try the most limited insn first, because there's no point
1740 including more than one in the machine description unless
1741 the more limited one has some advantage. */
1743 opt_scalar_int_mode mode_iter
;
1744 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_INT
)
1746 scalar_int_mode mode
= mode_iter
.require ();
1747 enum insn_code code
= direct_optab_handler (movmem_optab
, mode
);
1749 if (code
!= CODE_FOR_nothing
1750 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1751 here because if SIZE is less than the mode mask, as it is
1752 returned by the macro, it will definitely be less than the
1753 actual mode mask. Since SIZE is within the Pmode address
1754 space, we limit MODE to Pmode. */
1755 && ((CONST_INT_P (size
)
1756 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1757 <= (GET_MODE_MASK (mode
) >> 1)))
1758 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
1759 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
1761 struct expand_operand ops
[9];
1764 /* ??? When called via emit_block_move_for_call, it'd be
1765 nice if there were some way to inform the backend, so
1766 that it doesn't fail the expansion because it thinks
1767 emitting the libcall would be more efficient. */
1768 nops
= insn_data
[(int) code
].n_generator_args
;
1769 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
1771 create_fixed_operand (&ops
[0], x
);
1772 create_fixed_operand (&ops
[1], y
);
1773 /* The check above guarantees that this size conversion is valid. */
1774 create_convert_operand_to (&ops
[2], size
, mode
, true);
1775 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
1778 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
1779 create_integer_operand (&ops
[5], expected_size
);
1783 create_integer_operand (&ops
[6], min_size
);
1784 /* If we can not represent the maximal size,
1785 make parameter NULL. */
1786 if ((HOST_WIDE_INT
) max_size
!= -1)
1787 create_integer_operand (&ops
[7], max_size
);
1789 create_fixed_operand (&ops
[7], NULL
);
1793 /* If we can not represent the maximal size,
1794 make parameter NULL. */
1795 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
1796 create_integer_operand (&ops
[8], probable_max_size
);
1798 create_fixed_operand (&ops
[8], NULL
);
1800 if (maybe_expand_insn (code
, nops
, ops
))
1802 volatile_ok
= save_volatile_ok
;
1808 volatile_ok
= save_volatile_ok
;
1812 /* A subroutine of emit_block_move. Copy the data via an explicit
1813 loop. This is used only when libcalls are forbidden. */
1814 /* ??? It'd be nice to copy in hunks larger than QImode. */
1817 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1818 unsigned int align ATTRIBUTE_UNUSED
)
1820 rtx_code_label
*cmp_label
, *top_label
;
1821 rtx iter
, x_addr
, y_addr
, tmp
;
1822 machine_mode x_addr_mode
= get_address_mode (x
);
1823 machine_mode y_addr_mode
= get_address_mode (y
);
1824 machine_mode iter_mode
;
1826 iter_mode
= GET_MODE (size
);
1827 if (iter_mode
== VOIDmode
)
1828 iter_mode
= word_mode
;
1830 top_label
= gen_label_rtx ();
1831 cmp_label
= gen_label_rtx ();
1832 iter
= gen_reg_rtx (iter_mode
);
1834 emit_move_insn (iter
, const0_rtx
);
1836 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1837 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1838 do_pending_stack_adjust ();
1840 emit_jump (cmp_label
);
1841 emit_label (top_label
);
1843 tmp
= convert_modes (x_addr_mode
, iter_mode
, iter
, true);
1844 x_addr
= simplify_gen_binary (PLUS
, x_addr_mode
, x_addr
, tmp
);
1846 if (x_addr_mode
!= y_addr_mode
)
1847 tmp
= convert_modes (y_addr_mode
, iter_mode
, iter
, true);
1848 y_addr
= simplify_gen_binary (PLUS
, y_addr_mode
, y_addr
, tmp
);
1850 x
= change_address (x
, QImode
, x_addr
);
1851 y
= change_address (y
, QImode
, y_addr
);
1853 emit_move_insn (x
, y
);
1855 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1856 true, OPTAB_LIB_WIDEN
);
1858 emit_move_insn (iter
, tmp
);
1860 emit_label (cmp_label
);
1862 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1864 profile_probability::guessed_always ()
1865 .apply_scale (9, 10));
1868 /* Expand a call to memcpy or memmove or memcmp, and return the result.
1869 TAILCALL is true if this is a tail call. */
1872 emit_block_op_via_libcall (enum built_in_function fncode
, rtx dst
, rtx src
,
1873 rtx size
, bool tailcall
)
1875 rtx dst_addr
, src_addr
;
1876 tree call_expr
, dst_tree
, src_tree
, size_tree
;
1877 machine_mode size_mode
;
1879 dst_addr
= copy_addr_to_reg (XEXP (dst
, 0));
1880 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1881 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1883 src_addr
= copy_addr_to_reg (XEXP (src
, 0));
1884 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1885 src_tree
= make_tree (ptr_type_node
, src_addr
);
1887 size_mode
= TYPE_MODE (sizetype
);
1888 size
= convert_to_mode (size_mode
, size
, 1);
1889 size
= copy_to_mode_reg (size_mode
, size
);
1890 size_tree
= make_tree (sizetype
, size
);
1892 /* It is incorrect to use the libcall calling conventions for calls to
1893 memcpy/memmove/memcmp because they can be provided by the user. */
1894 tree fn
= builtin_decl_implicit (fncode
);
1895 call_expr
= build_call_expr (fn
, 3, dst_tree
, src_tree
, size_tree
);
1896 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
1898 return expand_call (call_expr
, NULL_RTX
, false);
1901 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
1902 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
1903 otherwise return null. */
1906 expand_cmpstrn_or_cmpmem (insn_code icode
, rtx target
, rtx arg1_rtx
,
1907 rtx arg2_rtx
, tree arg3_type
, rtx arg3_rtx
,
1908 HOST_WIDE_INT align
)
1910 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
1912 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
1915 struct expand_operand ops
[5];
1916 create_output_operand (&ops
[0], target
, insn_mode
);
1917 create_fixed_operand (&ops
[1], arg1_rtx
);
1918 create_fixed_operand (&ops
[2], arg2_rtx
);
1919 create_convert_operand_from (&ops
[3], arg3_rtx
, TYPE_MODE (arg3_type
),
1920 TYPE_UNSIGNED (arg3_type
));
1921 create_integer_operand (&ops
[4], align
);
1922 if (maybe_expand_insn (icode
, 5, ops
))
1923 return ops
[0].value
;
1927 /* Expand a block compare between X and Y with length LEN using the
1928 cmpmem optab, placing the result in TARGET. LEN_TYPE is the type
1929 of the expression that was used to calculate the length. ALIGN
1930 gives the known minimum common alignment. */
1933 emit_block_cmp_via_cmpmem (rtx x
, rtx y
, rtx len
, tree len_type
, rtx target
,
1936 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
1937 implementing memcmp because it will stop if it encounters two
1939 insn_code icode
= direct_optab_handler (cmpmem_optab
, SImode
);
1941 if (icode
== CODE_FOR_nothing
)
1944 return expand_cmpstrn_or_cmpmem (icode
, target
, x
, y
, len_type
, len
, align
);
1947 /* Emit code to compare a block Y to a block X. This may be done with
1948 string-compare instructions, with multiple scalar instructions,
1949 or with a library call.
1951 Both X and Y must be MEM rtx's. LEN is an rtx that says how long
1952 they are. LEN_TYPE is the type of the expression that was used to
1955 If EQUALITY_ONLY is true, it means we don't have to return the tri-state
1956 value of a normal memcmp call, instead we can just compare for equality.
1957 If FORCE_LIBCALL is true, we should emit a call to memcmp rather than
1960 Optionally, the caller can pass a constfn and associated data in Y_CFN
1961 and Y_CFN_DATA. describing that the second operand being compared is a
1962 known constant and how to obtain its data.
1963 Return the result of the comparison, or NULL_RTX if we failed to
1964 perform the operation. */
1967 emit_block_cmp_hints (rtx x
, rtx y
, rtx len
, tree len_type
, rtx target
,
1968 bool equality_only
, by_pieces_constfn y_cfn
,
1973 if (CONST_INT_P (len
) && INTVAL (len
) == 0)
1976 gcc_assert (MEM_P (x
) && MEM_P (y
));
1977 unsigned int align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1978 gcc_assert (align
>= BITS_PER_UNIT
);
1980 x
= adjust_address (x
, BLKmode
, 0);
1981 y
= adjust_address (y
, BLKmode
, 0);
1984 && CONST_INT_P (len
)
1985 && can_do_by_pieces (INTVAL (len
), align
, COMPARE_BY_PIECES
))
1986 result
= compare_by_pieces (x
, y
, INTVAL (len
), target
, align
,
1989 result
= emit_block_cmp_via_cmpmem (x
, y
, len
, len_type
, target
, align
);
1994 /* Copy all or part of a value X into registers starting at REGNO.
1995 The number of registers to be filled is NREGS. */
1998 move_block_to_reg (int regno
, rtx x
, int nregs
, machine_mode mode
)
2003 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
2004 x
= validize_mem (force_const_mem (mode
, x
));
2006 /* See if the machine can do this with a load multiple insn. */
2007 if (targetm
.have_load_multiple ())
2009 rtx_insn
*last
= get_last_insn ();
2010 rtx first
= gen_rtx_REG (word_mode
, regno
);
2011 if (rtx_insn
*pat
= targetm
.gen_load_multiple (first
, x
,
2018 delete_insns_since (last
);
2021 for (int i
= 0; i
< nregs
; i
++)
2022 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
2023 operand_subword_force (x
, i
, mode
));
2026 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2027 The number of registers to be filled is NREGS. */
2030 move_block_from_reg (int regno
, rtx x
, int nregs
)
2035 /* See if the machine can do this with a store multiple insn. */
2036 if (targetm
.have_store_multiple ())
2038 rtx_insn
*last
= get_last_insn ();
2039 rtx first
= gen_rtx_REG (word_mode
, regno
);
2040 if (rtx_insn
*pat
= targetm
.gen_store_multiple (x
, first
,
2047 delete_insns_since (last
);
2050 for (int i
= 0; i
< nregs
; i
++)
2052 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
2056 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
2060 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2061 ORIG, where ORIG is a non-consecutive group of registers represented by
2062 a PARALLEL. The clone is identical to the original except in that the
2063 original set of registers is replaced by a new set of pseudo registers.
2064 The new set has the same modes as the original set. */
2067 gen_group_rtx (rtx orig
)
2072 gcc_assert (GET_CODE (orig
) == PARALLEL
);
2074 length
= XVECLEN (orig
, 0);
2075 tmps
= XALLOCAVEC (rtx
, length
);
2077 /* Skip a NULL entry in first slot. */
2078 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
2083 for (; i
< length
; i
++)
2085 machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
2086 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
2088 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
2091 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
2094 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
2095 except that values are placed in TMPS[i], and must later be moved
2096 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
2099 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
,
2104 machine_mode m
= GET_MODE (orig_src
);
2106 gcc_assert (GET_CODE (dst
) == PARALLEL
);
2109 && !SCALAR_INT_MODE_P (m
)
2110 && !MEM_P (orig_src
)
2111 && GET_CODE (orig_src
) != CONCAT
)
2113 scalar_int_mode imode
;
2114 if (int_mode_for_mode (GET_MODE (orig_src
)).exists (&imode
))
2116 src
= gen_reg_rtx (imode
);
2117 emit_move_insn (gen_lowpart (GET_MODE (orig_src
), src
), orig_src
);
2121 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
);
2122 emit_move_insn (src
, orig_src
);
2124 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
2128 /* Check for a NULL entry, used to indicate that the parameter goes
2129 both on the stack and in registers. */
2130 if (XEXP (XVECEXP (dst
, 0, 0), 0))
2135 /* Process the pieces. */
2136 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2138 machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
2139 poly_int64 bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
2140 poly_int64 bytelen
= GET_MODE_SIZE (mode
);
2141 poly_int64 shift
= 0;
2143 /* Handle trailing fragments that run over the size of the struct.
2144 It's the target's responsibility to make sure that the fragment
2145 cannot be strictly smaller in some cases and strictly larger
2147 gcc_checking_assert (ordered_p (bytepos
+ bytelen
, ssize
));
2148 if (known_size_p (ssize
) && maybe_gt (bytepos
+ bytelen
, ssize
))
2150 /* Arrange to shift the fragment to where it belongs.
2151 extract_bit_field loads to the lsb of the reg. */
2153 #ifdef BLOCK_REG_PADDING
2154 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
2155 == (BYTES_BIG_ENDIAN
? PAD_UPWARD
: PAD_DOWNWARD
)
2160 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2161 bytelen
= ssize
- bytepos
;
2162 gcc_assert (maybe_gt (bytelen
, 0));
2165 /* If we won't be loading directly from memory, protect the real source
2166 from strange tricks we might play; but make sure that the source can
2167 be loaded directly into the destination. */
2169 if (!MEM_P (orig_src
)
2170 && (!CONSTANT_P (orig_src
)
2171 || (GET_MODE (orig_src
) != mode
2172 && GET_MODE (orig_src
) != VOIDmode
)))
2174 if (GET_MODE (orig_src
) == VOIDmode
)
2175 src
= gen_reg_rtx (mode
);
2177 src
= gen_reg_rtx (GET_MODE (orig_src
));
2179 emit_move_insn (src
, orig_src
);
2182 /* Optimize the access just a bit. */
2184 && (! targetm
.slow_unaligned_access (mode
, MEM_ALIGN (src
))
2185 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
2186 && multiple_p (bytepos
* BITS_PER_UNIT
, GET_MODE_ALIGNMENT (mode
))
2187 && known_eq (bytelen
, GET_MODE_SIZE (mode
)))
2189 tmps
[i
] = gen_reg_rtx (mode
);
2190 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
2192 else if (COMPLEX_MODE_P (mode
)
2193 && GET_MODE (src
) == mode
2194 && known_eq (bytelen
, GET_MODE_SIZE (mode
)))
2195 /* Let emit_move_complex do the bulk of the work. */
2197 else if (GET_CODE (src
) == CONCAT
)
2199 poly_int64 slen
= GET_MODE_SIZE (GET_MODE (src
));
2200 poly_int64 slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
2204 if (can_div_trunc_p (bytepos
, slen0
, &elt
, &subpos
)
2205 && known_le (subpos
+ bytelen
, slen0
))
2207 /* The following assumes that the concatenated objects all
2208 have the same size. In this case, a simple calculation
2209 can be used to determine the object and the bit field
2211 tmps
[i
] = XEXP (src
, elt
);
2212 if (maybe_ne (subpos
, 0)
2213 || maybe_ne (subpos
+ bytelen
, slen0
)
2214 || (!CONSTANT_P (tmps
[i
])
2215 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
)))
2216 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
2217 subpos
* BITS_PER_UNIT
,
2218 1, NULL_RTX
, mode
, mode
, false,
2225 gcc_assert (known_eq (bytepos
, 0));
2226 mem
= assign_stack_temp (GET_MODE (src
), slen
);
2227 emit_move_insn (mem
, src
);
2228 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
2229 0, 1, NULL_RTX
, mode
, mode
, false,
2233 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2234 SIMD register, which is currently broken. While we get GCC
2235 to emit proper RTL for these cases, let's dump to memory. */
2236 else if (VECTOR_MODE_P (GET_MODE (dst
))
2239 int slen
= GET_MODE_SIZE (GET_MODE (src
));
2242 mem
= assign_stack_temp (GET_MODE (src
), slen
);
2243 emit_move_insn (mem
, src
);
2244 tmps
[i
] = adjust_address (mem
, mode
, bytepos
);
2246 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
2247 && XVECLEN (dst
, 0) > 1)
2248 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE (dst
), bytepos
);
2249 else if (CONSTANT_P (src
))
2251 if (known_eq (bytelen
, ssize
))
2257 /* TODO: const_wide_int can have sizes other than this... */
2258 gcc_assert (known_eq (2 * bytelen
, ssize
));
2259 split_double (src
, &first
, &second
);
2266 else if (REG_P (src
) && GET_MODE (src
) == mode
)
2269 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
2270 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
2271 mode
, mode
, false, NULL
);
2273 if (maybe_ne (shift
, 0))
2274 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
2279 /* Emit code to move a block SRC of type TYPE to a block DST,
2280 where DST is non-consecutive registers represented by a PARALLEL.
2281 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
2285 emit_group_load (rtx dst
, rtx src
, tree type
, poly_int64 ssize
)
2290 tmps
= XALLOCAVEC (rtx
, XVECLEN (dst
, 0));
2291 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
2293 /* Copy the extracted pieces into the proper (probable) hard regs. */
2294 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
2296 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
2299 emit_move_insn (d
, tmps
[i
]);
2303 /* Similar, but load SRC into new pseudos in a format that looks like
2304 PARALLEL. This can later be fed to emit_group_move to get things
2305 in the right place. */
2308 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, poly_int64 ssize
)
2313 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
2314 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
2316 /* Convert the vector to look just like the original PARALLEL, except
2317 with the computed values. */
2318 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
2320 rtx e
= XVECEXP (parallel
, 0, i
);
2321 rtx d
= XEXP (e
, 0);
2325 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
2326 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
2328 RTVEC_ELT (vec
, i
) = e
;
2331 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
2334 /* Emit code to move a block SRC to block DST, where SRC and DST are
2335 non-consecutive groups of registers, each represented by a PARALLEL. */
2338 emit_group_move (rtx dst
, rtx src
)
2342 gcc_assert (GET_CODE (src
) == PARALLEL
2343 && GET_CODE (dst
) == PARALLEL
2344 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
2346 /* Skip first entry if NULL. */
2347 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
2348 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
2349 XEXP (XVECEXP (src
, 0, i
), 0));
2352 /* Move a group of registers represented by a PARALLEL into pseudos. */
2355 emit_group_move_into_temps (rtx src
)
2357 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
2360 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
2362 rtx e
= XVECEXP (src
, 0, i
);
2363 rtx d
= XEXP (e
, 0);
2366 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
2367 RTVEC_ELT (vec
, i
) = e
;
2370 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
2373 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
2374 where SRC is non-consecutive registers represented by a PARALLEL.
2375 SSIZE represents the total size of block ORIG_DST, or -1 if not
2379 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
,
2383 int start
, finish
, i
;
2384 machine_mode m
= GET_MODE (orig_dst
);
2386 gcc_assert (GET_CODE (src
) == PARALLEL
);
2388 if (!SCALAR_INT_MODE_P (m
)
2389 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
2391 scalar_int_mode imode
;
2392 if (int_mode_for_mode (GET_MODE (orig_dst
)).exists (&imode
))
2394 dst
= gen_reg_rtx (imode
);
2395 emit_group_store (dst
, src
, type
, ssize
);
2396 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
2400 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
);
2401 emit_group_store (dst
, src
, type
, ssize
);
2403 emit_move_insn (orig_dst
, dst
);
2407 /* Check for a NULL entry, used to indicate that the parameter goes
2408 both on the stack and in registers. */
2409 if (XEXP (XVECEXP (src
, 0, 0), 0))
2413 finish
= XVECLEN (src
, 0);
2415 tmps
= XALLOCAVEC (rtx
, finish
);
2417 /* Copy the (probable) hard regs into pseudos. */
2418 for (i
= start
; i
< finish
; i
++)
2420 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2421 if (!REG_P (reg
) || REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
2423 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2424 emit_move_insn (tmps
[i
], reg
);
2430 /* If we won't be storing directly into memory, protect the real destination
2431 from strange tricks we might play. */
2433 if (GET_CODE (dst
) == PARALLEL
)
2437 /* We can get a PARALLEL dst if there is a conditional expression in
2438 a return statement. In that case, the dst and src are the same,
2439 so no action is necessary. */
2440 if (rtx_equal_p (dst
, src
))
2443 /* It is unclear if we can ever reach here, but we may as well handle
2444 it. Allocate a temporary, and split this into a store/load to/from
2446 temp
= assign_stack_temp (GET_MODE (dst
), ssize
);
2447 emit_group_store (temp
, src
, type
, ssize
);
2448 emit_group_load (dst
, temp
, type
, ssize
);
2451 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
2453 machine_mode outer
= GET_MODE (dst
);
2459 if (!REG_P (dst
) || REGNO (dst
) < FIRST_PSEUDO_REGISTER
)
2460 dst
= gen_reg_rtx (outer
);
2462 /* Make life a bit easier for combine. */
2463 /* If the first element of the vector is the low part
2464 of the destination mode, use a paradoxical subreg to
2465 initialize the destination. */
2468 inner
= GET_MODE (tmps
[start
]);
2469 bytepos
= subreg_lowpart_offset (inner
, outer
);
2470 if (known_eq (INTVAL (XEXP (XVECEXP (src
, 0, start
), 1)), bytepos
))
2472 temp
= simplify_gen_subreg (outer
, tmps
[start
],
2476 emit_move_insn (dst
, temp
);
2483 /* If the first element wasn't the low part, try the last. */
2485 && start
< finish
- 1)
2487 inner
= GET_MODE (tmps
[finish
- 1]);
2488 bytepos
= subreg_lowpart_offset (inner
, outer
);
2489 if (known_eq (INTVAL (XEXP (XVECEXP (src
, 0, finish
- 1), 1)),
2492 temp
= simplify_gen_subreg (outer
, tmps
[finish
- 1],
2496 emit_move_insn (dst
, temp
);
2503 /* Otherwise, simply initialize the result to zero. */
2505 emit_move_insn (dst
, CONST0_RTX (outer
));
2508 /* Process the pieces. */
2509 for (i
= start
; i
< finish
; i
++)
2511 poly_int64 bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2512 machine_mode mode
= GET_MODE (tmps
[i
]);
2513 poly_int64 bytelen
= GET_MODE_SIZE (mode
);
2514 poly_uint64 adj_bytelen
;
2517 /* Handle trailing fragments that run over the size of the struct.
2518 It's the target's responsibility to make sure that the fragment
2519 cannot be strictly smaller in some cases and strictly larger
2521 gcc_checking_assert (ordered_p (bytepos
+ bytelen
, ssize
));
2522 if (known_size_p (ssize
) && maybe_gt (bytepos
+ bytelen
, ssize
))
2523 adj_bytelen
= ssize
- bytepos
;
2525 adj_bytelen
= bytelen
;
2527 if (GET_CODE (dst
) == CONCAT
)
2529 if (known_le (bytepos
+ adj_bytelen
,
2530 GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)))))
2531 dest
= XEXP (dst
, 0);
2532 else if (known_ge (bytepos
, GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)))))
2534 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2535 dest
= XEXP (dst
, 1);
2539 machine_mode dest_mode
= GET_MODE (dest
);
2540 machine_mode tmp_mode
= GET_MODE (tmps
[i
]);
2542 gcc_assert (known_eq (bytepos
, 0) && XVECLEN (src
, 0));
2544 if (GET_MODE_ALIGNMENT (dest_mode
)
2545 >= GET_MODE_ALIGNMENT (tmp_mode
))
2547 dest
= assign_stack_temp (dest_mode
,
2548 GET_MODE_SIZE (dest_mode
));
2549 emit_move_insn (adjust_address (dest
,
2557 dest
= assign_stack_temp (tmp_mode
,
2558 GET_MODE_SIZE (tmp_mode
));
2559 emit_move_insn (dest
, tmps
[i
]);
2560 dst
= adjust_address (dest
, dest_mode
, bytepos
);
2566 /* Handle trailing fragments that run over the size of the struct. */
2567 if (known_size_p (ssize
) && maybe_gt (bytepos
+ bytelen
, ssize
))
2569 /* store_bit_field always takes its value from the lsb.
2570 Move the fragment to the lsb if it's not already there. */
2572 #ifdef BLOCK_REG_PADDING
2573 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2574 == (BYTES_BIG_ENDIAN
? PAD_UPWARD
: PAD_DOWNWARD
)
2580 poly_int64 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2581 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
2585 /* Make sure not to write past the end of the struct. */
2586 store_bit_field (dest
,
2587 adj_bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2588 bytepos
* BITS_PER_UNIT
, ssize
* BITS_PER_UNIT
- 1,
2589 VOIDmode
, tmps
[i
], false);
2592 /* Optimize the access just a bit. */
2593 else if (MEM_P (dest
)
2594 && (!targetm
.slow_unaligned_access (mode
, MEM_ALIGN (dest
))
2595 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2596 && multiple_p (bytepos
* BITS_PER_UNIT
,
2597 GET_MODE_ALIGNMENT (mode
))
2598 && known_eq (bytelen
, GET_MODE_SIZE (mode
)))
2599 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2602 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2603 0, 0, mode
, tmps
[i
], false);
2606 /* Copy from the pseudo into the (probable) hard reg. */
2607 if (orig_dst
!= dst
)
2608 emit_move_insn (orig_dst
, dst
);
2611 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2612 of the value stored in X. */
2615 maybe_emit_group_store (rtx x
, tree type
)
2617 machine_mode mode
= TYPE_MODE (type
);
2618 gcc_checking_assert (GET_MODE (x
) == VOIDmode
|| GET_MODE (x
) == mode
);
2619 if (GET_CODE (x
) == PARALLEL
)
2621 rtx result
= gen_reg_rtx (mode
);
2622 emit_group_store (result
, x
, type
, int_size_in_bytes (type
));
2628 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2630 This is used on targets that return BLKmode values in registers. */
2633 copy_blkmode_from_reg (rtx target
, rtx srcreg
, tree type
)
2635 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2636 rtx src
= NULL
, dst
= NULL
;
2637 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2638 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2639 /* No current ABI uses variable-sized modes to pass a BLKmnode type. */
2640 fixed_size_mode mode
= as_a
<fixed_size_mode
> (GET_MODE (srcreg
));
2641 fixed_size_mode tmode
= as_a
<fixed_size_mode
> (GET_MODE (target
));
2642 fixed_size_mode copy_mode
;
2644 /* BLKmode registers created in the back-end shouldn't have survived. */
2645 gcc_assert (mode
!= BLKmode
);
2647 /* If the structure doesn't take up a whole number of words, see whether
2648 SRCREG is padded on the left or on the right. If it's on the left,
2649 set PADDING_CORRECTION to the number of bits to skip.
2651 In most ABIs, the structure will be returned at the least end of
2652 the register, which translates to right padding on little-endian
2653 targets and left padding on big-endian targets. The opposite
2654 holds if the structure is returned at the most significant
2655 end of the register. */
2656 if (bytes
% UNITS_PER_WORD
!= 0
2657 && (targetm
.calls
.return_in_msb (type
)
2659 : BYTES_BIG_ENDIAN
))
2661 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2663 /* We can use a single move if we have an exact mode for the size. */
2664 else if (MEM_P (target
)
2665 && (!targetm
.slow_unaligned_access (mode
, MEM_ALIGN (target
))
2666 || MEM_ALIGN (target
) >= GET_MODE_ALIGNMENT (mode
))
2667 && bytes
== GET_MODE_SIZE (mode
))
2669 emit_move_insn (adjust_address (target
, mode
, 0), srcreg
);
2673 /* And if we additionally have the same mode for a register. */
2674 else if (REG_P (target
)
2675 && GET_MODE (target
) == mode
2676 && bytes
== GET_MODE_SIZE (mode
))
2678 emit_move_insn (target
, srcreg
);
2682 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2683 into a new pseudo which is a full word. */
2684 if (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
2686 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2690 /* Copy the structure BITSIZE bits at a time. If the target lives in
2691 memory, take care of not reading/writing past its end by selecting
2692 a copy mode suited to BITSIZE. This should always be possible given
2695 If the target lives in register, make sure not to select a copy mode
2696 larger than the mode of the register.
2698 We could probably emit more efficient code for machines which do not use
2699 strict alignment, but it doesn't seem worth the effort at the current
2702 copy_mode
= word_mode
;
2705 opt_scalar_int_mode mem_mode
= int_mode_for_size (bitsize
, 1);
2706 if (mem_mode
.exists ())
2707 copy_mode
= mem_mode
.require ();
2709 else if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2712 for (bitpos
= 0, xbitpos
= padding_correction
;
2713 bitpos
< bytes
* BITS_PER_UNIT
;
2714 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2716 /* We need a new source operand each time xbitpos is on a
2717 word boundary and when xbitpos == padding_correction
2718 (the first time through). */
2719 if (xbitpos
% BITS_PER_WORD
== 0 || xbitpos
== padding_correction
)
2720 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
, mode
);
2722 /* We need a new destination operand each time bitpos is on
2724 if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2726 else if (bitpos
% BITS_PER_WORD
== 0)
2727 dst
= operand_subword (target
, bitpos
/ BITS_PER_WORD
, 1, tmode
);
2729 /* Use xbitpos for the source extraction (right justified) and
2730 bitpos for the destination store (left justified). */
2731 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, 0, 0, copy_mode
,
2732 extract_bit_field (src
, bitsize
,
2733 xbitpos
% BITS_PER_WORD
, 1,
2734 NULL_RTX
, copy_mode
, copy_mode
,
2740 /* Copy BLKmode value SRC into a register of mode MODE_IN. Return the
2741 register if it contains any data, otherwise return null.
2743 This is used on targets that return BLKmode values in registers. */
2746 copy_blkmode_to_reg (machine_mode mode_in
, tree src
)
2749 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0, bytes
;
2750 unsigned int bitsize
;
2751 rtx
*dst_words
, dst
, x
, src_word
= NULL_RTX
, dst_word
= NULL_RTX
;
2752 /* No current ABI uses variable-sized modes to pass a BLKmnode type. */
2753 fixed_size_mode mode
= as_a
<fixed_size_mode
> (mode_in
);
2754 fixed_size_mode dst_mode
;
2756 gcc_assert (TYPE_MODE (TREE_TYPE (src
)) == BLKmode
);
2758 x
= expand_normal (src
);
2760 bytes
= arg_int_size_in_bytes (TREE_TYPE (src
));
2764 /* If the structure doesn't take up a whole number of words, see
2765 whether the register value should be padded on the left or on
2766 the right. Set PADDING_CORRECTION to the number of padding
2767 bits needed on the left side.
2769 In most ABIs, the structure will be returned at the least end of
2770 the register, which translates to right padding on little-endian
2771 targets and left padding on big-endian targets. The opposite
2772 holds if the structure is returned at the most significant
2773 end of the register. */
2774 if (bytes
% UNITS_PER_WORD
!= 0
2775 && (targetm
.calls
.return_in_msb (TREE_TYPE (src
))
2777 : BYTES_BIG_ENDIAN
))
2778 padding_correction
= (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
)
2781 n_regs
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
2782 dst_words
= XALLOCAVEC (rtx
, n_regs
);
2783 bitsize
= BITS_PER_WORD
;
2784 if (targetm
.slow_unaligned_access (word_mode
, TYPE_ALIGN (TREE_TYPE (src
))))
2785 bitsize
= MIN (TYPE_ALIGN (TREE_TYPE (src
)), BITS_PER_WORD
);
2787 /* Copy the structure BITSIZE bits at a time. */
2788 for (bitpos
= 0, xbitpos
= padding_correction
;
2789 bitpos
< bytes
* BITS_PER_UNIT
;
2790 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2792 /* We need a new destination pseudo each time xbitpos is
2793 on a word boundary and when xbitpos == padding_correction
2794 (the first time through). */
2795 if (xbitpos
% BITS_PER_WORD
== 0
2796 || xbitpos
== padding_correction
)
2798 /* Generate an appropriate register. */
2799 dst_word
= gen_reg_rtx (word_mode
);
2800 dst_words
[xbitpos
/ BITS_PER_WORD
] = dst_word
;
2802 /* Clear the destination before we move anything into it. */
2803 emit_move_insn (dst_word
, CONST0_RTX (word_mode
));
2806 /* We need a new source operand each time bitpos is on a word
2808 if (bitpos
% BITS_PER_WORD
== 0)
2809 src_word
= operand_subword_force (x
, bitpos
/ BITS_PER_WORD
, BLKmode
);
2811 /* Use bitpos for the source extraction (left justified) and
2812 xbitpos for the destination store (right justified). */
2813 store_bit_field (dst_word
, bitsize
, xbitpos
% BITS_PER_WORD
,
2815 extract_bit_field (src_word
, bitsize
,
2816 bitpos
% BITS_PER_WORD
, 1,
2817 NULL_RTX
, word_mode
, word_mode
,
2822 if (mode
== BLKmode
)
2824 /* Find the smallest integer mode large enough to hold the
2825 entire structure. */
2826 opt_scalar_int_mode mode_iter
;
2827 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_INT
)
2828 if (GET_MODE_SIZE (mode_iter
.require ()) >= bytes
)
2831 /* A suitable mode should have been found. */
2832 mode
= mode_iter
.require ();
2835 if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
2836 dst_mode
= word_mode
;
2839 dst
= gen_reg_rtx (dst_mode
);
2841 for (i
= 0; i
< n_regs
; i
++)
2842 emit_move_insn (operand_subword (dst
, i
, 0, dst_mode
), dst_words
[i
]);
2844 if (mode
!= dst_mode
)
2845 dst
= gen_lowpart (mode
, dst
);
2850 /* Add a USE expression for REG to the (possibly empty) list pointed
2851 to by CALL_FUSAGE. REG must denote a hard register. */
2854 use_reg_mode (rtx
*call_fusage
, rtx reg
, machine_mode mode
)
2856 gcc_assert (REG_P (reg
));
2858 if (!HARD_REGISTER_P (reg
))
2862 = gen_rtx_EXPR_LIST (mode
, gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2865 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2866 to by CALL_FUSAGE. REG must denote a hard register. */
2869 clobber_reg_mode (rtx
*call_fusage
, rtx reg
, machine_mode mode
)
2871 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2874 = gen_rtx_EXPR_LIST (mode
, gen_rtx_CLOBBER (VOIDmode
, reg
), *call_fusage
);
2877 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2878 starting at REGNO. All of these registers must be hard registers. */
2881 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2885 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2887 for (i
= 0; i
< nregs
; i
++)
2888 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2891 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2892 PARALLEL REGS. This is for calls that pass values in multiple
2893 non-contiguous locations. The Irix 6 ABI has examples of this. */
2896 use_group_regs (rtx
*call_fusage
, rtx regs
)
2900 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2902 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2904 /* A NULL entry means the parameter goes both on the stack and in
2905 registers. This can also be a MEM for targets that pass values
2906 partially on the stack and partially in registers. */
2907 if (reg
!= 0 && REG_P (reg
))
2908 use_reg (call_fusage
, reg
);
2912 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2913 assigment and the code of the expresion on the RHS is CODE. Return
2917 get_def_for_expr (tree name
, enum tree_code code
)
2921 if (TREE_CODE (name
) != SSA_NAME
)
2924 def_stmt
= get_gimple_for_ssa_name (name
);
2926 || gimple_assign_rhs_code (def_stmt
) != code
)
2932 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2933 assigment and the class of the expresion on the RHS is CLASS. Return
2937 get_def_for_expr_class (tree name
, enum tree_code_class tclass
)
2941 if (TREE_CODE (name
) != SSA_NAME
)
2944 def_stmt
= get_gimple_for_ssa_name (name
);
2946 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) != tclass
)
2952 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2953 its length in bytes. */
2956 clear_storage_hints (rtx object
, rtx size
, enum block_op_methods method
,
2957 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
2958 unsigned HOST_WIDE_INT min_size
,
2959 unsigned HOST_WIDE_INT max_size
,
2960 unsigned HOST_WIDE_INT probable_max_size
)
2962 machine_mode mode
= GET_MODE (object
);
2965 gcc_assert (method
== BLOCK_OP_NORMAL
|| method
== BLOCK_OP_TAILCALL
);
2967 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2968 just move a zero. Otherwise, do this a piece at a time. */
2970 && CONST_INT_P (size
)
2971 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (mode
))
2973 rtx zero
= CONST0_RTX (mode
);
2976 emit_move_insn (object
, zero
);
2980 if (COMPLEX_MODE_P (mode
))
2982 zero
= CONST0_RTX (GET_MODE_INNER (mode
));
2985 write_complex_part (object
, zero
, 0);
2986 write_complex_part (object
, zero
, 1);
2992 if (size
== const0_rtx
)
2995 align
= MEM_ALIGN (object
);
2997 if (CONST_INT_P (size
)
2998 && targetm
.use_by_pieces_infrastructure_p (INTVAL (size
), align
,
3000 optimize_insn_for_speed_p ()))
3001 clear_by_pieces (object
, INTVAL (size
), align
);
3002 else if (set_storage_via_setmem (object
, size
, const0_rtx
, align
,
3003 expected_align
, expected_size
,
3004 min_size
, max_size
, probable_max_size
))
3006 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object
)))
3007 return set_storage_via_libcall (object
, size
, const0_rtx
,
3008 method
== BLOCK_OP_TAILCALL
);
3016 clear_storage (rtx object
, rtx size
, enum block_op_methods method
)
3018 unsigned HOST_WIDE_INT max
, min
= 0;
3019 if (GET_CODE (size
) == CONST_INT
)
3020 min
= max
= UINTVAL (size
);
3022 max
= GET_MODE_MASK (GET_MODE (size
));
3023 return clear_storage_hints (object
, size
, method
, 0, -1, min
, max
, max
);
3027 /* A subroutine of clear_storage. Expand a call to memset.
3028 Return the return value of memset, 0 otherwise. */
3031 set_storage_via_libcall (rtx object
, rtx size
, rtx val
, bool tailcall
)
3033 tree call_expr
, fn
, object_tree
, size_tree
, val_tree
;
3034 machine_mode size_mode
;
3036 object
= copy_addr_to_reg (XEXP (object
, 0));
3037 object_tree
= make_tree (ptr_type_node
, object
);
3039 if (!CONST_INT_P (val
))
3040 val
= convert_to_mode (TYPE_MODE (integer_type_node
), val
, 1);
3041 val_tree
= make_tree (integer_type_node
, val
);
3043 size_mode
= TYPE_MODE (sizetype
);
3044 size
= convert_to_mode (size_mode
, size
, 1);
3045 size
= copy_to_mode_reg (size_mode
, size
);
3046 size_tree
= make_tree (sizetype
, size
);
3048 /* It is incorrect to use the libcall calling conventions for calls to
3049 memset because it can be provided by the user. */
3050 fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
3051 call_expr
= build_call_expr (fn
, 3, object_tree
, val_tree
, size_tree
);
3052 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
3054 return expand_call (call_expr
, NULL_RTX
, false);
3057 /* Expand a setmem pattern; return true if successful. */
3060 set_storage_via_setmem (rtx object
, rtx size
, rtx val
, unsigned int align
,
3061 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
3062 unsigned HOST_WIDE_INT min_size
,
3063 unsigned HOST_WIDE_INT max_size
,
3064 unsigned HOST_WIDE_INT probable_max_size
)
3066 /* Try the most limited insn first, because there's no point
3067 including more than one in the machine description unless
3068 the more limited one has some advantage. */
3070 if (expected_align
< align
)
3071 expected_align
= align
;
3072 if (expected_size
!= -1)
3074 if ((unsigned HOST_WIDE_INT
)expected_size
> max_size
)
3075 expected_size
= max_size
;
3076 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
3077 expected_size
= min_size
;
3080 opt_scalar_int_mode mode_iter
;
3081 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_INT
)
3083 scalar_int_mode mode
= mode_iter
.require ();
3084 enum insn_code code
= direct_optab_handler (setmem_optab
, mode
);
3086 if (code
!= CODE_FOR_nothing
3087 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
3088 here because if SIZE is less than the mode mask, as it is
3089 returned by the macro, it will definitely be less than the
3090 actual mode mask. Since SIZE is within the Pmode address
3091 space, we limit MODE to Pmode. */
3092 && ((CONST_INT_P (size
)
3093 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
3094 <= (GET_MODE_MASK (mode
) >> 1)))
3095 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
3096 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
3098 struct expand_operand ops
[9];
3101 nops
= insn_data
[(int) code
].n_generator_args
;
3102 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
3104 create_fixed_operand (&ops
[0], object
);
3105 /* The check above guarantees that this size conversion is valid. */
3106 create_convert_operand_to (&ops
[1], size
, mode
, true);
3107 create_convert_operand_from (&ops
[2], val
, byte_mode
, true);
3108 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
3111 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
3112 create_integer_operand (&ops
[5], expected_size
);
3116 create_integer_operand (&ops
[6], min_size
);
3117 /* If we can not represent the maximal size,
3118 make parameter NULL. */
3119 if ((HOST_WIDE_INT
) max_size
!= -1)
3120 create_integer_operand (&ops
[7], max_size
);
3122 create_fixed_operand (&ops
[7], NULL
);
3126 /* If we can not represent the maximal size,
3127 make parameter NULL. */
3128 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
3129 create_integer_operand (&ops
[8], probable_max_size
);
3131 create_fixed_operand (&ops
[8], NULL
);
3133 if (maybe_expand_insn (code
, nops
, ops
))
3142 /* Write to one of the components of the complex value CPLX. Write VAL to
3143 the real part if IMAG_P is false, and the imaginary part if its true. */
3146 write_complex_part (rtx cplx
, rtx val
, bool imag_p
)
3152 if (GET_CODE (cplx
) == CONCAT
)
3154 emit_move_insn (XEXP (cplx
, imag_p
), val
);
3158 cmode
= GET_MODE (cplx
);
3159 imode
= GET_MODE_INNER (cmode
);
3160 ibitsize
= GET_MODE_BITSIZE (imode
);
3162 /* For MEMs simplify_gen_subreg may generate an invalid new address
3163 because, e.g., the original address is considered mode-dependent
3164 by the target, which restricts simplify_subreg from invoking
3165 adjust_address_nv. Instead of preparing fallback support for an
3166 invalid address, we call adjust_address_nv directly. */
3169 emit_move_insn (adjust_address_nv (cplx
, imode
,
3170 imag_p
? GET_MODE_SIZE (imode
) : 0),
3175 /* If the sub-object is at least word sized, then we know that subregging
3176 will work. This special case is important, since store_bit_field
3177 wants to operate on integer modes, and there's rarely an OImode to
3178 correspond to TCmode. */
3179 if (ibitsize
>= BITS_PER_WORD
3180 /* For hard regs we have exact predicates. Assume we can split
3181 the original object if it spans an even number of hard regs.
3182 This special case is important for SCmode on 64-bit platforms
3183 where the natural size of floating-point regs is 32-bit. */
3185 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3186 && REG_NREGS (cplx
) % 2 == 0))
3188 rtx part
= simplify_gen_subreg (imode
, cplx
, cmode
,
3189 imag_p
? GET_MODE_SIZE (imode
) : 0);
3192 emit_move_insn (part
, val
);
3196 /* simplify_gen_subreg may fail for sub-word MEMs. */
3197 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3200 store_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0, 0, 0, imode
, val
,
3204 /* Extract one of the components of the complex value CPLX. Extract the
3205 real part if IMAG_P is false, and the imaginary part if it's true. */
3208 read_complex_part (rtx cplx
, bool imag_p
)
3214 if (GET_CODE (cplx
) == CONCAT
)
3215 return XEXP (cplx
, imag_p
);
3217 cmode
= GET_MODE (cplx
);
3218 imode
= GET_MODE_INNER (cmode
);
3219 ibitsize
= GET_MODE_BITSIZE (imode
);
3221 /* Special case reads from complex constants that got spilled to memory. */
3222 if (MEM_P (cplx
) && GET_CODE (XEXP (cplx
, 0)) == SYMBOL_REF
)
3224 tree decl
= SYMBOL_REF_DECL (XEXP (cplx
, 0));
3225 if (decl
&& TREE_CODE (decl
) == COMPLEX_CST
)
3227 tree part
= imag_p
? TREE_IMAGPART (decl
) : TREE_REALPART (decl
);
3228 if (CONSTANT_CLASS_P (part
))
3229 return expand_expr (part
, NULL_RTX
, imode
, EXPAND_NORMAL
);
3233 /* For MEMs simplify_gen_subreg may generate an invalid new address
3234 because, e.g., the original address is considered mode-dependent
3235 by the target, which restricts simplify_subreg from invoking
3236 adjust_address_nv. Instead of preparing fallback support for an
3237 invalid address, we call adjust_address_nv directly. */
3239 return adjust_address_nv (cplx
, imode
,
3240 imag_p
? GET_MODE_SIZE (imode
) : 0);
3242 /* If the sub-object is at least word sized, then we know that subregging
3243 will work. This special case is important, since extract_bit_field
3244 wants to operate on integer modes, and there's rarely an OImode to
3245 correspond to TCmode. */
3246 if (ibitsize
>= BITS_PER_WORD
3247 /* For hard regs we have exact predicates. Assume we can split
3248 the original object if it spans an even number of hard regs.
3249 This special case is important for SCmode on 64-bit platforms
3250 where the natural size of floating-point regs is 32-bit. */
3252 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3253 && REG_NREGS (cplx
) % 2 == 0))
3255 rtx ret
= simplify_gen_subreg (imode
, cplx
, cmode
,
3256 imag_p
? GET_MODE_SIZE (imode
) : 0);
3260 /* simplify_gen_subreg may fail for sub-word MEMs. */
3261 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3264 return extract_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0,
3265 true, NULL_RTX
, imode
, imode
, false, NULL
);
3268 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3269 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3270 represented in NEW_MODE. If FORCE is true, this will never happen, as
3271 we'll force-create a SUBREG if needed. */
3274 emit_move_change_mode (machine_mode new_mode
,
3275 machine_mode old_mode
, rtx x
, bool force
)
3279 if (push_operand (x
, GET_MODE (x
)))
3281 ret
= gen_rtx_MEM (new_mode
, XEXP (x
, 0));
3282 MEM_COPY_ATTRIBUTES (ret
, x
);
3286 /* We don't have to worry about changing the address since the
3287 size in bytes is supposed to be the same. */
3288 if (reload_in_progress
)
3290 /* Copy the MEM to change the mode and move any
3291 substitutions from the old MEM to the new one. */
3292 ret
= adjust_address_nv (x
, new_mode
, 0);
3293 copy_replacements (x
, ret
);
3296 ret
= adjust_address (x
, new_mode
, 0);
3300 /* Note that we do want simplify_subreg's behavior of validating
3301 that the new mode is ok for a hard register. If we were to use
3302 simplify_gen_subreg, we would create the subreg, but would
3303 probably run into the target not being able to implement it. */
3304 /* Except, of course, when FORCE is true, when this is exactly what
3305 we want. Which is needed for CCmodes on some targets. */
3307 ret
= simplify_gen_subreg (new_mode
, x
, old_mode
, 0);
3309 ret
= simplify_subreg (new_mode
, x
, old_mode
, 0);
3315 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3316 an integer mode of the same size as MODE. Returns the instruction
3317 emitted, or NULL if such a move could not be generated. */
3320 emit_move_via_integer (machine_mode mode
, rtx x
, rtx y
, bool force
)
3322 scalar_int_mode imode
;
3323 enum insn_code code
;
3325 /* There must exist a mode of the exact size we require. */
3326 if (!int_mode_for_mode (mode
).exists (&imode
))
3329 /* The target must support moves in this mode. */
3330 code
= optab_handler (mov_optab
, imode
);
3331 if (code
== CODE_FOR_nothing
)
3334 x
= emit_move_change_mode (imode
, mode
, x
, force
);
3337 y
= emit_move_change_mode (imode
, mode
, y
, force
);
3340 return emit_insn (GEN_FCN (code
) (x
, y
));
3343 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3344 Return an equivalent MEM that does not use an auto-increment. */
3347 emit_move_resolve_push (machine_mode mode
, rtx x
)
3349 enum rtx_code code
= GET_CODE (XEXP (x
, 0));
3350 HOST_WIDE_INT adjust
;
3353 adjust
= GET_MODE_SIZE (mode
);
3354 #ifdef PUSH_ROUNDING
3355 adjust
= PUSH_ROUNDING (adjust
);
3357 if (code
== PRE_DEC
|| code
== POST_DEC
)
3359 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
3361 rtx expr
= XEXP (XEXP (x
, 0), 1);
3364 gcc_assert (GET_CODE (expr
) == PLUS
|| GET_CODE (expr
) == MINUS
);
3365 gcc_assert (CONST_INT_P (XEXP (expr
, 1)));
3366 val
= INTVAL (XEXP (expr
, 1));
3367 if (GET_CODE (expr
) == MINUS
)
3369 gcc_assert (adjust
== val
|| adjust
== -val
);
3373 /* Do not use anti_adjust_stack, since we don't want to update
3374 stack_pointer_delta. */
3375 temp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
3376 gen_int_mode (adjust
, Pmode
), stack_pointer_rtx
,
3377 0, OPTAB_LIB_WIDEN
);
3378 if (temp
!= stack_pointer_rtx
)
3379 emit_move_insn (stack_pointer_rtx
, temp
);
3386 temp
= stack_pointer_rtx
;
3391 temp
= plus_constant (Pmode
, stack_pointer_rtx
, -adjust
);
3397 return replace_equiv_address (x
, temp
);
3400 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3401 X is known to satisfy push_operand, and MODE is known to be complex.
3402 Returns the last instruction emitted. */
3405 emit_move_complex_push (machine_mode mode
, rtx x
, rtx y
)
3407 scalar_mode submode
= GET_MODE_INNER (mode
);
3410 #ifdef PUSH_ROUNDING
3411 unsigned int submodesize
= GET_MODE_SIZE (submode
);
3413 /* In case we output to the stack, but the size is smaller than the
3414 machine can push exactly, we need to use move instructions. */
3415 if (PUSH_ROUNDING (submodesize
) != submodesize
)
3417 x
= emit_move_resolve_push (mode
, x
);
3418 return emit_move_insn (x
, y
);
3422 /* Note that the real part always precedes the imag part in memory
3423 regardless of machine's endianness. */
3424 switch (GET_CODE (XEXP (x
, 0)))
3438 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3439 read_complex_part (y
, imag_first
));
3440 return emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3441 read_complex_part (y
, !imag_first
));
3444 /* A subroutine of emit_move_complex. Perform the move from Y to X
3445 via two moves of the parts. Returns the last instruction emitted. */
3448 emit_move_complex_parts (rtx x
, rtx y
)
3450 /* Show the output dies here. This is necessary for SUBREGs
3451 of pseudos since we cannot track their lifetimes correctly;
3452 hard regs shouldn't appear here except as return values. */
3453 if (!reload_completed
&& !reload_in_progress
3454 && REG_P (x
) && !reg_overlap_mentioned_p (x
, y
))
3457 write_complex_part (x
, read_complex_part (y
, false), false);
3458 write_complex_part (x
, read_complex_part (y
, true), true);
3460 return get_last_insn ();
3463 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3464 MODE is known to be complex. Returns the last instruction emitted. */
3467 emit_move_complex (machine_mode mode
, rtx x
, rtx y
)
3471 /* Need to take special care for pushes, to maintain proper ordering
3472 of the data, and possibly extra padding. */
3473 if (push_operand (x
, mode
))
3474 return emit_move_complex_push (mode
, x
, y
);
3476 /* See if we can coerce the target into moving both values at once, except
3477 for floating point where we favor moving as parts if this is easy. */
3478 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
3479 && optab_handler (mov_optab
, GET_MODE_INNER (mode
)) != CODE_FOR_nothing
3481 && HARD_REGISTER_P (x
)
3482 && REG_NREGS (x
) == 1)
3484 && HARD_REGISTER_P (y
)
3485 && REG_NREGS (y
) == 1))
3487 /* Not possible if the values are inherently not adjacent. */
3488 else if (GET_CODE (x
) == CONCAT
|| GET_CODE (y
) == CONCAT
)
3490 /* Is possible if both are registers (or subregs of registers). */
3491 else if (register_operand (x
, mode
) && register_operand (y
, mode
))
3493 /* If one of the operands is a memory, and alignment constraints
3494 are friendly enough, we may be able to do combined memory operations.
3495 We do not attempt this if Y is a constant because that combination is
3496 usually better with the by-parts thing below. */
3497 else if ((MEM_P (x
) ? !CONSTANT_P (y
) : MEM_P (y
))
3498 && (!STRICT_ALIGNMENT
3499 || get_mode_alignment (mode
) == BIGGEST_ALIGNMENT
))
3508 /* For memory to memory moves, optimal behavior can be had with the
3509 existing block move logic. */
3510 if (MEM_P (x
) && MEM_P (y
))
3512 emit_block_move (x
, y
, GEN_INT (GET_MODE_SIZE (mode
)),
3513 BLOCK_OP_NO_LIBCALL
);
3514 return get_last_insn ();
3517 ret
= emit_move_via_integer (mode
, x
, y
, true);
3522 return emit_move_complex_parts (x
, y
);
3525 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3526 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3529 emit_move_ccmode (machine_mode mode
, rtx x
, rtx y
)
3533 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3536 enum insn_code code
= optab_handler (mov_optab
, CCmode
);
3537 if (code
!= CODE_FOR_nothing
)
3539 x
= emit_move_change_mode (CCmode
, mode
, x
, true);
3540 y
= emit_move_change_mode (CCmode
, mode
, y
, true);
3541 return emit_insn (GEN_FCN (code
) (x
, y
));
3545 /* Otherwise, find the MODE_INT mode of the same width. */
3546 ret
= emit_move_via_integer (mode
, x
, y
, false);
3547 gcc_assert (ret
!= NULL
);
3551 /* Return true if word I of OP lies entirely in the
3552 undefined bits of a paradoxical subreg. */
3555 undefined_operand_subword_p (const_rtx op
, int i
)
3557 if (GET_CODE (op
) != SUBREG
)
3559 machine_mode innermostmode
= GET_MODE (SUBREG_REG (op
));
3560 poly_int64 offset
= i
* UNITS_PER_WORD
+ subreg_memory_offset (op
);
3561 return (known_ge (offset
, GET_MODE_SIZE (innermostmode
))
3562 || known_le (offset
, -UNITS_PER_WORD
));
3565 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3566 MODE is any multi-word or full-word mode that lacks a move_insn
3567 pattern. Note that you will get better code if you define such
3568 patterns, even if they must turn into multiple assembler instructions. */
3571 emit_move_multi_word (machine_mode mode
, rtx x
, rtx y
)
3573 rtx_insn
*last_insn
= 0;
3579 gcc_assert (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
);
3581 /* If X is a push on the stack, do the push now and replace
3582 X with a reference to the stack pointer. */
3583 if (push_operand (x
, mode
))
3584 x
= emit_move_resolve_push (mode
, x
);
3586 /* If we are in reload, see if either operand is a MEM whose address
3587 is scheduled for replacement. */
3588 if (reload_in_progress
&& MEM_P (x
)
3589 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3590 x
= replace_equiv_address_nv (x
, inner
);
3591 if (reload_in_progress
&& MEM_P (y
)
3592 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3593 y
= replace_equiv_address_nv (y
, inner
);
3597 need_clobber
= false;
3599 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3602 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3605 /* Do not generate code for a move if it would come entirely
3606 from the undefined bits of a paradoxical subreg. */
3607 if (undefined_operand_subword_p (y
, i
))
3610 ypart
= operand_subword (y
, i
, 1, mode
);
3612 /* If we can't get a part of Y, put Y into memory if it is a
3613 constant. Otherwise, force it into a register. Then we must
3614 be able to get a part of Y. */
3615 if (ypart
== 0 && CONSTANT_P (y
))
3617 y
= use_anchored_address (force_const_mem (mode
, y
));
3618 ypart
= operand_subword (y
, i
, 1, mode
);
3620 else if (ypart
== 0)
3621 ypart
= operand_subword_force (y
, i
, mode
);
3623 gcc_assert (xpart
&& ypart
);
3625 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3627 last_insn
= emit_move_insn (xpart
, ypart
);
3633 /* Show the output dies here. This is necessary for SUBREGs
3634 of pseudos since we cannot track their lifetimes correctly;
3635 hard regs shouldn't appear here except as return values.
3636 We never want to emit such a clobber after reload. */
3638 && ! (reload_in_progress
|| reload_completed
)
3639 && need_clobber
!= 0)
3647 /* Low level part of emit_move_insn.
3648 Called just like emit_move_insn, but assumes X and Y
3649 are basically valid. */
3652 emit_move_insn_1 (rtx x
, rtx y
)
3654 machine_mode mode
= GET_MODE (x
);
3655 enum insn_code code
;
3657 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
3659 code
= optab_handler (mov_optab
, mode
);
3660 if (code
!= CODE_FOR_nothing
)
3661 return emit_insn (GEN_FCN (code
) (x
, y
));
3663 /* Expand complex moves by moving real part and imag part. */
3664 if (COMPLEX_MODE_P (mode
))
3665 return emit_move_complex (mode
, x
, y
);
3667 if (GET_MODE_CLASS (mode
) == MODE_DECIMAL_FLOAT
3668 || ALL_FIXED_POINT_MODE_P (mode
))
3670 rtx_insn
*result
= emit_move_via_integer (mode
, x
, y
, true);
3672 /* If we can't find an integer mode, use multi words. */
3676 return emit_move_multi_word (mode
, x
, y
);
3679 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3680 return emit_move_ccmode (mode
, x
, y
);
3682 /* Try using a move pattern for the corresponding integer mode. This is
3683 only safe when simplify_subreg can convert MODE constants into integer
3684 constants. At present, it can only do this reliably if the value
3685 fits within a HOST_WIDE_INT. */
3686 if (!CONSTANT_P (y
) || GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3688 rtx_insn
*ret
= emit_move_via_integer (mode
, x
, y
, lra_in_progress
);
3692 if (! lra_in_progress
|| recog (PATTERN (ret
), ret
, 0) >= 0)
3697 return emit_move_multi_word (mode
, x
, y
);
3700 /* Generate code to copy Y into X.
3701 Both Y and X must have the same mode, except that
3702 Y can be a constant with VOIDmode.
3703 This mode cannot be BLKmode; use emit_block_move for that.
3705 Return the last instruction emitted. */
3708 emit_move_insn (rtx x
, rtx y
)
3710 machine_mode mode
= GET_MODE (x
);
3711 rtx y_cst
= NULL_RTX
;
3712 rtx_insn
*last_insn
;
3715 gcc_assert (mode
!= BLKmode
3716 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
3721 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3722 && (last_insn
= compress_float_constant (x
, y
)))
3727 if (!targetm
.legitimate_constant_p (mode
, y
))
3729 y
= force_const_mem (mode
, y
);
3731 /* If the target's cannot_force_const_mem prevented the spill,
3732 assume that the target's move expanders will also take care
3733 of the non-legitimate constant. */
3737 y
= use_anchored_address (y
);
3741 /* If X or Y are memory references, verify that their addresses are valid
3744 && (! memory_address_addr_space_p (GET_MODE (x
), XEXP (x
, 0),
3746 && ! push_operand (x
, GET_MODE (x
))))
3747 x
= validize_mem (x
);
3750 && ! memory_address_addr_space_p (GET_MODE (y
), XEXP (y
, 0),
3751 MEM_ADDR_SPACE (y
)))
3752 y
= validize_mem (y
);
3754 gcc_assert (mode
!= BLKmode
);
3756 last_insn
= emit_move_insn_1 (x
, y
);
3758 if (y_cst
&& REG_P (x
)
3759 && (set
= single_set (last_insn
)) != NULL_RTX
3760 && SET_DEST (set
) == x
3761 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3762 set_unique_reg_note (last_insn
, REG_EQUAL
, copy_rtx (y_cst
));
3767 /* Generate the body of an instruction to copy Y into X.
3768 It may be a list of insns, if one insn isn't enough. */
3771 gen_move_insn (rtx x
, rtx y
)
3776 emit_move_insn_1 (x
, y
);
3782 /* If Y is representable exactly in a narrower mode, and the target can
3783 perform the extension directly from constant or memory, then emit the
3784 move as an extension. */
3787 compress_float_constant (rtx x
, rtx y
)
3789 machine_mode dstmode
= GET_MODE (x
);
3790 machine_mode orig_srcmode
= GET_MODE (y
);
3791 machine_mode srcmode
;
3792 const REAL_VALUE_TYPE
*r
;
3793 int oldcost
, newcost
;
3794 bool speed
= optimize_insn_for_speed_p ();
3796 r
= CONST_DOUBLE_REAL_VALUE (y
);
3798 if (targetm
.legitimate_constant_p (dstmode
, y
))
3799 oldcost
= set_src_cost (y
, orig_srcmode
, speed
);
3801 oldcost
= set_src_cost (force_const_mem (dstmode
, y
), dstmode
, speed
);
3803 FOR_EACH_MODE_UNTIL (srcmode
, orig_srcmode
)
3807 rtx_insn
*last_insn
;
3809 /* Skip if the target can't extend this way. */
3810 ic
= can_extend_p (dstmode
, srcmode
, 0);
3811 if (ic
== CODE_FOR_nothing
)
3814 /* Skip if the narrowed value isn't exact. */
3815 if (! exact_real_truncate (srcmode
, r
))
3818 trunc_y
= const_double_from_real_value (*r
, srcmode
);
3820 if (targetm
.legitimate_constant_p (srcmode
, trunc_y
))
3822 /* Skip if the target needs extra instructions to perform
3824 if (!insn_operand_matches (ic
, 1, trunc_y
))
3826 /* This is valid, but may not be cheaper than the original. */
3827 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3829 if (oldcost
< newcost
)
3832 else if (float_extend_from_mem
[dstmode
][srcmode
])
3834 trunc_y
= force_const_mem (srcmode
, trunc_y
);
3835 /* This is valid, but may not be cheaper than the original. */
3836 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3838 if (oldcost
< newcost
)
3840 trunc_y
= validize_mem (trunc_y
);
3845 /* For CSE's benefit, force the compressed constant pool entry
3846 into a new pseudo. This constant may be used in different modes,
3847 and if not, combine will put things back together for us. */
3848 trunc_y
= force_reg (srcmode
, trunc_y
);
3850 /* If x is a hard register, perform the extension into a pseudo,
3851 so that e.g. stack realignment code is aware of it. */
3853 if (REG_P (x
) && HARD_REGISTER_P (x
))
3854 target
= gen_reg_rtx (dstmode
);
3856 emit_unop_insn (ic
, target
, trunc_y
, UNKNOWN
);
3857 last_insn
= get_last_insn ();
3860 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3863 return emit_move_insn (x
, target
);
3870 /* Pushing data onto the stack. */
3872 /* Push a block of length SIZE (perhaps variable)
3873 and return an rtx to address the beginning of the block.
3874 The value may be virtual_outgoing_args_rtx.
3876 EXTRA is the number of bytes of padding to push in addition to SIZE.
3877 BELOW nonzero means this padding comes at low addresses;
3878 otherwise, the padding comes at high addresses. */
3881 push_block (rtx size
, poly_int64 extra
, int below
)
3885 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3886 if (CONSTANT_P (size
))
3887 anti_adjust_stack (plus_constant (Pmode
, size
, extra
));
3888 else if (REG_P (size
) && known_eq (extra
, 0))
3889 anti_adjust_stack (size
);
3892 temp
= copy_to_mode_reg (Pmode
, size
);
3893 if (maybe_ne (extra
, 0))
3894 temp
= expand_binop (Pmode
, add_optab
, temp
,
3895 gen_int_mode (extra
, Pmode
),
3896 temp
, 0, OPTAB_LIB_WIDEN
);
3897 anti_adjust_stack (temp
);
3900 if (STACK_GROWS_DOWNWARD
)
3902 temp
= virtual_outgoing_args_rtx
;
3903 if (maybe_ne (extra
, 0) && below
)
3904 temp
= plus_constant (Pmode
, temp
, extra
);
3908 if (CONST_INT_P (size
))
3909 temp
= plus_constant (Pmode
, virtual_outgoing_args_rtx
,
3910 -INTVAL (size
) - (below
? 0 : extra
));
3911 else if (maybe_ne (extra
, 0) && !below
)
3912 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3913 negate_rtx (Pmode
, plus_constant (Pmode
, size
,
3916 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3917 negate_rtx (Pmode
, size
));
3920 return memory_address (NARROWEST_INT_MODE
, temp
);
3923 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3926 mem_autoinc_base (rtx mem
)
3930 rtx addr
= XEXP (mem
, 0);
3931 if (GET_RTX_CLASS (GET_CODE (addr
)) == RTX_AUTOINC
)
3932 return XEXP (addr
, 0);
3937 /* A utility routine used here, in reload, and in try_split. The insns
3938 after PREV up to and including LAST are known to adjust the stack,
3939 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3940 placing notes as appropriate. PREV may be NULL, indicating the
3941 entire insn sequence prior to LAST should be scanned.
3943 The set of allowed stack pointer modifications is small:
3944 (1) One or more auto-inc style memory references (aka pushes),
3945 (2) One or more addition/subtraction with the SP as destination,
3946 (3) A single move insn with the SP as destination,
3947 (4) A call_pop insn,
3948 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3950 Insns in the sequence that do not modify the SP are ignored,
3951 except for noreturn calls.
3953 The return value is the amount of adjustment that can be trivially
3954 verified, via immediate operand or auto-inc. If the adjustment
3955 cannot be trivially extracted, the return value is HOST_WIDE_INT_MIN. */
3958 find_args_size_adjust (rtx_insn
*insn
)
3963 pat
= PATTERN (insn
);
3966 /* Look for a call_pop pattern. */
3969 /* We have to allow non-call_pop patterns for the case
3970 of emit_single_push_insn of a TLS address. */
3971 if (GET_CODE (pat
) != PARALLEL
)
3974 /* All call_pop have a stack pointer adjust in the parallel.
3975 The call itself is always first, and the stack adjust is
3976 usually last, so search from the end. */
3977 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; --i
)
3979 set
= XVECEXP (pat
, 0, i
);
3980 if (GET_CODE (set
) != SET
)
3982 dest
= SET_DEST (set
);
3983 if (dest
== stack_pointer_rtx
)
3986 /* We'd better have found the stack pointer adjust. */
3989 /* Fall through to process the extracted SET and DEST
3990 as if it was a standalone insn. */
3992 else if (GET_CODE (pat
) == SET
)
3994 else if ((set
= single_set (insn
)) != NULL
)
3996 else if (GET_CODE (pat
) == PARALLEL
)
3998 /* ??? Some older ports use a parallel with a stack adjust
3999 and a store for a PUSH_ROUNDING pattern, rather than a
4000 PRE/POST_MODIFY rtx. Don't force them to update yet... */
4001 /* ??? See h8300 and m68k, pushqi1. */
4002 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; --i
)
4004 set
= XVECEXP (pat
, 0, i
);
4005 if (GET_CODE (set
) != SET
)
4007 dest
= SET_DEST (set
);
4008 if (dest
== stack_pointer_rtx
)
4011 /* We do not expect an auto-inc of the sp in the parallel. */
4012 gcc_checking_assert (mem_autoinc_base (dest
) != stack_pointer_rtx
);
4013 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
4014 != stack_pointer_rtx
);
4022 dest
= SET_DEST (set
);
4024 /* Look for direct modifications of the stack pointer. */
4025 if (REG_P (dest
) && REGNO (dest
) == STACK_POINTER_REGNUM
)
4027 /* Look for a trivial adjustment, otherwise assume nothing. */
4028 /* Note that the SPU restore_stack_block pattern refers to
4029 the stack pointer in V4SImode. Consider that non-trivial. */
4030 if (SCALAR_INT_MODE_P (GET_MODE (dest
))
4031 && GET_CODE (SET_SRC (set
)) == PLUS
4032 && XEXP (SET_SRC (set
), 0) == stack_pointer_rtx
4033 && CONST_INT_P (XEXP (SET_SRC (set
), 1)))
4034 return INTVAL (XEXP (SET_SRC (set
), 1));
4035 /* ??? Reload can generate no-op moves, which will be cleaned
4036 up later. Recognize it and continue searching. */
4037 else if (rtx_equal_p (dest
, SET_SRC (set
)))
4040 return HOST_WIDE_INT_MIN
;
4046 /* Otherwise only think about autoinc patterns. */
4047 if (mem_autoinc_base (dest
) == stack_pointer_rtx
)
4050 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
4051 != stack_pointer_rtx
);
4053 else if (mem_autoinc_base (SET_SRC (set
)) == stack_pointer_rtx
)
4054 mem
= SET_SRC (set
);
4058 addr
= XEXP (mem
, 0);
4059 switch (GET_CODE (addr
))
4063 return GET_MODE_SIZE (GET_MODE (mem
));
4066 return -GET_MODE_SIZE (GET_MODE (mem
));
4069 addr
= XEXP (addr
, 1);
4070 gcc_assert (GET_CODE (addr
) == PLUS
);
4071 gcc_assert (XEXP (addr
, 0) == stack_pointer_rtx
);
4072 gcc_assert (CONST_INT_P (XEXP (addr
, 1)));
4073 return INTVAL (XEXP (addr
, 1));
4081 fixup_args_size_notes (rtx_insn
*prev
, rtx_insn
*last
,
4082 poly_int64 end_args_size
)
4084 poly_int64 args_size
= end_args_size
;
4085 bool saw_unknown
= false;
4088 for (insn
= last
; insn
!= prev
; insn
= PREV_INSN (insn
))
4090 if (!NONDEBUG_INSN_P (insn
))
4093 /* We might have existing REG_ARGS_SIZE notes, e.g. when pushing
4094 a call argument containing a TLS address that itself requires
4095 a call to __tls_get_addr. The handling of stack_pointer_delta
4096 in emit_single_push_insn is supposed to ensure that any such
4097 notes are already correct. */
4098 rtx note
= find_reg_note (insn
, REG_ARGS_SIZE
, NULL_RTX
);
4099 gcc_assert (!note
|| known_eq (args_size
, get_args_size (note
)));
4101 poly_int64 this_delta
= find_args_size_adjust (insn
);
4102 if (known_eq (this_delta
, 0))
4105 || ACCUMULATE_OUTGOING_ARGS
4106 || find_reg_note (insn
, REG_NORETURN
, NULL_RTX
) == NULL_RTX
)
4110 gcc_assert (!saw_unknown
);
4111 if (known_eq (this_delta
, HOST_WIDE_INT_MIN
))
4115 add_args_size_note (insn
, args_size
);
4116 if (STACK_GROWS_DOWNWARD
)
4117 this_delta
= -poly_uint64 (this_delta
);
4120 args_size
= HOST_WIDE_INT_MIN
;
4122 args_size
-= this_delta
;
4128 #ifdef PUSH_ROUNDING
4129 /* Emit single push insn. */
4132 emit_single_push_insn_1 (machine_mode mode
, rtx x
, tree type
)
4135 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
4137 enum insn_code icode
;
4139 /* If there is push pattern, use it. Otherwise try old way of throwing
4140 MEM representing push operation to move expander. */
4141 icode
= optab_handler (push_optab
, mode
);
4142 if (icode
!= CODE_FOR_nothing
)
4144 struct expand_operand ops
[1];
4146 create_input_operand (&ops
[0], x
, mode
);
4147 if (maybe_expand_insn (icode
, 1, ops
))
4150 if (GET_MODE_SIZE (mode
) == rounded_size
)
4151 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
4152 /* If we are to pad downward, adjust the stack pointer first and
4153 then store X into the stack location using an offset. This is
4154 because emit_move_insn does not know how to pad; it does not have
4156 else if (targetm
.calls
.function_arg_padding (mode
, type
) == PAD_DOWNWARD
)
4158 emit_move_insn (stack_pointer_rtx
,
4159 expand_binop (Pmode
,
4160 STACK_GROWS_DOWNWARD
? sub_optab
4163 gen_int_mode (rounded_size
, Pmode
),
4164 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
4166 poly_int64 offset
= rounded_size
- GET_MODE_SIZE (mode
);
4167 if (STACK_GROWS_DOWNWARD
&& STACK_PUSH_CODE
== POST_DEC
)
4168 /* We have already decremented the stack pointer, so get the
4170 offset
+= rounded_size
;
4172 if (!STACK_GROWS_DOWNWARD
&& STACK_PUSH_CODE
== POST_INC
)
4173 /* We have already incremented the stack pointer, so get the
4175 offset
-= rounded_size
;
4177 dest_addr
= plus_constant (Pmode
, stack_pointer_rtx
, offset
);
4181 if (STACK_GROWS_DOWNWARD
)
4182 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4183 dest_addr
= plus_constant (Pmode
, stack_pointer_rtx
, -rounded_size
);
4185 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4186 dest_addr
= plus_constant (Pmode
, stack_pointer_rtx
, rounded_size
);
4188 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
4191 dest
= gen_rtx_MEM (mode
, dest_addr
);
4195 set_mem_attributes (dest
, type
, 1);
4197 if (cfun
->tail_call_marked
)
4198 /* Function incoming arguments may overlap with sibling call
4199 outgoing arguments and we cannot allow reordering of reads
4200 from function arguments with stores to outgoing arguments
4201 of sibling calls. */
4202 set_mem_alias_set (dest
, 0);
4204 emit_move_insn (dest
, x
);
4207 /* Emit and annotate a single push insn. */
4210 emit_single_push_insn (machine_mode mode
, rtx x
, tree type
)
4212 poly_int64 delta
, old_delta
= stack_pointer_delta
;
4213 rtx_insn
*prev
= get_last_insn ();
4216 emit_single_push_insn_1 (mode
, x
, type
);
4218 /* Adjust stack_pointer_delta to describe the situation after the push
4219 we just performed. Note that we must do this after the push rather
4220 than before the push in case calculating X needs pushes and pops of
4221 its own (e.g. if calling __tls_get_addr). The REG_ARGS_SIZE notes
4222 for such pushes and pops must not include the effect of the future
4224 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
4226 last
= get_last_insn ();
4228 /* Notice the common case where we emitted exactly one insn. */
4229 if (PREV_INSN (last
) == prev
)
4231 add_args_size_note (last
, stack_pointer_delta
);
4235 delta
= fixup_args_size_notes (prev
, last
, stack_pointer_delta
);
4236 gcc_assert (known_eq (delta
, HOST_WIDE_INT_MIN
)
4237 || known_eq (delta
, old_delta
));
4241 /* If reading SIZE bytes from X will end up reading from
4242 Y return the number of bytes that overlap. Return -1
4243 if there is no overlap or -2 if we can't determine
4244 (for example when X and Y have different base registers). */
4247 memory_load_overlap (rtx x
, rtx y
, HOST_WIDE_INT size
)
4249 rtx tmp
= plus_constant (Pmode
, x
, size
);
4250 rtx sub
= simplify_gen_binary (MINUS
, Pmode
, tmp
, y
);
4252 if (!CONST_INT_P (sub
))
4255 HOST_WIDE_INT val
= INTVAL (sub
);
4257 return IN_RANGE (val
, 1, size
) ? val
: -1;
4260 /* Generate code to push X onto the stack, assuming it has mode MODE and
4262 MODE is redundant except when X is a CONST_INT (since they don't
4264 SIZE is an rtx for the size of data to be copied (in bytes),
4265 needed only if X is BLKmode.
4266 Return true if successful. May return false if asked to push a
4267 partial argument during a sibcall optimization (as specified by
4268 SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4271 ALIGN (in bits) is maximum alignment we can assume.
4273 If PARTIAL and REG are both nonzero, then copy that many of the first
4274 bytes of X into registers starting with REG, and push the rest of X.
4275 The amount of space pushed is decreased by PARTIAL bytes.
4276 REG must be a hard register in this case.
4277 If REG is zero but PARTIAL is not, take any all others actions for an
4278 argument partially in registers, but do not actually load any
4281 EXTRA is the amount in bytes of extra space to leave next to this arg.
4282 This is ignored if an argument block has already been allocated.
4284 On a machine that lacks real push insns, ARGS_ADDR is the address of
4285 the bottom of the argument block for this call. We use indexing off there
4286 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4287 argument block has not been preallocated.
4289 ARGS_SO_FAR is the size of args previously pushed for this call.
4291 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4292 for arguments passed in registers. If nonzero, it will be the number
4293 of bytes required. */
4296 emit_push_insn (rtx x
, machine_mode mode
, tree type
, rtx size
,
4297 unsigned int align
, int partial
, rtx reg
, poly_int64 extra
,
4298 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
4299 rtx alignment_pad
, bool sibcall_p
)
4302 pad_direction stack_direction
4303 = STACK_GROWS_DOWNWARD
? PAD_DOWNWARD
: PAD_UPWARD
;
4305 /* Decide where to pad the argument: PAD_DOWNWARD for below,
4306 PAD_UPWARD for above, or PAD_NONE for don't pad it.
4307 Default is below for small data on big-endian machines; else above. */
4308 pad_direction where_pad
= targetm
.calls
.function_arg_padding (mode
, type
);
4310 /* Invert direction if stack is post-decrement.
4312 if (STACK_PUSH_CODE
== POST_DEC
)
4313 if (where_pad
!= PAD_NONE
)
4314 where_pad
= (where_pad
== PAD_DOWNWARD
? PAD_UPWARD
: PAD_DOWNWARD
);
4318 int nregs
= partial
/ UNITS_PER_WORD
;
4319 rtx
*tmp_regs
= NULL
;
4320 int overlapping
= 0;
4323 || (STRICT_ALIGNMENT
&& align
< GET_MODE_ALIGNMENT (mode
)))
4325 /* Copy a block into the stack, entirely or partially. */
4332 offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4333 used
= partial
- offset
;
4335 if (mode
!= BLKmode
)
4337 /* A value is to be stored in an insufficiently aligned
4338 stack slot; copy via a suitably aligned slot if
4340 size
= GEN_INT (GET_MODE_SIZE (mode
));
4341 if (!MEM_P (xinner
))
4343 temp
= assign_temp (type
, 1, 1);
4344 emit_move_insn (temp
, xinner
);
4351 /* USED is now the # of bytes we need not copy to the stack
4352 because registers will take care of them. */
4355 xinner
= adjust_address (xinner
, BLKmode
, used
);
4357 /* If the partial register-part of the arg counts in its stack size,
4358 skip the part of stack space corresponding to the registers.
4359 Otherwise, start copying to the beginning of the stack space,
4360 by setting SKIP to 0. */
4361 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
4363 #ifdef PUSH_ROUNDING
4364 /* Do it with several push insns if that doesn't take lots of insns
4365 and if there is no difficulty with push insns that skip bytes
4366 on the stack for alignment purposes. */
4369 && CONST_INT_P (size
)
4371 && MEM_ALIGN (xinner
) >= align
4372 && can_move_by_pieces ((unsigned) INTVAL (size
) - used
, align
)
4373 /* Here we avoid the case of a structure whose weak alignment
4374 forces many pushes of a small amount of data,
4375 and such small pushes do rounding that causes trouble. */
4376 && ((!targetm
.slow_unaligned_access (word_mode
, align
))
4377 || align
>= BIGGEST_ALIGNMENT
4378 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
4379 == (align
/ BITS_PER_UNIT
)))
4380 && (HOST_WIDE_INT
) PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
4382 /* Push padding now if padding above and stack grows down,
4383 or if padding below and stack grows up.
4384 But if space already allocated, this has already been done. */
4385 if (maybe_ne (extra
, 0)
4387 && where_pad
!= PAD_NONE
4388 && where_pad
!= stack_direction
)
4389 anti_adjust_stack (gen_int_mode (extra
, Pmode
));
4391 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
4394 #endif /* PUSH_ROUNDING */
4398 /* Otherwise make space on the stack and copy the data
4399 to the address of that space. */
4401 /* Deduct words put into registers from the size we must copy. */
4404 if (CONST_INT_P (size
))
4405 size
= GEN_INT (INTVAL (size
) - used
);
4407 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
4408 gen_int_mode (used
, GET_MODE (size
)),
4409 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4412 /* Get the address of the stack space.
4413 In this case, we do not deal with EXTRA separately.
4414 A single stack adjust will do. */
4417 temp
= push_block (size
, extra
, where_pad
== PAD_DOWNWARD
);
4420 else if (CONST_INT_P (args_so_far
))
4421 temp
= memory_address (BLKmode
,
4422 plus_constant (Pmode
, args_addr
,
4423 skip
+ INTVAL (args_so_far
)));
4425 temp
= memory_address (BLKmode
,
4426 plus_constant (Pmode
,
4427 gen_rtx_PLUS (Pmode
,
4432 if (!ACCUMULATE_OUTGOING_ARGS
)
4434 /* If the source is referenced relative to the stack pointer,
4435 copy it to another register to stabilize it. We do not need
4436 to do this if we know that we won't be changing sp. */
4438 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
4439 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
4440 temp
= copy_to_reg (temp
);
4443 target
= gen_rtx_MEM (BLKmode
, temp
);
4445 /* We do *not* set_mem_attributes here, because incoming arguments
4446 may overlap with sibling call outgoing arguments and we cannot
4447 allow reordering of reads from function arguments with stores
4448 to outgoing arguments of sibling calls. We do, however, want
4449 to record the alignment of the stack slot. */
4450 /* ALIGN may well be better aligned than TYPE, e.g. due to
4451 PARM_BOUNDARY. Assume the caller isn't lying. */
4452 set_mem_align (target
, align
);
4454 /* If part should go in registers and pushing to that part would
4455 overwrite some of the values that need to go into regs, load the
4456 overlapping values into temporary pseudos to be moved into the hard
4457 regs at the end after the stack pushing has completed.
4458 We cannot load them directly into the hard regs here because
4459 they can be clobbered by the block move expansions.
4462 if (partial
> 0 && reg
!= 0 && mode
== BLKmode
4463 && GET_CODE (reg
) != PARALLEL
)
4465 overlapping
= memory_load_overlap (XEXP (x
, 0), temp
, partial
);
4466 if (overlapping
> 0)
4468 gcc_assert (overlapping
% UNITS_PER_WORD
== 0);
4469 overlapping
/= UNITS_PER_WORD
;
4471 tmp_regs
= XALLOCAVEC (rtx
, overlapping
);
4473 for (int i
= 0; i
< overlapping
; i
++)
4474 tmp_regs
[i
] = gen_reg_rtx (word_mode
);
4476 for (int i
= 0; i
< overlapping
; i
++)
4477 emit_move_insn (tmp_regs
[i
],
4478 operand_subword_force (target
, i
, mode
));
4480 else if (overlapping
== -1)
4482 /* Could not determine whether there is overlap.
4483 Fail the sibcall. */
4491 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
4494 else if (partial
> 0)
4496 /* Scalar partly in registers. */
4498 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
4501 /* # bytes of start of argument
4502 that we must make space for but need not store. */
4503 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4504 int args_offset
= INTVAL (args_so_far
);
4507 /* Push padding now if padding above and stack grows down,
4508 or if padding below and stack grows up.
4509 But if space already allocated, this has already been done. */
4510 if (maybe_ne (extra
, 0)
4512 && where_pad
!= PAD_NONE
4513 && where_pad
!= stack_direction
)
4514 anti_adjust_stack (gen_int_mode (extra
, Pmode
));
4516 /* If we make space by pushing it, we might as well push
4517 the real data. Otherwise, we can leave OFFSET nonzero
4518 and leave the space uninitialized. */
4522 /* Now NOT_STACK gets the number of words that we don't need to
4523 allocate on the stack. Convert OFFSET to words too. */
4524 not_stack
= (partial
- offset
) / UNITS_PER_WORD
;
4525 offset
/= UNITS_PER_WORD
;
4527 /* If the partial register-part of the arg counts in its stack size,
4528 skip the part of stack space corresponding to the registers.
4529 Otherwise, start copying to the beginning of the stack space,
4530 by setting SKIP to 0. */
4531 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
4533 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
4534 x
= validize_mem (force_const_mem (mode
, x
));
4536 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4537 SUBREGs of such registers are not allowed. */
4538 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
4539 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
4540 x
= copy_to_reg (x
);
4542 /* Loop over all the words allocated on the stack for this arg. */
4543 /* We can do it by words, because any scalar bigger than a word
4544 has a size a multiple of a word. */
4545 for (i
= size
- 1; i
>= not_stack
; i
--)
4546 if (i
>= not_stack
+ offset
)
4547 if (!emit_push_insn (operand_subword_force (x
, i
, mode
),
4548 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
4550 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
4552 reg_parm_stack_space
, alignment_pad
, sibcall_p
))
4560 /* Push padding now if padding above and stack grows down,
4561 or if padding below and stack grows up.
4562 But if space already allocated, this has already been done. */
4563 if (maybe_ne (extra
, 0)
4565 && where_pad
!= PAD_NONE
4566 && where_pad
!= stack_direction
)
4567 anti_adjust_stack (gen_int_mode (extra
, Pmode
));
4569 #ifdef PUSH_ROUNDING
4570 if (args_addr
== 0 && PUSH_ARGS
)
4571 emit_single_push_insn (mode
, x
, type
);
4575 addr
= simplify_gen_binary (PLUS
, Pmode
, args_addr
, args_so_far
);
4576 dest
= gen_rtx_MEM (mode
, memory_address (mode
, addr
));
4578 /* We do *not* set_mem_attributes here, because incoming arguments
4579 may overlap with sibling call outgoing arguments and we cannot
4580 allow reordering of reads from function arguments with stores
4581 to outgoing arguments of sibling calls. We do, however, want
4582 to record the alignment of the stack slot. */
4583 /* ALIGN may well be better aligned than TYPE, e.g. due to
4584 PARM_BOUNDARY. Assume the caller isn't lying. */
4585 set_mem_align (dest
, align
);
4587 emit_move_insn (dest
, x
);
4591 /* Move the partial arguments into the registers and any overlapping
4592 values that we moved into the pseudos in tmp_regs. */
4593 if (partial
> 0 && reg
!= 0)
4595 /* Handle calls that pass values in multiple non-contiguous locations.
4596 The Irix 6 ABI has examples of this. */
4597 if (GET_CODE (reg
) == PARALLEL
)
4598 emit_group_load (reg
, x
, type
, -1);
4601 gcc_assert (partial
% UNITS_PER_WORD
== 0);
4602 move_block_to_reg (REGNO (reg
), x
, nregs
- overlapping
, mode
);
4604 for (int i
= 0; i
< overlapping
; i
++)
4605 emit_move_insn (gen_rtx_REG (word_mode
, REGNO (reg
)
4606 + nregs
- overlapping
+ i
),
4612 if (maybe_ne (extra
, 0) && args_addr
== 0 && where_pad
== stack_direction
)
4613 anti_adjust_stack (gen_int_mode (extra
, Pmode
));
4615 if (alignment_pad
&& args_addr
== 0)
4616 anti_adjust_stack (alignment_pad
);
4621 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4625 get_subtarget (rtx x
)
4629 /* Only registers can be subtargets. */
4631 /* Don't use hard regs to avoid extending their life. */
4632 || REGNO (x
) < FIRST_PSEUDO_REGISTER
4636 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4637 FIELD is a bitfield. Returns true if the optimization was successful,
4638 and there's nothing else to do. */
4641 optimize_bitfield_assignment_op (poly_uint64 pbitsize
,
4642 poly_uint64 pbitpos
,
4643 poly_uint64 pbitregion_start
,
4644 poly_uint64 pbitregion_end
,
4645 machine_mode mode1
, rtx str_rtx
,
4646 tree to
, tree src
, bool reverse
)
4648 machine_mode str_mode
= GET_MODE (str_rtx
);
4649 unsigned int str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4654 enum tree_code code
;
4656 unsigned HOST_WIDE_INT bitsize
, bitpos
, bitregion_start
, bitregion_end
;
4657 if (mode1
!= VOIDmode
4658 || !pbitsize
.is_constant (&bitsize
)
4659 || !pbitpos
.is_constant (&bitpos
)
4660 || !pbitregion_start
.is_constant (&bitregion_start
)
4661 || !pbitregion_end
.is_constant (&bitregion_end
)
4662 || bitsize
>= BITS_PER_WORD
4663 || str_bitsize
> BITS_PER_WORD
4664 || TREE_SIDE_EFFECTS (to
)
4665 || TREE_THIS_VOLATILE (to
))
4669 if (TREE_CODE (src
) != SSA_NAME
)
4671 if (TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
)
4674 srcstmt
= get_gimple_for_ssa_name (src
);
4676 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt
)) != tcc_binary
)
4679 code
= gimple_assign_rhs_code (srcstmt
);
4681 op0
= gimple_assign_rhs1 (srcstmt
);
4683 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4684 to find its initialization. Hopefully the initialization will
4685 be from a bitfield load. */
4686 if (TREE_CODE (op0
) == SSA_NAME
)
4688 gimple
*op0stmt
= get_gimple_for_ssa_name (op0
);
4690 /* We want to eventually have OP0 be the same as TO, which
4691 should be a bitfield. */
4693 || !is_gimple_assign (op0stmt
)
4694 || gimple_assign_rhs_code (op0stmt
) != TREE_CODE (to
))
4696 op0
= gimple_assign_rhs1 (op0stmt
);
4699 op1
= gimple_assign_rhs2 (srcstmt
);
4701 if (!operand_equal_p (to
, op0
, 0))
4704 if (MEM_P (str_rtx
))
4706 unsigned HOST_WIDE_INT offset1
;
4708 if (str_bitsize
== 0 || str_bitsize
> BITS_PER_WORD
)
4709 str_bitsize
= BITS_PER_WORD
;
4711 scalar_int_mode best_mode
;
4712 if (!get_best_mode (bitsize
, bitpos
, bitregion_start
, bitregion_end
,
4713 MEM_ALIGN (str_rtx
), str_bitsize
, false, &best_mode
))
4715 str_mode
= best_mode
;
4716 str_bitsize
= GET_MODE_BITSIZE (best_mode
);
4719 bitpos
%= str_bitsize
;
4720 offset1
= (offset1
- bitpos
) / BITS_PER_UNIT
;
4721 str_rtx
= adjust_address (str_rtx
, str_mode
, offset1
);
4723 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
4726 gcc_assert (!reverse
);
4728 /* If the bit field covers the whole REG/MEM, store_field
4729 will likely generate better code. */
4730 if (bitsize
>= str_bitsize
)
4733 /* We can't handle fields split across multiple entities. */
4734 if (bitpos
+ bitsize
> str_bitsize
)
4737 if (reverse
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
4738 bitpos
= str_bitsize
- bitpos
- bitsize
;
4744 /* For now, just optimize the case of the topmost bitfield
4745 where we don't need to do any masking and also
4746 1 bit bitfields where xor can be used.
4747 We might win by one instruction for the other bitfields
4748 too if insv/extv instructions aren't used, so that
4749 can be added later. */
4750 if ((reverse
|| bitpos
+ bitsize
!= str_bitsize
)
4751 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
4754 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4755 value
= convert_modes (str_mode
,
4756 TYPE_MODE (TREE_TYPE (op1
)), value
,
4757 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4759 /* We may be accessing data outside the field, which means
4760 we can alias adjacent data. */
4761 if (MEM_P (str_rtx
))
4763 str_rtx
= shallow_copy_rtx (str_rtx
);
4764 set_mem_alias_set (str_rtx
, 0);
4765 set_mem_expr (str_rtx
, 0);
4768 if (bitsize
== 1 && (reverse
|| bitpos
+ bitsize
!= str_bitsize
))
4770 value
= expand_and (str_mode
, value
, const1_rtx
, NULL
);
4774 binop
= code
== PLUS_EXPR
? add_optab
: sub_optab
;
4776 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4778 value
= flip_storage_order (str_mode
, value
);
4779 result
= expand_binop (str_mode
, binop
, str_rtx
,
4780 value
, str_rtx
, 1, OPTAB_WIDEN
);
4781 if (result
!= str_rtx
)
4782 emit_move_insn (str_rtx
, result
);
4787 if (TREE_CODE (op1
) != INTEGER_CST
)
4789 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4790 value
= convert_modes (str_mode
,
4791 TYPE_MODE (TREE_TYPE (op1
)), value
,
4792 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4794 /* We may be accessing data outside the field, which means
4795 we can alias adjacent data. */
4796 if (MEM_P (str_rtx
))
4798 str_rtx
= shallow_copy_rtx (str_rtx
);
4799 set_mem_alias_set (str_rtx
, 0);
4800 set_mem_expr (str_rtx
, 0);
4803 binop
= code
== BIT_IOR_EXPR
? ior_optab
: xor_optab
;
4804 if (bitpos
+ bitsize
!= str_bitsize
)
4806 rtx mask
= gen_int_mode ((HOST_WIDE_INT_1U
<< bitsize
) - 1,
4808 value
= expand_and (str_mode
, value
, mask
, NULL_RTX
);
4810 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4812 value
= flip_storage_order (str_mode
, value
);
4813 result
= expand_binop (str_mode
, binop
, str_rtx
,
4814 value
, str_rtx
, 1, OPTAB_WIDEN
);
4815 if (result
!= str_rtx
)
4816 emit_move_insn (str_rtx
, result
);
4826 /* In the C++ memory model, consecutive bit fields in a structure are
4827 considered one memory location.
4829 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4830 returns the bit range of consecutive bits in which this COMPONENT_REF
4831 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4832 and *OFFSET may be adjusted in the process.
4834 If the access does not need to be restricted, 0 is returned in both
4835 *BITSTART and *BITEND. */
4838 get_bit_range (poly_uint64_pod
*bitstart
, poly_uint64_pod
*bitend
, tree exp
,
4839 poly_int64_pod
*bitpos
, tree
*offset
)
4841 poly_int64 bitoffset
;
4844 gcc_assert (TREE_CODE (exp
) == COMPONENT_REF
);
4846 field
= TREE_OPERAND (exp
, 1);
4847 repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
4848 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4849 need to limit the range we can access. */
4852 *bitstart
= *bitend
= 0;
4856 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4857 part of a larger bit field, then the representative does not serve any
4858 useful purpose. This can occur in Ada. */
4859 if (handled_component_p (TREE_OPERAND (exp
, 0)))
4862 poly_int64 rbitsize
, rbitpos
;
4864 int unsignedp
, reversep
, volatilep
= 0;
4865 get_inner_reference (TREE_OPERAND (exp
, 0), &rbitsize
, &rbitpos
,
4866 &roffset
, &rmode
, &unsignedp
, &reversep
,
4868 if (!multiple_p (rbitpos
, BITS_PER_UNIT
))
4870 *bitstart
= *bitend
= 0;
4875 /* Compute the adjustment to bitpos from the offset of the field
4876 relative to the representative. DECL_FIELD_OFFSET of field and
4877 repr are the same by construction if they are not constants,
4878 see finish_bitfield_layout. */
4879 poly_uint64 field_offset
, repr_offset
;
4880 if (poly_int_tree_p (DECL_FIELD_OFFSET (field
), &field_offset
)
4881 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
4882 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
4885 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
4886 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
4888 /* If the adjustment is larger than bitpos, we would have a negative bit
4889 position for the lower bound and this may wreak havoc later. Adjust
4890 offset and bitpos to make the lower bound non-negative in that case. */
4891 if (maybe_gt (bitoffset
, *bitpos
))
4893 poly_int64 adjust_bits
= upper_bound (bitoffset
, *bitpos
) - *bitpos
;
4894 poly_int64 adjust_bytes
= exact_div (adjust_bits
, BITS_PER_UNIT
);
4896 *bitpos
+= adjust_bits
;
4897 if (*offset
== NULL_TREE
)
4898 *offset
= size_int (-adjust_bytes
);
4900 *offset
= size_binop (MINUS_EXPR
, *offset
, size_int (adjust_bytes
));
4904 *bitstart
= *bitpos
- bitoffset
;
4906 *bitend
= *bitstart
+ tree_to_uhwi (DECL_SIZE (repr
)) - 1;
4909 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4910 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4911 DECL_RTL was not set yet, return NORTL. */
4914 addr_expr_of_non_mem_decl_p_1 (tree addr
, bool nortl
)
4916 if (TREE_CODE (addr
) != ADDR_EXPR
)
4919 tree base
= TREE_OPERAND (addr
, 0);
4922 || TREE_ADDRESSABLE (base
)
4923 || DECL_MODE (base
) == BLKmode
)
4926 if (!DECL_RTL_SET_P (base
))
4929 return (!MEM_P (DECL_RTL (base
)));
4932 /* Returns true if the MEM_REF REF refers to an object that does not
4933 reside in memory and has non-BLKmode. */
4936 mem_ref_refers_to_non_mem_p (tree ref
)
4938 tree base
= TREE_OPERAND (ref
, 0);
4939 return addr_expr_of_non_mem_decl_p_1 (base
, false);
4942 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4943 is true, try generating a nontemporal store. */
4946 expand_assignment (tree to
, tree from
, bool nontemporal
)
4952 enum insn_code icode
;
4954 /* Don't crash if the lhs of the assignment was erroneous. */
4955 if (TREE_CODE (to
) == ERROR_MARK
)
4957 expand_normal (from
);
4961 /* Optimize away no-op moves without side-effects. */
4962 if (operand_equal_p (to
, from
, 0))
4965 /* Handle misaligned stores. */
4966 mode
= TYPE_MODE (TREE_TYPE (to
));
4967 if ((TREE_CODE (to
) == MEM_REF
4968 || TREE_CODE (to
) == TARGET_MEM_REF
)
4970 && !mem_ref_refers_to_non_mem_p (to
)
4971 && ((align
= get_object_alignment (to
))
4972 < GET_MODE_ALIGNMENT (mode
))
4973 && (((icode
= optab_handler (movmisalign_optab
, mode
))
4974 != CODE_FOR_nothing
)
4975 || targetm
.slow_unaligned_access (mode
, align
)))
4979 reg
= expand_expr (from
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4980 reg
= force_not_mem (reg
);
4981 mem
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4982 if (TREE_CODE (to
) == MEM_REF
&& REF_REVERSE_STORAGE_ORDER (to
))
4983 reg
= flip_storage_order (mode
, reg
);
4985 if (icode
!= CODE_FOR_nothing
)
4987 struct expand_operand ops
[2];
4989 create_fixed_operand (&ops
[0], mem
);
4990 create_input_operand (&ops
[1], reg
, mode
);
4991 /* The movmisalign<mode> pattern cannot fail, else the assignment
4992 would silently be omitted. */
4993 expand_insn (icode
, 2, ops
);
4996 store_bit_field (mem
, GET_MODE_BITSIZE (mode
), 0, 0, 0, mode
, reg
,
5001 /* Assignment of a structure component needs special treatment
5002 if the structure component's rtx is not simply a MEM.
5003 Assignment of an array element at a constant index, and assignment of
5004 an array element in an unaligned packed structure field, has the same
5005 problem. Same for (partially) storing into a non-memory object. */
5006 if (handled_component_p (to
)
5007 || (TREE_CODE (to
) == MEM_REF
5008 && (REF_REVERSE_STORAGE_ORDER (to
)
5009 || mem_ref_refers_to_non_mem_p (to
)))
5010 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
5013 poly_int64 bitsize
, bitpos
;
5014 poly_uint64 bitregion_start
= 0;
5015 poly_uint64 bitregion_end
= 0;
5017 int unsignedp
, reversep
, volatilep
= 0;
5021 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
5022 &unsignedp
, &reversep
, &volatilep
);
5024 /* Make sure bitpos is not negative, it can wreak havoc later. */
5025 if (maybe_lt (bitpos
, 0))
5027 gcc_assert (offset
== NULL_TREE
);
5028 offset
= size_int (bits_to_bytes_round_down (bitpos
));
5029 bitpos
= num_trailing_bits (bitpos
);
5032 if (TREE_CODE (to
) == COMPONENT_REF
5033 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to
, 1)))
5034 get_bit_range (&bitregion_start
, &bitregion_end
, to
, &bitpos
, &offset
);
5035 /* The C++ memory model naturally applies to byte-aligned fields.
5036 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
5037 BITSIZE are not byte-aligned, there is no need to limit the range
5038 we can access. This can occur with packed structures in Ada. */
5039 else if (maybe_gt (bitsize
, 0)
5040 && multiple_p (bitsize
, BITS_PER_UNIT
)
5041 && multiple_p (bitpos
, BITS_PER_UNIT
))
5043 bitregion_start
= bitpos
;
5044 bitregion_end
= bitpos
+ bitsize
- 1;
5047 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5049 /* If the field has a mode, we want to access it in the
5050 field's mode, not the computed mode.
5051 If a MEM has VOIDmode (external with incomplete type),
5052 use BLKmode for it instead. */
5055 if (mode1
!= VOIDmode
)
5056 to_rtx
= adjust_address (to_rtx
, mode1
, 0);
5057 else if (GET_MODE (to_rtx
) == VOIDmode
)
5058 to_rtx
= adjust_address (to_rtx
, BLKmode
, 0);
5063 machine_mode address_mode
;
5066 if (!MEM_P (to_rtx
))
5068 /* We can get constant negative offsets into arrays with broken
5069 user code. Translate this to a trap instead of ICEing. */
5070 gcc_assert (TREE_CODE (offset
) == INTEGER_CST
);
5071 expand_builtin_trap ();
5072 to_rtx
= gen_rtx_MEM (BLKmode
, const0_rtx
);
5075 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
5076 address_mode
= get_address_mode (to_rtx
);
5077 if (GET_MODE (offset_rtx
) != address_mode
)
5079 /* We cannot be sure that the RTL in offset_rtx is valid outside
5080 of a memory address context, so force it into a register
5081 before attempting to convert it to the desired mode. */
5082 offset_rtx
= force_operand (offset_rtx
, NULL_RTX
);
5083 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
5086 /* If we have an expression in OFFSET_RTX and a non-zero
5087 byte offset in BITPOS, adding the byte offset before the
5088 OFFSET_RTX results in better intermediate code, which makes
5089 later rtl optimization passes perform better.
5091 We prefer intermediate code like this:
5093 r124:DI=r123:DI+0x18
5098 r124:DI=r123:DI+0x10
5099 [r124:DI+0x8]=r121:DI
5101 This is only done for aligned data values, as these can
5102 be expected to result in single move instructions. */
5104 if (mode1
!= VOIDmode
5105 && maybe_ne (bitpos
, 0)
5106 && maybe_gt (bitsize
, 0)
5107 && multiple_p (bitpos
, BITS_PER_UNIT
, &bytepos
)
5108 && multiple_p (bitpos
, bitsize
)
5109 && multiple_p (bitsize
, GET_MODE_ALIGNMENT (mode1
))
5110 && MEM_ALIGN (to_rtx
) >= GET_MODE_ALIGNMENT (mode1
))
5112 to_rtx
= adjust_address (to_rtx
, mode1
, bytepos
);
5113 bitregion_start
= 0;
5114 if (known_ge (bitregion_end
, poly_uint64 (bitpos
)))
5115 bitregion_end
-= bitpos
;
5119 to_rtx
= offset_address (to_rtx
, offset_rtx
,
5120 highest_pow2_factor_for_target (to
,
5124 /* No action is needed if the target is not a memory and the field
5125 lies completely outside that target. This can occur if the source
5126 code contains an out-of-bounds access to a small array. */
5128 && GET_MODE (to_rtx
) != BLKmode
5129 && known_ge (bitpos
, GET_MODE_PRECISION (GET_MODE (to_rtx
))))
5131 expand_normal (from
);
5134 /* Handle expand_expr of a complex value returning a CONCAT. */
5135 else if (GET_CODE (to_rtx
) == CONCAT
)
5137 unsigned short mode_bitsize
= GET_MODE_BITSIZE (GET_MODE (to_rtx
));
5138 if (TYPE_MODE (TREE_TYPE (from
)) == GET_MODE (to_rtx
)
5139 && COMPLEX_MODE_P (GET_MODE (to_rtx
))
5140 && known_eq (bitpos
, 0)
5141 && known_eq (bitsize
, mode_bitsize
))
5142 result
= store_expr (from
, to_rtx
, false, nontemporal
, reversep
);
5143 else if (known_eq (bitsize
, mode_bitsize
/ 2)
5144 && (known_eq (bitpos
, 0)
5145 || known_eq (bitpos
, mode_bitsize
/ 2)))
5146 result
= store_expr (from
, XEXP (to_rtx
, maybe_ne (bitpos
, 0)),
5147 false, nontemporal
, reversep
);
5148 else if (known_le (bitpos
+ bitsize
, mode_bitsize
/ 2))
5149 result
= store_field (XEXP (to_rtx
, 0), bitsize
, bitpos
,
5150 bitregion_start
, bitregion_end
,
5151 mode1
, from
, get_alias_set (to
),
5152 nontemporal
, reversep
);
5153 else if (known_ge (bitpos
, mode_bitsize
/ 2))
5154 result
= store_field (XEXP (to_rtx
, 1), bitsize
,
5155 bitpos
- mode_bitsize
/ 2,
5156 bitregion_start
, bitregion_end
,
5157 mode1
, from
, get_alias_set (to
),
5158 nontemporal
, reversep
);
5159 else if (known_eq (bitpos
, 0) && known_eq (bitsize
, mode_bitsize
))
5161 result
= expand_normal (from
);
5162 if (GET_CODE (result
) == CONCAT
)
5164 machine_mode to_mode
= GET_MODE_INNER (GET_MODE (to_rtx
));
5165 machine_mode from_mode
= GET_MODE_INNER (GET_MODE (result
));
5167 = simplify_gen_subreg (to_mode
, XEXP (result
, 0),
5170 = simplify_gen_subreg (to_mode
, XEXP (result
, 1),
5172 if (!from_real
|| !from_imag
)
5173 goto concat_store_slow
;
5174 emit_move_insn (XEXP (to_rtx
, 0), from_real
);
5175 emit_move_insn (XEXP (to_rtx
, 1), from_imag
);
5180 = simplify_gen_subreg (GET_MODE (to_rtx
), result
,
5181 TYPE_MODE (TREE_TYPE (from
)), 0);
5184 emit_move_insn (XEXP (to_rtx
, 0),
5185 read_complex_part (from_rtx
, false));
5186 emit_move_insn (XEXP (to_rtx
, 1),
5187 read_complex_part (from_rtx
, true));
5191 machine_mode to_mode
5192 = GET_MODE_INNER (GET_MODE (to_rtx
));
5194 = simplify_gen_subreg (to_mode
, result
,
5195 TYPE_MODE (TREE_TYPE (from
)),
5198 = simplify_gen_subreg (to_mode
, result
,
5199 TYPE_MODE (TREE_TYPE (from
)),
5200 GET_MODE_SIZE (to_mode
));
5201 if (!from_real
|| !from_imag
)
5202 goto concat_store_slow
;
5203 emit_move_insn (XEXP (to_rtx
, 0), from_real
);
5204 emit_move_insn (XEXP (to_rtx
, 1), from_imag
);
5211 rtx temp
= assign_stack_temp (GET_MODE (to_rtx
),
5212 GET_MODE_SIZE (GET_MODE (to_rtx
)));
5213 write_complex_part (temp
, XEXP (to_rtx
, 0), false);
5214 write_complex_part (temp
, XEXP (to_rtx
, 1), true);
5215 result
= store_field (temp
, bitsize
, bitpos
,
5216 bitregion_start
, bitregion_end
,
5217 mode1
, from
, get_alias_set (to
),
5218 nontemporal
, reversep
);
5219 emit_move_insn (XEXP (to_rtx
, 0), read_complex_part (temp
, false));
5220 emit_move_insn (XEXP (to_rtx
, 1), read_complex_part (temp
, true));
5227 /* If the field is at offset zero, we could have been given the
5228 DECL_RTX of the parent struct. Don't munge it. */
5229 to_rtx
= shallow_copy_rtx (to_rtx
);
5230 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
5232 MEM_VOLATILE_P (to_rtx
) = 1;
5235 if (optimize_bitfield_assignment_op (bitsize
, bitpos
,
5236 bitregion_start
, bitregion_end
,
5237 mode1
, to_rtx
, to
, from
,
5241 result
= store_field (to_rtx
, bitsize
, bitpos
,
5242 bitregion_start
, bitregion_end
,
5243 mode1
, from
, get_alias_set (to
),
5244 nontemporal
, reversep
);
5248 preserve_temp_slots (result
);
5253 /* If the rhs is a function call and its value is not an aggregate,
5254 call the function before we start to compute the lhs.
5255 This is needed for correct code for cases such as
5256 val = setjmp (buf) on machines where reference to val
5257 requires loading up part of an address in a separate insn.
5259 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5260 since it might be a promoted variable where the zero- or sign- extension
5261 needs to be done. Handling this in the normal way is safe because no
5262 computation is done before the call. The same is true for SSA names. */
5263 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
5264 && COMPLETE_TYPE_P (TREE_TYPE (from
))
5265 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
5267 || TREE_CODE (to
) == PARM_DECL
5268 || TREE_CODE (to
) == RESULT_DECL
)
5269 && REG_P (DECL_RTL (to
)))
5270 || TREE_CODE (to
) == SSA_NAME
))
5276 value
= expand_normal (from
);
5278 /* Split value and bounds to store them separately. */
5279 chkp_split_slot (value
, &value
, &bounds
);
5282 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5284 /* Handle calls that return values in multiple non-contiguous locations.
5285 The Irix 6 ABI has examples of this. */
5286 if (GET_CODE (to_rtx
) == PARALLEL
)
5288 if (GET_CODE (value
) == PARALLEL
)
5289 emit_group_move (to_rtx
, value
);
5291 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
5292 int_size_in_bytes (TREE_TYPE (from
)));
5294 else if (GET_CODE (value
) == PARALLEL
)
5295 emit_group_store (to_rtx
, value
, TREE_TYPE (from
),
5296 int_size_in_bytes (TREE_TYPE (from
)));
5297 else if (GET_MODE (to_rtx
) == BLKmode
)
5299 /* Handle calls that return BLKmode values in registers. */
5301 copy_blkmode_from_reg (to_rtx
, value
, TREE_TYPE (from
));
5303 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
5307 if (POINTER_TYPE_P (TREE_TYPE (to
)))
5308 value
= convert_memory_address_addr_space
5309 (as_a
<scalar_int_mode
> (GET_MODE (to_rtx
)), value
,
5310 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to
))));
5312 emit_move_insn (to_rtx
, value
);
5315 /* Store bounds if required. */
5317 && (BOUNDED_P (to
) || chkp_type_has_pointer (TREE_TYPE (to
))))
5319 gcc_assert (MEM_P (to_rtx
));
5320 chkp_emit_bounds_store (bounds
, value
, to_rtx
);
5323 preserve_temp_slots (to_rtx
);
5328 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5329 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5331 /* Don't move directly into a return register. */
5332 if (TREE_CODE (to
) == RESULT_DECL
5333 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
5339 /* If the source is itself a return value, it still is in a pseudo at
5340 this point so we can move it back to the return register directly. */
5342 && TYPE_MODE (TREE_TYPE (from
)) == BLKmode
5343 && TREE_CODE (from
) != CALL_EXPR
)
5344 temp
= copy_blkmode_to_reg (GET_MODE (to_rtx
), from
);
5346 temp
= expand_expr (from
, NULL_RTX
, GET_MODE (to_rtx
), EXPAND_NORMAL
);
5348 /* Handle calls that return values in multiple non-contiguous locations.
5349 The Irix 6 ABI has examples of this. */
5350 if (GET_CODE (to_rtx
) == PARALLEL
)
5352 if (GET_CODE (temp
) == PARALLEL
)
5353 emit_group_move (to_rtx
, temp
);
5355 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
5356 int_size_in_bytes (TREE_TYPE (from
)));
5359 emit_move_insn (to_rtx
, temp
);
5361 preserve_temp_slots (to_rtx
);
5366 /* In case we are returning the contents of an object which overlaps
5367 the place the value is being stored, use a safe function when copying
5368 a value through a pointer into a structure value return block. */
5369 if (TREE_CODE (to
) == RESULT_DECL
5370 && TREE_CODE (from
) == INDIRECT_REF
5371 && ADDR_SPACE_GENERIC_P
5372 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from
, 0)))))
5373 && refs_may_alias_p (to
, from
)
5374 && cfun
->returns_struct
5375 && !cfun
->returns_pcc_struct
)
5380 size
= expr_size (from
);
5381 from_rtx
= expand_normal (from
);
5383 emit_block_move_via_libcall (XEXP (to_rtx
, 0), XEXP (from_rtx
, 0), size
);
5385 preserve_temp_slots (to_rtx
);
5390 /* Compute FROM and store the value in the rtx we got. */
5393 result
= store_expr_with_bounds (from
, to_rtx
, 0, nontemporal
, false, to
);
5394 preserve_temp_slots (result
);
5399 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5400 succeeded, false otherwise. */
5403 emit_storent_insn (rtx to
, rtx from
)
5405 struct expand_operand ops
[2];
5406 machine_mode mode
= GET_MODE (to
);
5407 enum insn_code code
= optab_handler (storent_optab
, mode
);
5409 if (code
== CODE_FOR_nothing
)
5412 create_fixed_operand (&ops
[0], to
);
5413 create_input_operand (&ops
[1], from
, mode
);
5414 return maybe_expand_insn (code
, 2, ops
);
5417 /* Generate code for computing expression EXP,
5418 and storing the value into TARGET.
5420 If the mode is BLKmode then we may return TARGET itself.
5421 It turns out that in BLKmode it doesn't cause a problem.
5422 because C has no operators that could combine two different
5423 assignments into the same BLKmode object with different values
5424 with no sequence point. Will other languages need this to
5427 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5428 stack, and block moves may need to be treated specially.
5430 If NONTEMPORAL is true, try using a nontemporal store instruction.
5432 If REVERSE is true, the store is to be done in reverse order.
5434 If BTARGET is not NULL then computed bounds of EXP are
5435 associated with BTARGET. */
5438 store_expr_with_bounds (tree exp
, rtx target
, int call_param_p
,
5439 bool nontemporal
, bool reverse
, tree btarget
)
5442 rtx alt_rtl
= NULL_RTX
;
5443 location_t loc
= curr_insn_location ();
5445 if (VOID_TYPE_P (TREE_TYPE (exp
)))
5447 /* C++ can generate ?: expressions with a throw expression in one
5448 branch and an rvalue in the other. Here, we resolve attempts to
5449 store the throw expression's nonexistent result. */
5450 gcc_assert (!call_param_p
);
5451 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5454 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
5456 /* Perform first part of compound expression, then assign from second
5458 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
5459 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5460 return store_expr_with_bounds (TREE_OPERAND (exp
, 1), target
,
5461 call_param_p
, nontemporal
, reverse
,
5464 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
5466 /* For conditional expression, get safe form of the target. Then
5467 test the condition, doing the appropriate assignment on either
5468 side. This avoids the creation of unnecessary temporaries.
5469 For non-BLKmode, it is more efficient not to do this. */
5471 rtx_code_label
*lab1
= gen_label_rtx (), *lab2
= gen_label_rtx ();
5473 do_pending_stack_adjust ();
5475 jumpifnot (TREE_OPERAND (exp
, 0), lab1
,
5476 profile_probability::uninitialized ());
5477 store_expr_with_bounds (TREE_OPERAND (exp
, 1), target
, call_param_p
,
5478 nontemporal
, reverse
, btarget
);
5479 emit_jump_insn (targetm
.gen_jump (lab2
));
5482 store_expr_with_bounds (TREE_OPERAND (exp
, 2), target
, call_param_p
,
5483 nontemporal
, reverse
, btarget
);
5489 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
5490 /* If this is a scalar in a register that is stored in a wider mode
5491 than the declared mode, compute the result into its declared mode
5492 and then convert to the wider mode. Our value is the computed
5495 rtx inner_target
= 0;
5496 scalar_int_mode outer_mode
= subreg_unpromoted_mode (target
);
5497 scalar_int_mode inner_mode
= subreg_promoted_mode (target
);
5499 /* We can do the conversion inside EXP, which will often result
5500 in some optimizations. Do the conversion in two steps: first
5501 change the signedness, if needed, then the extend. But don't
5502 do this if the type of EXP is a subtype of something else
5503 since then the conversion might involve more than just
5504 converting modes. */
5505 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
5506 && TREE_TYPE (TREE_TYPE (exp
)) == 0
5507 && GET_MODE_PRECISION (outer_mode
)
5508 == TYPE_PRECISION (TREE_TYPE (exp
)))
5510 if (!SUBREG_CHECK_PROMOTED_SIGN (target
,
5511 TYPE_UNSIGNED (TREE_TYPE (exp
))))
5513 /* Some types, e.g. Fortran's logical*4, won't have a signed
5514 version, so use the mode instead. */
5516 = (signed_or_unsigned_type_for
5517 (SUBREG_PROMOTED_SIGN (target
), TREE_TYPE (exp
)));
5519 ntype
= lang_hooks
.types
.type_for_mode
5520 (TYPE_MODE (TREE_TYPE (exp
)),
5521 SUBREG_PROMOTED_SIGN (target
));
5523 exp
= fold_convert_loc (loc
, ntype
, exp
);
5526 exp
= fold_convert_loc (loc
, lang_hooks
.types
.type_for_mode
5527 (inner_mode
, SUBREG_PROMOTED_SIGN (target
)),
5530 inner_target
= SUBREG_REG (target
);
5533 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
5534 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5536 /* Handle bounds returned by call. */
5537 if (TREE_CODE (exp
) == CALL_EXPR
)
5540 chkp_split_slot (temp
, &temp
, &bounds
);
5541 if (bounds
&& btarget
)
5543 gcc_assert (TREE_CODE (btarget
) == SSA_NAME
);
5544 rtx tmp
= targetm
.calls
.load_returned_bounds (bounds
);
5545 chkp_set_rtl_bounds (btarget
, tmp
);
5549 /* If TEMP is a VOIDmode constant, use convert_modes to make
5550 sure that we properly convert it. */
5551 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
5553 temp
= convert_modes (outer_mode
, TYPE_MODE (TREE_TYPE (exp
)),
5554 temp
, SUBREG_PROMOTED_SIGN (target
));
5555 temp
= convert_modes (inner_mode
, outer_mode
, temp
,
5556 SUBREG_PROMOTED_SIGN (target
));
5559 convert_move (SUBREG_REG (target
), temp
,
5560 SUBREG_PROMOTED_SIGN (target
));
5564 else if ((TREE_CODE (exp
) == STRING_CST
5565 || (TREE_CODE (exp
) == MEM_REF
5566 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
5567 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5569 && integer_zerop (TREE_OPERAND (exp
, 1))))
5570 && !nontemporal
&& !call_param_p
5573 /* Optimize initialization of an array with a STRING_CST. */
5574 HOST_WIDE_INT exp_len
, str_copy_len
;
5576 tree str
= TREE_CODE (exp
) == STRING_CST
5577 ? exp
: TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5579 exp_len
= int_expr_size (exp
);
5583 if (TREE_STRING_LENGTH (str
) <= 0)
5586 str_copy_len
= strlen (TREE_STRING_POINTER (str
));
5587 if (str_copy_len
< TREE_STRING_LENGTH (str
) - 1)
5590 str_copy_len
= TREE_STRING_LENGTH (str
);
5591 if ((STORE_MAX_PIECES
& (STORE_MAX_PIECES
- 1)) == 0
5592 && TREE_STRING_POINTER (str
)[TREE_STRING_LENGTH (str
) - 1] == '\0')
5594 str_copy_len
+= STORE_MAX_PIECES
- 1;
5595 str_copy_len
&= ~(STORE_MAX_PIECES
- 1);
5597 str_copy_len
= MIN (str_copy_len
, exp_len
);
5598 if (!can_store_by_pieces (str_copy_len
, builtin_strncpy_read_str
,
5599 CONST_CAST (char *, TREE_STRING_POINTER (str
)),
5600 MEM_ALIGN (target
), false))
5605 dest_mem
= store_by_pieces (dest_mem
,
5606 str_copy_len
, builtin_strncpy_read_str
,
5608 TREE_STRING_POINTER (str
)),
5609 MEM_ALIGN (target
), false,
5610 exp_len
> str_copy_len
? 1 : 0);
5611 if (exp_len
> str_copy_len
)
5612 clear_storage (adjust_address (dest_mem
, BLKmode
, 0),
5613 GEN_INT (exp_len
- str_copy_len
),
5622 /* If we want to use a nontemporal or a reverse order store, force the
5623 value into a register first. */
5624 tmp_target
= nontemporal
|| reverse
? NULL_RTX
: target
;
5625 temp
= expand_expr_real (exp
, tmp_target
, GET_MODE (target
),
5627 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
5630 /* Handle bounds returned by call. */
5631 if (TREE_CODE (exp
) == CALL_EXPR
)
5634 chkp_split_slot (temp
, &temp
, &bounds
);
5635 if (bounds
&& btarget
)
5637 gcc_assert (TREE_CODE (btarget
) == SSA_NAME
);
5638 rtx tmp
= targetm
.calls
.load_returned_bounds (bounds
);
5639 chkp_set_rtl_bounds (btarget
, tmp
);
5644 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5645 the same as that of TARGET, adjust the constant. This is needed, for
5646 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5647 only a word-sized value. */
5648 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
5649 && TREE_CODE (exp
) != ERROR_MARK
5650 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
5652 if (GET_MODE_CLASS (GET_MODE (target
))
5653 != GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp
)))
5654 && GET_MODE_BITSIZE (GET_MODE (target
))
5655 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
))))
5657 rtx t
= simplify_gen_subreg (GET_MODE (target
), temp
,
5658 TYPE_MODE (TREE_TYPE (exp
)), 0);
5662 if (GET_MODE (temp
) == VOIDmode
)
5663 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5664 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5667 /* If value was not generated in the target, store it there.
5668 Convert the value to TARGET's type first if necessary and emit the
5669 pending incrementations that have been queued when expanding EXP.
5670 Note that we cannot emit the whole queue blindly because this will
5671 effectively disable the POST_INC optimization later.
5673 If TEMP and TARGET compare equal according to rtx_equal_p, but
5674 one or both of them are volatile memory refs, we have to distinguish
5676 - expand_expr has used TARGET. In this case, we must not generate
5677 another copy. This can be detected by TARGET being equal according
5679 - expand_expr has not used TARGET - that means that the source just
5680 happens to have the same RTX form. Since temp will have been created
5681 by expand_expr, it will compare unequal according to == .
5682 We must generate a copy in this case, to reach the correct number
5683 of volatile memory references. */
5685 if ((! rtx_equal_p (temp
, target
)
5686 || (temp
!= target
&& (side_effects_p (temp
)
5687 || side_effects_p (target
))))
5688 && TREE_CODE (exp
) != ERROR_MARK
5689 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5690 but TARGET is not valid memory reference, TEMP will differ
5691 from TARGET although it is really the same location. */
5693 && rtx_equal_p (alt_rtl
, target
)
5694 && !side_effects_p (alt_rtl
)
5695 && !side_effects_p (target
))
5696 /* If there's nothing to copy, don't bother. Don't call
5697 expr_size unless necessary, because some front-ends (C++)
5698 expr_size-hook must not be given objects that are not
5699 supposed to be bit-copied or bit-initialized. */
5700 && expr_size (exp
) != const0_rtx
)
5702 if (GET_MODE (temp
) != GET_MODE (target
) && GET_MODE (temp
) != VOIDmode
)
5704 if (GET_MODE (target
) == BLKmode
)
5706 /* Handle calls that return BLKmode values in registers. */
5707 if (REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
5708 copy_blkmode_from_reg (target
, temp
, TREE_TYPE (exp
));
5710 store_bit_field (target
,
5711 INTVAL (expr_size (exp
)) * BITS_PER_UNIT
,
5712 0, 0, 0, GET_MODE (temp
), temp
, reverse
);
5715 convert_move (target
, temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5718 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
5720 /* Handle copying a string constant into an array. The string
5721 constant may be shorter than the array. So copy just the string's
5722 actual length, and clear the rest. First get the size of the data
5723 type of the string, which is actually the size of the target. */
5724 rtx size
= expr_size (exp
);
5726 if (CONST_INT_P (size
)
5727 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
5728 emit_block_move (target
, temp
, size
,
5730 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5733 machine_mode pointer_mode
5734 = targetm
.addr_space
.pointer_mode (MEM_ADDR_SPACE (target
));
5735 machine_mode address_mode
= get_address_mode (target
);
5737 /* Compute the size of the data to copy from the string. */
5739 = size_binop_loc (loc
, MIN_EXPR
,
5740 make_tree (sizetype
, size
),
5741 size_int (TREE_STRING_LENGTH (exp
)));
5743 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
5745 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
5746 rtx_code_label
*label
= 0;
5748 /* Copy that much. */
5749 copy_size_rtx
= convert_to_mode (pointer_mode
, copy_size_rtx
,
5750 TYPE_UNSIGNED (sizetype
));
5751 emit_block_move (target
, temp
, copy_size_rtx
,
5753 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5755 /* Figure out how much is left in TARGET that we have to clear.
5756 Do all calculations in pointer_mode. */
5757 if (CONST_INT_P (copy_size_rtx
))
5759 size
= plus_constant (address_mode
, size
,
5760 -INTVAL (copy_size_rtx
));
5761 target
= adjust_address (target
, BLKmode
,
5762 INTVAL (copy_size_rtx
));
5766 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
5767 copy_size_rtx
, NULL_RTX
, 0,
5770 if (GET_MODE (copy_size_rtx
) != address_mode
)
5771 copy_size_rtx
= convert_to_mode (address_mode
,
5773 TYPE_UNSIGNED (sizetype
));
5775 target
= offset_address (target
, copy_size_rtx
,
5776 highest_pow2_factor (copy_size
));
5777 label
= gen_label_rtx ();
5778 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
5779 GET_MODE (size
), 0, label
);
5782 if (size
!= const0_rtx
)
5783 clear_storage (target
, size
, BLOCK_OP_NORMAL
);
5789 /* Handle calls that return values in multiple non-contiguous locations.
5790 The Irix 6 ABI has examples of this. */
5791 else if (GET_CODE (target
) == PARALLEL
)
5793 if (GET_CODE (temp
) == PARALLEL
)
5794 emit_group_move (target
, temp
);
5796 emit_group_load (target
, temp
, TREE_TYPE (exp
),
5797 int_size_in_bytes (TREE_TYPE (exp
)));
5799 else if (GET_CODE (temp
) == PARALLEL
)
5800 emit_group_store (target
, temp
, TREE_TYPE (exp
),
5801 int_size_in_bytes (TREE_TYPE (exp
)));
5802 else if (GET_MODE (temp
) == BLKmode
)
5803 emit_block_move (target
, temp
, expr_size (exp
),
5805 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5806 /* If we emit a nontemporal store, there is nothing else to do. */
5807 else if (nontemporal
&& emit_storent_insn (target
, temp
))
5812 temp
= flip_storage_order (GET_MODE (target
), temp
);
5813 temp
= force_operand (temp
, target
);
5815 emit_move_insn (target
, temp
);
5822 /* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5824 store_expr (tree exp
, rtx target
, int call_param_p
, bool nontemporal
,
5827 return store_expr_with_bounds (exp
, target
, call_param_p
, nontemporal
,
5831 /* Return true if field F of structure TYPE is a flexible array. */
5834 flexible_array_member_p (const_tree f
, const_tree type
)
5839 return (DECL_CHAIN (f
) == NULL
5840 && TREE_CODE (tf
) == ARRAY_TYPE
5842 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf
))
5843 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf
)))
5844 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf
))
5845 && int_size_in_bytes (type
) >= 0);
5848 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5849 must have in order for it to completely initialize a value of type TYPE.
5850 Return -1 if the number isn't known.
5852 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5854 static HOST_WIDE_INT
5855 count_type_elements (const_tree type
, bool for_ctor_p
)
5857 switch (TREE_CODE (type
))
5863 nelts
= array_type_nelts (type
);
5864 if (nelts
&& tree_fits_uhwi_p (nelts
))
5866 unsigned HOST_WIDE_INT n
;
5868 n
= tree_to_uhwi (nelts
) + 1;
5869 if (n
== 0 || for_ctor_p
)
5872 return n
* count_type_elements (TREE_TYPE (type
), false);
5874 return for_ctor_p
? -1 : 1;
5879 unsigned HOST_WIDE_INT n
;
5883 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5884 if (TREE_CODE (f
) == FIELD_DECL
)
5887 n
+= count_type_elements (TREE_TYPE (f
), false);
5888 else if (!flexible_array_member_p (f
, type
))
5889 /* Don't count flexible arrays, which are not supposed
5890 to be initialized. */
5898 case QUAL_UNION_TYPE
:
5903 gcc_assert (!for_ctor_p
);
5904 /* Estimate the number of scalars in each field and pick the
5905 maximum. Other estimates would do instead; the idea is simply
5906 to make sure that the estimate is not sensitive to the ordering
5909 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5910 if (TREE_CODE (f
) == FIELD_DECL
)
5912 m
= count_type_elements (TREE_TYPE (f
), false);
5913 /* If the field doesn't span the whole union, add an extra
5914 scalar for the rest. */
5915 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f
)),
5916 TYPE_SIZE (type
)) != 1)
5928 return TYPE_VECTOR_SUBPARTS (type
);
5932 case FIXED_POINT_TYPE
:
5937 case REFERENCE_TYPE
:
5953 /* Helper for categorize_ctor_elements. Identical interface. */
5956 categorize_ctor_elements_1 (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
5957 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
5959 unsigned HOST_WIDE_INT idx
;
5960 HOST_WIDE_INT nz_elts
, init_elts
, num_fields
;
5961 tree value
, purpose
, elt_type
;
5963 /* Whether CTOR is a valid constant initializer, in accordance with what
5964 initializer_constant_valid_p does. If inferred from the constructor
5965 elements, true until proven otherwise. */
5966 bool const_from_elts_p
= constructor_static_from_elts_p (ctor
);
5967 bool const_p
= const_from_elts_p
? true : TREE_STATIC (ctor
);
5972 elt_type
= NULL_TREE
;
5974 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), idx
, purpose
, value
)
5976 HOST_WIDE_INT mult
= 1;
5978 if (purpose
&& TREE_CODE (purpose
) == RANGE_EXPR
)
5980 tree lo_index
= TREE_OPERAND (purpose
, 0);
5981 tree hi_index
= TREE_OPERAND (purpose
, 1);
5983 if (tree_fits_uhwi_p (lo_index
) && tree_fits_uhwi_p (hi_index
))
5984 mult
= (tree_to_uhwi (hi_index
)
5985 - tree_to_uhwi (lo_index
) + 1);
5988 elt_type
= TREE_TYPE (value
);
5990 switch (TREE_CODE (value
))
5994 HOST_WIDE_INT nz
= 0, ic
= 0;
5996 bool const_elt_p
= categorize_ctor_elements_1 (value
, &nz
, &ic
,
5999 nz_elts
+= mult
* nz
;
6000 init_elts
+= mult
* ic
;
6002 if (const_from_elts_p
&& const_p
)
6003 const_p
= const_elt_p
;
6010 if (!initializer_zerop (value
))
6016 nz_elts
+= mult
* TREE_STRING_LENGTH (value
);
6017 init_elts
+= mult
* TREE_STRING_LENGTH (value
);
6021 if (!initializer_zerop (TREE_REALPART (value
)))
6023 if (!initializer_zerop (TREE_IMAGPART (value
)))
6031 for (i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
6033 tree v
= VECTOR_CST_ELT (value
, i
);
6034 if (!initializer_zerop (v
))
6043 HOST_WIDE_INT tc
= count_type_elements (elt_type
, false);
6044 nz_elts
+= mult
* tc
;
6045 init_elts
+= mult
* tc
;
6047 if (const_from_elts_p
&& const_p
)
6049 = initializer_constant_valid_p (value
,
6051 TYPE_REVERSE_STORAGE_ORDER
6059 if (*p_complete
&& !complete_ctor_at_level_p (TREE_TYPE (ctor
),
6060 num_fields
, elt_type
))
6061 *p_complete
= false;
6063 *p_nz_elts
+= nz_elts
;
6064 *p_init_elts
+= init_elts
;
6069 /* Examine CTOR to discover:
6070 * how many scalar fields are set to nonzero values,
6071 and place it in *P_NZ_ELTS;
6072 * how many scalar fields in total are in CTOR,
6073 and place it in *P_ELT_COUNT.
6074 * whether the constructor is complete -- in the sense that every
6075 meaningful byte is explicitly given a value --
6076 and place it in *P_COMPLETE.
6078 Return whether or not CTOR is a valid static constant initializer, the same
6079 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
6082 categorize_ctor_elements (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
6083 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
6089 return categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_init_elts
, p_complete
);
6092 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
6093 of which had type LAST_TYPE. Each element was itself a complete
6094 initializer, in the sense that every meaningful byte was explicitly
6095 given a value. Return true if the same is true for the constructor
6099 complete_ctor_at_level_p (const_tree type
, HOST_WIDE_INT num_elts
,
6100 const_tree last_type
)
6102 if (TREE_CODE (type
) == UNION_TYPE
6103 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
6108 gcc_assert (num_elts
== 1 && last_type
);
6110 /* ??? We could look at each element of the union, and find the
6111 largest element. Which would avoid comparing the size of the
6112 initialized element against any tail padding in the union.
6113 Doesn't seem worth the effort... */
6114 return simple_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (last_type
)) == 1;
6117 return count_type_elements (type
, true) == num_elts
;
6120 /* Return 1 if EXP contains mostly (3/4) zeros. */
6123 mostly_zeros_p (const_tree exp
)
6125 if (TREE_CODE (exp
) == CONSTRUCTOR
)
6127 HOST_WIDE_INT nz_elts
, init_elts
;
6130 categorize_ctor_elements (exp
, &nz_elts
, &init_elts
, &complete_p
);
6131 return !complete_p
|| nz_elts
< init_elts
/ 4;
6134 return initializer_zerop (exp
);
6137 /* Return 1 if EXP contains all zeros. */
6140 all_zeros_p (const_tree exp
)
6142 if (TREE_CODE (exp
) == CONSTRUCTOR
)
6144 HOST_WIDE_INT nz_elts
, init_elts
;
6147 categorize_ctor_elements (exp
, &nz_elts
, &init_elts
, &complete_p
);
6148 return nz_elts
== 0;
6151 return initializer_zerop (exp
);
6154 /* Helper function for store_constructor.
6155 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
6156 CLEARED is as for store_constructor.
6157 ALIAS_SET is the alias set to use for any stores.
6158 If REVERSE is true, the store is to be done in reverse order.
6160 This provides a recursive shortcut back to store_constructor when it isn't
6161 necessary to go through store_field. This is so that we can pass through
6162 the cleared field to let store_constructor know that we may not have to
6163 clear a substructure if the outer structure has already been cleared. */
6166 store_constructor_field (rtx target
, poly_uint64 bitsize
, poly_int64 bitpos
,
6167 poly_uint64 bitregion_start
,
6168 poly_uint64 bitregion_end
,
6170 tree exp
, int cleared
,
6171 alias_set_type alias_set
, bool reverse
)
6174 poly_uint64 bytesize
;
6175 if (TREE_CODE (exp
) == CONSTRUCTOR
6176 /* We can only call store_constructor recursively if the size and
6177 bit position are on a byte boundary. */
6178 && multiple_p (bitpos
, BITS_PER_UNIT
, &bytepos
)
6179 && maybe_ne (bitsize
, 0U)
6180 && multiple_p (bitsize
, BITS_PER_UNIT
, &bytesize
)
6181 /* If we have a nonzero bitpos for a register target, then we just
6182 let store_field do the bitfield handling. This is unlikely to
6183 generate unnecessary clear instructions anyways. */
6184 && (known_eq (bitpos
, 0) || MEM_P (target
)))
6188 machine_mode target_mode
= GET_MODE (target
);
6189 if (target_mode
!= BLKmode
6190 && !multiple_p (bitpos
, GET_MODE_ALIGNMENT (target_mode
)))
6191 target_mode
= BLKmode
;
6192 target
= adjust_address (target
, target_mode
, bytepos
);
6196 /* Update the alias set, if required. */
6197 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
6198 && MEM_ALIAS_SET (target
) != 0)
6200 target
= copy_rtx (target
);
6201 set_mem_alias_set (target
, alias_set
);
6204 store_constructor (exp
, target
, cleared
, bytesize
, reverse
);
6207 store_field (target
, bitsize
, bitpos
, bitregion_start
, bitregion_end
, mode
,
6208 exp
, alias_set
, false, reverse
);
6212 /* Returns the number of FIELD_DECLs in TYPE. */
6215 fields_length (const_tree type
)
6217 tree t
= TYPE_FIELDS (type
);
6220 for (; t
; t
= DECL_CHAIN (t
))
6221 if (TREE_CODE (t
) == FIELD_DECL
)
6228 /* Store the value of constructor EXP into the rtx TARGET.
6229 TARGET is either a REG or a MEM; we know it cannot conflict, since
6230 safe_from_p has been called.
6231 CLEARED is true if TARGET is known to have been zero'd.
6232 SIZE is the number of bytes of TARGET we are allowed to modify: this
6233 may not be the same as the size of EXP if we are assigning to a field
6234 which has been packed to exclude padding bits.
6235 If REVERSE is true, the store is to be done in reverse order. */
6238 store_constructor (tree exp
, rtx target
, int cleared
, poly_int64 size
,
6241 tree type
= TREE_TYPE (exp
);
6242 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
6243 poly_int64 bitregion_end
= known_gt (size
, 0) ? size
* BITS_PER_UNIT
- 1 : 0;
6245 switch (TREE_CODE (type
))
6249 case QUAL_UNION_TYPE
:
6251 unsigned HOST_WIDE_INT idx
;
6254 /* The storage order is specified for every aggregate type. */
6255 reverse
= TYPE_REVERSE_STORAGE_ORDER (type
);
6257 /* If size is zero or the target is already cleared, do nothing. */
6258 if (known_eq (size
, 0) || cleared
)
6260 /* We either clear the aggregate or indicate the value is dead. */
6261 else if ((TREE_CODE (type
) == UNION_TYPE
6262 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
6263 && ! CONSTRUCTOR_ELTS (exp
))
6264 /* If the constructor is empty, clear the union. */
6266 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
6270 /* If we are building a static constructor into a register,
6271 set the initial value as zero so we can fold the value into
6272 a constant. But if more than one register is involved,
6273 this probably loses. */
6274 else if (REG_P (target
) && TREE_STATIC (exp
)
6275 && known_le (GET_MODE_SIZE (GET_MODE (target
)),
6276 REGMODE_NATURAL_SIZE (GET_MODE (target
))))
6278 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6282 /* If the constructor has fewer fields than the structure or
6283 if we are initializing the structure to mostly zeros, clear
6284 the whole structure first. Don't do this if TARGET is a
6285 register whose mode size isn't equal to SIZE since
6286 clear_storage can't handle this case. */
6287 else if (known_size_p (size
)
6288 && (((int) CONSTRUCTOR_NELTS (exp
) != fields_length (type
))
6289 || mostly_zeros_p (exp
))
6291 || known_eq (GET_MODE_SIZE (GET_MODE (target
)), size
)))
6293 clear_storage (target
, gen_int_mode (size
, Pmode
),
6298 if (REG_P (target
) && !cleared
)
6299 emit_clobber (target
);
6301 /* Store each element of the constructor into the
6302 corresponding field of TARGET. */
6303 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, field
, value
)
6306 HOST_WIDE_INT bitsize
;
6307 HOST_WIDE_INT bitpos
= 0;
6309 rtx to_rtx
= target
;
6311 /* Just ignore missing fields. We cleared the whole
6312 structure, above, if any fields are missing. */
6316 if (cleared
&& initializer_zerop (value
))
6319 if (tree_fits_uhwi_p (DECL_SIZE (field
)))
6320 bitsize
= tree_to_uhwi (DECL_SIZE (field
));
6324 mode
= DECL_MODE (field
);
6325 if (DECL_BIT_FIELD (field
))
6328 offset
= DECL_FIELD_OFFSET (field
);
6329 if (tree_fits_shwi_p (offset
)
6330 && tree_fits_shwi_p (bit_position (field
)))
6332 bitpos
= int_bit_position (field
);
6338 /* If this initializes a field that is smaller than a
6339 word, at the start of a word, try to widen it to a full
6340 word. This special case allows us to output C++ member
6341 function initializations in a form that the optimizers
6343 if (WORD_REGISTER_OPERATIONS
6345 && bitsize
< BITS_PER_WORD
6346 && bitpos
% BITS_PER_WORD
== 0
6347 && GET_MODE_CLASS (mode
) == MODE_INT
6348 && TREE_CODE (value
) == INTEGER_CST
6350 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
6352 tree type
= TREE_TYPE (value
);
6354 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
6356 type
= lang_hooks
.types
.type_for_mode
6357 (word_mode
, TYPE_UNSIGNED (type
));
6358 value
= fold_convert (type
, value
);
6359 /* Make sure the bits beyond the original bitsize are zero
6360 so that we can correctly avoid extra zeroing stores in
6361 later constructor elements. */
6363 = wide_int_to_tree (type
, wi::mask (bitsize
, false,
6365 value
= fold_build2 (BIT_AND_EXPR
, type
, value
, bitsize_mask
);
6368 if (BYTES_BIG_ENDIAN
)
6370 = fold_build2 (LSHIFT_EXPR
, type
, value
,
6371 build_int_cst (type
,
6372 BITS_PER_WORD
- bitsize
));
6373 bitsize
= BITS_PER_WORD
;
6377 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
6378 && DECL_NONADDRESSABLE_P (field
))
6380 to_rtx
= copy_rtx (to_rtx
);
6381 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
6384 store_constructor_field (to_rtx
, bitsize
, bitpos
,
6385 0, bitregion_end
, mode
,
6387 get_alias_set (TREE_TYPE (field
)),
6395 unsigned HOST_WIDE_INT i
;
6398 tree elttype
= TREE_TYPE (type
);
6400 HOST_WIDE_INT minelt
= 0;
6401 HOST_WIDE_INT maxelt
= 0;
6403 /* The storage order is specified for every aggregate type. */
6404 reverse
= TYPE_REVERSE_STORAGE_ORDER (type
);
6406 domain
= TYPE_DOMAIN (type
);
6407 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
6408 && TYPE_MAX_VALUE (domain
)
6409 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain
))
6410 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain
)));
6412 /* If we have constant bounds for the range of the type, get them. */
6415 minelt
= tree_to_shwi (TYPE_MIN_VALUE (domain
));
6416 maxelt
= tree_to_shwi (TYPE_MAX_VALUE (domain
));
6419 /* If the constructor has fewer elements than the array, clear
6420 the whole array first. Similarly if this is static
6421 constructor of a non-BLKmode object. */
6424 else if (REG_P (target
) && TREE_STATIC (exp
))
6428 unsigned HOST_WIDE_INT idx
;
6430 HOST_WIDE_INT count
= 0, zero_count
= 0;
6431 need_to_clear
= ! const_bounds_p
;
6433 /* This loop is a more accurate version of the loop in
6434 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6435 is also needed to check for missing elements. */
6436 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, index
, value
)
6438 HOST_WIDE_INT this_node_count
;
6443 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6445 tree lo_index
= TREE_OPERAND (index
, 0);
6446 tree hi_index
= TREE_OPERAND (index
, 1);
6448 if (! tree_fits_uhwi_p (lo_index
)
6449 || ! tree_fits_uhwi_p (hi_index
))
6455 this_node_count
= (tree_to_uhwi (hi_index
)
6456 - tree_to_uhwi (lo_index
) + 1);
6459 this_node_count
= 1;
6461 count
+= this_node_count
;
6462 if (mostly_zeros_p (value
))
6463 zero_count
+= this_node_count
;
6466 /* Clear the entire array first if there are any missing
6467 elements, or if the incidence of zero elements is >=
6470 && (count
< maxelt
- minelt
+ 1
6471 || 4 * zero_count
>= 3 * count
))
6475 if (need_to_clear
&& maybe_gt (size
, 0))
6478 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6480 clear_storage (target
, gen_int_mode (size
, Pmode
),
6485 if (!cleared
&& REG_P (target
))
6486 /* Inform later passes that the old value is dead. */
6487 emit_clobber (target
);
6489 /* Store each element of the constructor into the
6490 corresponding element of TARGET, determined by counting the
6492 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), i
, index
, value
)
6496 HOST_WIDE_INT bitpos
;
6497 rtx xtarget
= target
;
6499 if (cleared
&& initializer_zerop (value
))
6502 mode
= TYPE_MODE (elttype
);
6503 if (mode
== BLKmode
)
6504 bitsize
= (tree_fits_uhwi_p (TYPE_SIZE (elttype
))
6505 ? tree_to_uhwi (TYPE_SIZE (elttype
))
6508 bitsize
= GET_MODE_BITSIZE (mode
);
6510 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6512 tree lo_index
= TREE_OPERAND (index
, 0);
6513 tree hi_index
= TREE_OPERAND (index
, 1);
6514 rtx index_r
, pos_rtx
;
6515 HOST_WIDE_INT lo
, hi
, count
;
6518 /* If the range is constant and "small", unroll the loop. */
6520 && tree_fits_shwi_p (lo_index
)
6521 && tree_fits_shwi_p (hi_index
)
6522 && (lo
= tree_to_shwi (lo_index
),
6523 hi
= tree_to_shwi (hi_index
),
6524 count
= hi
- lo
+ 1,
6527 || (tree_fits_uhwi_p (TYPE_SIZE (elttype
))
6528 && (tree_to_uhwi (TYPE_SIZE (elttype
)) * count
6531 lo
-= minelt
; hi
-= minelt
;
6532 for (; lo
<= hi
; lo
++)
6534 bitpos
= lo
* tree_to_shwi (TYPE_SIZE (elttype
));
6537 && !MEM_KEEP_ALIAS_SET_P (target
)
6538 && TREE_CODE (type
) == ARRAY_TYPE
6539 && TYPE_NONALIASED_COMPONENT (type
))
6541 target
= copy_rtx (target
);
6542 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6545 store_constructor_field
6546 (target
, bitsize
, bitpos
, 0, bitregion_end
,
6547 mode
, value
, cleared
,
6548 get_alias_set (elttype
), reverse
);
6553 rtx_code_label
*loop_start
= gen_label_rtx ();
6554 rtx_code_label
*loop_end
= gen_label_rtx ();
6557 expand_normal (hi_index
);
6559 index
= build_decl (EXPR_LOCATION (exp
),
6560 VAR_DECL
, NULL_TREE
, domain
);
6561 index_r
= gen_reg_rtx (promote_decl_mode (index
, NULL
));
6562 SET_DECL_RTL (index
, index_r
);
6563 store_expr (lo_index
, index_r
, 0, false, reverse
);
6565 /* Build the head of the loop. */
6566 do_pending_stack_adjust ();
6567 emit_label (loop_start
);
6569 /* Assign value to element index. */
6571 fold_convert (ssizetype
,
6572 fold_build2 (MINUS_EXPR
,
6575 TYPE_MIN_VALUE (domain
)));
6578 size_binop (MULT_EXPR
, position
,
6579 fold_convert (ssizetype
,
6580 TYPE_SIZE_UNIT (elttype
)));
6582 pos_rtx
= expand_normal (position
);
6583 xtarget
= offset_address (target
, pos_rtx
,
6584 highest_pow2_factor (position
));
6585 xtarget
= adjust_address (xtarget
, mode
, 0);
6586 if (TREE_CODE (value
) == CONSTRUCTOR
)
6587 store_constructor (value
, xtarget
, cleared
,
6588 exact_div (bitsize
, BITS_PER_UNIT
),
6591 store_expr (value
, xtarget
, 0, false, reverse
);
6593 /* Generate a conditional jump to exit the loop. */
6594 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
6596 jumpif (exit_cond
, loop_end
,
6597 profile_probability::uninitialized ());
6599 /* Update the loop counter, and jump to the head of
6601 expand_assignment (index
,
6602 build2 (PLUS_EXPR
, TREE_TYPE (index
),
6603 index
, integer_one_node
),
6606 emit_jump (loop_start
);
6608 /* Build the end of the loop. */
6609 emit_label (loop_end
);
6612 else if ((index
!= 0 && ! tree_fits_shwi_p (index
))
6613 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype
)))
6618 index
= ssize_int (1);
6621 index
= fold_convert (ssizetype
,
6622 fold_build2 (MINUS_EXPR
,
6625 TYPE_MIN_VALUE (domain
)));
6628 size_binop (MULT_EXPR
, index
,
6629 fold_convert (ssizetype
,
6630 TYPE_SIZE_UNIT (elttype
)));
6631 xtarget
= offset_address (target
,
6632 expand_normal (position
),
6633 highest_pow2_factor (position
));
6634 xtarget
= adjust_address (xtarget
, mode
, 0);
6635 store_expr (value
, xtarget
, 0, false, reverse
);
6640 bitpos
= ((tree_to_shwi (index
) - minelt
)
6641 * tree_to_uhwi (TYPE_SIZE (elttype
)));
6643 bitpos
= (i
* tree_to_uhwi (TYPE_SIZE (elttype
)));
6645 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
6646 && TREE_CODE (type
) == ARRAY_TYPE
6647 && TYPE_NONALIASED_COMPONENT (type
))
6649 target
= copy_rtx (target
);
6650 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6652 store_constructor_field (target
, bitsize
, bitpos
, 0,
6653 bitregion_end
, mode
, value
,
6654 cleared
, get_alias_set (elttype
),
6663 unsigned HOST_WIDE_INT idx
;
6664 constructor_elt
*ce
;
6667 insn_code icode
= CODE_FOR_nothing
;
6669 tree elttype
= TREE_TYPE (type
);
6670 int elt_size
= tree_to_uhwi (TYPE_SIZE (elttype
));
6671 machine_mode eltmode
= TYPE_MODE (elttype
);
6672 HOST_WIDE_INT bitsize
;
6673 HOST_WIDE_INT bitpos
;
6674 rtvec vector
= NULL
;
6676 alias_set_type alias
;
6677 bool vec_vec_init_p
= false;
6678 machine_mode mode
= GET_MODE (target
);
6680 gcc_assert (eltmode
!= BLKmode
);
6682 /* Try using vec_duplicate_optab for uniform vectors. */
6683 if (!TREE_SIDE_EFFECTS (exp
)
6684 && VECTOR_MODE_P (mode
)
6685 && eltmode
== GET_MODE_INNER (mode
)
6686 && ((icode
= optab_handler (vec_duplicate_optab
, mode
))
6687 != CODE_FOR_nothing
)
6688 && (elt
= uniform_vector_p (exp
)))
6690 struct expand_operand ops
[2];
6691 create_output_operand (&ops
[0], target
, mode
);
6692 create_input_operand (&ops
[1], expand_normal (elt
), eltmode
);
6693 expand_insn (icode
, 2, ops
);
6694 if (!rtx_equal_p (target
, ops
[0].value
))
6695 emit_move_insn (target
, ops
[0].value
);
6699 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
6700 if (REG_P (target
) && VECTOR_MODE_P (mode
))
6702 machine_mode emode
= eltmode
;
6704 if (CONSTRUCTOR_NELTS (exp
)
6705 && (TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (exp
, 0)->value
))
6708 tree etype
= TREE_TYPE (CONSTRUCTOR_ELT (exp
, 0)->value
);
6709 gcc_assert (CONSTRUCTOR_NELTS (exp
) * TYPE_VECTOR_SUBPARTS (etype
)
6711 emode
= TYPE_MODE (etype
);
6713 icode
= convert_optab_handler (vec_init_optab
, mode
, emode
);
6714 if (icode
!= CODE_FOR_nothing
)
6716 unsigned int i
, n
= n_elts
;
6718 if (emode
!= eltmode
)
6720 n
= CONSTRUCTOR_NELTS (exp
);
6721 vec_vec_init_p
= true;
6723 vector
= rtvec_alloc (n
);
6724 for (i
= 0; i
< n
; i
++)
6725 RTVEC_ELT (vector
, i
) = CONST0_RTX (emode
);
6729 /* If the constructor has fewer elements than the vector,
6730 clear the whole array first. Similarly if this is static
6731 constructor of a non-BLKmode object. */
6734 else if (REG_P (target
) && TREE_STATIC (exp
))
6738 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
6741 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
6743 tree sz
= TYPE_SIZE (TREE_TYPE (value
));
6745 = tree_to_uhwi (int_const_binop (TRUNC_DIV_EXPR
, sz
,
6746 TYPE_SIZE (elttype
)));
6748 count
+= n_elts_here
;
6749 if (mostly_zeros_p (value
))
6750 zero_count
+= n_elts_here
;
6753 /* Clear the entire vector first if there are any missing elements,
6754 or if the incidence of zero elements is >= 75%. */
6755 need_to_clear
= (count
< n_elts
|| 4 * zero_count
>= 3 * count
);
6758 if (need_to_clear
&& maybe_gt (size
, 0) && !vector
)
6761 emit_move_insn (target
, CONST0_RTX (mode
));
6763 clear_storage (target
, gen_int_mode (size
, Pmode
),
6768 /* Inform later passes that the old value is dead. */
6769 if (!cleared
&& !vector
&& REG_P (target
))
6770 emit_move_insn (target
, CONST0_RTX (mode
));
6773 alias
= MEM_ALIAS_SET (target
);
6775 alias
= get_alias_set (elttype
);
6777 /* Store each element of the constructor into the corresponding
6778 element of TARGET, determined by counting the elements. */
6779 for (idx
= 0, i
= 0;
6780 vec_safe_iterate (CONSTRUCTOR_ELTS (exp
), idx
, &ce
);
6781 idx
++, i
+= bitsize
/ elt_size
)
6783 HOST_WIDE_INT eltpos
;
6784 tree value
= ce
->value
;
6786 bitsize
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value
)));
6787 if (cleared
&& initializer_zerop (value
))
6791 eltpos
= tree_to_uhwi (ce
->index
);
6799 gcc_assert (ce
->index
== NULL_TREE
);
6800 gcc_assert (TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
);
6804 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
6805 RTVEC_ELT (vector
, eltpos
) = expand_normal (value
);
6809 machine_mode value_mode
6810 = (TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
6811 ? TYPE_MODE (TREE_TYPE (value
)) : eltmode
);
6812 bitpos
= eltpos
* elt_size
;
6813 store_constructor_field (target
, bitsize
, bitpos
, 0,
6814 bitregion_end
, value_mode
,
6815 value
, cleared
, alias
, reverse
);
6820 emit_insn (GEN_FCN (icode
) (target
,
6821 gen_rtx_PARALLEL (mode
, vector
)));
6830 /* Store the value of EXP (an expression tree)
6831 into a subfield of TARGET which has mode MODE and occupies
6832 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6833 If MODE is VOIDmode, it means that we are storing into a bit-field.
6835 BITREGION_START is bitpos of the first bitfield in this region.
6836 BITREGION_END is the bitpos of the ending bitfield in this region.
6837 These two fields are 0, if the C++ memory model does not apply,
6838 or we are not interested in keeping track of bitfield regions.
6840 Always return const0_rtx unless we have something particular to
6843 ALIAS_SET is the alias set for the destination. This value will
6844 (in general) be different from that for TARGET, since TARGET is a
6845 reference to the containing structure.
6847 If NONTEMPORAL is true, try generating a nontemporal store.
6849 If REVERSE is true, the store is to be done in reverse order. */
6852 store_field (rtx target
, poly_int64 bitsize
, poly_int64 bitpos
,
6853 poly_uint64 bitregion_start
, poly_uint64 bitregion_end
,
6854 machine_mode mode
, tree exp
,
6855 alias_set_type alias_set
, bool nontemporal
, bool reverse
)
6857 if (TREE_CODE (exp
) == ERROR_MARK
)
6860 /* If we have nothing to store, do nothing unless the expression has
6861 side-effects. Don't do that for zero sized addressable lhs of
6863 if (known_eq (bitsize
, 0)
6864 && (!TREE_ADDRESSABLE (TREE_TYPE (exp
))
6865 || TREE_CODE (exp
) != CALL_EXPR
))
6866 return expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6868 if (GET_CODE (target
) == CONCAT
)
6870 /* We're storing into a struct containing a single __complex. */
6872 gcc_assert (known_eq (bitpos
, 0));
6873 return store_expr (exp
, target
, 0, nontemporal
, reverse
);
6876 /* If the structure is in a register or if the component
6877 is a bit field, we cannot use addressing to access it.
6878 Use bit-field techniques or SUBREG to store in it. */
6880 poly_int64 decl_bitsize
;
6881 if (mode
== VOIDmode
6882 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
6883 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
6884 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
6886 || GET_CODE (target
) == SUBREG
6887 /* If the field isn't aligned enough to store as an ordinary memref,
6888 store it as a bit field. */
6890 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
6891 || !multiple_p (bitpos
, GET_MODE_ALIGNMENT (mode
)))
6892 && targetm
.slow_unaligned_access (mode
, MEM_ALIGN (target
)))
6893 || !multiple_p (bitpos
, BITS_PER_UNIT
)))
6894 || (known_size_p (bitsize
)
6896 && maybe_gt (GET_MODE_BITSIZE (mode
), bitsize
))
6897 /* If the RHS and field are a constant size and the size of the
6898 RHS isn't the same size as the bitfield, we must use bitfield
6900 || (known_size_p (bitsize
)
6901 && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp
)))
6902 && maybe_ne (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp
))),
6904 /* Except for initialization of full bytes from a CONSTRUCTOR, which
6905 we will handle specially below. */
6906 && !(TREE_CODE (exp
) == CONSTRUCTOR
6907 && multiple_p (bitsize
, BITS_PER_UNIT
))
6908 /* And except for bitwise copying of TREE_ADDRESSABLE types,
6909 where the FIELD_DECL has the right bitsize, but TREE_TYPE (exp)
6910 includes some extra padding. store_expr / expand_expr will in
6911 that case call get_inner_reference that will have the bitsize
6912 we check here and thus the block move will not clobber the
6913 padding that shouldn't be clobbered. In the future we could
6914 replace the TREE_ADDRESSABLE check with a check that
6915 get_base_address needs to live in memory. */
6916 && (!TREE_ADDRESSABLE (TREE_TYPE (exp
))
6917 || TREE_CODE (exp
) != COMPONENT_REF
6918 || !multiple_p (bitsize
, BITS_PER_UNIT
)
6919 || !multiple_p (bitpos
, BITS_PER_UNIT
)
6920 || !poly_int_tree_p (DECL_SIZE (TREE_OPERAND (exp
, 1)),
6922 || maybe_ne (decl_bitsize
, bitsize
)))
6923 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6924 decl we must use bitfield operations. */
6925 || (known_size_p (bitsize
)
6926 && TREE_CODE (exp
) == MEM_REF
6927 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
6928 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6929 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6930 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) != BLKmode
))
6935 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6936 implies a mask operation. If the precision is the same size as
6937 the field we're storing into, that mask is redundant. This is
6938 particularly common with bit field assignments generated by the
6940 nop_def
= get_def_for_expr (exp
, NOP_EXPR
);
6943 tree type
= TREE_TYPE (exp
);
6944 if (INTEGRAL_TYPE_P (type
)
6945 && TYPE_PRECISION (type
) < GET_MODE_BITSIZE (TYPE_MODE (type
))
6946 && known_eq (bitsize
, TYPE_PRECISION (type
)))
6948 tree op
= gimple_assign_rhs1 (nop_def
);
6949 type
= TREE_TYPE (op
);
6950 if (INTEGRAL_TYPE_P (type
)
6951 && known_ge (TYPE_PRECISION (type
), bitsize
))
6956 temp
= expand_normal (exp
);
6958 /* We don't support variable-sized BLKmode bitfields, since our
6959 handling of BLKmode is bound up with the ability to break
6960 things into words. */
6961 gcc_assert (mode
!= BLKmode
|| bitsize
.is_constant ());
6963 /* Handle calls that return values in multiple non-contiguous locations.
6964 The Irix 6 ABI has examples of this. */
6965 if (GET_CODE (temp
) == PARALLEL
)
6967 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
6968 scalar_int_mode temp_mode
6969 = smallest_int_mode_for_size (size
* BITS_PER_UNIT
);
6970 rtx temp_target
= gen_reg_rtx (temp_mode
);
6971 emit_group_store (temp_target
, temp
, TREE_TYPE (exp
), size
);
6975 /* Handle calls that return BLKmode values in registers. */
6976 else if (mode
== BLKmode
&& REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
6978 rtx temp_target
= gen_reg_rtx (GET_MODE (temp
));
6979 copy_blkmode_from_reg (temp_target
, temp
, TREE_TYPE (exp
));
6983 /* If the value has aggregate type and an integral mode then, if BITSIZE
6984 is narrower than this mode and this is for big-endian data, we first
6985 need to put the value into the low-order bits for store_bit_field,
6986 except when MODE is BLKmode and BITSIZE larger than the word size
6987 (see the handling of fields larger than a word in store_bit_field).
6988 Moreover, the field may be not aligned on a byte boundary; in this
6989 case, if it has reverse storage order, it needs to be accessed as a
6990 scalar field with reverse storage order and we must first put the
6991 value into target order. */
6992 scalar_int_mode temp_mode
;
6993 if (AGGREGATE_TYPE_P (TREE_TYPE (exp
))
6994 && is_int_mode (GET_MODE (temp
), &temp_mode
))
6996 HOST_WIDE_INT size
= GET_MODE_BITSIZE (temp_mode
);
6998 reverse
= TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (exp
));
7001 temp
= flip_storage_order (temp_mode
, temp
);
7003 gcc_checking_assert (known_le (bitsize
, size
));
7004 if (maybe_lt (bitsize
, size
)
7005 && reverse
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
7006 /* Use of to_constant for BLKmode was checked above. */
7007 && !(mode
== BLKmode
&& bitsize
.to_constant () > BITS_PER_WORD
))
7008 temp
= expand_shift (RSHIFT_EXPR
, temp_mode
, temp
,
7009 size
- bitsize
, NULL_RTX
, 1);
7012 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
7013 if (mode
!= VOIDmode
&& mode
!= BLKmode
7014 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
7015 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
7017 /* If the mode of TEMP and TARGET is BLKmode, both must be in memory
7018 and BITPOS must be aligned on a byte boundary. If so, we simply do
7019 a block copy. Likewise for a BLKmode-like TARGET. */
7020 if (GET_MODE (temp
) == BLKmode
7021 && (GET_MODE (target
) == BLKmode
7023 && GET_MODE_CLASS (GET_MODE (target
)) == MODE_INT
7024 && multiple_p (bitpos
, BITS_PER_UNIT
)
7025 && multiple_p (bitsize
, BITS_PER_UNIT
))))
7027 gcc_assert (MEM_P (target
) && MEM_P (temp
));
7028 poly_int64 bytepos
= exact_div (bitpos
, BITS_PER_UNIT
);
7029 poly_int64 bytesize
= bits_to_bytes_round_up (bitsize
);
7031 target
= adjust_address (target
, VOIDmode
, bytepos
);
7032 emit_block_move (target
, temp
,
7033 gen_int_mode (bytesize
, Pmode
),
7039 /* If the mode of TEMP is still BLKmode and BITSIZE not larger than the
7040 word size, we need to load the value (see again store_bit_field). */
7041 if (GET_MODE (temp
) == BLKmode
&& known_le (bitsize
, BITS_PER_WORD
))
7043 scalar_int_mode temp_mode
= smallest_int_mode_for_size (bitsize
);
7044 temp
= extract_bit_field (temp
, bitsize
, 0, 1, NULL_RTX
, temp_mode
,
7045 temp_mode
, false, NULL
);
7048 /* Store the value in the bitfield. */
7049 store_bit_field (target
, bitsize
, bitpos
,
7050 bitregion_start
, bitregion_end
,
7051 mode
, temp
, reverse
);
7057 /* Now build a reference to just the desired component. */
7058 rtx to_rtx
= adjust_address (target
, mode
,
7059 exact_div (bitpos
, BITS_PER_UNIT
));
7061 if (to_rtx
== target
)
7062 to_rtx
= copy_rtx (to_rtx
);
7064 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
7065 set_mem_alias_set (to_rtx
, alias_set
);
7067 /* Above we avoided using bitfield operations for storing a CONSTRUCTOR
7068 into a target smaller than its type; handle that case now. */
7069 if (TREE_CODE (exp
) == CONSTRUCTOR
&& known_size_p (bitsize
))
7071 poly_int64 bytesize
= exact_div (bitsize
, BITS_PER_UNIT
);
7072 store_constructor (exp
, to_rtx
, 0, bytesize
, reverse
);
7076 return store_expr (exp
, to_rtx
, 0, nontemporal
, reverse
);
7080 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
7081 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
7082 codes and find the ultimate containing object, which we return.
7084 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
7085 bit position, *PUNSIGNEDP to the signedness and *PREVERSEP to the
7086 storage order of the field.
7087 If the position of the field is variable, we store a tree
7088 giving the variable offset (in units) in *POFFSET.
7089 This offset is in addition to the bit position.
7090 If the position is not variable, we store 0 in *POFFSET.
7092 If any of the extraction expressions is volatile,
7093 we store 1 in *PVOLATILEP. Otherwise we don't change that.
7095 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
7096 Otherwise, it is a mode that can be used to access the field.
7098 If the field describes a variable-sized object, *PMODE is set to
7099 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
7100 this case, but the address of the object can be found. */
7103 get_inner_reference (tree exp
, poly_int64_pod
*pbitsize
,
7104 poly_int64_pod
*pbitpos
, tree
*poffset
,
7105 machine_mode
*pmode
, int *punsignedp
,
7106 int *preversep
, int *pvolatilep
)
7109 machine_mode mode
= VOIDmode
;
7110 bool blkmode_bitfield
= false;
7111 tree offset
= size_zero_node
;
7112 poly_offset_int bit_offset
= 0;
7114 /* First get the mode, signedness, storage order and size. We do this from
7115 just the outermost expression. */
7117 if (TREE_CODE (exp
) == COMPONENT_REF
)
7119 tree field
= TREE_OPERAND (exp
, 1);
7120 size_tree
= DECL_SIZE (field
);
7121 if (flag_strict_volatile_bitfields
> 0
7122 && TREE_THIS_VOLATILE (exp
)
7123 && DECL_BIT_FIELD_TYPE (field
)
7124 && DECL_MODE (field
) != BLKmode
)
7125 /* Volatile bitfields should be accessed in the mode of the
7126 field's type, not the mode computed based on the bit
7128 mode
= TYPE_MODE (DECL_BIT_FIELD_TYPE (field
));
7129 else if (!DECL_BIT_FIELD (field
))
7131 mode
= DECL_MODE (field
);
7132 /* For vector fields re-check the target flags, as DECL_MODE
7133 could have been set with different target flags than
7134 the current function has. */
7136 && VECTOR_TYPE_P (TREE_TYPE (field
))
7137 && VECTOR_MODE_P (TYPE_MODE_RAW (TREE_TYPE (field
))))
7138 mode
= TYPE_MODE (TREE_TYPE (field
));
7140 else if (DECL_MODE (field
) == BLKmode
)
7141 blkmode_bitfield
= true;
7143 *punsignedp
= DECL_UNSIGNED (field
);
7145 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
7147 size_tree
= TREE_OPERAND (exp
, 1);
7148 *punsignedp
= (! INTEGRAL_TYPE_P (TREE_TYPE (exp
))
7149 || TYPE_UNSIGNED (TREE_TYPE (exp
)));
7151 /* For vector types, with the correct size of access, use the mode of
7153 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == VECTOR_TYPE
7154 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
7155 && tree_int_cst_equal (size_tree
, TYPE_SIZE (TREE_TYPE (exp
))))
7156 mode
= TYPE_MODE (TREE_TYPE (exp
));
7160 mode
= TYPE_MODE (TREE_TYPE (exp
));
7161 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
7163 if (mode
== BLKmode
)
7164 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
7166 *pbitsize
= GET_MODE_BITSIZE (mode
);
7171 if (! tree_fits_uhwi_p (size_tree
))
7172 mode
= BLKmode
, *pbitsize
= -1;
7174 *pbitsize
= tree_to_uhwi (size_tree
);
7177 *preversep
= reverse_storage_order_for_component_p (exp
);
7179 /* Compute cumulative bit-offset for nested component-refs and array-refs,
7180 and find the ultimate containing object. */
7183 switch (TREE_CODE (exp
))
7186 bit_offset
+= wi::to_poly_offset (TREE_OPERAND (exp
, 2));
7191 tree field
= TREE_OPERAND (exp
, 1);
7192 tree this_offset
= component_ref_field_offset (exp
);
7194 /* If this field hasn't been filled in yet, don't go past it.
7195 This should only happen when folding expressions made during
7196 type construction. */
7197 if (this_offset
== 0)
7200 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
7201 bit_offset
+= wi::to_poly_offset (DECL_FIELD_BIT_OFFSET (field
));
7203 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
7208 case ARRAY_RANGE_REF
:
7210 tree index
= TREE_OPERAND (exp
, 1);
7211 tree low_bound
= array_ref_low_bound (exp
);
7212 tree unit_size
= array_ref_element_size (exp
);
7214 /* We assume all arrays have sizes that are a multiple of a byte.
7215 First subtract the lower bound, if any, in the type of the
7216 index, then convert to sizetype and multiply by the size of
7217 the array element. */
7218 if (! integer_zerop (low_bound
))
7219 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
7222 offset
= size_binop (PLUS_EXPR
, offset
,
7223 size_binop (MULT_EXPR
,
7224 fold_convert (sizetype
, index
),
7233 bit_offset
+= *pbitsize
;
7236 case VIEW_CONVERT_EXPR
:
7240 /* Hand back the decl for MEM[&decl, off]. */
7241 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
7243 tree off
= TREE_OPERAND (exp
, 1);
7244 if (!integer_zerop (off
))
7246 poly_offset_int boff
= mem_ref_offset (exp
);
7247 boff
<<= LOG2_BITS_PER_UNIT
;
7250 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7258 /* If any reference in the chain is volatile, the effect is volatile. */
7259 if (TREE_THIS_VOLATILE (exp
))
7262 exp
= TREE_OPERAND (exp
, 0);
7266 /* If OFFSET is constant, see if we can return the whole thing as a
7267 constant bit position. Make sure to handle overflow during
7269 if (poly_int_tree_p (offset
))
7271 poly_offset_int tem
= wi::sext (wi::to_poly_offset (offset
),
7272 TYPE_PRECISION (sizetype
));
7273 tem
<<= LOG2_BITS_PER_UNIT
;
7275 if (tem
.to_shwi (pbitpos
))
7276 *poffset
= offset
= NULL_TREE
;
7279 /* Otherwise, split it up. */
7282 /* Avoid returning a negative bitpos as this may wreak havoc later. */
7283 if (!bit_offset
.to_shwi (pbitpos
) || maybe_lt (*pbitpos
, 0))
7285 *pbitpos
= num_trailing_bits (bit_offset
.force_shwi ());
7286 poly_offset_int bytes
= bits_to_bytes_round_down (bit_offset
);
7287 offset
= size_binop (PLUS_EXPR
, offset
,
7288 build_int_cst (sizetype
, bytes
.force_shwi ()));
7294 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
7295 if (mode
== VOIDmode
7297 && multiple_p (*pbitpos
, BITS_PER_UNIT
)
7298 && multiple_p (*pbitsize
, BITS_PER_UNIT
))
7306 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7308 static unsigned HOST_WIDE_INT
7309 target_align (const_tree target
)
7311 /* We might have a chain of nested references with intermediate misaligning
7312 bitfields components, so need to recurse to find out. */
7314 unsigned HOST_WIDE_INT this_align
, outer_align
;
7316 switch (TREE_CODE (target
))
7322 this_align
= DECL_ALIGN (TREE_OPERAND (target
, 1));
7323 outer_align
= target_align (TREE_OPERAND (target
, 0));
7324 return MIN (this_align
, outer_align
);
7327 case ARRAY_RANGE_REF
:
7328 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7329 outer_align
= target_align (TREE_OPERAND (target
, 0));
7330 return MIN (this_align
, outer_align
);
7333 case NON_LVALUE_EXPR
:
7334 case VIEW_CONVERT_EXPR
:
7335 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7336 outer_align
= target_align (TREE_OPERAND (target
, 0));
7337 return MAX (this_align
, outer_align
);
7340 return TYPE_ALIGN (TREE_TYPE (target
));
7345 /* Given an rtx VALUE that may contain additions and multiplications, return
7346 an equivalent value that just refers to a register, memory, or constant.
7347 This is done by generating instructions to perform the arithmetic and
7348 returning a pseudo-register containing the value.
7350 The returned value may be a REG, SUBREG, MEM or constant. */
7353 force_operand (rtx value
, rtx target
)
7356 /* Use subtarget as the target for operand 0 of a binary operation. */
7357 rtx subtarget
= get_subtarget (target
);
7358 enum rtx_code code
= GET_CODE (value
);
7360 /* Check for subreg applied to an expression produced by loop optimizer. */
7362 && !REG_P (SUBREG_REG (value
))
7363 && !MEM_P (SUBREG_REG (value
)))
7366 = simplify_gen_subreg (GET_MODE (value
),
7367 force_reg (GET_MODE (SUBREG_REG (value
)),
7368 force_operand (SUBREG_REG (value
),
7370 GET_MODE (SUBREG_REG (value
)),
7371 SUBREG_BYTE (value
));
7372 code
= GET_CODE (value
);
7375 /* Check for a PIC address load. */
7376 if ((code
== PLUS
|| code
== MINUS
)
7377 && XEXP (value
, 0) == pic_offset_table_rtx
7378 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
7379 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
7380 || GET_CODE (XEXP (value
, 1)) == CONST
))
7383 subtarget
= gen_reg_rtx (GET_MODE (value
));
7384 emit_move_insn (subtarget
, value
);
7388 if (ARITHMETIC_P (value
))
7390 op2
= XEXP (value
, 1);
7391 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
7393 if (code
== MINUS
&& CONST_INT_P (op2
))
7396 op2
= negate_rtx (GET_MODE (value
), op2
);
7399 /* Check for an addition with OP2 a constant integer and our first
7400 operand a PLUS of a virtual register and something else. In that
7401 case, we want to emit the sum of the virtual register and the
7402 constant first and then add the other value. This allows virtual
7403 register instantiation to simply modify the constant rather than
7404 creating another one around this addition. */
7405 if (code
== PLUS
&& CONST_INT_P (op2
)
7406 && GET_CODE (XEXP (value
, 0)) == PLUS
7407 && REG_P (XEXP (XEXP (value
, 0), 0))
7408 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7409 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
7411 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
7412 XEXP (XEXP (value
, 0), 0), op2
,
7413 subtarget
, 0, OPTAB_LIB_WIDEN
);
7414 return expand_simple_binop (GET_MODE (value
), code
, temp
,
7415 force_operand (XEXP (XEXP (value
,
7417 target
, 0, OPTAB_LIB_WIDEN
);
7420 op1
= force_operand (XEXP (value
, 0), subtarget
);
7421 op2
= force_operand (op2
, NULL_RTX
);
7425 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
7427 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
7428 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7429 target
, 1, OPTAB_LIB_WIDEN
);
7431 return expand_divmod (0,
7432 FLOAT_MODE_P (GET_MODE (value
))
7433 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
7434 GET_MODE (value
), op1
, op2
, target
, 0);
7436 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7439 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
7442 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7445 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7446 target
, 0, OPTAB_LIB_WIDEN
);
7448 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7449 target
, 1, OPTAB_LIB_WIDEN
);
7452 if (UNARY_P (value
))
7455 target
= gen_reg_rtx (GET_MODE (value
));
7456 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
7463 case FLOAT_TRUNCATE
:
7464 convert_move (target
, op1
, code
== ZERO_EXTEND
);
7469 expand_fix (target
, op1
, code
== UNSIGNED_FIX
);
7473 case UNSIGNED_FLOAT
:
7474 expand_float (target
, op1
, code
== UNSIGNED_FLOAT
);
7478 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
7482 #ifdef INSN_SCHEDULING
7483 /* On machines that have insn scheduling, we want all memory reference to be
7484 explicit, so we need to deal with such paradoxical SUBREGs. */
7485 if (paradoxical_subreg_p (value
) && MEM_P (SUBREG_REG (value
)))
7487 = simplify_gen_subreg (GET_MODE (value
),
7488 force_reg (GET_MODE (SUBREG_REG (value
)),
7489 force_operand (SUBREG_REG (value
),
7491 GET_MODE (SUBREG_REG (value
)),
7492 SUBREG_BYTE (value
));
7498 /* Subroutine of expand_expr: return nonzero iff there is no way that
7499 EXP can reference X, which is being modified. TOP_P is nonzero if this
7500 call is going to be used to determine whether we need a temporary
7501 for EXP, as opposed to a recursive call to this function.
7503 It is always safe for this routine to return zero since it merely
7504 searches for optimization opportunities. */
7507 safe_from_p (const_rtx x
, tree exp
, int top_p
)
7513 /* If EXP has varying size, we MUST use a target since we currently
7514 have no way of allocating temporaries of variable size
7515 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7516 So we assume here that something at a higher level has prevented a
7517 clash. This is somewhat bogus, but the best we can do. Only
7518 do this when X is BLKmode and when we are at the top level. */
7519 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
7520 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
7521 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
7522 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
7523 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
7525 && GET_MODE (x
) == BLKmode
)
7526 /* If X is in the outgoing argument area, it is always safe. */
7528 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
7529 || (GET_CODE (XEXP (x
, 0)) == PLUS
7530 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
7533 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7534 find the underlying pseudo. */
7535 if (GET_CODE (x
) == SUBREG
)
7538 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7542 /* Now look at our tree code and possibly recurse. */
7543 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
7545 case tcc_declaration
:
7546 exp_rtl
= DECL_RTL_IF_SET (exp
);
7552 case tcc_exceptional
:
7553 if (TREE_CODE (exp
) == TREE_LIST
)
7557 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
7559 exp
= TREE_CHAIN (exp
);
7562 if (TREE_CODE (exp
) != TREE_LIST
)
7563 return safe_from_p (x
, exp
, 0);
7566 else if (TREE_CODE (exp
) == CONSTRUCTOR
)
7568 constructor_elt
*ce
;
7569 unsigned HOST_WIDE_INT idx
;
7571 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp
), idx
, ce
)
7572 if ((ce
->index
!= NULL_TREE
&& !safe_from_p (x
, ce
->index
, 0))
7573 || !safe_from_p (x
, ce
->value
, 0))
7577 else if (TREE_CODE (exp
) == ERROR_MARK
)
7578 return 1; /* An already-visited SAVE_EXPR? */
7583 /* The only case we look at here is the DECL_INITIAL inside a
7585 return (TREE_CODE (exp
) != DECL_EXPR
7586 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
7587 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
7588 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
7591 case tcc_comparison
:
7592 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
7597 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7599 case tcc_expression
:
7602 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7603 the expression. If it is set, we conflict iff we are that rtx or
7604 both are in memory. Otherwise, we check all operands of the
7605 expression recursively. */
7607 switch (TREE_CODE (exp
))
7610 /* If the operand is static or we are static, we can't conflict.
7611 Likewise if we don't conflict with the operand at all. */
7612 if (staticp (TREE_OPERAND (exp
, 0))
7613 || TREE_STATIC (exp
)
7614 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
7617 /* Otherwise, the only way this can conflict is if we are taking
7618 the address of a DECL a that address if part of X, which is
7620 exp
= TREE_OPERAND (exp
, 0);
7623 if (!DECL_RTL_SET_P (exp
)
7624 || !MEM_P (DECL_RTL (exp
)))
7627 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
7633 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
7634 get_alias_set (exp
)))
7639 /* Assume that the call will clobber all hard registers and
7641 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7646 case WITH_CLEANUP_EXPR
:
7647 case CLEANUP_POINT_EXPR
:
7648 /* Lowered by gimplify.c. */
7652 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7658 /* If we have an rtx, we do not need to scan our operands. */
7662 nops
= TREE_OPERAND_LENGTH (exp
);
7663 for (i
= 0; i
< nops
; i
++)
7664 if (TREE_OPERAND (exp
, i
) != 0
7665 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
7671 /* Should never get a type here. */
7675 /* If we have an rtl, find any enclosed object. Then see if we conflict
7679 if (GET_CODE (exp_rtl
) == SUBREG
)
7681 exp_rtl
= SUBREG_REG (exp_rtl
);
7683 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
7687 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7688 are memory and they conflict. */
7689 return ! (rtx_equal_p (x
, exp_rtl
)
7690 || (MEM_P (x
) && MEM_P (exp_rtl
)
7691 && true_dependence (exp_rtl
, VOIDmode
, x
)));
7694 /* If we reach here, it is safe. */
7699 /* Return the highest power of two that EXP is known to be a multiple of.
7700 This is used in updating alignment of MEMs in array references. */
7702 unsigned HOST_WIDE_INT
7703 highest_pow2_factor (const_tree exp
)
7705 unsigned HOST_WIDE_INT ret
;
7706 int trailing_zeros
= tree_ctz (exp
);
7707 if (trailing_zeros
>= HOST_BITS_PER_WIDE_INT
)
7708 return BIGGEST_ALIGNMENT
;
7709 ret
= HOST_WIDE_INT_1U
<< trailing_zeros
;
7710 if (ret
> BIGGEST_ALIGNMENT
)
7711 return BIGGEST_ALIGNMENT
;
7715 /* Similar, except that the alignment requirements of TARGET are
7716 taken into account. Assume it is at least as aligned as its
7717 type, unless it is a COMPONENT_REF in which case the layout of
7718 the structure gives the alignment. */
7720 static unsigned HOST_WIDE_INT
7721 highest_pow2_factor_for_target (const_tree target
, const_tree exp
)
7723 unsigned HOST_WIDE_INT talign
= target_align (target
) / BITS_PER_UNIT
;
7724 unsigned HOST_WIDE_INT factor
= highest_pow2_factor (exp
);
7726 return MAX (factor
, talign
);
7729 /* Convert the tree comparison code TCODE to the rtl one where the
7730 signedness is UNSIGNEDP. */
7732 static enum rtx_code
7733 convert_tree_comp_to_rtx (enum tree_code tcode
, int unsignedp
)
7745 code
= unsignedp
? LTU
: LT
;
7748 code
= unsignedp
? LEU
: LE
;
7751 code
= unsignedp
? GTU
: GT
;
7754 code
= unsignedp
? GEU
: GE
;
7756 case UNORDERED_EXPR
:
7787 /* Subroutine of expand_expr. Expand the two operands of a binary
7788 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7789 The value may be stored in TARGET if TARGET is nonzero. The
7790 MODIFIER argument is as documented by expand_expr. */
7793 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
7794 enum expand_modifier modifier
)
7796 if (! safe_from_p (target
, exp1
, 1))
7798 if (operand_equal_p (exp0
, exp1
, 0))
7800 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7801 *op1
= copy_rtx (*op0
);
7805 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7806 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
7811 /* Return a MEM that contains constant EXP. DEFER is as for
7812 output_constant_def and MODIFIER is as for expand_expr. */
7815 expand_expr_constant (tree exp
, int defer
, enum expand_modifier modifier
)
7819 mem
= output_constant_def (exp
, defer
);
7820 if (modifier
!= EXPAND_INITIALIZER
)
7821 mem
= use_anchored_address (mem
);
7825 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7826 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7829 expand_expr_addr_expr_1 (tree exp
, rtx target
, scalar_int_mode tmode
,
7830 enum expand_modifier modifier
, addr_space_t as
)
7832 rtx result
, subtarget
;
7834 poly_int64 bitsize
, bitpos
;
7835 int unsignedp
, reversep
, volatilep
= 0;
7838 /* If we are taking the address of a constant and are at the top level,
7839 we have to use output_constant_def since we can't call force_const_mem
7841 /* ??? This should be considered a front-end bug. We should not be
7842 generating ADDR_EXPR of something that isn't an LVALUE. The only
7843 exception here is STRING_CST. */
7844 if (CONSTANT_CLASS_P (exp
))
7846 result
= XEXP (expand_expr_constant (exp
, 0, modifier
), 0);
7847 if (modifier
< EXPAND_SUM
)
7848 result
= force_operand (result
, target
);
7852 /* Everything must be something allowed by is_gimple_addressable. */
7853 switch (TREE_CODE (exp
))
7856 /* This case will happen via recursion for &a->b. */
7857 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7861 tree tem
= TREE_OPERAND (exp
, 0);
7862 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
7863 tem
= fold_build_pointer_plus (tem
, TREE_OPERAND (exp
, 1));
7864 return expand_expr (tem
, target
, tmode
, modifier
);
7868 /* Expand the initializer like constants above. */
7869 result
= XEXP (expand_expr_constant (DECL_INITIAL (exp
),
7871 if (modifier
< EXPAND_SUM
)
7872 result
= force_operand (result
, target
);
7876 /* The real part of the complex number is always first, therefore
7877 the address is the same as the address of the parent object. */
7880 inner
= TREE_OPERAND (exp
, 0);
7884 /* The imaginary part of the complex number is always second.
7885 The expression is therefore always offset by the size of the
7888 bitpos
= GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (exp
)));
7889 inner
= TREE_OPERAND (exp
, 0);
7892 case COMPOUND_LITERAL_EXPR
:
7893 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7894 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7895 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7896 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7897 the initializers aren't gimplified. */
7898 if (COMPOUND_LITERAL_EXPR_DECL (exp
)
7899 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp
)))
7900 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp
),
7901 target
, tmode
, modifier
, as
);
7904 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7905 expand_expr, as that can have various side effects; LABEL_DECLs for
7906 example, may not have their DECL_RTL set yet. Expand the rtl of
7907 CONSTRUCTORs too, which should yield a memory reference for the
7908 constructor's contents. Assume language specific tree nodes can
7909 be expanded in some interesting way. */
7910 gcc_assert (TREE_CODE (exp
) < LAST_AND_UNUSED_TREE_CODE
);
7912 || TREE_CODE (exp
) == CONSTRUCTOR
7913 || TREE_CODE (exp
) == COMPOUND_LITERAL_EXPR
)
7915 result
= expand_expr (exp
, target
, tmode
,
7916 modifier
== EXPAND_INITIALIZER
7917 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
7919 /* If the DECL isn't in memory, then the DECL wasn't properly
7920 marked TREE_ADDRESSABLE, which will be either a front-end
7921 or a tree optimizer bug. */
7923 gcc_assert (MEM_P (result
));
7924 result
= XEXP (result
, 0);
7926 /* ??? Is this needed anymore? */
7928 TREE_USED (exp
) = 1;
7930 if (modifier
!= EXPAND_INITIALIZER
7931 && modifier
!= EXPAND_CONST_ADDRESS
7932 && modifier
!= EXPAND_SUM
)
7933 result
= force_operand (result
, target
);
7937 /* Pass FALSE as the last argument to get_inner_reference although
7938 we are expanding to RTL. The rationale is that we know how to
7939 handle "aligning nodes" here: we can just bypass them because
7940 they won't change the final object whose address will be returned
7941 (they actually exist only for that purpose). */
7942 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode1
,
7943 &unsignedp
, &reversep
, &volatilep
);
7947 /* We must have made progress. */
7948 gcc_assert (inner
!= exp
);
7950 subtarget
= offset
|| maybe_ne (bitpos
, 0) ? NULL_RTX
: target
;
7951 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7952 inner alignment, force the inner to be sufficiently aligned. */
7953 if (CONSTANT_CLASS_P (inner
)
7954 && TYPE_ALIGN (TREE_TYPE (inner
)) < TYPE_ALIGN (TREE_TYPE (exp
)))
7956 inner
= copy_node (inner
);
7957 TREE_TYPE (inner
) = copy_node (TREE_TYPE (inner
));
7958 SET_TYPE_ALIGN (TREE_TYPE (inner
), TYPE_ALIGN (TREE_TYPE (exp
)));
7959 TYPE_USER_ALIGN (TREE_TYPE (inner
)) = 1;
7961 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
, as
);
7967 if (modifier
!= EXPAND_NORMAL
)
7968 result
= force_operand (result
, NULL
);
7969 tmp
= expand_expr (offset
, NULL_RTX
, tmode
,
7970 modifier
== EXPAND_INITIALIZER
7971 ? EXPAND_INITIALIZER
: EXPAND_NORMAL
);
7973 /* expand_expr is allowed to return an object in a mode other
7974 than TMODE. If it did, we need to convert. */
7975 if (GET_MODE (tmp
) != VOIDmode
&& tmode
!= GET_MODE (tmp
))
7976 tmp
= convert_modes (tmode
, GET_MODE (tmp
),
7977 tmp
, TYPE_UNSIGNED (TREE_TYPE (offset
)));
7978 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7979 tmp
= convert_memory_address_addr_space (tmode
, tmp
, as
);
7981 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7982 result
= simplify_gen_binary (PLUS
, tmode
, result
, tmp
);
7985 subtarget
= maybe_ne (bitpos
, 0) ? NULL_RTX
: target
;
7986 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
7987 1, OPTAB_LIB_WIDEN
);
7991 if (maybe_ne (bitpos
, 0))
7993 /* Someone beforehand should have rejected taking the address
7994 of an object that isn't byte-aligned. */
7995 poly_int64 bytepos
= exact_div (bitpos
, BITS_PER_UNIT
);
7996 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7997 result
= plus_constant (tmode
, result
, bytepos
);
7998 if (modifier
< EXPAND_SUM
)
7999 result
= force_operand (result
, target
);
8005 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
8006 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
8009 expand_expr_addr_expr (tree exp
, rtx target
, machine_mode tmode
,
8010 enum expand_modifier modifier
)
8012 addr_space_t as
= ADDR_SPACE_GENERIC
;
8013 scalar_int_mode address_mode
= Pmode
;
8014 scalar_int_mode pointer_mode
= ptr_mode
;
8018 /* Target mode of VOIDmode says "whatever's natural". */
8019 if (tmode
== VOIDmode
)
8020 tmode
= TYPE_MODE (TREE_TYPE (exp
));
8022 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
8024 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
8025 address_mode
= targetm
.addr_space
.address_mode (as
);
8026 pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
8029 /* We can get called with some Weird Things if the user does silliness
8030 like "(short) &a". In that case, convert_memory_address won't do
8031 the right thing, so ignore the given target mode. */
8032 scalar_int_mode new_tmode
= (tmode
== pointer_mode
8036 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
8037 new_tmode
, modifier
, as
);
8039 /* Despite expand_expr claims concerning ignoring TMODE when not
8040 strictly convenient, stuff breaks if we don't honor it. Note
8041 that combined with the above, we only do this for pointer modes. */
8042 rmode
= GET_MODE (result
);
8043 if (rmode
== VOIDmode
)
8045 if (rmode
!= new_tmode
)
8046 result
= convert_memory_address_addr_space (new_tmode
, result
, as
);
8051 /* Generate code for computing CONSTRUCTOR EXP.
8052 An rtx for the computed value is returned. If AVOID_TEMP_MEM
8053 is TRUE, instead of creating a temporary variable in memory
8054 NULL is returned and the caller needs to handle it differently. */
8057 expand_constructor (tree exp
, rtx target
, enum expand_modifier modifier
,
8058 bool avoid_temp_mem
)
8060 tree type
= TREE_TYPE (exp
);
8061 machine_mode mode
= TYPE_MODE (type
);
8063 /* Try to avoid creating a temporary at all. This is possible
8064 if all of the initializer is zero.
8065 FIXME: try to handle all [0..255] initializers we can handle
8067 if (TREE_STATIC (exp
)
8068 && !TREE_ADDRESSABLE (exp
)
8069 && target
!= 0 && mode
== BLKmode
8070 && all_zeros_p (exp
))
8072 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
8076 /* All elts simple constants => refer to a constant in memory. But
8077 if this is a non-BLKmode mode, let it store a field at a time
8078 since that should make a CONST_INT, CONST_WIDE_INT or
8079 CONST_DOUBLE when we fold. Likewise, if we have a target we can
8080 use, it is best to store directly into the target unless the type
8081 is large enough that memcpy will be used. If we are making an
8082 initializer and all operands are constant, put it in memory as
8085 FIXME: Avoid trying to fill vector constructors piece-meal.
8086 Output them with output_constant_def below unless we're sure
8087 they're zeros. This should go away when vector initializers
8088 are treated like VECTOR_CST instead of arrays. */
8089 if ((TREE_STATIC (exp
)
8090 && ((mode
== BLKmode
8091 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
8092 || TREE_ADDRESSABLE (exp
)
8093 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type
))
8094 && (! can_move_by_pieces
8095 (tree_to_uhwi (TYPE_SIZE_UNIT (type
)),
8097 && ! mostly_zeros_p (exp
))))
8098 || ((modifier
== EXPAND_INITIALIZER
|| modifier
== EXPAND_CONST_ADDRESS
)
8099 && TREE_CONSTANT (exp
)))
8106 constructor
= expand_expr_constant (exp
, 1, modifier
);
8108 if (modifier
!= EXPAND_CONST_ADDRESS
8109 && modifier
!= EXPAND_INITIALIZER
8110 && modifier
!= EXPAND_SUM
)
8111 constructor
= validize_mem (constructor
);
8116 /* Handle calls that pass values in multiple non-contiguous
8117 locations. The Irix 6 ABI has examples of this. */
8118 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
8119 || GET_CODE (target
) == PARALLEL
|| modifier
== EXPAND_STACK_PARM
)
8124 target
= assign_temp (type
, TREE_ADDRESSABLE (exp
), 1);
8127 store_constructor (exp
, target
, 0, int_expr_size (exp
), false);
8132 /* expand_expr: generate code for computing expression EXP.
8133 An rtx for the computed value is returned. The value is never null.
8134 In the case of a void EXP, const0_rtx is returned.
8136 The value may be stored in TARGET if TARGET is nonzero.
8137 TARGET is just a suggestion; callers must assume that
8138 the rtx returned may not be the same as TARGET.
8140 If TARGET is CONST0_RTX, it means that the value will be ignored.
8142 If TMODE is not VOIDmode, it suggests generating the
8143 result in mode TMODE. But this is done only when convenient.
8144 Otherwise, TMODE is ignored and the value generated in its natural mode.
8145 TMODE is just a suggestion; callers must assume that
8146 the rtx returned may not have mode TMODE.
8148 Note that TARGET may have neither TMODE nor MODE. In that case, it
8149 probably will not be used.
8151 If MODIFIER is EXPAND_SUM then when EXP is an addition
8152 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
8153 or a nest of (PLUS ...) and (MINUS ...) where the terms are
8154 products as above, or REG or MEM, or constant.
8155 Ordinarily in such cases we would output mul or add instructions
8156 and then return a pseudo reg containing the sum.
8158 EXPAND_INITIALIZER is much like EXPAND_SUM except that
8159 it also marks a label as absolutely required (it can't be dead).
8160 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
8161 This is used for outputting expressions used in initializers.
8163 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
8164 with a constant address even if that address is not normally legitimate.
8165 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
8167 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
8168 a call parameter. Such targets require special care as we haven't yet
8169 marked TARGET so that it's safe from being trashed by libcalls. We
8170 don't want to use TARGET for anything but the final result;
8171 Intermediate values must go elsewhere. Additionally, calls to
8172 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
8174 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
8175 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
8176 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
8177 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
8180 If INNER_REFERENCE_P is true, we are expanding an inner reference.
8181 In this case, we don't adjust a returned MEM rtx that wouldn't be
8182 sufficiently aligned for its mode; instead, it's up to the caller
8183 to deal with it afterwards. This is used to make sure that unaligned
8184 base objects for which out-of-bounds accesses are supported, for
8185 example record types with trailing arrays, aren't realigned behind
8186 the back of the caller.
8187 The normal operating mode is to pass FALSE for this parameter. */
8190 expand_expr_real (tree exp
, rtx target
, machine_mode tmode
,
8191 enum expand_modifier modifier
, rtx
*alt_rtl
,
8192 bool inner_reference_p
)
8196 /* Handle ERROR_MARK before anybody tries to access its type. */
8197 if (TREE_CODE (exp
) == ERROR_MARK
8198 || (TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
))
8200 ret
= CONST0_RTX (tmode
);
8201 return ret
? ret
: const0_rtx
;
8204 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
,
8209 /* Try to expand the conditional expression which is represented by
8210 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If it succeeds
8211 return the rtl reg which represents the result. Otherwise return
8215 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED
,
8216 tree treeop1 ATTRIBUTE_UNUSED
,
8217 tree treeop2 ATTRIBUTE_UNUSED
)
8220 rtx op00
, op01
, op1
, op2
;
8221 enum rtx_code comparison_code
;
8222 machine_mode comparison_mode
;
8225 tree type
= TREE_TYPE (treeop1
);
8226 int unsignedp
= TYPE_UNSIGNED (type
);
8227 machine_mode mode
= TYPE_MODE (type
);
8228 machine_mode orig_mode
= mode
;
8229 static bool expanding_cond_expr_using_cmove
= false;
8231 /* Conditional move expansion can end up TERing two operands which,
8232 when recursively hitting conditional expressions can result in
8233 exponential behavior if the cmove expansion ultimatively fails.
8234 It's hardly profitable to TER a cmove into a cmove so avoid doing
8235 that by failing early if we end up recursing. */
8236 if (expanding_cond_expr_using_cmove
)
8239 /* If we cannot do a conditional move on the mode, try doing it
8240 with the promoted mode. */
8241 if (!can_conditionally_move_p (mode
))
8243 mode
= promote_mode (type
, mode
, &unsignedp
);
8244 if (!can_conditionally_move_p (mode
))
8246 temp
= assign_temp (type
, 0, 0); /* Use promoted mode for temp. */
8249 temp
= assign_temp (type
, 0, 1);
8251 expanding_cond_expr_using_cmove
= true;
8253 expand_operands (treeop1
, treeop2
,
8254 temp
, &op1
, &op2
, EXPAND_NORMAL
);
8256 if (TREE_CODE (treeop0
) == SSA_NAME
8257 && (srcstmt
= get_def_for_expr_class (treeop0
, tcc_comparison
)))
8259 tree type
= TREE_TYPE (gimple_assign_rhs1 (srcstmt
));
8260 enum tree_code cmpcode
= gimple_assign_rhs_code (srcstmt
);
8261 op00
= expand_normal (gimple_assign_rhs1 (srcstmt
));
8262 op01
= expand_normal (gimple_assign_rhs2 (srcstmt
));
8263 comparison_mode
= TYPE_MODE (type
);
8264 unsignedp
= TYPE_UNSIGNED (type
);
8265 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
8267 else if (COMPARISON_CLASS_P (treeop0
))
8269 tree type
= TREE_TYPE (TREE_OPERAND (treeop0
, 0));
8270 enum tree_code cmpcode
= TREE_CODE (treeop0
);
8271 op00
= expand_normal (TREE_OPERAND (treeop0
, 0));
8272 op01
= expand_normal (TREE_OPERAND (treeop0
, 1));
8273 unsignedp
= TYPE_UNSIGNED (type
);
8274 comparison_mode
= TYPE_MODE (type
);
8275 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
8279 op00
= expand_normal (treeop0
);
8281 comparison_code
= NE
;
8282 comparison_mode
= GET_MODE (op00
);
8283 if (comparison_mode
== VOIDmode
)
8284 comparison_mode
= TYPE_MODE (TREE_TYPE (treeop0
));
8286 expanding_cond_expr_using_cmove
= false;
8288 if (GET_MODE (op1
) != mode
)
8289 op1
= gen_lowpart (mode
, op1
);
8291 if (GET_MODE (op2
) != mode
)
8292 op2
= gen_lowpart (mode
, op2
);
8294 /* Try to emit the conditional move. */
8295 insn
= emit_conditional_move (temp
, comparison_code
,
8296 op00
, op01
, comparison_mode
,
8300 /* If we could do the conditional move, emit the sequence,
8304 rtx_insn
*seq
= get_insns ();
8307 return convert_modes (orig_mode
, mode
, temp
, 0);
8310 /* Otherwise discard the sequence and fall back to code with
8317 expand_expr_real_2 (sepops ops
, rtx target
, machine_mode tmode
,
8318 enum expand_modifier modifier
)
8320 rtx op0
, op1
, op2
, temp
;
8321 rtx_code_label
*lab
;
8325 scalar_int_mode int_mode
;
8326 enum tree_code code
= ops
->code
;
8328 rtx subtarget
, original_target
;
8330 bool reduce_bit_field
;
8331 location_t loc
= ops
->location
;
8332 tree treeop0
, treeop1
, treeop2
;
8333 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8334 ? reduce_to_bit_field_precision ((expr), \
8340 mode
= TYPE_MODE (type
);
8341 unsignedp
= TYPE_UNSIGNED (type
);
8347 /* We should be called only on simple (binary or unary) expressions,
8348 exactly those that are valid in gimple expressions that aren't
8349 GIMPLE_SINGLE_RHS (or invalid). */
8350 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
8351 || get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
8352 || get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
);
8354 ignore
= (target
== const0_rtx
8355 || ((CONVERT_EXPR_CODE_P (code
)
8356 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
8357 && TREE_CODE (type
) == VOID_TYPE
));
8359 /* We should be called only if we need the result. */
8360 gcc_assert (!ignore
);
8362 /* An operation in what may be a bit-field type needs the
8363 result to be reduced to the precision of the bit-field type,
8364 which is narrower than that of the type's mode. */
8365 reduce_bit_field
= (INTEGRAL_TYPE_P (type
)
8366 && !type_has_mode_precision_p (type
));
8368 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
8371 /* Use subtarget as the target for operand 0 of a binary operation. */
8372 subtarget
= get_subtarget (target
);
8373 original_target
= target
;
8377 case NON_LVALUE_EXPR
:
8380 if (treeop0
== error_mark_node
)
8383 if (TREE_CODE (type
) == UNION_TYPE
)
8385 tree valtype
= TREE_TYPE (treeop0
);
8387 /* If both input and output are BLKmode, this conversion isn't doing
8388 anything except possibly changing memory attribute. */
8389 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
8391 rtx result
= expand_expr (treeop0
, target
, tmode
,
8394 result
= copy_rtx (result
);
8395 set_mem_attributes (result
, type
, 0);
8401 if (TYPE_MODE (type
) != BLKmode
)
8402 target
= gen_reg_rtx (TYPE_MODE (type
));
8404 target
= assign_temp (type
, 1, 1);
8408 /* Store data into beginning of memory target. */
8409 store_expr (treeop0
,
8410 adjust_address (target
, TYPE_MODE (valtype
), 0),
8411 modifier
== EXPAND_STACK_PARM
,
8412 false, TYPE_REVERSE_STORAGE_ORDER (type
));
8416 gcc_assert (REG_P (target
)
8417 && !TYPE_REVERSE_STORAGE_ORDER (type
));
8419 /* Store this field into a union of the proper type. */
8420 poly_uint64 op0_size
8421 = tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (treeop0
)));
8422 poly_uint64 union_size
= GET_MODE_BITSIZE (mode
);
8423 store_field (target
,
8424 /* The conversion must be constructed so that
8425 we know at compile time how many bits
8427 ordered_min (op0_size
, union_size
),
8428 0, 0, 0, TYPE_MODE (valtype
), treeop0
, 0,
8432 /* Return the entire union. */
8436 if (mode
== TYPE_MODE (TREE_TYPE (treeop0
)))
8438 op0
= expand_expr (treeop0
, target
, VOIDmode
,
8441 /* If the signedness of the conversion differs and OP0 is
8442 a promoted SUBREG, clear that indication since we now
8443 have to do the proper extension. */
8444 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)) != unsignedp
8445 && GET_CODE (op0
) == SUBREG
)
8446 SUBREG_PROMOTED_VAR_P (op0
) = 0;
8448 return REDUCE_BIT_FIELD (op0
);
8451 op0
= expand_expr (treeop0
, NULL_RTX
, mode
,
8452 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
);
8453 if (GET_MODE (op0
) == mode
)
8456 /* If OP0 is a constant, just convert it into the proper mode. */
8457 else if (CONSTANT_P (op0
))
8459 tree inner_type
= TREE_TYPE (treeop0
);
8460 machine_mode inner_mode
= GET_MODE (op0
);
8462 if (inner_mode
== VOIDmode
)
8463 inner_mode
= TYPE_MODE (inner_type
);
8465 if (modifier
== EXPAND_INITIALIZER
)
8466 op0
= lowpart_subreg (mode
, op0
, inner_mode
);
8468 op0
= convert_modes (mode
, inner_mode
, op0
,
8469 TYPE_UNSIGNED (inner_type
));
8472 else if (modifier
== EXPAND_INITIALIZER
)
8473 op0
= gen_rtx_fmt_e (TYPE_UNSIGNED (TREE_TYPE (treeop0
))
8474 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
8476 else if (target
== 0)
8477 op0
= convert_to_mode (mode
, op0
,
8478 TYPE_UNSIGNED (TREE_TYPE
8482 convert_move (target
, op0
,
8483 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8487 return REDUCE_BIT_FIELD (op0
);
8489 case ADDR_SPACE_CONVERT_EXPR
:
8491 tree treeop0_type
= TREE_TYPE (treeop0
);
8493 gcc_assert (POINTER_TYPE_P (type
));
8494 gcc_assert (POINTER_TYPE_P (treeop0_type
));
8496 addr_space_t as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
8497 addr_space_t as_from
= TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type
));
8499 /* Conversions between pointers to the same address space should
8500 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8501 gcc_assert (as_to
!= as_from
);
8503 op0
= expand_expr (treeop0
, NULL_RTX
, VOIDmode
, modifier
);
8505 /* Ask target code to handle conversion between pointers
8506 to overlapping address spaces. */
8507 if (targetm
.addr_space
.subset_p (as_to
, as_from
)
8508 || targetm
.addr_space
.subset_p (as_from
, as_to
))
8510 op0
= targetm
.addr_space
.convert (op0
, treeop0_type
, type
);
8514 /* For disjoint address spaces, converting anything but a null
8515 pointer invokes undefined behavior. We truncate or extend the
8516 value as if we'd converted via integers, which handles 0 as
8517 required, and all others as the programmer likely expects. */
8518 #ifndef POINTERS_EXTEND_UNSIGNED
8519 const int POINTERS_EXTEND_UNSIGNED
= 1;
8521 op0
= convert_modes (mode
, TYPE_MODE (treeop0_type
),
8522 op0
, POINTERS_EXTEND_UNSIGNED
);
8528 case POINTER_PLUS_EXPR
:
8529 /* Even though the sizetype mode and the pointer's mode can be different
8530 expand is able to handle this correctly and get the correct result out
8531 of the PLUS_EXPR code. */
8532 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8533 if sizetype precision is smaller than pointer precision. */
8534 if (TYPE_PRECISION (sizetype
) < TYPE_PRECISION (type
))
8535 treeop1
= fold_convert_loc (loc
, type
,
8536 fold_convert_loc (loc
, ssizetype
,
8538 /* If sizetype precision is larger than pointer precision, truncate the
8539 offset to have matching modes. */
8540 else if (TYPE_PRECISION (sizetype
) > TYPE_PRECISION (type
))
8541 treeop1
= fold_convert_loc (loc
, type
, treeop1
);
8545 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8546 something else, make sure we add the register to the constant and
8547 then to the other thing. This case can occur during strength
8548 reduction and doing it this way will produce better code if the
8549 frame pointer or argument pointer is eliminated.
8551 fold-const.c will ensure that the constant is always in the inner
8552 PLUS_EXPR, so the only case we need to do anything about is if
8553 sp, ap, or fp is our second argument, in which case we must swap
8554 the innermost first argument and our second argument. */
8556 if (TREE_CODE (treeop0
) == PLUS_EXPR
8557 && TREE_CODE (TREE_OPERAND (treeop0
, 1)) == INTEGER_CST
8559 && (DECL_RTL (treeop1
) == frame_pointer_rtx
8560 || DECL_RTL (treeop1
) == stack_pointer_rtx
8561 || DECL_RTL (treeop1
) == arg_pointer_rtx
))
8566 /* If the result is to be ptr_mode and we are adding an integer to
8567 something, we might be forming a constant. So try to use
8568 plus_constant. If it produces a sum and we can't accept it,
8569 use force_operand. This allows P = &ARR[const] to generate
8570 efficient code on machines where a SYMBOL_REF is not a valid
8573 If this is an EXPAND_SUM call, always return the sum. */
8574 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
8575 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
8577 if (modifier
== EXPAND_STACK_PARM
)
8579 if (TREE_CODE (treeop0
) == INTEGER_CST
8580 && HWI_COMPUTABLE_MODE_P (mode
)
8581 && TREE_CONSTANT (treeop1
))
8585 machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop1
));
8587 op1
= expand_expr (treeop1
, subtarget
, VOIDmode
,
8589 /* Use wi::shwi to ensure that the constant is
8590 truncated according to the mode of OP1, then sign extended
8591 to a HOST_WIDE_INT. Using the constant directly can result
8592 in non-canonical RTL in a 64x32 cross compile. */
8593 wc
= TREE_INT_CST_LOW (treeop0
);
8595 immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8596 op1
= plus_constant (mode
, op1
, INTVAL (constant_part
));
8597 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8598 op1
= force_operand (op1
, target
);
8599 return REDUCE_BIT_FIELD (op1
);
8602 else if (TREE_CODE (treeop1
) == INTEGER_CST
8603 && HWI_COMPUTABLE_MODE_P (mode
)
8604 && TREE_CONSTANT (treeop0
))
8608 machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop0
));
8610 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8611 (modifier
== EXPAND_INITIALIZER
8612 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
8613 if (! CONSTANT_P (op0
))
8615 op1
= expand_expr (treeop1
, NULL_RTX
,
8616 VOIDmode
, modifier
);
8617 /* Return a PLUS if modifier says it's OK. */
8618 if (modifier
== EXPAND_SUM
8619 || modifier
== EXPAND_INITIALIZER
)
8620 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
8623 /* Use wi::shwi to ensure that the constant is
8624 truncated according to the mode of OP1, then sign extended
8625 to a HOST_WIDE_INT. Using the constant directly can result
8626 in non-canonical RTL in a 64x32 cross compile. */
8627 wc
= TREE_INT_CST_LOW (treeop1
);
8629 = immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8630 op0
= plus_constant (mode
, op0
, INTVAL (constant_part
));
8631 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8632 op0
= force_operand (op0
, target
);
8633 return REDUCE_BIT_FIELD (op0
);
8637 /* Use TER to expand pointer addition of a negated value
8638 as pointer subtraction. */
8639 if ((POINTER_TYPE_P (TREE_TYPE (treeop0
))
8640 || (TREE_CODE (TREE_TYPE (treeop0
)) == VECTOR_TYPE
8641 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0
)))))
8642 && TREE_CODE (treeop1
) == SSA_NAME
8643 && TYPE_MODE (TREE_TYPE (treeop0
))
8644 == TYPE_MODE (TREE_TYPE (treeop1
)))
8646 gimple
*def
= get_def_for_expr (treeop1
, NEGATE_EXPR
);
8649 treeop1
= gimple_assign_rhs1 (def
);
8655 /* No sense saving up arithmetic to be done
8656 if it's all in the wrong mode to form part of an address.
8657 And force_operand won't know whether to sign-extend or
8659 if (modifier
!= EXPAND_INITIALIZER
8660 && (modifier
!= EXPAND_SUM
|| mode
!= ptr_mode
))
8662 expand_operands (treeop0
, treeop1
,
8663 subtarget
, &op0
, &op1
, modifier
);
8664 if (op0
== const0_rtx
)
8666 if (op1
== const0_rtx
)
8671 expand_operands (treeop0
, treeop1
,
8672 subtarget
, &op0
, &op1
, modifier
);
8673 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8676 case POINTER_DIFF_EXPR
:
8678 /* For initializers, we are allowed to return a MINUS of two
8679 symbolic constants. Here we handle all cases when both operands
8681 /* Handle difference of two symbolic constants,
8682 for the sake of an initializer. */
8683 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8684 && really_constant_p (treeop0
)
8685 && really_constant_p (treeop1
))
8687 expand_operands (treeop0
, treeop1
,
8688 NULL_RTX
, &op0
, &op1
, modifier
);
8689 return simplify_gen_binary (MINUS
, mode
, op0
, op1
);
8692 /* No sense saving up arithmetic to be done
8693 if it's all in the wrong mode to form part of an address.
8694 And force_operand won't know whether to sign-extend or
8696 if (modifier
!= EXPAND_INITIALIZER
8697 && (modifier
!= EXPAND_SUM
|| mode
!= ptr_mode
))
8700 expand_operands (treeop0
, treeop1
,
8701 subtarget
, &op0
, &op1
, modifier
);
8703 /* Convert A - const to A + (-const). */
8704 if (CONST_INT_P (op1
))
8706 op1
= negate_rtx (mode
, op1
);
8707 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8712 case WIDEN_MULT_PLUS_EXPR
:
8713 case WIDEN_MULT_MINUS_EXPR
:
8714 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8715 op2
= expand_normal (treeop2
);
8716 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
8720 case WIDEN_MULT_EXPR
:
8721 /* If first operand is constant, swap them.
8722 Thus the following special case checks need only
8723 check the second operand. */
8724 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8725 std::swap (treeop0
, treeop1
);
8727 /* First, check if we have a multiplication of one signed and one
8728 unsigned operand. */
8729 if (TREE_CODE (treeop1
) != INTEGER_CST
8730 && (TYPE_UNSIGNED (TREE_TYPE (treeop0
))
8731 != TYPE_UNSIGNED (TREE_TYPE (treeop1
))))
8733 machine_mode innermode
= TYPE_MODE (TREE_TYPE (treeop0
));
8734 this_optab
= usmul_widen_optab
;
8735 if (find_widening_optab_handler (this_optab
, mode
, innermode
)
8736 != CODE_FOR_nothing
)
8738 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8739 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8742 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op1
, &op0
,
8744 /* op0 and op1 might still be constant, despite the above
8745 != INTEGER_CST check. Handle it. */
8746 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8748 op0
= convert_modes (innermode
, mode
, op0
, true);
8749 op1
= convert_modes (innermode
, mode
, op1
, false);
8750 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8751 target
, unsignedp
));
8756 /* Check for a multiplication with matching signedness. */
8757 else if ((TREE_CODE (treeop1
) == INTEGER_CST
8758 && int_fits_type_p (treeop1
, TREE_TYPE (treeop0
)))
8759 || (TYPE_UNSIGNED (TREE_TYPE (treeop1
))
8760 == TYPE_UNSIGNED (TREE_TYPE (treeop0
))))
8762 tree op0type
= TREE_TYPE (treeop0
);
8763 machine_mode innermode
= TYPE_MODE (op0type
);
8764 bool zextend_p
= TYPE_UNSIGNED (op0type
);
8765 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
8766 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
8768 if (TREE_CODE (treeop0
) != INTEGER_CST
)
8770 if (find_widening_optab_handler (this_optab
, mode
, innermode
)
8771 != CODE_FOR_nothing
)
8773 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8775 /* op0 and op1 might still be constant, despite the above
8776 != INTEGER_CST check. Handle it. */
8777 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8780 op0
= convert_modes (innermode
, mode
, op0
, zextend_p
);
8782 = convert_modes (innermode
, mode
, op1
,
8783 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8784 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8788 temp
= expand_widening_mult (mode
, op0
, op1
, target
,
8789 unsignedp
, this_optab
);
8790 return REDUCE_BIT_FIELD (temp
);
8792 if (find_widening_optab_handler (other_optab
, mode
, innermode
)
8794 && innermode
== word_mode
)
8797 op0
= expand_normal (treeop0
);
8798 if (TREE_CODE (treeop1
) == INTEGER_CST
)
8799 op1
= convert_modes (word_mode
, mode
,
8800 expand_normal (treeop1
),
8801 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8803 op1
= expand_normal (treeop1
);
8804 /* op0 and op1 might still be constant, despite the above
8805 != INTEGER_CST check. Handle it. */
8806 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8807 goto widen_mult_const
;
8808 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8809 unsignedp
, OPTAB_LIB_WIDEN
);
8810 hipart
= gen_highpart (word_mode
, temp
);
8811 htem
= expand_mult_highpart_adjust (word_mode
, hipart
,
8815 emit_move_insn (hipart
, htem
);
8816 return REDUCE_BIT_FIELD (temp
);
8820 treeop0
= fold_build1 (CONVERT_EXPR
, type
, treeop0
);
8821 treeop1
= fold_build1 (CONVERT_EXPR
, type
, treeop1
);
8822 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8823 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8827 optab opt
= fma_optab
;
8828 gimple
*def0
, *def2
;
8830 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8832 if (optab_handler (fma_optab
, mode
) == CODE_FOR_nothing
)
8834 tree fn
= mathfn_built_in (TREE_TYPE (treeop0
), BUILT_IN_FMA
);
8837 gcc_assert (fn
!= NULL_TREE
);
8838 call_expr
= build_call_expr (fn
, 3, treeop0
, treeop1
, treeop2
);
8839 return expand_builtin (call_expr
, target
, subtarget
, mode
, false);
8842 def0
= get_def_for_expr (treeop0
, NEGATE_EXPR
);
8843 /* The multiplication is commutative - look at its 2nd operand
8844 if the first isn't fed by a negate. */
8847 def0
= get_def_for_expr (treeop1
, NEGATE_EXPR
);
8848 /* Swap operands if the 2nd operand is fed by a negate. */
8850 std::swap (treeop0
, treeop1
);
8852 def2
= get_def_for_expr (treeop2
, NEGATE_EXPR
);
8857 && optab_handler (fnms_optab
, mode
) != CODE_FOR_nothing
)
8860 op0
= expand_normal (gimple_assign_rhs1 (def0
));
8861 op2
= expand_normal (gimple_assign_rhs1 (def2
));
8864 && optab_handler (fnma_optab
, mode
) != CODE_FOR_nothing
)
8867 op0
= expand_normal (gimple_assign_rhs1 (def0
));
8870 && optab_handler (fms_optab
, mode
) != CODE_FOR_nothing
)
8873 op2
= expand_normal (gimple_assign_rhs1 (def2
));
8877 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
8879 op2
= expand_normal (treeop2
);
8880 op1
= expand_normal (treeop1
);
8882 return expand_ternary_op (TYPE_MODE (type
), opt
,
8883 op0
, op1
, op2
, target
, 0);
8887 /* If this is a fixed-point operation, then we cannot use the code
8888 below because "expand_mult" doesn't support sat/no-sat fixed-point
8890 if (ALL_FIXED_POINT_MODE_P (mode
))
8893 /* If first operand is constant, swap them.
8894 Thus the following special case checks need only
8895 check the second operand. */
8896 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8897 std::swap (treeop0
, treeop1
);
8899 /* Attempt to return something suitable for generating an
8900 indexed address, for machines that support that. */
8902 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8903 && tree_fits_shwi_p (treeop1
))
8905 tree exp1
= treeop1
;
8907 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8911 op0
= force_operand (op0
, NULL_RTX
);
8913 op0
= copy_to_mode_reg (mode
, op0
);
8915 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
8916 gen_int_mode (tree_to_shwi (exp1
),
8917 TYPE_MODE (TREE_TYPE (exp1
)))));
8920 if (modifier
== EXPAND_STACK_PARM
)
8923 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8924 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8926 case TRUNC_MOD_EXPR
:
8927 case FLOOR_MOD_EXPR
:
8929 case ROUND_MOD_EXPR
:
8931 case TRUNC_DIV_EXPR
:
8932 case FLOOR_DIV_EXPR
:
8934 case ROUND_DIV_EXPR
:
8935 case EXACT_DIV_EXPR
:
8937 /* If this is a fixed-point operation, then we cannot use the code
8938 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8940 if (ALL_FIXED_POINT_MODE_P (mode
))
8943 if (modifier
== EXPAND_STACK_PARM
)
8945 /* Possible optimization: compute the dividend with EXPAND_SUM
8946 then if the divisor is constant can optimize the case
8947 where some terms of the dividend have coeffs divisible by it. */
8948 expand_operands (treeop0
, treeop1
,
8949 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8950 bool mod_p
= code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
8951 || code
== CEIL_MOD_EXPR
|| code
== ROUND_MOD_EXPR
;
8952 if (SCALAR_INT_MODE_P (mode
)
8954 && get_range_pos_neg (treeop0
) == 1
8955 && get_range_pos_neg (treeop1
) == 1)
8957 /* If both arguments are known to be positive when interpreted
8958 as signed, we can expand it as both signed and unsigned
8959 division or modulo. Choose the cheaper sequence in that case. */
8960 bool speed_p
= optimize_insn_for_speed_p ();
8961 do_pending_stack_adjust ();
8963 rtx uns_ret
= expand_divmod (mod_p
, code
, mode
, op0
, op1
, target
, 1);
8964 rtx_insn
*uns_insns
= get_insns ();
8967 rtx sgn_ret
= expand_divmod (mod_p
, code
, mode
, op0
, op1
, target
, 0);
8968 rtx_insn
*sgn_insns
= get_insns ();
8970 unsigned uns_cost
= seq_cost (uns_insns
, speed_p
);
8971 unsigned sgn_cost
= seq_cost (sgn_insns
, speed_p
);
8973 /* If costs are the same then use as tie breaker the other
8975 if (uns_cost
== sgn_cost
)
8977 uns_cost
= seq_cost (uns_insns
, !speed_p
);
8978 sgn_cost
= seq_cost (sgn_insns
, !speed_p
);
8981 if (uns_cost
< sgn_cost
|| (uns_cost
== sgn_cost
&& unsignedp
))
8983 emit_insn (uns_insns
);
8986 emit_insn (sgn_insns
);
8989 return expand_divmod (mod_p
, code
, mode
, op0
, op1
, target
, unsignedp
);
8994 case MULT_HIGHPART_EXPR
:
8995 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8996 temp
= expand_mult_highpart (mode
, op0
, op1
, target
, unsignedp
);
9000 case FIXED_CONVERT_EXPR
:
9001 op0
= expand_normal (treeop0
);
9002 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
9003 target
= gen_reg_rtx (mode
);
9005 if ((TREE_CODE (TREE_TYPE (treeop0
)) == INTEGER_TYPE
9006 && TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
9007 || (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_UNSIGNED (type
)))
9008 expand_fixed_convert (target
, op0
, 1, TYPE_SATURATING (type
));
9010 expand_fixed_convert (target
, op0
, 0, TYPE_SATURATING (type
));
9013 case FIX_TRUNC_EXPR
:
9014 op0
= expand_normal (treeop0
);
9015 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
9016 target
= gen_reg_rtx (mode
);
9017 expand_fix (target
, op0
, unsignedp
);
9021 op0
= expand_normal (treeop0
);
9022 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
9023 target
= gen_reg_rtx (mode
);
9024 /* expand_float can't figure out what to do if FROM has VOIDmode.
9025 So give it the correct mode. With -O, cse will optimize this. */
9026 if (GET_MODE (op0
) == VOIDmode
)
9027 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0
)),
9029 expand_float (target
, op0
,
9030 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
9034 op0
= expand_expr (treeop0
, subtarget
,
9035 VOIDmode
, EXPAND_NORMAL
);
9036 if (modifier
== EXPAND_STACK_PARM
)
9038 temp
= expand_unop (mode
,
9039 optab_for_tree_code (NEGATE_EXPR
, type
,
9043 return REDUCE_BIT_FIELD (temp
);
9046 op0
= expand_expr (treeop0
, subtarget
,
9047 VOIDmode
, EXPAND_NORMAL
);
9048 if (modifier
== EXPAND_STACK_PARM
)
9051 /* ABS_EXPR is not valid for complex arguments. */
9052 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
9053 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
9055 /* Unsigned abs is simply the operand. Testing here means we don't
9056 risk generating incorrect code below. */
9057 if (TYPE_UNSIGNED (type
))
9060 return expand_abs (mode
, op0
, target
, unsignedp
,
9061 safe_from_p (target
, treeop0
, 1));
9065 target
= original_target
;
9067 || modifier
== EXPAND_STACK_PARM
9068 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
9069 || GET_MODE (target
) != mode
9071 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
9072 target
= gen_reg_rtx (mode
);
9073 expand_operands (treeop0
, treeop1
,
9074 target
, &op0
, &op1
, EXPAND_NORMAL
);
9076 /* First try to do it with a special MIN or MAX instruction.
9077 If that does not win, use a conditional jump to select the proper
9079 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9080 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
9085 /* For vector MIN <x, y>, expand it a VEC_COND_EXPR <x <= y, x, y>
9086 and similarly for MAX <x, y>. */
9087 if (VECTOR_TYPE_P (type
))
9089 tree t0
= make_tree (type
, op0
);
9090 tree t1
= make_tree (type
, op1
);
9091 tree comparison
= build2 (code
== MIN_EXPR
? LE_EXPR
: GE_EXPR
,
9093 return expand_vec_cond_expr (type
, comparison
, t0
, t1
,
9097 /* At this point, a MEM target is no longer useful; we will get better
9100 if (! REG_P (target
))
9101 target
= gen_reg_rtx (mode
);
9103 /* If op1 was placed in target, swap op0 and op1. */
9104 if (target
!= op0
&& target
== op1
)
9105 std::swap (op0
, op1
);
9107 /* We generate better code and avoid problems with op1 mentioning
9108 target by forcing op1 into a pseudo if it isn't a constant. */
9109 if (! CONSTANT_P (op1
))
9110 op1
= force_reg (mode
, op1
);
9113 enum rtx_code comparison_code
;
9116 if (code
== MAX_EXPR
)
9117 comparison_code
= unsignedp
? GEU
: GE
;
9119 comparison_code
= unsignedp
? LEU
: LE
;
9121 /* Canonicalize to comparisons against 0. */
9122 if (op1
== const1_rtx
)
9124 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
9125 or (a != 0 ? a : 1) for unsigned.
9126 For MIN we are safe converting (a <= 1 ? a : 1)
9127 into (a <= 0 ? a : 1) */
9128 cmpop1
= const0_rtx
;
9129 if (code
== MAX_EXPR
)
9130 comparison_code
= unsignedp
? NE
: GT
;
9132 if (op1
== constm1_rtx
&& !unsignedp
)
9134 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
9135 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
9136 cmpop1
= const0_rtx
;
9137 if (code
== MIN_EXPR
)
9138 comparison_code
= LT
;
9141 /* Use a conditional move if possible. */
9142 if (can_conditionally_move_p (mode
))
9148 /* Try to emit the conditional move. */
9149 insn
= emit_conditional_move (target
, comparison_code
,
9154 /* If we could do the conditional move, emit the sequence,
9158 rtx_insn
*seq
= get_insns ();
9164 /* Otherwise discard the sequence and fall back to code with
9170 emit_move_insn (target
, op0
);
9172 lab
= gen_label_rtx ();
9173 do_compare_rtx_and_jump (target
, cmpop1
, comparison_code
,
9174 unsignedp
, mode
, NULL_RTX
, NULL
, lab
,
9175 profile_probability::uninitialized ());
9177 emit_move_insn (target
, op1
);
9182 op0
= expand_expr (treeop0
, subtarget
,
9183 VOIDmode
, EXPAND_NORMAL
);
9184 if (modifier
== EXPAND_STACK_PARM
)
9186 /* In case we have to reduce the result to bitfield precision
9187 for unsigned bitfield expand this as XOR with a proper constant
9189 if (reduce_bit_field
&& TYPE_UNSIGNED (type
))
9191 int_mode
= SCALAR_INT_TYPE_MODE (type
);
9192 wide_int mask
= wi::mask (TYPE_PRECISION (type
),
9193 false, GET_MODE_PRECISION (int_mode
));
9195 temp
= expand_binop (int_mode
, xor_optab
, op0
,
9196 immed_wide_int_const (mask
, int_mode
),
9197 target
, 1, OPTAB_LIB_WIDEN
);
9200 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
9204 /* ??? Can optimize bitwise operations with one arg constant.
9205 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
9206 and (a bitwise1 b) bitwise2 b (etc)
9207 but that is probably not worth while. */
9216 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type
))
9217 || type_has_mode_precision_p (type
));
9223 /* If this is a fixed-point operation, then we cannot use the code
9224 below because "expand_shift" doesn't support sat/no-sat fixed-point
9226 if (ALL_FIXED_POINT_MODE_P (mode
))
9229 if (! safe_from_p (subtarget
, treeop1
, 1))
9231 if (modifier
== EXPAND_STACK_PARM
)
9233 op0
= expand_expr (treeop0
, subtarget
,
9234 VOIDmode
, EXPAND_NORMAL
);
9236 /* Left shift optimization when shifting across word_size boundary.
9238 If mode == GET_MODE_WIDER_MODE (word_mode), then normally
9239 there isn't native instruction to support this wide mode
9240 left shift. Given below scenario:
9242 Type A = (Type) B << C
9245 | dest_high | dest_low |
9249 If the shift amount C caused we shift B to across the word
9250 size boundary, i.e part of B shifted into high half of
9251 destination register, and part of B remains in the low
9252 half, then GCC will use the following left shift expand
9255 1. Initialize dest_low to B.
9256 2. Initialize every bit of dest_high to the sign bit of B.
9257 3. Logic left shift dest_low by C bit to finalize dest_low.
9258 The value of dest_low before this shift is kept in a temp D.
9259 4. Logic left shift dest_high by C.
9260 5. Logic right shift D by (word_size - C).
9261 6. Or the result of 4 and 5 to finalize dest_high.
9263 While, by checking gimple statements, if operand B is
9264 coming from signed extension, then we can simplify above
9267 1. dest_high = src_low >> (word_size - C).
9268 2. dest_low = src_low << C.
9270 We can use one arithmetic right shift to finish all the
9271 purpose of steps 2, 4, 5, 6, thus we reduce the steps
9272 needed from 6 into 2.
9274 The case is similar for zero extension, except that we
9275 initialize dest_high to zero rather than copies of the sign
9276 bit from B. Furthermore, we need to use a logical right shift
9279 The choice of sign-extension versus zero-extension is
9280 determined entirely by whether or not B is signed and is
9281 independent of the current setting of unsignedp. */
9284 if (code
== LSHIFT_EXPR
9287 && GET_MODE_2XWIDER_MODE (word_mode
).exists (&int_mode
)
9289 && TREE_CONSTANT (treeop1
)
9290 && TREE_CODE (treeop0
) == SSA_NAME
)
9292 gimple
*def
= SSA_NAME_DEF_STMT (treeop0
);
9293 if (is_gimple_assign (def
)
9294 && gimple_assign_rhs_code (def
) == NOP_EXPR
)
9296 scalar_int_mode rmode
= SCALAR_INT_TYPE_MODE
9297 (TREE_TYPE (gimple_assign_rhs1 (def
)));
9299 if (GET_MODE_SIZE (rmode
) < GET_MODE_SIZE (int_mode
)
9300 && TREE_INT_CST_LOW (treeop1
) < GET_MODE_BITSIZE (word_mode
)
9301 && ((TREE_INT_CST_LOW (treeop1
) + GET_MODE_BITSIZE (rmode
))
9302 >= GET_MODE_BITSIZE (word_mode
)))
9304 rtx_insn
*seq
, *seq_old
;
9305 poly_uint64 high_off
= subreg_highpart_offset (word_mode
,
9307 bool extend_unsigned
9308 = TYPE_UNSIGNED (TREE_TYPE (gimple_assign_rhs1 (def
)));
9309 rtx low
= lowpart_subreg (word_mode
, op0
, int_mode
);
9310 rtx dest_low
= lowpart_subreg (word_mode
, target
, int_mode
);
9311 rtx dest_high
= simplify_gen_subreg (word_mode
, target
,
9312 int_mode
, high_off
);
9313 HOST_WIDE_INT ramount
= (BITS_PER_WORD
9314 - TREE_INT_CST_LOW (treeop1
));
9315 tree rshift
= build_int_cst (TREE_TYPE (treeop1
), ramount
);
9318 /* dest_high = src_low >> (word_size - C). */
9319 temp
= expand_variable_shift (RSHIFT_EXPR
, word_mode
, low
,
9322 if (temp
!= dest_high
)
9323 emit_move_insn (dest_high
, temp
);
9325 /* dest_low = src_low << C. */
9326 temp
= expand_variable_shift (LSHIFT_EXPR
, word_mode
, low
,
9327 treeop1
, dest_low
, unsignedp
);
9328 if (temp
!= dest_low
)
9329 emit_move_insn (dest_low
, temp
);
9335 if (have_insn_for (ASHIFT
, int_mode
))
9337 bool speed_p
= optimize_insn_for_speed_p ();
9339 rtx ret_old
= expand_variable_shift (code
, int_mode
,
9344 seq_old
= get_insns ();
9346 if (seq_cost (seq
, speed_p
)
9347 >= seq_cost (seq_old
, speed_p
))
9358 if (temp
== NULL_RTX
)
9359 temp
= expand_variable_shift (code
, mode
, op0
, treeop1
, target
,
9361 if (code
== LSHIFT_EXPR
)
9362 temp
= REDUCE_BIT_FIELD (temp
);
9366 /* Could determine the answer when only additive constants differ. Also,
9367 the addition of one can be handled by changing the condition. */
9374 case UNORDERED_EXPR
:
9383 temp
= do_store_flag (ops
,
9384 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
9385 tmode
!= VOIDmode
? tmode
: mode
);
9389 /* Use a compare and a jump for BLKmode comparisons, or for function
9390 type comparisons is have_canonicalize_funcptr_for_compare. */
9393 || modifier
== EXPAND_STACK_PARM
9394 || ! safe_from_p (target
, treeop0
, 1)
9395 || ! safe_from_p (target
, treeop1
, 1)
9396 /* Make sure we don't have a hard reg (such as function's return
9397 value) live across basic blocks, if not optimizing. */
9398 || (!optimize
&& REG_P (target
)
9399 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
9400 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
9402 emit_move_insn (target
, const0_rtx
);
9404 rtx_code_label
*lab1
= gen_label_rtx ();
9405 jumpifnot_1 (code
, treeop0
, treeop1
, lab1
,
9406 profile_probability::uninitialized ());
9408 if (TYPE_PRECISION (type
) == 1 && !TYPE_UNSIGNED (type
))
9409 emit_move_insn (target
, constm1_rtx
);
9411 emit_move_insn (target
, const1_rtx
);
9417 /* Get the rtx code of the operands. */
9418 op0
= expand_normal (treeop0
);
9419 op1
= expand_normal (treeop1
);
9422 target
= gen_reg_rtx (TYPE_MODE (type
));
9424 /* If target overlaps with op1, then either we need to force
9425 op1 into a pseudo (if target also overlaps with op0),
9426 or write the complex parts in reverse order. */
9427 switch (GET_CODE (target
))
9430 if (reg_overlap_mentioned_p (XEXP (target
, 0), op1
))
9432 if (reg_overlap_mentioned_p (XEXP (target
, 1), op0
))
9434 complex_expr_force_op1
:
9435 temp
= gen_reg_rtx (GET_MODE_INNER (GET_MODE (target
)));
9436 emit_move_insn (temp
, op1
);
9440 complex_expr_swap_order
:
9441 /* Move the imaginary (op1) and real (op0) parts to their
9443 write_complex_part (target
, op1
, true);
9444 write_complex_part (target
, op0
, false);
9450 temp
= adjust_address_nv (target
,
9451 GET_MODE_INNER (GET_MODE (target
)), 0);
9452 if (reg_overlap_mentioned_p (temp
, op1
))
9454 scalar_mode imode
= GET_MODE_INNER (GET_MODE (target
));
9455 temp
= adjust_address_nv (target
, imode
,
9456 GET_MODE_SIZE (imode
));
9457 if (reg_overlap_mentioned_p (temp
, op0
))
9458 goto complex_expr_force_op1
;
9459 goto complex_expr_swap_order
;
9463 if (reg_overlap_mentioned_p (target
, op1
))
9465 if (reg_overlap_mentioned_p (target
, op0
))
9466 goto complex_expr_force_op1
;
9467 goto complex_expr_swap_order
;
9472 /* Move the real (op0) and imaginary (op1) parts to their location. */
9473 write_complex_part (target
, op0
, false);
9474 write_complex_part (target
, op1
, true);
9478 case WIDEN_SUM_EXPR
:
9480 tree oprnd0
= treeop0
;
9481 tree oprnd1
= treeop1
;
9483 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9484 target
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, op1
,
9489 case VEC_UNPACK_HI_EXPR
:
9490 case VEC_UNPACK_LO_EXPR
:
9492 op0
= expand_normal (treeop0
);
9493 temp
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, NULL_RTX
,
9499 case VEC_UNPACK_FLOAT_HI_EXPR
:
9500 case VEC_UNPACK_FLOAT_LO_EXPR
:
9502 op0
= expand_normal (treeop0
);
9503 /* The signedness is determined from input operand. */
9504 temp
= expand_widen_pattern_expr
9505 (ops
, op0
, NULL_RTX
, NULL_RTX
,
9506 target
, TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
9512 case VEC_WIDEN_MULT_HI_EXPR
:
9513 case VEC_WIDEN_MULT_LO_EXPR
:
9514 case VEC_WIDEN_MULT_EVEN_EXPR
:
9515 case VEC_WIDEN_MULT_ODD_EXPR
:
9516 case VEC_WIDEN_LSHIFT_HI_EXPR
:
9517 case VEC_WIDEN_LSHIFT_LO_EXPR
:
9518 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9519 target
= expand_widen_pattern_expr (ops
, op0
, op1
, NULL_RTX
,
9521 gcc_assert (target
);
9524 case VEC_PACK_TRUNC_EXPR
:
9525 case VEC_PACK_SAT_EXPR
:
9526 case VEC_PACK_FIX_TRUNC_EXPR
:
9527 mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9532 expand_operands (treeop0
, treeop1
, target
, &op0
, &op1
, EXPAND_NORMAL
);
9533 vec_perm_builder sel
;
9534 if (TREE_CODE (treeop2
) == VECTOR_CST
9535 && tree_to_vec_perm_builder (&sel
, treeop2
))
9537 machine_mode sel_mode
= TYPE_MODE (TREE_TYPE (treeop2
));
9538 temp
= expand_vec_perm_const (mode
, op0
, op1
, sel
,
9543 op2
= expand_normal (treeop2
);
9544 temp
= expand_vec_perm_var (mode
, op0
, op1
, op2
, target
);
9552 tree oprnd0
= treeop0
;
9553 tree oprnd1
= treeop1
;
9554 tree oprnd2
= treeop2
;
9557 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9558 op2
= expand_normal (oprnd2
);
9559 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9566 tree oprnd0
= treeop0
;
9567 tree oprnd1
= treeop1
;
9568 tree oprnd2
= treeop2
;
9571 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9572 op2
= expand_normal (oprnd2
);
9573 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9578 case REALIGN_LOAD_EXPR
:
9580 tree oprnd0
= treeop0
;
9581 tree oprnd1
= treeop1
;
9582 tree oprnd2
= treeop2
;
9585 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9586 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9587 op2
= expand_normal (oprnd2
);
9588 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
9596 /* A COND_EXPR with its type being VOID_TYPE represents a
9597 conditional jump and is handled in
9598 expand_gimple_cond_expr. */
9599 gcc_assert (!VOID_TYPE_P (type
));
9601 /* Note that COND_EXPRs whose type is a structure or union
9602 are required to be constructed to contain assignments of
9603 a temporary variable, so that we can evaluate them here
9604 for side effect only. If type is void, we must do likewise. */
9606 gcc_assert (!TREE_ADDRESSABLE (type
)
9608 && TREE_TYPE (treeop1
) != void_type_node
9609 && TREE_TYPE (treeop2
) != void_type_node
);
9611 temp
= expand_cond_expr_using_cmove (treeop0
, treeop1
, treeop2
);
9615 /* If we are not to produce a result, we have no target. Otherwise,
9616 if a target was specified use it; it will not be used as an
9617 intermediate target unless it is safe. If no target, use a
9620 if (modifier
!= EXPAND_STACK_PARM
9622 && safe_from_p (original_target
, treeop0
, 1)
9623 && GET_MODE (original_target
) == mode
9624 && !MEM_P (original_target
))
9625 temp
= original_target
;
9627 temp
= assign_temp (type
, 0, 1);
9629 do_pending_stack_adjust ();
9631 rtx_code_label
*lab0
= gen_label_rtx ();
9632 rtx_code_label
*lab1
= gen_label_rtx ();
9633 jumpifnot (treeop0
, lab0
,
9634 profile_probability::uninitialized ());
9635 store_expr (treeop1
, temp
,
9636 modifier
== EXPAND_STACK_PARM
,
9639 emit_jump_insn (targetm
.gen_jump (lab1
));
9642 store_expr (treeop2
, temp
,
9643 modifier
== EXPAND_STACK_PARM
,
9652 target
= expand_vec_cond_expr (type
, treeop0
, treeop1
, treeop2
, target
);
9655 case VEC_DUPLICATE_EXPR
:
9656 op0
= expand_expr (treeop0
, NULL_RTX
, VOIDmode
, modifier
);
9657 target
= expand_vector_broadcast (mode
, op0
);
9658 gcc_assert (target
);
9661 case VEC_SERIES_EXPR
:
9662 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, modifier
);
9663 return expand_vec_series_expr (mode
, op0
, op1
, target
);
9665 case BIT_INSERT_EXPR
:
9667 unsigned bitpos
= tree_to_uhwi (treeop2
);
9669 if (INTEGRAL_TYPE_P (TREE_TYPE (treeop1
)))
9670 bitsize
= TYPE_PRECISION (TREE_TYPE (treeop1
));
9672 bitsize
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (treeop1
)));
9673 rtx op0
= expand_normal (treeop0
);
9674 rtx op1
= expand_normal (treeop1
);
9675 rtx dst
= gen_reg_rtx (mode
);
9676 emit_move_insn (dst
, op0
);
9677 store_bit_field (dst
, bitsize
, bitpos
, 0, 0,
9678 TYPE_MODE (TREE_TYPE (treeop1
)), op1
, false);
9686 /* Here to do an ordinary binary operator. */
9688 expand_operands (treeop0
, treeop1
,
9689 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9691 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9693 if (modifier
== EXPAND_STACK_PARM
)
9695 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9696 unsignedp
, OPTAB_LIB_WIDEN
);
9698 /* Bitwise operations do not need bitfield reduction as we expect their
9699 operands being properly truncated. */
9700 if (code
== BIT_XOR_EXPR
9701 || code
== BIT_AND_EXPR
9702 || code
== BIT_IOR_EXPR
)
9704 return REDUCE_BIT_FIELD (temp
);
9706 #undef REDUCE_BIT_FIELD
9709 /* Return TRUE if expression STMT is suitable for replacement.
9710 Never consider memory loads as replaceable, because those don't ever lead
9711 into constant expressions. */
9714 stmt_is_replaceable_p (gimple
*stmt
)
9716 if (ssa_is_replaceable_p (stmt
))
9718 /* Don't move around loads. */
9719 if (!gimple_assign_single_p (stmt
)
9720 || is_gimple_val (gimple_assign_rhs1 (stmt
)))
9727 expand_expr_real_1 (tree exp
, rtx target
, machine_mode tmode
,
9728 enum expand_modifier modifier
, rtx
*alt_rtl
,
9729 bool inner_reference_p
)
9731 rtx op0
, op1
, temp
, decl_rtl
;
9734 machine_mode mode
, dmode
;
9735 enum tree_code code
= TREE_CODE (exp
);
9736 rtx subtarget
, original_target
;
9739 bool reduce_bit_field
;
9740 location_t loc
= EXPR_LOCATION (exp
);
9741 struct separate_ops ops
;
9742 tree treeop0
, treeop1
, treeop2
;
9743 tree ssa_name
= NULL_TREE
;
9746 type
= TREE_TYPE (exp
);
9747 mode
= TYPE_MODE (type
);
9748 unsignedp
= TYPE_UNSIGNED (type
);
9750 treeop0
= treeop1
= treeop2
= NULL_TREE
;
9751 if (!VL_EXP_CLASS_P (exp
))
9752 switch (TREE_CODE_LENGTH (code
))
9755 case 3: treeop2
= TREE_OPERAND (exp
, 2); /* FALLTHRU */
9756 case 2: treeop1
= TREE_OPERAND (exp
, 1); /* FALLTHRU */
9757 case 1: treeop0
= TREE_OPERAND (exp
, 0); /* FALLTHRU */
9767 ignore
= (target
== const0_rtx
9768 || ((CONVERT_EXPR_CODE_P (code
)
9769 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
9770 && TREE_CODE (type
) == VOID_TYPE
));
9772 /* An operation in what may be a bit-field type needs the
9773 result to be reduced to the precision of the bit-field type,
9774 which is narrower than that of the type's mode. */
9775 reduce_bit_field
= (!ignore
9776 && INTEGRAL_TYPE_P (type
)
9777 && !type_has_mode_precision_p (type
));
9779 /* If we are going to ignore this result, we need only do something
9780 if there is a side-effect somewhere in the expression. If there
9781 is, short-circuit the most common cases here. Note that we must
9782 not call expand_expr with anything but const0_rtx in case this
9783 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9787 if (! TREE_SIDE_EFFECTS (exp
))
9790 /* Ensure we reference a volatile object even if value is ignored, but
9791 don't do this if all we are doing is taking its address. */
9792 if (TREE_THIS_VOLATILE (exp
)
9793 && TREE_CODE (exp
) != FUNCTION_DECL
9794 && mode
!= VOIDmode
&& mode
!= BLKmode
9795 && modifier
!= EXPAND_CONST_ADDRESS
)
9797 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
9803 if (TREE_CODE_CLASS (code
) == tcc_unary
9804 || code
== BIT_FIELD_REF
9805 || code
== COMPONENT_REF
9806 || code
== INDIRECT_REF
)
9807 return expand_expr (treeop0
, const0_rtx
, VOIDmode
,
9810 else if (TREE_CODE_CLASS (code
) == tcc_binary
9811 || TREE_CODE_CLASS (code
) == tcc_comparison
9812 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
9814 expand_expr (treeop0
, const0_rtx
, VOIDmode
, modifier
);
9815 expand_expr (treeop1
, const0_rtx
, VOIDmode
, modifier
);
9822 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
9825 /* Use subtarget as the target for operand 0 of a binary operation. */
9826 subtarget
= get_subtarget (target
);
9827 original_target
= target
;
9833 tree function
= decl_function_context (exp
);
9835 temp
= label_rtx (exp
);
9836 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
9838 if (function
!= current_function_decl
9840 LABEL_REF_NONLOCAL_P (temp
) = 1;
9842 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
9847 /* ??? ivopts calls expander, without any preparation from
9848 out-of-ssa. So fake instructions as if this was an access to the
9849 base variable. This unnecessarily allocates a pseudo, see how we can
9850 reuse it, if partition base vars have it set already. */
9851 if (!currently_expanding_to_rtl
)
9853 tree var
= SSA_NAME_VAR (exp
);
9854 if (var
&& DECL_RTL_SET_P (var
))
9855 return DECL_RTL (var
);
9856 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp
)),
9857 LAST_VIRTUAL_REGISTER
+ 1);
9860 g
= get_gimple_for_ssa_name (exp
);
9861 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9863 && modifier
== EXPAND_INITIALIZER
9864 && !SSA_NAME_IS_DEFAULT_DEF (exp
)
9865 && (optimize
|| !SSA_NAME_VAR (exp
)
9866 || DECL_IGNORED_P (SSA_NAME_VAR (exp
)))
9867 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp
)))
9868 g
= SSA_NAME_DEF_STMT (exp
);
9872 location_t saved_loc
= curr_insn_location ();
9873 location_t loc
= gimple_location (g
);
9874 if (loc
!= UNKNOWN_LOCATION
)
9875 set_curr_insn_location (loc
);
9876 ops
.code
= gimple_assign_rhs_code (g
);
9877 switch (get_gimple_rhs_class (ops
.code
))
9879 case GIMPLE_TERNARY_RHS
:
9880 ops
.op2
= gimple_assign_rhs3 (g
);
9882 case GIMPLE_BINARY_RHS
:
9883 ops
.op1
= gimple_assign_rhs2 (g
);
9885 /* Try to expand conditonal compare. */
9886 if (targetm
.gen_ccmp_first
)
9888 gcc_checking_assert (targetm
.gen_ccmp_next
!= NULL
);
9889 r
= expand_ccmp_expr (g
, mode
);
9894 case GIMPLE_UNARY_RHS
:
9895 ops
.op0
= gimple_assign_rhs1 (g
);
9896 ops
.type
= TREE_TYPE (gimple_assign_lhs (g
));
9898 r
= expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
9900 case GIMPLE_SINGLE_RHS
:
9902 r
= expand_expr_real (gimple_assign_rhs1 (g
), target
,
9903 tmode
, modifier
, alt_rtl
,
9910 set_curr_insn_location (saved_loc
);
9911 if (REG_P (r
) && !REG_EXPR (r
))
9912 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp
), r
);
9917 decl_rtl
= get_rtx_for_ssa_name (ssa_name
);
9918 exp
= SSA_NAME_VAR (ssa_name
);
9919 goto expand_decl_rtl
;
9923 /* If a static var's type was incomplete when the decl was written,
9924 but the type is complete now, lay out the decl now. */
9925 if (DECL_SIZE (exp
) == 0
9926 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
9927 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
9928 layout_decl (exp
, 0);
9934 decl_rtl
= DECL_RTL (exp
);
9936 gcc_assert (decl_rtl
);
9938 /* DECL_MODE might change when TYPE_MODE depends on attribute target
9939 settings for VECTOR_TYPE_P that might switch for the function. */
9940 if (currently_expanding_to_rtl
9941 && code
== VAR_DECL
&& MEM_P (decl_rtl
)
9942 && VECTOR_TYPE_P (type
) && exp
&& DECL_MODE (exp
) != mode
)
9943 decl_rtl
= change_address (decl_rtl
, TYPE_MODE (type
), 0);
9945 decl_rtl
= copy_rtx (decl_rtl
);
9947 /* Record writes to register variables. */
9948 if (modifier
== EXPAND_WRITE
9950 && HARD_REGISTER_P (decl_rtl
))
9951 add_to_hard_reg_set (&crtl
->asm_clobbers
,
9952 GET_MODE (decl_rtl
), REGNO (decl_rtl
));
9954 /* Ensure variable marked as used even if it doesn't go through
9955 a parser. If it hasn't be used yet, write out an external
9958 TREE_USED (exp
) = 1;
9960 /* Show we haven't gotten RTL for this yet. */
9963 /* Variables inherited from containing functions should have
9964 been lowered by this point. */
9966 context
= decl_function_context (exp
);
9968 || SCOPE_FILE_SCOPE_P (context
)
9969 || context
== current_function_decl
9970 || TREE_STATIC (exp
)
9971 || DECL_EXTERNAL (exp
)
9972 /* ??? C++ creates functions that are not TREE_STATIC. */
9973 || TREE_CODE (exp
) == FUNCTION_DECL
);
9975 /* This is the case of an array whose size is to be determined
9976 from its initializer, while the initializer is still being parsed.
9977 ??? We aren't parsing while expanding anymore. */
9979 if (MEM_P (decl_rtl
) && REG_P (XEXP (decl_rtl
, 0)))
9980 temp
= validize_mem (decl_rtl
);
9982 /* If DECL_RTL is memory, we are in the normal case and the
9983 address is not valid, get the address into a register. */
9985 else if (MEM_P (decl_rtl
) && modifier
!= EXPAND_INITIALIZER
)
9988 *alt_rtl
= decl_rtl
;
9989 decl_rtl
= use_anchored_address (decl_rtl
);
9990 if (modifier
!= EXPAND_CONST_ADDRESS
9991 && modifier
!= EXPAND_SUM
9992 && !memory_address_addr_space_p (exp
? DECL_MODE (exp
)
9993 : GET_MODE (decl_rtl
),
9995 MEM_ADDR_SPACE (decl_rtl
)))
9996 temp
= replace_equiv_address (decl_rtl
,
9997 copy_rtx (XEXP (decl_rtl
, 0)));
10000 /* If we got something, return it. But first, set the alignment
10001 if the address is a register. */
10004 if (exp
&& MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
10005 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
10011 dmode
= DECL_MODE (exp
);
10013 dmode
= TYPE_MODE (TREE_TYPE (ssa_name
));
10015 /* If the mode of DECL_RTL does not match that of the decl,
10016 there are two cases: we are dealing with a BLKmode value
10017 that is returned in a register, or we are dealing with
10018 a promoted value. In the latter case, return a SUBREG
10019 of the wanted mode, but mark it so that we know that it
10020 was already extended. */
10021 if (REG_P (decl_rtl
)
10022 && dmode
!= BLKmode
10023 && GET_MODE (decl_rtl
) != dmode
)
10025 machine_mode pmode
;
10027 /* Get the signedness to be used for this variable. Ensure we get
10028 the same mode we got when the variable was declared. */
10029 if (code
!= SSA_NAME
)
10030 pmode
= promote_decl_mode (exp
, &unsignedp
);
10031 else if ((g
= SSA_NAME_DEF_STMT (ssa_name
))
10032 && gimple_code (g
) == GIMPLE_CALL
10033 && !gimple_call_internal_p (g
))
10034 pmode
= promote_function_mode (type
, mode
, &unsignedp
,
10035 gimple_call_fntype (g
),
10038 pmode
= promote_ssa_mode (ssa_name
, &unsignedp
);
10039 gcc_assert (GET_MODE (decl_rtl
) == pmode
);
10041 temp
= gen_lowpart_SUBREG (mode
, decl_rtl
);
10042 SUBREG_PROMOTED_VAR_P (temp
) = 1;
10043 SUBREG_PROMOTED_SET (temp
, unsignedp
);
10051 /* Given that TYPE_PRECISION (type) is not always equal to
10052 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
10053 the former to the latter according to the signedness of the
10055 scalar_int_mode mode
= SCALAR_INT_TYPE_MODE (type
);
10056 temp
= immed_wide_int_const
10057 (wi::to_wide (exp
, GET_MODE_PRECISION (mode
)), mode
);
10063 tree tmp
= NULL_TREE
;
10064 if (GET_MODE_CLASS (mode
) == MODE_VECTOR_INT
10065 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
10066 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FRACT
10067 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UFRACT
10068 || GET_MODE_CLASS (mode
) == MODE_VECTOR_ACCUM
10069 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UACCUM
)
10070 return const_vector_from_tree (exp
);
10071 scalar_int_mode int_mode
;
10072 if (is_int_mode (mode
, &int_mode
))
10074 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp
)))
10075 return const_scalar_mask_from_tree (int_mode
, exp
);
10079 = lang_hooks
.types
.type_for_mode (int_mode
, 1);
10081 tmp
= fold_unary_loc (loc
, VIEW_CONVERT_EXPR
,
10082 type_for_mode
, exp
);
10087 vec
<constructor_elt
, va_gc
> *v
;
10089 vec_alloc (v
, VECTOR_CST_NELTS (exp
));
10090 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
10091 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, VECTOR_CST_ELT (exp
, i
));
10092 tmp
= build_constructor (type
, v
);
10094 return expand_expr (tmp
, ignore
? const0_rtx
: target
,
10099 if (modifier
== EXPAND_WRITE
)
10101 /* Writing into CONST_DECL is always invalid, but handle it
10103 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (exp
));
10104 scalar_int_mode address_mode
= targetm
.addr_space
.address_mode (as
);
10105 op0
= expand_expr_addr_expr_1 (exp
, NULL_RTX
, address_mode
,
10106 EXPAND_NORMAL
, as
);
10107 op0
= memory_address_addr_space (mode
, op0
, as
);
10108 temp
= gen_rtx_MEM (mode
, op0
);
10109 set_mem_addr_space (temp
, as
);
10112 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
10115 /* If optimized, generate immediate CONST_DOUBLE
10116 which will be turned into memory by reload if necessary.
10118 We used to force a register so that loop.c could see it. But
10119 this does not allow gen_* patterns to perform optimizations with
10120 the constants. It also produces two insns in cases like "x = 1.0;".
10121 On most machines, floating-point constants are not permitted in
10122 many insns, so we'd end up copying it to a register in any case.
10124 Now, we do the copying in expand_binop, if appropriate. */
10125 return const_double_from_real_value (TREE_REAL_CST (exp
),
10126 TYPE_MODE (TREE_TYPE (exp
)));
10129 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp
),
10130 TYPE_MODE (TREE_TYPE (exp
)));
10133 /* Handle evaluating a complex constant in a CONCAT target. */
10134 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
10136 machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
10139 rtarg
= XEXP (original_target
, 0);
10140 itarg
= XEXP (original_target
, 1);
10142 /* Move the real and imaginary parts separately. */
10143 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, EXPAND_NORMAL
);
10144 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, EXPAND_NORMAL
);
10147 emit_move_insn (rtarg
, op0
);
10149 emit_move_insn (itarg
, op1
);
10151 return original_target
;
10157 temp
= expand_expr_constant (exp
, 1, modifier
);
10159 /* temp contains a constant address.
10160 On RISC machines where a constant address isn't valid,
10161 make some insns to get that address into a register. */
10162 if (modifier
!= EXPAND_CONST_ADDRESS
10163 && modifier
!= EXPAND_INITIALIZER
10164 && modifier
!= EXPAND_SUM
10165 && ! memory_address_addr_space_p (mode
, XEXP (temp
, 0),
10166 MEM_ADDR_SPACE (temp
)))
10167 return replace_equiv_address (temp
,
10168 copy_rtx (XEXP (temp
, 0)));
10172 return immed_wide_int_const (poly_int_cst_value (exp
), mode
);
10176 tree val
= treeop0
;
10177 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
,
10178 inner_reference_p
);
10180 if (!SAVE_EXPR_RESOLVED_P (exp
))
10182 /* We can indeed still hit this case, typically via builtin
10183 expanders calling save_expr immediately before expanding
10184 something. Assume this means that we only have to deal
10185 with non-BLKmode values. */
10186 gcc_assert (GET_MODE (ret
) != BLKmode
);
10188 val
= build_decl (curr_insn_location (),
10189 VAR_DECL
, NULL
, TREE_TYPE (exp
));
10190 DECL_ARTIFICIAL (val
) = 1;
10191 DECL_IGNORED_P (val
) = 1;
10193 TREE_OPERAND (exp
, 0) = treeop0
;
10194 SAVE_EXPR_RESOLVED_P (exp
) = 1;
10196 if (!CONSTANT_P (ret
))
10197 ret
= copy_to_reg (ret
);
10198 SET_DECL_RTL (val
, ret
);
10206 /* If we don't need the result, just ensure we evaluate any
10210 unsigned HOST_WIDE_INT idx
;
10213 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
10214 expand_expr (value
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
10219 return expand_constructor (exp
, target
, modifier
, false);
10221 case TARGET_MEM_REF
:
10224 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
10225 enum insn_code icode
;
10226 unsigned int align
;
10228 op0
= addr_for_mem_ref (exp
, as
, true);
10229 op0
= memory_address_addr_space (mode
, op0
, as
);
10230 temp
= gen_rtx_MEM (mode
, op0
);
10231 set_mem_attributes (temp
, exp
, 0);
10232 set_mem_addr_space (temp
, as
);
10233 align
= get_object_alignment (exp
);
10234 if (modifier
!= EXPAND_WRITE
10235 && modifier
!= EXPAND_MEMORY
10237 && align
< GET_MODE_ALIGNMENT (mode
)
10238 /* If the target does not have special handling for unaligned
10239 loads of mode then it can use regular moves for them. */
10240 && ((icode
= optab_handler (movmisalign_optab
, mode
))
10241 != CODE_FOR_nothing
))
10243 struct expand_operand ops
[2];
10245 /* We've already validated the memory, and we're creating a
10246 new pseudo destination. The predicates really can't fail,
10247 nor can the generator. */
10248 create_output_operand (&ops
[0], NULL_RTX
, mode
);
10249 create_fixed_operand (&ops
[1], temp
);
10250 expand_insn (icode
, 2, ops
);
10251 temp
= ops
[0].value
;
10258 const bool reverse
= REF_REVERSE_STORAGE_ORDER (exp
);
10260 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
10261 machine_mode address_mode
;
10262 tree base
= TREE_OPERAND (exp
, 0);
10264 enum insn_code icode
;
10266 /* Handle expansion of non-aliased memory with non-BLKmode. That
10267 might end up in a register. */
10268 if (mem_ref_refers_to_non_mem_p (exp
))
10270 poly_int64 offset
= mem_ref_offset (exp
).force_shwi ();
10271 base
= TREE_OPERAND (base
, 0);
10272 if (known_eq (offset
, 0)
10274 && tree_fits_uhwi_p (TYPE_SIZE (type
))
10275 && (GET_MODE_BITSIZE (DECL_MODE (base
))
10276 == tree_to_uhwi (TYPE_SIZE (type
))))
10277 return expand_expr (build1 (VIEW_CONVERT_EXPR
, type
, base
),
10278 target
, tmode
, modifier
);
10279 if (TYPE_MODE (type
) == BLKmode
)
10281 temp
= assign_stack_temp (DECL_MODE (base
),
10282 GET_MODE_SIZE (DECL_MODE (base
)));
10283 store_expr (base
, temp
, 0, false, false);
10284 temp
= adjust_address (temp
, BLKmode
, offset
);
10285 set_mem_size (temp
, int_size_in_bytes (type
));
10288 exp
= build3 (BIT_FIELD_REF
, type
, base
, TYPE_SIZE (type
),
10289 bitsize_int (offset
* BITS_PER_UNIT
));
10290 REF_REVERSE_STORAGE_ORDER (exp
) = reverse
;
10291 return expand_expr (exp
, target
, tmode
, modifier
);
10293 address_mode
= targetm
.addr_space
.address_mode (as
);
10294 base
= TREE_OPERAND (exp
, 0);
10295 if ((def_stmt
= get_def_for_expr (base
, BIT_AND_EXPR
)))
10297 tree mask
= gimple_assign_rhs2 (def_stmt
);
10298 base
= build2 (BIT_AND_EXPR
, TREE_TYPE (base
),
10299 gimple_assign_rhs1 (def_stmt
), mask
);
10300 TREE_OPERAND (exp
, 0) = base
;
10302 align
= get_object_alignment (exp
);
10303 op0
= expand_expr (base
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
10304 op0
= memory_address_addr_space (mode
, op0
, as
);
10305 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
10307 rtx off
= immed_wide_int_const (mem_ref_offset (exp
), address_mode
);
10308 op0
= simplify_gen_binary (PLUS
, address_mode
, op0
, off
);
10309 op0
= memory_address_addr_space (mode
, op0
, as
);
10311 temp
= gen_rtx_MEM (mode
, op0
);
10312 set_mem_attributes (temp
, exp
, 0);
10313 set_mem_addr_space (temp
, as
);
10314 if (TREE_THIS_VOLATILE (exp
))
10315 MEM_VOLATILE_P (temp
) = 1;
10316 if (modifier
!= EXPAND_WRITE
10317 && modifier
!= EXPAND_MEMORY
10318 && !inner_reference_p
10320 && align
< GET_MODE_ALIGNMENT (mode
))
10322 if ((icode
= optab_handler (movmisalign_optab
, mode
))
10323 != CODE_FOR_nothing
)
10325 struct expand_operand ops
[2];
10327 /* We've already validated the memory, and we're creating a
10328 new pseudo destination. The predicates really can't fail,
10329 nor can the generator. */
10330 create_output_operand (&ops
[0], NULL_RTX
, mode
);
10331 create_fixed_operand (&ops
[1], temp
);
10332 expand_insn (icode
, 2, ops
);
10333 temp
= ops
[0].value
;
10335 else if (targetm
.slow_unaligned_access (mode
, align
))
10336 temp
= extract_bit_field (temp
, GET_MODE_BITSIZE (mode
),
10337 0, TYPE_UNSIGNED (TREE_TYPE (exp
)),
10338 (modifier
== EXPAND_STACK_PARM
10339 ? NULL_RTX
: target
),
10340 mode
, mode
, false, alt_rtl
);
10343 && modifier
!= EXPAND_MEMORY
10344 && modifier
!= EXPAND_WRITE
)
10345 temp
= flip_storage_order (mode
, temp
);
10352 tree array
= treeop0
;
10353 tree index
= treeop1
;
10356 /* Fold an expression like: "foo"[2].
10357 This is not done in fold so it won't happen inside &.
10358 Don't fold if this is for wide characters since it's too
10359 difficult to do correctly and this is a very rare case. */
10361 if (modifier
!= EXPAND_CONST_ADDRESS
10362 && modifier
!= EXPAND_INITIALIZER
10363 && modifier
!= EXPAND_MEMORY
)
10365 tree t
= fold_read_from_constant_string (exp
);
10368 return expand_expr (t
, target
, tmode
, modifier
);
10371 /* If this is a constant index into a constant array,
10372 just get the value from the array. Handle both the cases when
10373 we have an explicit constructor and when our operand is a variable
10374 that was declared const. */
10376 if (modifier
!= EXPAND_CONST_ADDRESS
10377 && modifier
!= EXPAND_INITIALIZER
10378 && modifier
!= EXPAND_MEMORY
10379 && TREE_CODE (array
) == CONSTRUCTOR
10380 && ! TREE_SIDE_EFFECTS (array
)
10381 && TREE_CODE (index
) == INTEGER_CST
)
10383 unsigned HOST_WIDE_INT ix
;
10386 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array
), ix
,
10388 if (tree_int_cst_equal (field
, index
))
10390 if (!TREE_SIDE_EFFECTS (value
))
10391 return expand_expr (fold (value
), target
, tmode
, modifier
);
10396 else if (optimize
>= 1
10397 && modifier
!= EXPAND_CONST_ADDRESS
10398 && modifier
!= EXPAND_INITIALIZER
10399 && modifier
!= EXPAND_MEMORY
10400 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
10401 && TREE_CODE (index
) == INTEGER_CST
10402 && (VAR_P (array
) || TREE_CODE (array
) == CONST_DECL
)
10403 && (init
= ctor_for_folding (array
)) != error_mark_node
)
10405 if (init
== NULL_TREE
)
10407 tree value
= build_zero_cst (type
);
10408 if (TREE_CODE (value
) == CONSTRUCTOR
)
10410 /* If VALUE is a CONSTRUCTOR, this optimization is only
10411 useful if this doesn't store the CONSTRUCTOR into
10412 memory. If it does, it is more efficient to just
10413 load the data from the array directly. */
10414 rtx ret
= expand_constructor (value
, target
,
10416 if (ret
== NULL_RTX
)
10421 return expand_expr (value
, target
, tmode
, modifier
);
10423 else if (TREE_CODE (init
) == CONSTRUCTOR
)
10425 unsigned HOST_WIDE_INT ix
;
10428 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), ix
,
10430 if (tree_int_cst_equal (field
, index
))
10432 if (TREE_SIDE_EFFECTS (value
))
10435 if (TREE_CODE (value
) == CONSTRUCTOR
)
10437 /* If VALUE is a CONSTRUCTOR, this
10438 optimization is only useful if
10439 this doesn't store the CONSTRUCTOR
10440 into memory. If it does, it is more
10441 efficient to just load the data from
10442 the array directly. */
10443 rtx ret
= expand_constructor (value
, target
,
10445 if (ret
== NULL_RTX
)
10450 expand_expr (fold (value
), target
, tmode
, modifier
);
10453 else if (TREE_CODE (init
) == STRING_CST
)
10455 tree low_bound
= array_ref_low_bound (exp
);
10456 tree index1
= fold_convert_loc (loc
, sizetype
, treeop1
);
10458 /* Optimize the special case of a zero lower bound.
10460 We convert the lower bound to sizetype to avoid problems
10461 with constant folding. E.g. suppose the lower bound is
10462 1 and its mode is QI. Without the conversion
10463 (ARRAY + (INDEX - (unsigned char)1))
10465 (ARRAY + (-(unsigned char)1) + INDEX)
10467 (ARRAY + 255 + INDEX). Oops! */
10468 if (!integer_zerop (low_bound
))
10469 index1
= size_diffop_loc (loc
, index1
,
10470 fold_convert_loc (loc
, sizetype
,
10473 if (tree_fits_uhwi_p (index1
)
10474 && compare_tree_int (index1
, TREE_STRING_LENGTH (init
)) < 0)
10476 tree type
= TREE_TYPE (TREE_TYPE (init
));
10477 scalar_int_mode mode
;
10479 if (is_int_mode (TYPE_MODE (type
), &mode
)
10480 && GET_MODE_SIZE (mode
) == 1)
10481 return gen_int_mode (TREE_STRING_POINTER (init
)
10482 [TREE_INT_CST_LOW (index1
)],
10488 goto normal_inner_ref
;
10490 case COMPONENT_REF
:
10491 /* If the operand is a CONSTRUCTOR, we can just extract the
10492 appropriate field if it is present. */
10493 if (TREE_CODE (treeop0
) == CONSTRUCTOR
)
10495 unsigned HOST_WIDE_INT idx
;
10497 scalar_int_mode field_mode
;
10499 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0
),
10501 if (field
== treeop1
10502 /* We can normally use the value of the field in the
10503 CONSTRUCTOR. However, if this is a bitfield in
10504 an integral mode that we can fit in a HOST_WIDE_INT,
10505 we must mask only the number of bits in the bitfield,
10506 since this is done implicitly by the constructor. If
10507 the bitfield does not meet either of those conditions,
10508 we can't do this optimization. */
10509 && (! DECL_BIT_FIELD (field
)
10510 || (is_int_mode (DECL_MODE (field
), &field_mode
)
10511 && (GET_MODE_PRECISION (field_mode
)
10512 <= HOST_BITS_PER_WIDE_INT
))))
10514 if (DECL_BIT_FIELD (field
)
10515 && modifier
== EXPAND_STACK_PARM
)
10517 op0
= expand_expr (value
, target
, tmode
, modifier
);
10518 if (DECL_BIT_FIELD (field
))
10520 HOST_WIDE_INT bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
10521 scalar_int_mode imode
10522 = SCALAR_INT_TYPE_MODE (TREE_TYPE (field
));
10524 if (TYPE_UNSIGNED (TREE_TYPE (field
)))
10526 op1
= gen_int_mode ((HOST_WIDE_INT_1
<< bitsize
) - 1,
10528 op0
= expand_and (imode
, op0
, op1
, target
);
10532 int count
= GET_MODE_PRECISION (imode
) - bitsize
;
10534 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
10536 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
10544 goto normal_inner_ref
;
10546 case BIT_FIELD_REF
:
10547 case ARRAY_RANGE_REF
:
10550 machine_mode mode1
, mode2
;
10551 poly_int64 bitsize
, bitpos
, bytepos
;
10553 int reversep
, volatilep
= 0, must_force_mem
;
10555 = get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode1
,
10556 &unsignedp
, &reversep
, &volatilep
);
10557 rtx orig_op0
, memloc
;
10558 bool clear_mem_expr
= false;
10560 /* If we got back the original object, something is wrong. Perhaps
10561 we are evaluating an expression too early. In any event, don't
10562 infinitely recurse. */
10563 gcc_assert (tem
!= exp
);
10565 /* If TEM's type is a union of variable size, pass TARGET to the inner
10566 computation, since it will need a temporary and TARGET is known
10567 to have to do. This occurs in unchecked conversion in Ada. */
10569 = expand_expr_real (tem
,
10570 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
10571 && COMPLETE_TYPE_P (TREE_TYPE (tem
))
10572 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
10574 && modifier
!= EXPAND_STACK_PARM
10575 ? target
: NULL_RTX
),
10577 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
10580 /* If the field has a mode, we want to access it in the
10581 field's mode, not the computed mode.
10582 If a MEM has VOIDmode (external with incomplete type),
10583 use BLKmode for it instead. */
10586 if (mode1
!= VOIDmode
)
10587 op0
= adjust_address (op0
, mode1
, 0);
10588 else if (GET_MODE (op0
) == VOIDmode
)
10589 op0
= adjust_address (op0
, BLKmode
, 0);
10593 = CONSTANT_P (op0
) ? TYPE_MODE (TREE_TYPE (tem
)) : GET_MODE (op0
);
10595 /* If we have either an offset, a BLKmode result, or a reference
10596 outside the underlying object, we must force it to memory.
10597 Such a case can occur in Ada if we have unchecked conversion
10598 of an expression from a scalar type to an aggregate type or
10599 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10600 passed a partially uninitialized object or a view-conversion
10601 to a larger size. */
10602 must_force_mem
= (offset
10603 || mode1
== BLKmode
10604 || maybe_gt (bitpos
+ bitsize
,
10605 GET_MODE_BITSIZE (mode2
)));
10607 /* Handle CONCAT first. */
10608 if (GET_CODE (op0
) == CONCAT
&& !must_force_mem
)
10610 if (known_eq (bitpos
, 0)
10611 && known_eq (bitsize
, GET_MODE_BITSIZE (GET_MODE (op0
)))
10612 && COMPLEX_MODE_P (mode1
)
10613 && COMPLEX_MODE_P (GET_MODE (op0
))
10614 && (GET_MODE_PRECISION (GET_MODE_INNER (mode1
))
10615 == GET_MODE_PRECISION (GET_MODE_INNER (GET_MODE (op0
)))))
10618 op0
= flip_storage_order (GET_MODE (op0
), op0
);
10619 if (mode1
!= GET_MODE (op0
))
10622 for (int i
= 0; i
< 2; i
++)
10624 rtx op
= read_complex_part (op0
, i
!= 0);
10625 if (GET_CODE (op
) == SUBREG
)
10626 op
= force_reg (GET_MODE (op
), op
);
10627 rtx temp
= gen_lowpart_common (GET_MODE_INNER (mode1
),
10633 if (!REG_P (op
) && !MEM_P (op
))
10634 op
= force_reg (GET_MODE (op
), op
);
10635 op
= gen_lowpart (GET_MODE_INNER (mode1
), op
);
10639 op0
= gen_rtx_CONCAT (mode1
, parts
[0], parts
[1]);
10643 if (known_eq (bitpos
, 0)
10644 && known_eq (bitsize
,
10645 GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0))))
10646 && maybe_ne (bitsize
, 0))
10648 op0
= XEXP (op0
, 0);
10649 mode2
= GET_MODE (op0
);
10651 else if (known_eq (bitpos
,
10652 GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0))))
10653 && known_eq (bitsize
,
10654 GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 1))))
10655 && maybe_ne (bitpos
, 0)
10656 && maybe_ne (bitsize
, 0))
10658 op0
= XEXP (op0
, 1);
10660 mode2
= GET_MODE (op0
);
10663 /* Otherwise force into memory. */
10664 must_force_mem
= 1;
10667 /* If this is a constant, put it in a register if it is a legitimate
10668 constant and we don't need a memory reference. */
10669 if (CONSTANT_P (op0
)
10670 && mode2
!= BLKmode
10671 && targetm
.legitimate_constant_p (mode2
, op0
)
10672 && !must_force_mem
)
10673 op0
= force_reg (mode2
, op0
);
10675 /* Otherwise, if this is a constant, try to force it to the constant
10676 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10677 is a legitimate constant. */
10678 else if (CONSTANT_P (op0
) && (memloc
= force_const_mem (mode2
, op0
)))
10679 op0
= validize_mem (memloc
);
10681 /* Otherwise, if this is a constant or the object is not in memory
10682 and need be, put it there. */
10683 else if (CONSTANT_P (op0
) || (!MEM_P (op0
) && must_force_mem
))
10685 memloc
= assign_temp (TREE_TYPE (tem
), 1, 1);
10686 emit_move_insn (memloc
, op0
);
10688 clear_mem_expr
= true;
10693 machine_mode address_mode
;
10694 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
10697 gcc_assert (MEM_P (op0
));
10699 address_mode
= get_address_mode (op0
);
10700 if (GET_MODE (offset_rtx
) != address_mode
)
10702 /* We cannot be sure that the RTL in offset_rtx is valid outside
10703 of a memory address context, so force it into a register
10704 before attempting to convert it to the desired mode. */
10705 offset_rtx
= force_operand (offset_rtx
, NULL_RTX
);
10706 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
10709 /* See the comment in expand_assignment for the rationale. */
10710 if (mode1
!= VOIDmode
10711 && maybe_ne (bitpos
, 0)
10712 && maybe_gt (bitsize
, 0)
10713 && multiple_p (bitpos
, BITS_PER_UNIT
, &bytepos
)
10714 && multiple_p (bitpos
, bitsize
)
10715 && multiple_p (bitsize
, GET_MODE_ALIGNMENT (mode1
))
10716 && MEM_ALIGN (op0
) >= GET_MODE_ALIGNMENT (mode1
))
10718 op0
= adjust_address (op0
, mode1
, bytepos
);
10722 op0
= offset_address (op0
, offset_rtx
,
10723 highest_pow2_factor (offset
));
10726 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10727 record its alignment as BIGGEST_ALIGNMENT. */
10729 && known_eq (bitpos
, 0)
10731 && is_aligning_offset (offset
, tem
))
10732 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
10734 /* Don't forget about volatility even if this is a bitfield. */
10735 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
10737 if (op0
== orig_op0
)
10738 op0
= copy_rtx (op0
);
10740 MEM_VOLATILE_P (op0
) = 1;
10743 /* In cases where an aligned union has an unaligned object
10744 as a field, we might be extracting a BLKmode value from
10745 an integer-mode (e.g., SImode) object. Handle this case
10746 by doing the extract into an object as wide as the field
10747 (which we know to be the width of a basic mode), then
10748 storing into memory, and changing the mode to BLKmode. */
10749 if (mode1
== VOIDmode
10750 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
10751 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
10752 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
10753 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
10754 && modifier
!= EXPAND_CONST_ADDRESS
10755 && modifier
!= EXPAND_INITIALIZER
10756 && modifier
!= EXPAND_MEMORY
)
10757 /* If the bitfield is volatile and the bitsize
10758 is narrower than the access size of the bitfield,
10759 we need to extract bitfields from the access. */
10760 || (volatilep
&& TREE_CODE (exp
) == COMPONENT_REF
10761 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp
, 1))
10762 && mode1
!= BLKmode
10763 && maybe_lt (bitsize
, GET_MODE_SIZE (mode1
) * BITS_PER_UNIT
))
10764 /* If the field isn't aligned enough to fetch as a memref,
10765 fetch it as a bit field. */
10766 || (mode1
!= BLKmode
10768 ? MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
10769 || !multiple_p (bitpos
, GET_MODE_ALIGNMENT (mode1
))
10770 : TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
10771 || !multiple_p (bitpos
, GET_MODE_ALIGNMENT (mode
)))
10772 && modifier
!= EXPAND_MEMORY
10773 && ((modifier
== EXPAND_CONST_ADDRESS
10774 || modifier
== EXPAND_INITIALIZER
)
10776 : targetm
.slow_unaligned_access (mode1
,
10778 || !multiple_p (bitpos
, BITS_PER_UNIT
)))
10779 /* If the type and the field are a constant size and the
10780 size of the type isn't the same size as the bitfield,
10781 we must use bitfield operations. */
10782 || (known_size_p (bitsize
)
10783 && TYPE_SIZE (TREE_TYPE (exp
))
10784 && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp
)))
10785 && maybe_ne (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp
))),
10788 machine_mode ext_mode
= mode
;
10790 if (ext_mode
== BLKmode
10791 && ! (target
!= 0 && MEM_P (op0
)
10793 && multiple_p (bitpos
, BITS_PER_UNIT
)))
10794 ext_mode
= int_mode_for_size (bitsize
, 1).else_blk ();
10796 if (ext_mode
== BLKmode
)
10799 target
= assign_temp (type
, 1, 1);
10801 /* ??? Unlike the similar test a few lines below, this one is
10802 very likely obsolete. */
10803 if (known_eq (bitsize
, 0))
10806 /* In this case, BITPOS must start at a byte boundary and
10807 TARGET, if specified, must be a MEM. */
10808 gcc_assert (MEM_P (op0
)
10809 && (!target
|| MEM_P (target
)));
10811 bytepos
= exact_div (bitpos
, BITS_PER_UNIT
);
10812 poly_int64 bytesize
= bits_to_bytes_round_up (bitsize
);
10813 emit_block_move (target
,
10814 adjust_address (op0
, VOIDmode
, bytepos
),
10815 gen_int_mode (bytesize
, Pmode
),
10816 (modifier
== EXPAND_STACK_PARM
10817 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
10822 /* If we have nothing to extract, the result will be 0 for targets
10823 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10824 return 0 for the sake of consistency, as reading a zero-sized
10825 bitfield is valid in Ada and the value is fully specified. */
10826 if (known_eq (bitsize
, 0))
10829 op0
= validize_mem (op0
);
10831 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
10832 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10834 /* If the result has a record type and the extraction is done in
10835 an integral mode, then the field may be not aligned on a byte
10836 boundary; in this case, if it has reverse storage order, it
10837 needs to be extracted as a scalar field with reverse storage
10838 order and put back into memory order afterwards. */
10839 if (TREE_CODE (type
) == RECORD_TYPE
10840 && GET_MODE_CLASS (ext_mode
) == MODE_INT
)
10841 reversep
= TYPE_REVERSE_STORAGE_ORDER (type
);
10843 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
10844 (modifier
== EXPAND_STACK_PARM
10845 ? NULL_RTX
: target
),
10846 ext_mode
, ext_mode
, reversep
, alt_rtl
);
10848 /* If the result has a record type and the mode of OP0 is an
10849 integral mode then, if BITSIZE is narrower than this mode
10850 and this is for big-endian data, we must put the field
10851 into the high-order bits. And we must also put it back
10852 into memory order if it has been previously reversed. */
10853 scalar_int_mode op0_mode
;
10854 if (TREE_CODE (type
) == RECORD_TYPE
10855 && is_int_mode (GET_MODE (op0
), &op0_mode
))
10857 HOST_WIDE_INT size
= GET_MODE_BITSIZE (op0_mode
);
10859 gcc_checking_assert (known_le (bitsize
, size
));
10860 if (maybe_lt (bitsize
, size
)
10861 && reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
10862 op0
= expand_shift (LSHIFT_EXPR
, op0_mode
, op0
,
10863 size
- bitsize
, op0
, 1);
10866 op0
= flip_storage_order (op0_mode
, op0
);
10869 /* If the result type is BLKmode, store the data into a temporary
10870 of the appropriate type, but with the mode corresponding to the
10871 mode for the data we have (op0's mode). */
10872 if (mode
== BLKmode
)
10875 = assign_stack_temp_for_type (ext_mode
,
10876 GET_MODE_BITSIZE (ext_mode
),
10878 emit_move_insn (new_rtx
, op0
);
10879 op0
= copy_rtx (new_rtx
);
10880 PUT_MODE (op0
, BLKmode
);
10886 /* If the result is BLKmode, use that to access the object
10888 if (mode
== BLKmode
)
10891 /* Get a reference to just this component. */
10892 bytepos
= bits_to_bytes_round_down (bitpos
);
10893 if (modifier
== EXPAND_CONST_ADDRESS
10894 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
10895 op0
= adjust_address_nv (op0
, mode1
, bytepos
);
10897 op0
= adjust_address (op0
, mode1
, bytepos
);
10899 if (op0
== orig_op0
)
10900 op0
= copy_rtx (op0
);
10902 /* Don't set memory attributes if the base expression is
10903 SSA_NAME that got expanded as a MEM. In that case, we should
10904 just honor its original memory attributes. */
10905 if (TREE_CODE (tem
) != SSA_NAME
|| !MEM_P (orig_op0
))
10906 set_mem_attributes (op0
, exp
, 0);
10908 if (REG_P (XEXP (op0
, 0)))
10909 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10911 /* If op0 is a temporary because the original expressions was forced
10912 to memory, clear MEM_EXPR so that the original expression cannot
10913 be marked as addressable through MEM_EXPR of the temporary. */
10914 if (clear_mem_expr
)
10915 set_mem_expr (op0
, NULL_TREE
);
10917 MEM_VOLATILE_P (op0
) |= volatilep
;
10920 && modifier
!= EXPAND_MEMORY
10921 && modifier
!= EXPAND_WRITE
)
10922 op0
= flip_storage_order (mode1
, op0
);
10924 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
10925 || modifier
== EXPAND_CONST_ADDRESS
10926 || modifier
== EXPAND_INITIALIZER
)
10930 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
10932 convert_move (target
, op0
, unsignedp
);
10937 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
10940 /* All valid uses of __builtin_va_arg_pack () are removed during
10942 if (CALL_EXPR_VA_ARG_PACK (exp
))
10943 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
10945 tree fndecl
= get_callee_fndecl (exp
), attr
;
10948 && (attr
= lookup_attribute ("error",
10949 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10950 error ("%Kcall to %qs declared with attribute error: %s",
10951 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
10952 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10954 && (attr
= lookup_attribute ("warning",
10955 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10956 warning_at (tree_nonartificial_location (exp
),
10957 0, "%Kcall to %qs declared with attribute warning: %s",
10958 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
10959 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10961 /* Check for a built-in function. */
10962 if (fndecl
&& DECL_BUILT_IN (fndecl
))
10964 gcc_assert (DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_FRONTEND
);
10965 if (CALL_WITH_BOUNDS_P (exp
))
10966 return expand_builtin_with_bounds (exp
, target
, subtarget
,
10969 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
10972 return expand_call (exp
, target
, ignore
);
10974 case VIEW_CONVERT_EXPR
:
10977 /* If we are converting to BLKmode, try to avoid an intermediate
10978 temporary by fetching an inner memory reference. */
10979 if (mode
== BLKmode
10980 && poly_int_tree_p (TYPE_SIZE (type
))
10981 && TYPE_MODE (TREE_TYPE (treeop0
)) != BLKmode
10982 && handled_component_p (treeop0
))
10984 machine_mode mode1
;
10985 poly_int64 bitsize
, bitpos
, bytepos
;
10987 int unsignedp
, reversep
, volatilep
= 0;
10989 = get_inner_reference (treeop0
, &bitsize
, &bitpos
, &offset
, &mode1
,
10990 &unsignedp
, &reversep
, &volatilep
);
10993 /* ??? We should work harder and deal with non-zero offsets. */
10995 && multiple_p (bitpos
, BITS_PER_UNIT
, &bytepos
)
10997 && known_size_p (bitsize
)
10998 && known_eq (wi::to_poly_offset (TYPE_SIZE (type
)), bitsize
))
11000 /* See the normal_inner_ref case for the rationale. */
11002 = expand_expr_real (tem
,
11003 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
11004 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
11006 && modifier
!= EXPAND_STACK_PARM
11007 ? target
: NULL_RTX
),
11009 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
11012 if (MEM_P (orig_op0
))
11016 /* Get a reference to just this component. */
11017 if (modifier
== EXPAND_CONST_ADDRESS
11018 || modifier
== EXPAND_SUM
11019 || modifier
== EXPAND_INITIALIZER
)
11020 op0
= adjust_address_nv (op0
, mode
, bytepos
);
11022 op0
= adjust_address (op0
, mode
, bytepos
);
11024 if (op0
== orig_op0
)
11025 op0
= copy_rtx (op0
);
11027 set_mem_attributes (op0
, treeop0
, 0);
11028 if (REG_P (XEXP (op0
, 0)))
11029 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
11031 MEM_VOLATILE_P (op0
) |= volatilep
;
11037 op0
= expand_expr_real (treeop0
, NULL_RTX
, VOIDmode
, modifier
,
11038 NULL
, inner_reference_p
);
11040 /* If the input and output modes are both the same, we are done. */
11041 if (mode
== GET_MODE (op0
))
11043 /* If neither mode is BLKmode, and both modes are the same size
11044 then we can use gen_lowpart. */
11045 else if (mode
!= BLKmode
&& GET_MODE (op0
) != BLKmode
11046 && (GET_MODE_PRECISION (mode
)
11047 == GET_MODE_PRECISION (GET_MODE (op0
)))
11048 && !COMPLEX_MODE_P (GET_MODE (op0
)))
11050 if (GET_CODE (op0
) == SUBREG
)
11051 op0
= force_reg (GET_MODE (op0
), op0
);
11052 temp
= gen_lowpart_common (mode
, op0
);
11057 if (!REG_P (op0
) && !MEM_P (op0
))
11058 op0
= force_reg (GET_MODE (op0
), op0
);
11059 op0
= gen_lowpart (mode
, op0
);
11062 /* If both types are integral, convert from one mode to the other. */
11063 else if (INTEGRAL_TYPE_P (type
) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0
)))
11064 op0
= convert_modes (mode
, GET_MODE (op0
), op0
,
11065 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
11066 /* If the output type is a bit-field type, do an extraction. */
11067 else if (reduce_bit_field
)
11068 return extract_bit_field (op0
, TYPE_PRECISION (type
), 0,
11069 TYPE_UNSIGNED (type
), NULL_RTX
,
11070 mode
, mode
, false, NULL
);
11071 /* As a last resort, spill op0 to memory, and reload it in a
11073 else if (!MEM_P (op0
))
11075 /* If the operand is not a MEM, force it into memory. Since we
11076 are going to be changing the mode of the MEM, don't call
11077 force_const_mem for constants because we don't allow pool
11078 constants to change mode. */
11079 tree inner_type
= TREE_TYPE (treeop0
);
11081 gcc_assert (!TREE_ADDRESSABLE (exp
));
11083 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
11085 = assign_stack_temp_for_type
11086 (TYPE_MODE (inner_type
),
11087 GET_MODE_SIZE (TYPE_MODE (inner_type
)), inner_type
);
11089 emit_move_insn (target
, op0
);
11093 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
11094 output type is such that the operand is known to be aligned, indicate
11095 that it is. Otherwise, we need only be concerned about alignment for
11096 non-BLKmode results. */
11099 enum insn_code icode
;
11101 if (modifier
!= EXPAND_WRITE
11102 && modifier
!= EXPAND_MEMORY
11103 && !inner_reference_p
11105 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode
))
11107 /* If the target does have special handling for unaligned
11108 loads of mode then use them. */
11109 if ((icode
= optab_handler (movmisalign_optab
, mode
))
11110 != CODE_FOR_nothing
)
11114 op0
= adjust_address (op0
, mode
, 0);
11115 /* We've already validated the memory, and we're creating a
11116 new pseudo destination. The predicates really can't
11118 reg
= gen_reg_rtx (mode
);
11120 /* Nor can the insn generator. */
11121 rtx_insn
*insn
= GEN_FCN (icode
) (reg
, op0
);
11125 else if (STRICT_ALIGNMENT
)
11127 tree inner_type
= TREE_TYPE (treeop0
);
11128 poly_uint64 mode_size
= GET_MODE_SIZE (mode
);
11129 poly_uint64 op0_size
11130 = tree_to_poly_uint64 (TYPE_SIZE_UNIT (inner_type
));
11131 poly_int64 temp_size
= upper_bound (op0_size
, mode_size
);
11133 = assign_stack_temp_for_type (mode
, temp_size
, type
);
11134 rtx new_with_op0_mode
11135 = adjust_address (new_rtx
, GET_MODE (op0
), 0);
11137 gcc_assert (!TREE_ADDRESSABLE (exp
));
11139 if (GET_MODE (op0
) == BLKmode
)
11140 emit_block_move (new_with_op0_mode
, op0
,
11141 GEN_INT (GET_MODE_SIZE (mode
)),
11142 (modifier
== EXPAND_STACK_PARM
11143 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
11145 emit_move_insn (new_with_op0_mode
, op0
);
11151 op0
= adjust_address (op0
, mode
, 0);
11158 tree lhs
= treeop0
;
11159 tree rhs
= treeop1
;
11160 gcc_assert (ignore
);
11162 /* Check for |= or &= of a bitfield of size one into another bitfield
11163 of size 1. In this case, (unless we need the result of the
11164 assignment) we can do this more efficiently with a
11165 test followed by an assignment, if necessary.
11167 ??? At this point, we can't get a BIT_FIELD_REF here. But if
11168 things change so we do, this code should be enhanced to
11170 if (TREE_CODE (lhs
) == COMPONENT_REF
11171 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
11172 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
11173 && TREE_OPERAND (rhs
, 0) == lhs
11174 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
11175 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
11176 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
11178 rtx_code_label
*label
= gen_label_rtx ();
11179 int value
= TREE_CODE (rhs
) == BIT_IOR_EXPR
;
11180 do_jump (TREE_OPERAND (rhs
, 1),
11183 profile_probability::uninitialized ());
11184 expand_assignment (lhs
, build_int_cst (TREE_TYPE (rhs
), value
),
11186 do_pending_stack_adjust ();
11187 emit_label (label
);
11191 expand_assignment (lhs
, rhs
, false);
11196 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
11198 case REALPART_EXPR
:
11199 op0
= expand_normal (treeop0
);
11200 return read_complex_part (op0
, false);
11202 case IMAGPART_EXPR
:
11203 op0
= expand_normal (treeop0
);
11204 return read_complex_part (op0
, true);
11211 /* Expanded in cfgexpand.c. */
11212 gcc_unreachable ();
11214 case TRY_CATCH_EXPR
:
11216 case EH_FILTER_EXPR
:
11217 case TRY_FINALLY_EXPR
:
11218 /* Lowered by tree-eh.c. */
11219 gcc_unreachable ();
11221 case WITH_CLEANUP_EXPR
:
11222 case CLEANUP_POINT_EXPR
:
11224 case CASE_LABEL_EXPR
:
11229 case COMPOUND_EXPR
:
11230 case PREINCREMENT_EXPR
:
11231 case PREDECREMENT_EXPR
:
11232 case POSTINCREMENT_EXPR
:
11233 case POSTDECREMENT_EXPR
:
11236 case COMPOUND_LITERAL_EXPR
:
11237 /* Lowered by gimplify.c. */
11238 gcc_unreachable ();
11241 /* Function descriptors are not valid except for as
11242 initialization constants, and should not be expanded. */
11243 gcc_unreachable ();
11245 case WITH_SIZE_EXPR
:
11246 /* WITH_SIZE_EXPR expands to its first argument. The caller should
11247 have pulled out the size to use in whatever context it needed. */
11248 return expand_expr_real (treeop0
, original_target
, tmode
,
11249 modifier
, alt_rtl
, inner_reference_p
);
11252 return expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
11256 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
11257 signedness of TYPE), possibly returning the result in TARGET.
11258 TYPE is known to be a partial integer type. */
11260 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
11262 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
11263 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
11265 /* For constant values, reduce using build_int_cst_type. */
11266 if (CONST_INT_P (exp
))
11268 HOST_WIDE_INT value
= INTVAL (exp
);
11269 tree t
= build_int_cst_type (type
, value
);
11270 return expand_expr (t
, target
, VOIDmode
, EXPAND_NORMAL
);
11272 else if (TYPE_UNSIGNED (type
))
11274 scalar_int_mode mode
= as_a
<scalar_int_mode
> (GET_MODE (exp
));
11275 rtx mask
= immed_wide_int_const
11276 (wi::mask (prec
, false, GET_MODE_PRECISION (mode
)), mode
);
11277 return expand_and (mode
, exp
, mask
, target
);
11281 scalar_int_mode mode
= as_a
<scalar_int_mode
> (GET_MODE (exp
));
11282 int count
= GET_MODE_PRECISION (mode
) - prec
;
11283 exp
= expand_shift (LSHIFT_EXPR
, mode
, exp
, count
, target
, 0);
11284 return expand_shift (RSHIFT_EXPR
, mode
, exp
, count
, target
, 0);
11288 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
11289 when applied to the address of EXP produces an address known to be
11290 aligned more than BIGGEST_ALIGNMENT. */
11293 is_aligning_offset (const_tree offset
, const_tree exp
)
11295 /* Strip off any conversions. */
11296 while (CONVERT_EXPR_P (offset
))
11297 offset
= TREE_OPERAND (offset
, 0);
11299 /* We must now have a BIT_AND_EXPR with a constant that is one less than
11300 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
11301 if (TREE_CODE (offset
) != BIT_AND_EXPR
11302 || !tree_fits_uhwi_p (TREE_OPERAND (offset
, 1))
11303 || compare_tree_int (TREE_OPERAND (offset
, 1),
11304 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
11305 || !pow2p_hwi (tree_to_uhwi (TREE_OPERAND (offset
, 1)) + 1))
11308 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
11309 It must be NEGATE_EXPR. Then strip any more conversions. */
11310 offset
= TREE_OPERAND (offset
, 0);
11311 while (CONVERT_EXPR_P (offset
))
11312 offset
= TREE_OPERAND (offset
, 0);
11314 if (TREE_CODE (offset
) != NEGATE_EXPR
)
11317 offset
= TREE_OPERAND (offset
, 0);
11318 while (CONVERT_EXPR_P (offset
))
11319 offset
= TREE_OPERAND (offset
, 0);
11321 /* This must now be the address of EXP. */
11322 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
11325 /* Return the tree node if an ARG corresponds to a string constant or zero
11326 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
11327 in bytes within the string that ARG is accessing. The type of the
11328 offset will be `sizetype'. */
11331 string_constant (tree arg
, tree
*ptr_offset
)
11333 tree array
, offset
, lower_bound
;
11336 if (TREE_CODE (arg
) == ADDR_EXPR
)
11338 if (TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
11340 *ptr_offset
= size_zero_node
;
11341 return TREE_OPERAND (arg
, 0);
11343 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == VAR_DECL
)
11345 array
= TREE_OPERAND (arg
, 0);
11346 offset
= size_zero_node
;
11348 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
11350 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
11351 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
11352 if (TREE_CODE (array
) != STRING_CST
&& !VAR_P (array
))
11355 /* Check if the array has a nonzero lower bound. */
11356 lower_bound
= array_ref_low_bound (TREE_OPERAND (arg
, 0));
11357 if (!integer_zerop (lower_bound
))
11359 /* If the offset and base aren't both constants, return 0. */
11360 if (TREE_CODE (lower_bound
) != INTEGER_CST
)
11362 if (TREE_CODE (offset
) != INTEGER_CST
)
11364 /* Adjust offset by the lower bound. */
11365 offset
= size_diffop (fold_convert (sizetype
, offset
),
11366 fold_convert (sizetype
, lower_bound
));
11369 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == MEM_REF
)
11371 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
11372 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
11373 if (TREE_CODE (array
) != ADDR_EXPR
)
11375 array
= TREE_OPERAND (array
, 0);
11376 if (TREE_CODE (array
) != STRING_CST
&& !VAR_P (array
))
11382 else if (TREE_CODE (arg
) == PLUS_EXPR
|| TREE_CODE (arg
) == POINTER_PLUS_EXPR
)
11384 tree arg0
= TREE_OPERAND (arg
, 0);
11385 tree arg1
= TREE_OPERAND (arg
, 1);
11390 if (TREE_CODE (arg0
) == ADDR_EXPR
11391 && (TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
11392 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == VAR_DECL
))
11394 array
= TREE_OPERAND (arg0
, 0);
11397 else if (TREE_CODE (arg1
) == ADDR_EXPR
11398 && (TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
11399 || TREE_CODE (TREE_OPERAND (arg1
, 0)) == VAR_DECL
))
11401 array
= TREE_OPERAND (arg1
, 0);
11410 if (TREE_CODE (array
) == STRING_CST
)
11412 *ptr_offset
= fold_convert (sizetype
, offset
);
11415 else if (VAR_P (array
) || TREE_CODE (array
) == CONST_DECL
)
11418 tree init
= ctor_for_folding (array
);
11420 /* Variables initialized to string literals can be handled too. */
11421 if (init
== error_mark_node
11423 || TREE_CODE (init
) != STRING_CST
)
11426 /* Avoid const char foo[4] = "abcde"; */
11427 if (DECL_SIZE_UNIT (array
) == NULL_TREE
11428 || TREE_CODE (DECL_SIZE_UNIT (array
)) != INTEGER_CST
11429 || (length
= TREE_STRING_LENGTH (init
)) <= 0
11430 || compare_tree_int (DECL_SIZE_UNIT (array
), length
) < 0)
11433 /* If variable is bigger than the string literal, OFFSET must be constant
11434 and inside of the bounds of the string literal. */
11435 offset
= fold_convert (sizetype
, offset
);
11436 if (compare_tree_int (DECL_SIZE_UNIT (array
), length
) > 0
11437 && (! tree_fits_uhwi_p (offset
)
11438 || compare_tree_int (offset
, length
) >= 0))
11441 *ptr_offset
= offset
;
11448 /* Generate code to calculate OPS, and exploded expression
11449 using a store-flag instruction and return an rtx for the result.
11450 OPS reflects a comparison.
11452 If TARGET is nonzero, store the result there if convenient.
11454 Return zero if there is no suitable set-flag instruction
11455 available on this machine.
11457 Once expand_expr has been called on the arguments of the comparison,
11458 we are committed to doing the store flag, since it is not safe to
11459 re-evaluate the expression. We emit the store-flag insn by calling
11460 emit_store_flag, but only expand the arguments if we have a reason
11461 to believe that emit_store_flag will be successful. If we think that
11462 it will, but it isn't, we have to simulate the store-flag with a
11463 set/jump/set sequence. */
11466 do_store_flag (sepops ops
, rtx target
, machine_mode mode
)
11468 enum rtx_code code
;
11469 tree arg0
, arg1
, type
;
11470 machine_mode operand_mode
;
11473 rtx subtarget
= target
;
11474 location_t loc
= ops
->location
;
11479 /* Don't crash if the comparison was erroneous. */
11480 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
11483 type
= TREE_TYPE (arg0
);
11484 operand_mode
= TYPE_MODE (type
);
11485 unsignedp
= TYPE_UNSIGNED (type
);
11487 /* We won't bother with BLKmode store-flag operations because it would mean
11488 passing a lot of information to emit_store_flag. */
11489 if (operand_mode
== BLKmode
)
11492 /* We won't bother with store-flag operations involving function pointers
11493 when function pointers must be canonicalized before comparisons. */
11494 if (targetm
.have_canonicalize_funcptr_for_compare ()
11495 && ((TREE_CODE (TREE_TYPE (arg0
)) == POINTER_TYPE
11496 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
)))
11498 || (TREE_CODE (TREE_TYPE (arg1
)) == POINTER_TYPE
11499 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
)))
11500 == FUNCTION_TYPE
))))
11506 /* For vector typed comparisons emit code to generate the desired
11507 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
11508 expander for this. */
11509 if (TREE_CODE (ops
->type
) == VECTOR_TYPE
)
11511 tree ifexp
= build2 (ops
->code
, ops
->type
, arg0
, arg1
);
11512 if (VECTOR_BOOLEAN_TYPE_P (ops
->type
)
11513 && expand_vec_cmp_expr_p (TREE_TYPE (arg0
), ops
->type
, ops
->code
))
11514 return expand_vec_cmp_expr (ops
->type
, ifexp
, target
);
11517 tree if_true
= constant_boolean_node (true, ops
->type
);
11518 tree if_false
= constant_boolean_node (false, ops
->type
);
11519 return expand_vec_cond_expr (ops
->type
, ifexp
, if_true
,
11524 /* Get the rtx comparison code to use. We know that EXP is a comparison
11525 operation of some type. Some comparisons against 1 and -1 can be
11526 converted to comparisons with zero. Do so here so that the tests
11527 below will be aware that we have a comparison with zero. These
11528 tests will not catch constants in the first operand, but constants
11529 are rarely passed as the first operand. */
11540 if (integer_onep (arg1
))
11541 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
11543 code
= unsignedp
? LTU
: LT
;
11546 if (! unsignedp
&& integer_all_onesp (arg1
))
11547 arg1
= integer_zero_node
, code
= LT
;
11549 code
= unsignedp
? LEU
: LE
;
11552 if (! unsignedp
&& integer_all_onesp (arg1
))
11553 arg1
= integer_zero_node
, code
= GE
;
11555 code
= unsignedp
? GTU
: GT
;
11558 if (integer_onep (arg1
))
11559 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
11561 code
= unsignedp
? GEU
: GE
;
11564 case UNORDERED_EXPR
:
11590 gcc_unreachable ();
11593 /* Put a constant second. */
11594 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
11595 || TREE_CODE (arg0
) == FIXED_CST
)
11597 std::swap (arg0
, arg1
);
11598 code
= swap_condition (code
);
11601 /* If this is an equality or inequality test of a single bit, we can
11602 do this by shifting the bit being tested to the low-order bit and
11603 masking the result with the constant 1. If the condition was EQ,
11604 we xor it with 1. This does not require an scc insn and is faster
11605 than an scc insn even if we have it.
11607 The code to make this transformation was moved into fold_single_bit_test,
11608 so we just call into the folder and expand its result. */
11610 if ((code
== NE
|| code
== EQ
)
11611 && integer_zerop (arg1
)
11612 && (TYPE_PRECISION (ops
->type
) != 1 || TYPE_UNSIGNED (ops
->type
)))
11614 gimple
*srcstmt
= get_def_for_expr (arg0
, BIT_AND_EXPR
);
11616 && integer_pow2p (gimple_assign_rhs2 (srcstmt
)))
11618 enum tree_code tcode
= code
== NE
? NE_EXPR
: EQ_EXPR
;
11619 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
11620 tree temp
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg1
),
11621 gimple_assign_rhs1 (srcstmt
),
11622 gimple_assign_rhs2 (srcstmt
));
11623 temp
= fold_single_bit_test (loc
, tcode
, temp
, arg1
, type
);
11625 return expand_expr (temp
, target
, VOIDmode
, EXPAND_NORMAL
);
11629 if (! get_subtarget (target
)
11630 || GET_MODE (subtarget
) != operand_mode
)
11633 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
11636 target
= gen_reg_rtx (mode
);
11638 /* Try a cstore if possible. */
11639 return emit_store_flag_force (target
, code
, op0
, op1
,
11640 operand_mode
, unsignedp
,
11641 (TYPE_PRECISION (ops
->type
) == 1
11642 && !TYPE_UNSIGNED (ops
->type
)) ? -1 : 1);
11645 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11646 0 otherwise (i.e. if there is no casesi instruction).
11648 DEFAULT_PROBABILITY is the probability of jumping to the default
11651 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
11652 rtx table_label
, rtx default_label
, rtx fallback_label
,
11653 profile_probability default_probability
)
11655 struct expand_operand ops
[5];
11656 scalar_int_mode index_mode
= SImode
;
11657 rtx op1
, op2
, index
;
11659 if (! targetm
.have_casesi ())
11662 /* The index must be some form of integer. Convert it to SImode. */
11663 scalar_int_mode omode
= SCALAR_INT_TYPE_MODE (index_type
);
11664 if (GET_MODE_BITSIZE (omode
) > GET_MODE_BITSIZE (index_mode
))
11666 rtx rangertx
= expand_normal (range
);
11668 /* We must handle the endpoints in the original mode. */
11669 index_expr
= build2 (MINUS_EXPR
, index_type
,
11670 index_expr
, minval
);
11671 minval
= integer_zero_node
;
11672 index
= expand_normal (index_expr
);
11674 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
11675 omode
, 1, default_label
,
11676 default_probability
);
11677 /* Now we can safely truncate. */
11678 index
= convert_to_mode (index_mode
, index
, 0);
11682 if (omode
!= index_mode
)
11684 index_type
= lang_hooks
.types
.type_for_mode (index_mode
, 0);
11685 index_expr
= fold_convert (index_type
, index_expr
);
11688 index
= expand_normal (index_expr
);
11691 do_pending_stack_adjust ();
11693 op1
= expand_normal (minval
);
11694 op2
= expand_normal (range
);
11696 create_input_operand (&ops
[0], index
, index_mode
);
11697 create_convert_operand_from_type (&ops
[1], op1
, TREE_TYPE (minval
));
11698 create_convert_operand_from_type (&ops
[2], op2
, TREE_TYPE (range
));
11699 create_fixed_operand (&ops
[3], table_label
);
11700 create_fixed_operand (&ops
[4], (default_label
11702 : fallback_label
));
11703 expand_jump_insn (targetm
.code_for_casesi
, 5, ops
);
11707 /* Attempt to generate a tablejump instruction; same concept. */
11708 /* Subroutine of the next function.
11710 INDEX is the value being switched on, with the lowest value
11711 in the table already subtracted.
11712 MODE is its expected mode (needed if INDEX is constant).
11713 RANGE is the length of the jump table.
11714 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11716 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11717 index value is out of range.
11718 DEFAULT_PROBABILITY is the probability of jumping to
11719 the default label. */
11722 do_tablejump (rtx index
, machine_mode mode
, rtx range
, rtx table_label
,
11723 rtx default_label
, profile_probability default_probability
)
11727 if (INTVAL (range
) > cfun
->cfg
->max_jumptable_ents
)
11728 cfun
->cfg
->max_jumptable_ents
= INTVAL (range
);
11730 /* Do an unsigned comparison (in the proper mode) between the index
11731 expression and the value which represents the length of the range.
11732 Since we just finished subtracting the lower bound of the range
11733 from the index expression, this comparison allows us to simultaneously
11734 check that the original index expression value is both greater than
11735 or equal to the minimum value of the range and less than or equal to
11736 the maximum value of the range. */
11739 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
11740 default_label
, default_probability
);
11743 /* If index is in range, it must fit in Pmode.
11744 Convert to Pmode so we can index with it. */
11746 index
= convert_to_mode (Pmode
, index
, 1);
11748 /* Don't let a MEM slip through, because then INDEX that comes
11749 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11750 and break_out_memory_refs will go to work on it and mess it up. */
11751 #ifdef PIC_CASE_VECTOR_ADDRESS
11752 if (flag_pic
&& !REG_P (index
))
11753 index
= copy_to_mode_reg (Pmode
, index
);
11756 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11757 GET_MODE_SIZE, because this indicates how large insns are. The other
11758 uses should all be Pmode, because they are addresses. This code
11759 could fail if addresses and insns are not the same size. */
11760 index
= simplify_gen_binary (MULT
, Pmode
, index
,
11761 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE
),
11763 index
= simplify_gen_binary (PLUS
, Pmode
, index
,
11764 gen_rtx_LABEL_REF (Pmode
, table_label
));
11766 #ifdef PIC_CASE_VECTOR_ADDRESS
11768 index
= PIC_CASE_VECTOR_ADDRESS (index
);
11771 index
= memory_address (CASE_VECTOR_MODE
, index
);
11772 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
11773 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
11774 convert_move (temp
, vector
, 0);
11776 emit_jump_insn (targetm
.gen_tablejump (temp
, table_label
));
11778 /* If we are generating PIC code or if the table is PC-relative, the
11779 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11780 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
11785 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
11786 rtx table_label
, rtx default_label
,
11787 profile_probability default_probability
)
11791 if (! targetm
.have_tablejump ())
11794 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
11795 fold_convert (index_type
, index_expr
),
11796 fold_convert (index_type
, minval
));
11797 index
= expand_normal (index_expr
);
11798 do_pending_stack_adjust ();
11800 do_tablejump (index
, TYPE_MODE (index_type
),
11801 convert_modes (TYPE_MODE (index_type
),
11802 TYPE_MODE (TREE_TYPE (range
)),
11803 expand_normal (range
),
11804 TYPE_UNSIGNED (TREE_TYPE (range
))),
11805 table_label
, default_label
, default_probability
);
11809 /* Return a CONST_VECTOR rtx representing vector mask for
11810 a VECTOR_CST of booleans. */
11812 const_vector_mask_from_tree (tree exp
)
11814 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
11815 machine_mode inner
= GET_MODE_INNER (mode
);
11817 rtx_vector_builder
builder (mode
, VECTOR_CST_NPATTERNS (exp
),
11818 VECTOR_CST_NELTS_PER_PATTERN (exp
));
11819 unsigned int count
= builder
.encoded_nelts ();
11820 for (unsigned int i
= 0; i
< count
; ++i
)
11822 tree elt
= VECTOR_CST_ELT (exp
, i
);
11823 gcc_assert (TREE_CODE (elt
) == INTEGER_CST
);
11824 if (integer_zerop (elt
))
11825 builder
.quick_push (CONST0_RTX (inner
));
11826 else if (integer_onep (elt
)
11827 || integer_minus_onep (elt
))
11828 builder
.quick_push (CONSTM1_RTX (inner
));
11830 gcc_unreachable ();
11832 return builder
.build ();
11835 /* EXP is a VECTOR_CST in which each element is either all-zeros or all-ones.
11836 Return a constant scalar rtx of mode MODE in which bit X is set if element
11837 X of EXP is nonzero. */
11839 const_scalar_mask_from_tree (scalar_int_mode mode
, tree exp
)
11841 wide_int res
= wi::zero (GET_MODE_PRECISION (mode
));
11845 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
11847 elt
= VECTOR_CST_ELT (exp
, i
);
11848 gcc_assert (TREE_CODE (elt
) == INTEGER_CST
);
11849 if (integer_all_onesp (elt
))
11850 res
= wi::set_bit (res
, i
);
11852 gcc_assert (integer_zerop (elt
));
11855 return immed_wide_int_const (res
, mode
);
11858 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11860 const_vector_from_tree (tree exp
)
11862 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
11864 if (initializer_zerop (exp
))
11865 return CONST0_RTX (mode
);
11867 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp
)))
11868 return const_vector_mask_from_tree (exp
);
11870 machine_mode inner
= GET_MODE_INNER (mode
);
11872 rtx_vector_builder
builder (mode
, VECTOR_CST_NPATTERNS (exp
),
11873 VECTOR_CST_NELTS_PER_PATTERN (exp
));
11874 unsigned int count
= builder
.encoded_nelts ();
11875 for (unsigned int i
= 0; i
< count
; ++i
)
11877 tree elt
= VECTOR_CST_ELT (exp
, i
);
11878 if (TREE_CODE (elt
) == REAL_CST
)
11879 builder
.quick_push (const_double_from_real_value (TREE_REAL_CST (elt
),
11881 else if (TREE_CODE (elt
) == FIXED_CST
)
11882 builder
.quick_push (CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt
),
11885 builder
.quick_push (immed_wide_int_const (wi::to_poly_wide (elt
),
11888 return builder
.build ();
11891 /* Build a decl for a personality function given a language prefix. */
11894 build_personality_function (const char *lang
)
11896 const char *unwind_and_version
;
11900 switch (targetm_common
.except_unwind_info (&global_options
))
11905 unwind_and_version
= "_sj0";
11909 unwind_and_version
= "_v0";
11912 unwind_and_version
= "_seh0";
11915 gcc_unreachable ();
11918 name
= ACONCAT (("__", lang
, "_personality", unwind_and_version
, NULL
));
11920 type
= build_function_type_list (integer_type_node
, integer_type_node
,
11921 long_long_unsigned_type_node
,
11922 ptr_type_node
, ptr_type_node
, NULL_TREE
);
11923 decl
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
,
11924 get_identifier (name
), type
);
11925 DECL_ARTIFICIAL (decl
) = 1;
11926 DECL_EXTERNAL (decl
) = 1;
11927 TREE_PUBLIC (decl
) = 1;
11929 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11930 are the flags assigned by targetm.encode_section_info. */
11931 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl
), 0), NULL
);
11936 /* Extracts the personality function of DECL and returns the corresponding
11940 get_personality_function (tree decl
)
11942 tree personality
= DECL_FUNCTION_PERSONALITY (decl
);
11943 enum eh_personality_kind pk
;
11945 pk
= function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl
));
11946 if (pk
== eh_personality_none
)
11950 && pk
== eh_personality_any
)
11951 personality
= lang_hooks
.eh_personality ();
11953 if (pk
== eh_personality_lang
)
11954 gcc_assert (personality
!= NULL_TREE
);
11956 return XEXP (DECL_RTL (personality
), 0);
11959 /* Returns a tree for the size of EXP in bytes. */
11962 tree_expr_size (const_tree exp
)
11965 && DECL_SIZE_UNIT (exp
) != 0)
11966 return DECL_SIZE_UNIT (exp
);
11968 return size_in_bytes (TREE_TYPE (exp
));
11971 /* Return an rtx for the size in bytes of the value of EXP. */
11974 expr_size (tree exp
)
11978 if (TREE_CODE (exp
) == WITH_SIZE_EXPR
)
11979 size
= TREE_OPERAND (exp
, 1);
11982 size
= tree_expr_size (exp
);
11984 gcc_assert (size
== SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, exp
));
11987 return expand_expr (size
, NULL_RTX
, TYPE_MODE (sizetype
), EXPAND_NORMAL
);
11990 /* Return a wide integer for the size in bytes of the value of EXP, or -1
11991 if the size can vary or is larger than an integer. */
11993 static HOST_WIDE_INT
11994 int_expr_size (tree exp
)
11998 if (TREE_CODE (exp
) == WITH_SIZE_EXPR
)
11999 size
= TREE_OPERAND (exp
, 1);
12002 size
= tree_expr_size (exp
);
12006 if (size
== 0 || !tree_fits_shwi_p (size
))
12009 return tree_to_shwi (size
);