1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
38 #include "diagnostic.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
45 #include "insn-attr.h"
50 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
52 #include "optabs-tree.h"
55 #include "langhooks.h"
56 #include "common/common-target.h"
57 #include "tree-ssa-live.h"
58 #include "tree-outof-ssa.h"
59 #include "tree-ssa-address.h"
61 #include "tree-chkp.h"
66 /* If this is nonzero, we do not bother generating VOLATILE
67 around volatile memory references, and we are willing to
68 output indirect addresses. If cse is to follow, we reject
69 indirect addresses so a useful potential cse is generated;
70 if it is used only once, instruction combination will produce
71 the same indirect address eventually. */
74 static bool block_move_libcall_safe_for_call_parm (void);
75 static bool emit_block_move_via_movmem (rtx
, rtx
, rtx
, unsigned, unsigned, HOST_WIDE_INT
,
76 unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
77 unsigned HOST_WIDE_INT
);
78 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
79 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
80 static rtx_insn
*compress_float_constant (rtx
, rtx
);
81 static rtx
get_subtarget (rtx
);
82 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
83 HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
84 unsigned HOST_WIDE_INT
, machine_mode
,
85 tree
, int, alias_set_type
, bool);
86 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
, bool);
87 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
,
88 unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
89 machine_mode
, tree
, alias_set_type
, bool, bool);
91 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (const_tree
, const_tree
);
93 static int is_aligning_offset (const_tree
, const_tree
);
94 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
95 static rtx
do_store_flag (sepops
, rtx
, machine_mode
);
97 static void emit_single_push_insn (machine_mode
, rtx
, tree
);
99 static void do_tablejump (rtx
, machine_mode
, rtx
, rtx
, rtx
,
100 profile_probability
);
101 static rtx
const_vector_from_tree (tree
);
102 static rtx
const_scalar_mask_from_tree (scalar_int_mode
, tree
);
103 static tree
tree_expr_size (const_tree
);
104 static HOST_WIDE_INT
int_expr_size (tree
);
107 /* This is run to set up which modes can be used
108 directly in memory and to initialize the block move optab. It is run
109 at the beginning of compilation and when the target is reinitialized. */
112 init_expr_target (void)
119 /* Try indexing by frame ptr and try by stack ptr.
120 It is known that on the Convex the stack ptr isn't a valid index.
121 With luck, one or the other is valid on any machine. */
122 mem
= gen_rtx_MEM (word_mode
, stack_pointer_rtx
);
123 mem1
= gen_rtx_MEM (word_mode
, frame_pointer_rtx
);
125 /* A scratch register we can modify in-place below to avoid
126 useless RTL allocations. */
127 reg
= gen_rtx_REG (word_mode
, LAST_VIRTUAL_REGISTER
+ 1);
129 rtx_insn
*insn
= as_a
<rtx_insn
*> (rtx_alloc (INSN
));
130 pat
= gen_rtx_SET (NULL_RTX
, NULL_RTX
);
131 PATTERN (insn
) = pat
;
133 for (machine_mode mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
134 mode
= (machine_mode
) ((int) mode
+ 1))
138 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
139 PUT_MODE (mem
, mode
);
140 PUT_MODE (mem1
, mode
);
142 /* See if there is some register that can be used in this mode and
143 directly loaded or stored from memory. */
145 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
146 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
147 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
150 if (! HARD_REGNO_MODE_OK (regno
, mode
))
153 set_mode_and_regno (reg
, mode
, regno
);
156 SET_DEST (pat
) = reg
;
157 if (recog (pat
, insn
, &num_clobbers
) >= 0)
158 direct_load
[(int) mode
] = 1;
160 SET_SRC (pat
) = mem1
;
161 SET_DEST (pat
) = reg
;
162 if (recog (pat
, insn
, &num_clobbers
) >= 0)
163 direct_load
[(int) mode
] = 1;
166 SET_DEST (pat
) = mem
;
167 if (recog (pat
, insn
, &num_clobbers
) >= 0)
168 direct_store
[(int) mode
] = 1;
171 SET_DEST (pat
) = mem1
;
172 if (recog (pat
, insn
, &num_clobbers
) >= 0)
173 direct_store
[(int) mode
] = 1;
177 mem
= gen_rtx_MEM (VOIDmode
, gen_raw_REG (Pmode
, LAST_VIRTUAL_REGISTER
+ 1));
179 opt_scalar_float_mode mode_iter
;
180 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_FLOAT
)
182 scalar_float_mode mode
= mode_iter
.require ();
183 scalar_float_mode srcmode
;
184 FOR_EACH_MODE_UNTIL (srcmode
, mode
)
188 ic
= can_extend_p (mode
, srcmode
, 0);
189 if (ic
== CODE_FOR_nothing
)
192 PUT_MODE (mem
, srcmode
);
194 if (insn_operand_matches (ic
, 1, mem
))
195 float_extend_from_mem
[mode
][srcmode
] = true;
200 /* This is run at the start of compiling a function. */
205 memset (&crtl
->expr
, 0, sizeof (crtl
->expr
));
208 /* Copy data from FROM to TO, where the machine modes are not the same.
209 Both modes may be integer, or both may be floating, or both may be
211 UNSIGNEDP should be nonzero if FROM is an unsigned type.
212 This causes zero-extension instead of sign-extension. */
215 convert_move (rtx to
, rtx from
, int unsignedp
)
217 machine_mode to_mode
= GET_MODE (to
);
218 machine_mode from_mode
= GET_MODE (from
);
219 int to_real
= SCALAR_FLOAT_MODE_P (to_mode
);
220 int from_real
= SCALAR_FLOAT_MODE_P (from_mode
);
224 /* rtx code for making an equivalent value. */
225 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
226 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
229 gcc_assert (to_real
== from_real
);
230 gcc_assert (to_mode
!= BLKmode
);
231 gcc_assert (from_mode
!= BLKmode
);
233 /* If the source and destination are already the same, then there's
238 /* If FROM is a SUBREG that indicates that we have already done at least
239 the required extension, strip it. We don't handle such SUBREGs as
242 scalar_int_mode to_int_mode
;
243 if (GET_CODE (from
) == SUBREG
244 && SUBREG_PROMOTED_VAR_P (from
)
245 && is_a
<scalar_int_mode
> (to_mode
, &to_int_mode
)
246 && (GET_MODE_PRECISION (subreg_promoted_mode (from
))
247 >= GET_MODE_PRECISION (to_int_mode
))
248 && SUBREG_CHECK_PROMOTED_SIGN (from
, unsignedp
))
249 from
= gen_lowpart (to_int_mode
, from
), from_mode
= to_int_mode
;
251 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
253 if (to_mode
== from_mode
254 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
256 emit_move_insn (to
, from
);
260 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
262 gcc_assert (GET_MODE_BITSIZE (from_mode
) == GET_MODE_BITSIZE (to_mode
));
264 if (VECTOR_MODE_P (to_mode
))
265 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
267 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
269 emit_move_insn (to
, from
);
273 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
275 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
276 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
286 gcc_assert ((GET_MODE_PRECISION (from_mode
)
287 != GET_MODE_PRECISION (to_mode
))
288 || (DECIMAL_FLOAT_MODE_P (from_mode
)
289 != DECIMAL_FLOAT_MODE_P (to_mode
)));
291 if (GET_MODE_PRECISION (from_mode
) == GET_MODE_PRECISION (to_mode
))
292 /* Conversion between decimal float and binary float, same size. */
293 tab
= DECIMAL_FLOAT_MODE_P (from_mode
) ? trunc_optab
: sext_optab
;
294 else if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
299 /* Try converting directly if the insn is supported. */
301 code
= convert_optab_handler (tab
, to_mode
, from_mode
);
302 if (code
!= CODE_FOR_nothing
)
304 emit_unop_insn (code
, to
, from
,
305 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
309 /* Otherwise use a libcall. */
310 libcall
= convert_optab_libfunc (tab
, to_mode
, from_mode
);
312 /* Is this conversion implemented yet? */
313 gcc_assert (libcall
);
316 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
318 insns
= get_insns ();
320 emit_libcall_block (insns
, to
, value
,
321 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
323 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
327 /* Handle pointer conversion. */ /* SPEE 900220. */
328 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
332 if (GET_MODE_PRECISION (from_mode
) > GET_MODE_PRECISION (to_mode
))
339 if (convert_optab_handler (ctab
, to_mode
, from_mode
)
342 emit_unop_insn (convert_optab_handler (ctab
, to_mode
, from_mode
),
348 /* Targets are expected to provide conversion insns between PxImode and
349 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
350 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
352 scalar_int_mode full_mode
353 = smallest_int_mode_for_size (GET_MODE_BITSIZE (to_mode
));
355 gcc_assert (convert_optab_handler (trunc_optab
, to_mode
, full_mode
)
356 != CODE_FOR_nothing
);
358 if (full_mode
!= from_mode
)
359 from
= convert_to_mode (full_mode
, from
, unsignedp
);
360 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, full_mode
),
364 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
367 scalar_int_mode full_mode
368 = smallest_int_mode_for_size (GET_MODE_BITSIZE (from_mode
));
369 convert_optab ctab
= unsignedp
? zext_optab
: sext_optab
;
370 enum insn_code icode
;
372 icode
= convert_optab_handler (ctab
, full_mode
, from_mode
);
373 gcc_assert (icode
!= CODE_FOR_nothing
);
375 if (to_mode
== full_mode
)
377 emit_unop_insn (icode
, to
, from
, UNKNOWN
);
381 new_from
= gen_reg_rtx (full_mode
);
382 emit_unop_insn (icode
, new_from
, from
, UNKNOWN
);
384 /* else proceed to integer conversions below. */
385 from_mode
= full_mode
;
389 /* Make sure both are fixed-point modes or both are not. */
390 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
) ==
391 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode
));
392 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
))
394 /* If we widen from_mode to to_mode and they are in the same class,
395 we won't saturate the result.
396 Otherwise, always saturate the result to play safe. */
397 if (GET_MODE_CLASS (from_mode
) == GET_MODE_CLASS (to_mode
)
398 && GET_MODE_SIZE (from_mode
) < GET_MODE_SIZE (to_mode
))
399 expand_fixed_convert (to
, from
, 0, 0);
401 expand_fixed_convert (to
, from
, 0, 1);
405 /* Now both modes are integers. */
407 /* Handle expanding beyond a word. */
408 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
)
409 && GET_MODE_PRECISION (to_mode
) > BITS_PER_WORD
)
416 machine_mode lowpart_mode
;
417 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
419 /* Try converting directly if the insn is supported. */
420 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
423 /* If FROM is a SUBREG, put it into a register. Do this
424 so that we always generate the same set of insns for
425 better cse'ing; if an intermediate assignment occurred,
426 we won't be doing the operation directly on the SUBREG. */
427 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
428 from
= force_reg (from_mode
, from
);
429 emit_unop_insn (code
, to
, from
, equiv_code
);
432 /* Next, try converting via full word. */
433 else if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
434 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
435 != CODE_FOR_nothing
))
437 rtx word_to
= gen_reg_rtx (word_mode
);
440 if (reg_overlap_mentioned_p (to
, from
))
441 from
= force_reg (from_mode
, from
);
444 convert_move (word_to
, from
, unsignedp
);
445 emit_unop_insn (code
, to
, word_to
, equiv_code
);
449 /* No special multiword conversion insn; do it by hand. */
452 /* Since we will turn this into a no conflict block, we must ensure
453 the source does not overlap the target so force it into an isolated
454 register when maybe so. Likewise for any MEM input, since the
455 conversion sequence might require several references to it and we
456 must ensure we're getting the same value every time. */
458 if (MEM_P (from
) || reg_overlap_mentioned_p (to
, from
))
459 from
= force_reg (from_mode
, from
);
461 /* Get a copy of FROM widened to a word, if necessary. */
462 if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
)
463 lowpart_mode
= word_mode
;
465 lowpart_mode
= from_mode
;
467 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
469 lowpart
= gen_lowpart (lowpart_mode
, to
);
470 emit_move_insn (lowpart
, lowfrom
);
472 /* Compute the value to put in each remaining word. */
474 fill_value
= const0_rtx
;
476 fill_value
= emit_store_flag_force (gen_reg_rtx (word_mode
),
477 LT
, lowfrom
, const0_rtx
,
478 lowpart_mode
, 0, -1);
480 /* Fill the remaining words. */
481 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
483 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
484 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
486 gcc_assert (subword
);
488 if (fill_value
!= subword
)
489 emit_move_insn (subword
, fill_value
);
492 insns
= get_insns ();
499 /* Truncating multi-word to a word or less. */
500 if (GET_MODE_PRECISION (from_mode
) > BITS_PER_WORD
501 && GET_MODE_PRECISION (to_mode
) <= BITS_PER_WORD
)
504 && ! MEM_VOLATILE_P (from
)
505 && direct_load
[(int) to_mode
]
506 && ! mode_dependent_address_p (XEXP (from
, 0),
507 MEM_ADDR_SPACE (from
)))
509 || GET_CODE (from
) == SUBREG
))
510 from
= force_reg (from_mode
, from
);
511 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
515 /* Now follow all the conversions between integers
516 no more than a word long. */
518 /* For truncation, usually we can just refer to FROM in a narrower mode. */
519 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
520 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, from_mode
))
523 && ! MEM_VOLATILE_P (from
)
524 && direct_load
[(int) to_mode
]
525 && ! mode_dependent_address_p (XEXP (from
, 0),
526 MEM_ADDR_SPACE (from
)))
528 || GET_CODE (from
) == SUBREG
))
529 from
= force_reg (from_mode
, from
);
530 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
531 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
532 from
= copy_to_reg (from
);
533 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
537 /* Handle extension. */
538 if (GET_MODE_PRECISION (to_mode
) > GET_MODE_PRECISION (from_mode
))
540 /* Convert directly if that works. */
541 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
544 emit_unop_insn (code
, to
, from
, equiv_code
);
549 machine_mode intermediate
;
553 /* Search for a mode to convert via. */
554 FOR_EACH_MODE_FROM (intermediate
, from_mode
)
555 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
557 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
558 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, intermediate
)))
559 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
560 != CODE_FOR_nothing
))
562 convert_move (to
, convert_to_mode (intermediate
, from
,
563 unsignedp
), unsignedp
);
567 /* No suitable intermediate mode.
568 Generate what we need with shifts. */
569 shift_amount
= (GET_MODE_PRECISION (to_mode
)
570 - GET_MODE_PRECISION (from_mode
));
571 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
572 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
574 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
577 emit_move_insn (to
, tmp
);
582 /* Support special truncate insns for certain modes. */
583 if (convert_optab_handler (trunc_optab
, to_mode
,
584 from_mode
) != CODE_FOR_nothing
)
586 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, from_mode
),
591 /* Handle truncation of volatile memrefs, and so on;
592 the things that couldn't be truncated directly,
593 and for which there was no special instruction.
595 ??? Code above formerly short-circuited this, for most integer
596 mode pairs, with a force_reg in from_mode followed by a recursive
597 call to this routine. Appears always to have been wrong. */
598 if (GET_MODE_PRECISION (to_mode
) < GET_MODE_PRECISION (from_mode
))
600 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
601 emit_move_insn (to
, temp
);
605 /* Mode combination is not recognized. */
609 /* Return an rtx for a value that would result
610 from converting X to mode MODE.
611 Both X and MODE may be floating, or both integer.
612 UNSIGNEDP is nonzero if X is an unsigned value.
613 This can be done by referring to a part of X in place
614 or by copying to a new temporary with conversion. */
617 convert_to_mode (machine_mode mode
, rtx x
, int unsignedp
)
619 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
622 /* Return an rtx for a value that would result
623 from converting X from mode OLDMODE to mode MODE.
624 Both modes may be floating, or both integer.
625 UNSIGNEDP is nonzero if X is an unsigned value.
627 This can be done by referring to a part of X in place
628 or by copying to a new temporary with conversion.
630 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
633 convert_modes (machine_mode mode
, machine_mode oldmode
, rtx x
, int unsignedp
)
636 scalar_int_mode int_mode
;
638 /* If FROM is a SUBREG that indicates that we have already done at least
639 the required extension, strip it. */
641 if (GET_CODE (x
) == SUBREG
642 && SUBREG_PROMOTED_VAR_P (x
)
643 && is_a
<scalar_int_mode
> (mode
, &int_mode
)
644 && (GET_MODE_PRECISION (subreg_promoted_mode (x
))
645 >= GET_MODE_PRECISION (int_mode
))
646 && SUBREG_CHECK_PROMOTED_SIGN (x
, unsignedp
))
647 x
= gen_lowpart (int_mode
, SUBREG_REG (x
));
649 if (GET_MODE (x
) != VOIDmode
)
650 oldmode
= GET_MODE (x
);
655 if (CONST_SCALAR_INT_P (x
)
656 && is_int_mode (mode
, &int_mode
))
658 /* If the caller did not tell us the old mode, then there is not
659 much to do with respect to canonicalization. We have to
660 assume that all the bits are significant. */
661 if (GET_MODE_CLASS (oldmode
) != MODE_INT
)
662 oldmode
= MAX_MODE_INT
;
663 wide_int w
= wide_int::from (rtx_mode_t (x
, oldmode
),
664 GET_MODE_PRECISION (int_mode
),
665 unsignedp
? UNSIGNED
: SIGNED
);
666 return immed_wide_int_const (w
, int_mode
);
669 /* We can do this with a gen_lowpart if both desired and current modes
670 are integer, and this is either a constant integer, a register, or a
672 scalar_int_mode int_oldmode
;
673 if (is_int_mode (mode
, &int_mode
)
674 && is_int_mode (oldmode
, &int_oldmode
)
675 && GET_MODE_PRECISION (int_mode
) <= GET_MODE_PRECISION (int_oldmode
)
676 && ((MEM_P (x
) && !MEM_VOLATILE_P (x
) && direct_load
[(int) int_mode
])
678 && (!HARD_REGISTER_P (x
)
679 || HARD_REGNO_MODE_OK (REGNO (x
), int_mode
))
680 && TRULY_NOOP_TRUNCATION_MODES_P (int_mode
, GET_MODE (x
)))))
681 return gen_lowpart (int_mode
, x
);
683 /* Converting from integer constant into mode is always equivalent to an
685 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
687 gcc_assert (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (oldmode
));
688 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
691 temp
= gen_reg_rtx (mode
);
692 convert_move (temp
, x
, unsignedp
);
696 /* Return the largest alignment we can use for doing a move (or store)
697 of MAX_PIECES. ALIGN is the largest alignment we could use. */
700 alignment_for_piecewise_move (unsigned int max_pieces
, unsigned int align
)
702 scalar_int_mode tmode
703 = int_mode_for_size (max_pieces
* BITS_PER_UNIT
, 1).require ();
705 if (align
>= GET_MODE_ALIGNMENT (tmode
))
706 align
= GET_MODE_ALIGNMENT (tmode
);
709 scalar_int_mode xmode
= NARROWEST_INT_MODE
;
710 opt_scalar_int_mode mode_iter
;
711 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_INT
)
713 tmode
= mode_iter
.require ();
714 if (GET_MODE_SIZE (tmode
) > max_pieces
715 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
720 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
726 /* Return the widest integer mode that is narrower than SIZE bytes. */
728 static scalar_int_mode
729 widest_int_mode_for_size (unsigned int size
)
731 scalar_int_mode result
= NARROWEST_INT_MODE
;
733 gcc_checking_assert (size
> 1);
735 opt_scalar_int_mode tmode
;
736 FOR_EACH_MODE_IN_CLASS (tmode
, MODE_INT
)
737 if (GET_MODE_SIZE (tmode
.require ()) < size
)
738 result
= tmode
.require ();
743 /* Determine whether an operation OP on LEN bytes with alignment ALIGN can
744 and should be performed piecewise. */
747 can_do_by_pieces (unsigned HOST_WIDE_INT len
, unsigned int align
,
748 enum by_pieces_operation op
)
750 return targetm
.use_by_pieces_infrastructure_p (len
, align
, op
,
751 optimize_insn_for_speed_p ());
754 /* Determine whether the LEN bytes can be moved by using several move
755 instructions. Return nonzero if a call to move_by_pieces should
759 can_move_by_pieces (unsigned HOST_WIDE_INT len
, unsigned int align
)
761 return can_do_by_pieces (len
, align
, MOVE_BY_PIECES
);
764 /* Return number of insns required to perform operation OP by pieces
765 for L bytes. ALIGN (in bits) is maximum alignment we can assume. */
767 unsigned HOST_WIDE_INT
768 by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
769 unsigned int max_size
, by_pieces_operation op
)
771 unsigned HOST_WIDE_INT n_insns
= 0;
773 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
775 while (max_size
> 1 && l
> 0)
777 scalar_int_mode mode
= widest_int_mode_for_size (max_size
);
778 enum insn_code icode
;
780 unsigned int modesize
= GET_MODE_SIZE (mode
);
782 icode
= optab_handler (mov_optab
, mode
);
783 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
785 unsigned HOST_WIDE_INT n_pieces
= l
/ modesize
;
793 case COMPARE_BY_PIECES
:
794 int batch
= targetm
.compare_by_pieces_branch_ratio (mode
);
795 int batch_ops
= 4 * batch
- 1;
796 unsigned HOST_WIDE_INT full
= n_pieces
/ batch
;
797 n_insns
+= full
* batch_ops
;
798 if (n_pieces
% batch
!= 0)
811 /* Used when performing piecewise block operations, holds information
812 about one of the memory objects involved. The member functions
813 can be used to generate code for loading from the object and
814 updating the address when iterating. */
818 /* The object being referenced, a MEM. Can be NULL_RTX to indicate
821 /* The address of the object. Can differ from that seen in the
822 MEM rtx if we copied the address to a register. */
824 /* Nonzero if the address on the object has an autoincrement already,
825 signifies whether that was an increment or decrement. */
826 signed char m_addr_inc
;
827 /* Nonzero if we intend to use autoinc without the address already
828 having autoinc form. We will insert add insns around each memory
829 reference, expecting later passes to form autoinc addressing modes.
830 The only supported options are predecrement and postincrement. */
831 signed char m_explicit_inc
;
832 /* True if we have either of the two possible cases of using
835 /* True if this is an address to be used for load operations rather
839 /* Optionally, a function to obtain constants for any given offset into
840 the objects, and data associated with it. */
841 by_pieces_constfn m_constfn
;
844 pieces_addr (rtx
, bool, by_pieces_constfn
, void *);
845 rtx
adjust (scalar_int_mode
, HOST_WIDE_INT
);
846 void increment_address (HOST_WIDE_INT
);
847 void maybe_predec (HOST_WIDE_INT
);
848 void maybe_postinc (HOST_WIDE_INT
);
849 void decide_autoinc (machine_mode
, bool, HOST_WIDE_INT
);
856 /* Initialize a pieces_addr structure from an object OBJ. IS_LOAD is
857 true if the operation to be performed on this object is a load
858 rather than a store. For stores, OBJ can be NULL, in which case we
859 assume the operation is a stack push. For loads, the optional
860 CONSTFN and its associated CFNDATA can be used in place of the
863 pieces_addr::pieces_addr (rtx obj
, bool is_load
, by_pieces_constfn constfn
,
865 : m_obj (obj
), m_is_load (is_load
), m_constfn (constfn
), m_cfndata (cfndata
)
871 rtx addr
= XEXP (obj
, 0);
872 rtx_code code
= GET_CODE (addr
);
874 bool dec
= code
== PRE_DEC
|| code
== POST_DEC
;
875 bool inc
= code
== PRE_INC
|| code
== POST_INC
;
878 m_addr_inc
= dec
? -1 : 1;
880 /* While we have always looked for these codes here, the code
881 implementing the memory operation has never handled them.
882 Support could be added later if necessary or beneficial. */
883 gcc_assert (code
!= PRE_INC
&& code
!= POST_DEC
);
891 if (STACK_GROWS_DOWNWARD
)
897 gcc_assert (constfn
!= NULL
);
901 gcc_assert (is_load
);
904 /* Decide whether to use autoinc for an address involved in a memory op.
905 MODE is the mode of the accesses, REVERSE is true if we've decided to
906 perform the operation starting from the end, and LEN is the length of
907 the operation. Don't override an earlier decision to set m_auto. */
910 pieces_addr::decide_autoinc (machine_mode
ARG_UNUSED (mode
), bool reverse
,
913 if (m_auto
|| m_obj
== NULL_RTX
)
916 bool use_predec
= (m_is_load
917 ? USE_LOAD_PRE_DECREMENT (mode
)
918 : USE_STORE_PRE_DECREMENT (mode
));
919 bool use_postinc
= (m_is_load
920 ? USE_LOAD_POST_INCREMENT (mode
)
921 : USE_STORE_POST_INCREMENT (mode
));
922 machine_mode addr_mode
= get_address_mode (m_obj
);
924 if (use_predec
&& reverse
)
926 m_addr
= copy_to_mode_reg (addr_mode
,
927 plus_constant (addr_mode
,
932 else if (use_postinc
&& !reverse
)
934 m_addr
= copy_to_mode_reg (addr_mode
, m_addr
);
938 else if (CONSTANT_P (m_addr
))
939 m_addr
= copy_to_mode_reg (addr_mode
, m_addr
);
942 /* Adjust the address to refer to the data at OFFSET in MODE. If we
943 are using autoincrement for this address, we don't add the offset,
944 but we still modify the MEM's properties. */
947 pieces_addr::adjust (scalar_int_mode mode
, HOST_WIDE_INT offset
)
950 return m_constfn (m_cfndata
, offset
, mode
);
951 if (m_obj
== NULL_RTX
)
954 return adjust_automodify_address (m_obj
, mode
, m_addr
, offset
);
956 return adjust_address (m_obj
, mode
, offset
);
959 /* Emit an add instruction to increment the address by SIZE. */
962 pieces_addr::increment_address (HOST_WIDE_INT size
)
964 rtx amount
= gen_int_mode (size
, GET_MODE (m_addr
));
965 emit_insn (gen_add2_insn (m_addr
, amount
));
968 /* If we are supposed to decrement the address after each access, emit code
969 to do so now. Increment by SIZE (which has should have the correct sign
973 pieces_addr::maybe_predec (HOST_WIDE_INT size
)
975 if (m_explicit_inc
>= 0)
977 gcc_assert (HAVE_PRE_DECREMENT
);
978 increment_address (size
);
981 /* If we are supposed to decrement the address after each access, emit code
982 to do so now. Increment by SIZE. */
985 pieces_addr::maybe_postinc (HOST_WIDE_INT size
)
987 if (m_explicit_inc
<= 0)
989 gcc_assert (HAVE_POST_INCREMENT
);
990 increment_address (size
);
993 /* This structure is used by do_op_by_pieces to describe the operation
999 pieces_addr m_to
, m_from
;
1000 unsigned HOST_WIDE_INT m_len
;
1001 HOST_WIDE_INT m_offset
;
1002 unsigned int m_align
;
1003 unsigned int m_max_size
;
1006 /* Virtual functions, overriden by derived classes for the specific
1008 virtual void generate (rtx
, rtx
, machine_mode
) = 0;
1009 virtual bool prepare_mode (machine_mode
, unsigned int) = 0;
1010 virtual void finish_mode (machine_mode
)
1015 op_by_pieces_d (rtx
, bool, rtx
, bool, by_pieces_constfn
, void *,
1016 unsigned HOST_WIDE_INT
, unsigned int);
1020 /* The constructor for an op_by_pieces_d structure. We require two
1021 objects named TO and FROM, which are identified as loads or stores
1022 by TO_LOAD and FROM_LOAD. If FROM is a load, the optional FROM_CFN
1023 and its associated FROM_CFN_DATA can be used to replace loads with
1024 constant values. LEN describes the length of the operation. */
1026 op_by_pieces_d::op_by_pieces_d (rtx to
, bool to_load
,
1027 rtx from
, bool from_load
,
1028 by_pieces_constfn from_cfn
,
1029 void *from_cfn_data
,
1030 unsigned HOST_WIDE_INT len
,
1032 : m_to (to
, to_load
, NULL
, NULL
),
1033 m_from (from
, from_load
, from_cfn
, from_cfn_data
),
1034 m_len (len
), m_max_size (MOVE_MAX_PIECES
+ 1)
1036 int toi
= m_to
.get_addr_inc ();
1037 int fromi
= m_from
.get_addr_inc ();
1038 if (toi
>= 0 && fromi
>= 0)
1040 else if (toi
<= 0 && fromi
<= 0)
1045 m_offset
= m_reverse
? len
: 0;
1046 align
= MIN (to
? MEM_ALIGN (to
) : align
,
1047 from
? MEM_ALIGN (from
) : align
);
1049 /* If copying requires more than two move insns,
1050 copy addresses to registers (to make displacements shorter)
1051 and use post-increment if available. */
1052 if (by_pieces_ninsns (len
, align
, m_max_size
, MOVE_BY_PIECES
) > 2)
1054 /* Find the mode of the largest comparison. */
1055 scalar_int_mode mode
= widest_int_mode_for_size (m_max_size
);
1057 m_from
.decide_autoinc (mode
, m_reverse
, len
);
1058 m_to
.decide_autoinc (mode
, m_reverse
, len
);
1061 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
1065 /* This function contains the main loop used for expanding a block
1066 operation. First move what we can in the largest integer mode,
1067 then go to successively smaller modes. For every access, call
1068 GENFUN with the two operands and the EXTRA_DATA. */
1071 op_by_pieces_d::run ()
1073 while (m_max_size
> 1 && m_len
> 0)
1075 scalar_int_mode mode
= widest_int_mode_for_size (m_max_size
);
1077 if (prepare_mode (mode
, m_align
))
1079 unsigned int size
= GET_MODE_SIZE (mode
);
1080 rtx to1
= NULL_RTX
, from1
;
1082 while (m_len
>= size
)
1087 to1
= m_to
.adjust (mode
, m_offset
);
1088 from1
= m_from
.adjust (mode
, m_offset
);
1090 m_to
.maybe_predec (-(HOST_WIDE_INT
)size
);
1091 m_from
.maybe_predec (-(HOST_WIDE_INT
)size
);
1093 generate (to1
, from1
, mode
);
1095 m_to
.maybe_postinc (size
);
1096 m_from
.maybe_postinc (size
);
1107 m_max_size
= GET_MODE_SIZE (mode
);
1110 /* The code above should have handled everything. */
1111 gcc_assert (!m_len
);
1114 /* Derived class from op_by_pieces_d, providing support for block move
1117 class move_by_pieces_d
: public op_by_pieces_d
1119 insn_gen_fn m_gen_fun
;
1120 void generate (rtx
, rtx
, machine_mode
);
1121 bool prepare_mode (machine_mode
, unsigned int);
1124 move_by_pieces_d (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
1126 : op_by_pieces_d (to
, false, from
, true, NULL
, NULL
, len
, align
)
1129 rtx
finish_endp (int);
1132 /* Return true if MODE can be used for a set of copies, given an
1133 alignment ALIGN. Prepare whatever data is necessary for later
1134 calls to generate. */
1137 move_by_pieces_d::prepare_mode (machine_mode mode
, unsigned int align
)
1139 insn_code icode
= optab_handler (mov_optab
, mode
);
1140 m_gen_fun
= GEN_FCN (icode
);
1141 return icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
);
1144 /* A callback used when iterating for a compare_by_pieces_operation.
1145 OP0 and OP1 are the values that have been loaded and should be
1146 compared in MODE. If OP0 is NULL, this means we should generate a
1147 push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1148 gen function that should be used to generate the mode. */
1151 move_by_pieces_d::generate (rtx op0
, rtx op1
,
1152 machine_mode mode ATTRIBUTE_UNUSED
)
1154 #ifdef PUSH_ROUNDING
1155 if (op0
== NULL_RTX
)
1157 emit_single_push_insn (mode
, op1
, NULL
);
1161 emit_insn (m_gen_fun (op0
, op1
));
1164 /* Perform the final adjustment at the end of a string to obtain the
1165 correct return value for the block operation. If ENDP is 1 return
1166 memory at the end ala mempcpy, and if ENDP is 2 return memory the
1167 end minus one byte ala stpcpy. */
1170 move_by_pieces_d::finish_endp (int endp
)
1172 gcc_assert (!m_reverse
);
1175 m_to
.maybe_postinc (-1);
1178 return m_to
.adjust (QImode
, m_offset
);
1181 /* Generate several move instructions to copy LEN bytes from block FROM to
1182 block TO. (These are MEM rtx's with BLKmode).
1184 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1185 used to push FROM to the stack.
1187 ALIGN is maximum stack alignment we can assume.
1189 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1190 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1194 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
1195 unsigned int align
, int endp
)
1197 #ifndef PUSH_ROUNDING
1202 move_by_pieces_d
data (to
, from
, len
, align
);
1207 return data
.finish_endp (endp
);
1212 /* Derived class from op_by_pieces_d, providing support for block move
1215 class store_by_pieces_d
: public op_by_pieces_d
1217 insn_gen_fn m_gen_fun
;
1218 void generate (rtx
, rtx
, machine_mode
);
1219 bool prepare_mode (machine_mode
, unsigned int);
1222 store_by_pieces_d (rtx to
, by_pieces_constfn cfn
, void *cfn_data
,
1223 unsigned HOST_WIDE_INT len
, unsigned int align
)
1224 : op_by_pieces_d (to
, false, NULL_RTX
, true, cfn
, cfn_data
, len
, align
)
1227 rtx
finish_endp (int);
1230 /* Return true if MODE can be used for a set of stores, given an
1231 alignment ALIGN. Prepare whatever data is necessary for later
1232 calls to generate. */
1235 store_by_pieces_d::prepare_mode (machine_mode mode
, unsigned int align
)
1237 insn_code icode
= optab_handler (mov_optab
, mode
);
1238 m_gen_fun
= GEN_FCN (icode
);
1239 return icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
);
1242 /* A callback used when iterating for a store_by_pieces_operation.
1243 OP0 and OP1 are the values that have been loaded and should be
1244 compared in MODE. If OP0 is NULL, this means we should generate a
1245 push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1246 gen function that should be used to generate the mode. */
1249 store_by_pieces_d::generate (rtx op0
, rtx op1
, machine_mode
)
1251 emit_insn (m_gen_fun (op0
, op1
));
1254 /* Perform the final adjustment at the end of a string to obtain the
1255 correct return value for the block operation. If ENDP is 1 return
1256 memory at the end ala mempcpy, and if ENDP is 2 return memory the
1257 end minus one byte ala stpcpy. */
1260 store_by_pieces_d::finish_endp (int endp
)
1262 gcc_assert (!m_reverse
);
1265 m_to
.maybe_postinc (-1);
1268 return m_to
.adjust (QImode
, m_offset
);
1271 /* Determine whether the LEN bytes generated by CONSTFUN can be
1272 stored to memory using several move instructions. CONSTFUNDATA is
1273 a pointer which will be passed as argument in every CONSTFUN call.
1274 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
1275 a memset operation and false if it's a copy of a constant string.
1276 Return nonzero if a call to store_by_pieces should succeed. */
1279 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
1280 rtx (*constfun
) (void *, HOST_WIDE_INT
, scalar_int_mode
),
1281 void *constfundata
, unsigned int align
, bool memsetp
)
1283 unsigned HOST_WIDE_INT l
;
1284 unsigned int max_size
;
1285 HOST_WIDE_INT offset
= 0;
1286 enum insn_code icode
;
1288 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
1289 rtx cst ATTRIBUTE_UNUSED
;
1294 if (!targetm
.use_by_pieces_infrastructure_p (len
, align
,
1298 optimize_insn_for_speed_p ()))
1301 align
= alignment_for_piecewise_move (STORE_MAX_PIECES
, align
);
1303 /* We would first store what we can in the largest integer mode, then go to
1304 successively smaller modes. */
1307 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
1311 max_size
= STORE_MAX_PIECES
+ 1;
1312 while (max_size
> 1 && l
> 0)
1314 scalar_int_mode mode
= widest_int_mode_for_size (max_size
);
1316 icode
= optab_handler (mov_optab
, mode
);
1317 if (icode
!= CODE_FOR_nothing
1318 && align
>= GET_MODE_ALIGNMENT (mode
))
1320 unsigned int size
= GET_MODE_SIZE (mode
);
1327 cst
= (*constfun
) (constfundata
, offset
, mode
);
1328 if (!targetm
.legitimate_constant_p (mode
, cst
))
1338 max_size
= GET_MODE_SIZE (mode
);
1341 /* The code above should have handled everything. */
1348 /* Generate several move instructions to store LEN bytes generated by
1349 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
1350 pointer which will be passed as argument in every CONSTFUN call.
1351 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
1352 a memset operation and false if it's a copy of a constant string.
1353 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1354 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1358 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
1359 rtx (*constfun
) (void *, HOST_WIDE_INT
, scalar_int_mode
),
1360 void *constfundata
, unsigned int align
, bool memsetp
, int endp
)
1364 gcc_assert (endp
!= 2);
1368 gcc_assert (targetm
.use_by_pieces_infrastructure_p
1370 memsetp
? SET_BY_PIECES
: STORE_BY_PIECES
,
1371 optimize_insn_for_speed_p ()));
1373 store_by_pieces_d
data (to
, constfun
, constfundata
, len
, align
);
1377 return data
.finish_endp (endp
);
1382 /* Callback routine for clear_by_pieces.
1383 Return const0_rtx unconditionally. */
1386 clear_by_pieces_1 (void *, HOST_WIDE_INT
, scalar_int_mode
)
1391 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
1392 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
1395 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
1400 store_by_pieces_d
data (to
, clear_by_pieces_1
, NULL
, len
, align
);
1404 /* Context used by compare_by_pieces_genfn. It stores the fail label
1405 to jump to in case of miscomparison, and for branch ratios greater than 1,
1406 it stores an accumulator and the current and maximum counts before
1407 emitting another branch. */
1409 class compare_by_pieces_d
: public op_by_pieces_d
1411 rtx_code_label
*m_fail_label
;
1413 int m_count
, m_batch
;
1415 void generate (rtx
, rtx
, machine_mode
);
1416 bool prepare_mode (machine_mode
, unsigned int);
1417 void finish_mode (machine_mode
);
1419 compare_by_pieces_d (rtx op0
, rtx op1
, by_pieces_constfn op1_cfn
,
1420 void *op1_cfn_data
, HOST_WIDE_INT len
, int align
,
1421 rtx_code_label
*fail_label
)
1422 : op_by_pieces_d (op0
, true, op1
, true, op1_cfn
, op1_cfn_data
, len
, align
)
1424 m_fail_label
= fail_label
;
1428 /* A callback used when iterating for a compare_by_pieces_operation.
1429 OP0 and OP1 are the values that have been loaded and should be
1430 compared in MODE. DATA holds a pointer to the compare_by_pieces_data
1431 context structure. */
1434 compare_by_pieces_d::generate (rtx op0
, rtx op1
, machine_mode mode
)
1438 rtx temp
= expand_binop (mode
, sub_optab
, op0
, op1
, NULL_RTX
,
1439 true, OPTAB_LIB_WIDEN
);
1441 temp
= expand_binop (mode
, ior_optab
, m_accumulator
, temp
, temp
,
1442 true, OPTAB_LIB_WIDEN
);
1443 m_accumulator
= temp
;
1445 if (++m_count
< m_batch
)
1449 op0
= m_accumulator
;
1451 m_accumulator
= NULL_RTX
;
1453 do_compare_rtx_and_jump (op0
, op1
, NE
, true, mode
, NULL_RTX
, NULL
,
1454 m_fail_label
, profile_probability::uninitialized ());
1457 /* Return true if MODE can be used for a set of moves and comparisons,
1458 given an alignment ALIGN. Prepare whatever data is necessary for
1459 later calls to generate. */
1462 compare_by_pieces_d::prepare_mode (machine_mode mode
, unsigned int align
)
1464 insn_code icode
= optab_handler (mov_optab
, mode
);
1465 if (icode
== CODE_FOR_nothing
1466 || align
< GET_MODE_ALIGNMENT (mode
)
1467 || !can_compare_p (EQ
, mode
, ccp_jump
))
1469 m_batch
= targetm
.compare_by_pieces_branch_ratio (mode
);
1472 m_accumulator
= NULL_RTX
;
1477 /* Called after expanding a series of comparisons in MODE. If we have
1478 accumulated results for which we haven't emitted a branch yet, do
1482 compare_by_pieces_d::finish_mode (machine_mode mode
)
1484 if (m_accumulator
!= NULL_RTX
)
1485 do_compare_rtx_and_jump (m_accumulator
, const0_rtx
, NE
, true, mode
,
1486 NULL_RTX
, NULL
, m_fail_label
,
1487 profile_probability::uninitialized ());
1490 /* Generate several move instructions to compare LEN bytes from blocks
1491 ARG0 and ARG1. (These are MEM rtx's with BLKmode).
1493 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1494 used to push FROM to the stack.
1496 ALIGN is maximum stack alignment we can assume.
1498 Optionally, the caller can pass a constfn and associated data in A1_CFN
1499 and A1_CFN_DATA. describing that the second operand being compared is a
1500 known constant and how to obtain its data. */
1503 compare_by_pieces (rtx arg0
, rtx arg1
, unsigned HOST_WIDE_INT len
,
1504 rtx target
, unsigned int align
,
1505 by_pieces_constfn a1_cfn
, void *a1_cfn_data
)
1507 rtx_code_label
*fail_label
= gen_label_rtx ();
1508 rtx_code_label
*end_label
= gen_label_rtx ();
1510 if (target
== NULL_RTX
1511 || !REG_P (target
) || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
1512 target
= gen_reg_rtx (TYPE_MODE (integer_type_node
));
1514 compare_by_pieces_d
data (arg0
, arg1
, a1_cfn
, a1_cfn_data
, len
, align
,
1519 emit_move_insn (target
, const0_rtx
);
1520 emit_jump (end_label
);
1522 emit_label (fail_label
);
1523 emit_move_insn (target
, const1_rtx
);
1524 emit_label (end_label
);
1529 /* Emit code to move a block Y to a block X. This may be done with
1530 string-move instructions, with multiple scalar move instructions,
1531 or with a library call.
1533 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1534 SIZE is an rtx that says how long they are.
1535 ALIGN is the maximum alignment we can assume they have.
1536 METHOD describes what kind of copy this is, and what mechanisms may be used.
1537 MIN_SIZE is the minimal size of block to move
1538 MAX_SIZE is the maximal size of block to move, if it can not be represented
1539 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1541 Return the address of the new block, if memcpy is called and returns it,
1545 emit_block_move_hints (rtx x
, rtx y
, rtx size
, enum block_op_methods method
,
1546 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1547 unsigned HOST_WIDE_INT min_size
,
1548 unsigned HOST_WIDE_INT max_size
,
1549 unsigned HOST_WIDE_INT probable_max_size
)
1556 if (CONST_INT_P (size
) && INTVAL (size
) == 0)
1561 case BLOCK_OP_NORMAL
:
1562 case BLOCK_OP_TAILCALL
:
1563 may_use_call
= true;
1566 case BLOCK_OP_CALL_PARM
:
1567 may_use_call
= block_move_libcall_safe_for_call_parm ();
1569 /* Make inhibit_defer_pop nonzero around the library call
1570 to force it to pop the arguments right away. */
1574 case BLOCK_OP_NO_LIBCALL
:
1575 may_use_call
= false;
1582 gcc_assert (MEM_P (x
) && MEM_P (y
));
1583 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1584 gcc_assert (align
>= BITS_PER_UNIT
);
1586 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1587 block copy is more efficient for other large modes, e.g. DCmode. */
1588 x
= adjust_address (x
, BLKmode
, 0);
1589 y
= adjust_address (y
, BLKmode
, 0);
1591 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1592 can be incorrect is coming from __builtin_memcpy. */
1593 if (CONST_INT_P (size
))
1595 x
= shallow_copy_rtx (x
);
1596 y
= shallow_copy_rtx (y
);
1597 set_mem_size (x
, INTVAL (size
));
1598 set_mem_size (y
, INTVAL (size
));
1601 if (CONST_INT_P (size
) && can_move_by_pieces (INTVAL (size
), align
))
1602 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1603 else if (emit_block_move_via_movmem (x
, y
, size
, align
,
1604 expected_align
, expected_size
,
1605 min_size
, max_size
, probable_max_size
))
1607 else if (may_use_call
1608 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
))
1609 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y
)))
1611 /* Since x and y are passed to a libcall, mark the corresponding
1612 tree EXPR as addressable. */
1613 tree y_expr
= MEM_EXPR (y
);
1614 tree x_expr
= MEM_EXPR (x
);
1616 mark_addressable (y_expr
);
1618 mark_addressable (x_expr
);
1619 retval
= emit_block_copy_via_libcall (x
, y
, size
,
1620 method
== BLOCK_OP_TAILCALL
);
1624 emit_block_move_via_loop (x
, y
, size
, align
);
1626 if (method
== BLOCK_OP_CALL_PARM
)
1633 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1635 unsigned HOST_WIDE_INT max
, min
= 0;
1636 if (GET_CODE (size
) == CONST_INT
)
1637 min
= max
= UINTVAL (size
);
1639 max
= GET_MODE_MASK (GET_MODE (size
));
1640 return emit_block_move_hints (x
, y
, size
, method
, 0, -1,
1644 /* A subroutine of emit_block_move. Returns true if calling the
1645 block move libcall will not clobber any parameters which may have
1646 already been placed on the stack. */
1649 block_move_libcall_safe_for_call_parm (void)
1651 #if defined (REG_PARM_STACK_SPACE)
1655 /* If arguments are pushed on the stack, then they're safe. */
1659 /* If registers go on the stack anyway, any argument is sure to clobber
1660 an outgoing argument. */
1661 #if defined (REG_PARM_STACK_SPACE)
1662 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1663 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1664 depend on its argument. */
1666 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn
? NULL_TREE
: TREE_TYPE (fn
)))
1667 && REG_PARM_STACK_SPACE (fn
) != 0)
1671 /* If any argument goes in memory, then it might clobber an outgoing
1674 CUMULATIVE_ARGS args_so_far_v
;
1675 cumulative_args_t args_so_far
;
1678 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1679 INIT_CUMULATIVE_ARGS (args_so_far_v
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1680 args_so_far
= pack_cumulative_args (&args_so_far_v
);
1682 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1683 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1685 machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1686 rtx tmp
= targetm
.calls
.function_arg (args_so_far
, mode
,
1688 if (!tmp
|| !REG_P (tmp
))
1690 if (targetm
.calls
.arg_partial_bytes (args_so_far
, mode
, NULL
, 1))
1692 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
1699 /* A subroutine of emit_block_move. Expand a movmem pattern;
1700 return true if successful. */
1703 emit_block_move_via_movmem (rtx x
, rtx y
, rtx size
, unsigned int align
,
1704 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1705 unsigned HOST_WIDE_INT min_size
,
1706 unsigned HOST_WIDE_INT max_size
,
1707 unsigned HOST_WIDE_INT probable_max_size
)
1709 int save_volatile_ok
= volatile_ok
;
1711 if (expected_align
< align
)
1712 expected_align
= align
;
1713 if (expected_size
!= -1)
1715 if ((unsigned HOST_WIDE_INT
)expected_size
> probable_max_size
)
1716 expected_size
= probable_max_size
;
1717 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
1718 expected_size
= min_size
;
1721 /* Since this is a move insn, we don't care about volatility. */
1724 /* Try the most limited insn first, because there's no point
1725 including more than one in the machine description unless
1726 the more limited one has some advantage. */
1728 opt_scalar_int_mode mode_iter
;
1729 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_INT
)
1731 scalar_int_mode mode
= mode_iter
.require ();
1732 enum insn_code code
= direct_optab_handler (movmem_optab
, mode
);
1734 if (code
!= CODE_FOR_nothing
1735 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1736 here because if SIZE is less than the mode mask, as it is
1737 returned by the macro, it will definitely be less than the
1738 actual mode mask. Since SIZE is within the Pmode address
1739 space, we limit MODE to Pmode. */
1740 && ((CONST_INT_P (size
)
1741 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1742 <= (GET_MODE_MASK (mode
) >> 1)))
1743 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
1744 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
1746 struct expand_operand ops
[9];
1749 /* ??? When called via emit_block_move_for_call, it'd be
1750 nice if there were some way to inform the backend, so
1751 that it doesn't fail the expansion because it thinks
1752 emitting the libcall would be more efficient. */
1753 nops
= insn_data
[(int) code
].n_generator_args
;
1754 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
1756 create_fixed_operand (&ops
[0], x
);
1757 create_fixed_operand (&ops
[1], y
);
1758 /* The check above guarantees that this size conversion is valid. */
1759 create_convert_operand_to (&ops
[2], size
, mode
, true);
1760 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
1763 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
1764 create_integer_operand (&ops
[5], expected_size
);
1768 create_integer_operand (&ops
[6], min_size
);
1769 /* If we can not represent the maximal size,
1770 make parameter NULL. */
1771 if ((HOST_WIDE_INT
) max_size
!= -1)
1772 create_integer_operand (&ops
[7], max_size
);
1774 create_fixed_operand (&ops
[7], NULL
);
1778 /* If we can not represent the maximal size,
1779 make parameter NULL. */
1780 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
1781 create_integer_operand (&ops
[8], probable_max_size
);
1783 create_fixed_operand (&ops
[8], NULL
);
1785 if (maybe_expand_insn (code
, nops
, ops
))
1787 volatile_ok
= save_volatile_ok
;
1793 volatile_ok
= save_volatile_ok
;
1797 /* A subroutine of emit_block_move. Copy the data via an explicit
1798 loop. This is used only when libcalls are forbidden. */
1799 /* ??? It'd be nice to copy in hunks larger than QImode. */
1802 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1803 unsigned int align ATTRIBUTE_UNUSED
)
1805 rtx_code_label
*cmp_label
, *top_label
;
1806 rtx iter
, x_addr
, y_addr
, tmp
;
1807 machine_mode x_addr_mode
= get_address_mode (x
);
1808 machine_mode y_addr_mode
= get_address_mode (y
);
1809 machine_mode iter_mode
;
1811 iter_mode
= GET_MODE (size
);
1812 if (iter_mode
== VOIDmode
)
1813 iter_mode
= word_mode
;
1815 top_label
= gen_label_rtx ();
1816 cmp_label
= gen_label_rtx ();
1817 iter
= gen_reg_rtx (iter_mode
);
1819 emit_move_insn (iter
, const0_rtx
);
1821 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1822 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1823 do_pending_stack_adjust ();
1825 emit_jump (cmp_label
);
1826 emit_label (top_label
);
1828 tmp
= convert_modes (x_addr_mode
, iter_mode
, iter
, true);
1829 x_addr
= simplify_gen_binary (PLUS
, x_addr_mode
, x_addr
, tmp
);
1831 if (x_addr_mode
!= y_addr_mode
)
1832 tmp
= convert_modes (y_addr_mode
, iter_mode
, iter
, true);
1833 y_addr
= simplify_gen_binary (PLUS
, y_addr_mode
, y_addr
, tmp
);
1835 x
= change_address (x
, QImode
, x_addr
);
1836 y
= change_address (y
, QImode
, y_addr
);
1838 emit_move_insn (x
, y
);
1840 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1841 true, OPTAB_LIB_WIDEN
);
1843 emit_move_insn (iter
, tmp
);
1845 emit_label (cmp_label
);
1847 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1849 profile_probability::guessed_always ()
1850 .apply_scale (9, 10));
1853 /* Expand a call to memcpy or memmove or memcmp, and return the result.
1854 TAILCALL is true if this is a tail call. */
1857 emit_block_op_via_libcall (enum built_in_function fncode
, rtx dst
, rtx src
,
1858 rtx size
, bool tailcall
)
1860 rtx dst_addr
, src_addr
;
1861 tree call_expr
, dst_tree
, src_tree
, size_tree
;
1862 machine_mode size_mode
;
1864 dst_addr
= copy_addr_to_reg (XEXP (dst
, 0));
1865 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1866 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1868 src_addr
= copy_addr_to_reg (XEXP (src
, 0));
1869 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1870 src_tree
= make_tree (ptr_type_node
, src_addr
);
1872 size_mode
= TYPE_MODE (sizetype
);
1873 size
= convert_to_mode (size_mode
, size
, 1);
1874 size
= copy_to_mode_reg (size_mode
, size
);
1875 size_tree
= make_tree (sizetype
, size
);
1877 /* It is incorrect to use the libcall calling conventions for calls to
1878 memcpy/memmove/memcmp because they can be provided by the user. */
1879 tree fn
= builtin_decl_implicit (fncode
);
1880 call_expr
= build_call_expr (fn
, 3, dst_tree
, src_tree
, size_tree
);
1881 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
1883 return expand_call (call_expr
, NULL_RTX
, false);
1886 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
1887 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
1888 otherwise return null. */
1891 expand_cmpstrn_or_cmpmem (insn_code icode
, rtx target
, rtx arg1_rtx
,
1892 rtx arg2_rtx
, tree arg3_type
, rtx arg3_rtx
,
1893 HOST_WIDE_INT align
)
1895 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
1897 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
1900 struct expand_operand ops
[5];
1901 create_output_operand (&ops
[0], target
, insn_mode
);
1902 create_fixed_operand (&ops
[1], arg1_rtx
);
1903 create_fixed_operand (&ops
[2], arg2_rtx
);
1904 create_convert_operand_from (&ops
[3], arg3_rtx
, TYPE_MODE (arg3_type
),
1905 TYPE_UNSIGNED (arg3_type
));
1906 create_integer_operand (&ops
[4], align
);
1907 if (maybe_expand_insn (icode
, 5, ops
))
1908 return ops
[0].value
;
1912 /* Expand a block compare between X and Y with length LEN using the
1913 cmpmem optab, placing the result in TARGET. LEN_TYPE is the type
1914 of the expression that was used to calculate the length. ALIGN
1915 gives the known minimum common alignment. */
1918 emit_block_cmp_via_cmpmem (rtx x
, rtx y
, rtx len
, tree len_type
, rtx target
,
1921 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
1922 implementing memcmp because it will stop if it encounters two
1924 insn_code icode
= direct_optab_handler (cmpmem_optab
, SImode
);
1926 if (icode
== CODE_FOR_nothing
)
1929 return expand_cmpstrn_or_cmpmem (icode
, target
, x
, y
, len_type
, len
, align
);
1932 /* Emit code to compare a block Y to a block X. This may be done with
1933 string-compare instructions, with multiple scalar instructions,
1934 or with a library call.
1936 Both X and Y must be MEM rtx's. LEN is an rtx that says how long
1937 they are. LEN_TYPE is the type of the expression that was used to
1940 If EQUALITY_ONLY is true, it means we don't have to return the tri-state
1941 value of a normal memcmp call, instead we can just compare for equality.
1942 If FORCE_LIBCALL is true, we should emit a call to memcmp rather than
1945 Optionally, the caller can pass a constfn and associated data in Y_CFN
1946 and Y_CFN_DATA. describing that the second operand being compared is a
1947 known constant and how to obtain its data.
1948 Return the result of the comparison, or NULL_RTX if we failed to
1949 perform the operation. */
1952 emit_block_cmp_hints (rtx x
, rtx y
, rtx len
, tree len_type
, rtx target
,
1953 bool equality_only
, by_pieces_constfn y_cfn
,
1958 if (CONST_INT_P (len
) && INTVAL (len
) == 0)
1961 gcc_assert (MEM_P (x
) && MEM_P (y
));
1962 unsigned int align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1963 gcc_assert (align
>= BITS_PER_UNIT
);
1965 x
= adjust_address (x
, BLKmode
, 0);
1966 y
= adjust_address (y
, BLKmode
, 0);
1969 && CONST_INT_P (len
)
1970 && can_do_by_pieces (INTVAL (len
), align
, COMPARE_BY_PIECES
))
1971 result
= compare_by_pieces (x
, y
, INTVAL (len
), target
, align
,
1974 result
= emit_block_cmp_via_cmpmem (x
, y
, len
, len_type
, target
, align
);
1979 /* Copy all or part of a value X into registers starting at REGNO.
1980 The number of registers to be filled is NREGS. */
1983 move_block_to_reg (int regno
, rtx x
, int nregs
, machine_mode mode
)
1988 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
1989 x
= validize_mem (force_const_mem (mode
, x
));
1991 /* See if the machine can do this with a load multiple insn. */
1992 if (targetm
.have_load_multiple ())
1994 rtx_insn
*last
= get_last_insn ();
1995 rtx first
= gen_rtx_REG (word_mode
, regno
);
1996 if (rtx_insn
*pat
= targetm
.gen_load_multiple (first
, x
,
2003 delete_insns_since (last
);
2006 for (int i
= 0; i
< nregs
; i
++)
2007 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
2008 operand_subword_force (x
, i
, mode
));
2011 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2012 The number of registers to be filled is NREGS. */
2015 move_block_from_reg (int regno
, rtx x
, int nregs
)
2020 /* See if the machine can do this with a store multiple insn. */
2021 if (targetm
.have_store_multiple ())
2023 rtx_insn
*last
= get_last_insn ();
2024 rtx first
= gen_rtx_REG (word_mode
, regno
);
2025 if (rtx_insn
*pat
= targetm
.gen_store_multiple (x
, first
,
2032 delete_insns_since (last
);
2035 for (int i
= 0; i
< nregs
; i
++)
2037 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
2041 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
2045 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2046 ORIG, where ORIG is a non-consecutive group of registers represented by
2047 a PARALLEL. The clone is identical to the original except in that the
2048 original set of registers is replaced by a new set of pseudo registers.
2049 The new set has the same modes as the original set. */
2052 gen_group_rtx (rtx orig
)
2057 gcc_assert (GET_CODE (orig
) == PARALLEL
);
2059 length
= XVECLEN (orig
, 0);
2060 tmps
= XALLOCAVEC (rtx
, length
);
2062 /* Skip a NULL entry in first slot. */
2063 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
2068 for (; i
< length
; i
++)
2070 machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
2071 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
2073 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
2076 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
2079 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
2080 except that values are placed in TMPS[i], and must later be moved
2081 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
2084 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
, int ssize
)
2088 machine_mode m
= GET_MODE (orig_src
);
2090 gcc_assert (GET_CODE (dst
) == PARALLEL
);
2093 && !SCALAR_INT_MODE_P (m
)
2094 && !MEM_P (orig_src
)
2095 && GET_CODE (orig_src
) != CONCAT
)
2097 scalar_int_mode imode
;
2098 if (int_mode_for_mode (GET_MODE (orig_src
)).exists (&imode
))
2100 src
= gen_reg_rtx (imode
);
2101 emit_move_insn (gen_lowpart (GET_MODE (orig_src
), src
), orig_src
);
2105 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
);
2106 emit_move_insn (src
, orig_src
);
2108 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
2112 /* Check for a NULL entry, used to indicate that the parameter goes
2113 both on the stack and in registers. */
2114 if (XEXP (XVECEXP (dst
, 0, 0), 0))
2119 /* Process the pieces. */
2120 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2122 machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
2123 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
2124 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2127 /* Handle trailing fragments that run over the size of the struct. */
2128 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2130 /* Arrange to shift the fragment to where it belongs.
2131 extract_bit_field loads to the lsb of the reg. */
2133 #ifdef BLOCK_REG_PADDING
2134 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
2135 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2140 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2141 bytelen
= ssize
- bytepos
;
2142 gcc_assert (bytelen
> 0);
2145 /* If we won't be loading directly from memory, protect the real source
2146 from strange tricks we might play; but make sure that the source can
2147 be loaded directly into the destination. */
2149 if (!MEM_P (orig_src
)
2150 && (!CONSTANT_P (orig_src
)
2151 || (GET_MODE (orig_src
) != mode
2152 && GET_MODE (orig_src
) != VOIDmode
)))
2154 if (GET_MODE (orig_src
) == VOIDmode
)
2155 src
= gen_reg_rtx (mode
);
2157 src
= gen_reg_rtx (GET_MODE (orig_src
));
2159 emit_move_insn (src
, orig_src
);
2162 /* Optimize the access just a bit. */
2164 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
2165 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
2166 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2167 && bytelen
== GET_MODE_SIZE (mode
))
2169 tmps
[i
] = gen_reg_rtx (mode
);
2170 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
2172 else if (COMPLEX_MODE_P (mode
)
2173 && GET_MODE (src
) == mode
2174 && bytelen
== GET_MODE_SIZE (mode
))
2175 /* Let emit_move_complex do the bulk of the work. */
2177 else if (GET_CODE (src
) == CONCAT
)
2179 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
2180 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
2181 unsigned int elt
= bytepos
/ slen0
;
2182 unsigned int subpos
= bytepos
% slen0
;
2184 if (subpos
+ bytelen
<= slen0
)
2186 /* The following assumes that the concatenated objects all
2187 have the same size. In this case, a simple calculation
2188 can be used to determine the object and the bit field
2190 tmps
[i
] = XEXP (src
, elt
);
2192 || subpos
+ bytelen
!= slen0
2193 || (!CONSTANT_P (tmps
[i
])
2194 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
)))
2195 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
2196 subpos
* BITS_PER_UNIT
,
2197 1, NULL_RTX
, mode
, mode
, false,
2204 gcc_assert (!bytepos
);
2205 mem
= assign_stack_temp (GET_MODE (src
), slen
);
2206 emit_move_insn (mem
, src
);
2207 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
2208 0, 1, NULL_RTX
, mode
, mode
, false,
2212 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2213 SIMD register, which is currently broken. While we get GCC
2214 to emit proper RTL for these cases, let's dump to memory. */
2215 else if (VECTOR_MODE_P (GET_MODE (dst
))
2218 int slen
= GET_MODE_SIZE (GET_MODE (src
));
2221 mem
= assign_stack_temp (GET_MODE (src
), slen
);
2222 emit_move_insn (mem
, src
);
2223 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
2225 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
2226 && XVECLEN (dst
, 0) > 1)
2227 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE (dst
), bytepos
);
2228 else if (CONSTANT_P (src
))
2230 HOST_WIDE_INT len
= (HOST_WIDE_INT
) bytelen
;
2238 /* TODO: const_wide_int can have sizes other than this... */
2239 gcc_assert (2 * len
== ssize
);
2240 split_double (src
, &first
, &second
);
2247 else if (REG_P (src
) && GET_MODE (src
) == mode
)
2250 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
2251 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
2252 mode
, mode
, false, NULL
);
2255 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
2260 /* Emit code to move a block SRC of type TYPE to a block DST,
2261 where DST is non-consecutive registers represented by a PARALLEL.
2262 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
2266 emit_group_load (rtx dst
, rtx src
, tree type
, int ssize
)
2271 tmps
= XALLOCAVEC (rtx
, XVECLEN (dst
, 0));
2272 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
2274 /* Copy the extracted pieces into the proper (probable) hard regs. */
2275 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
2277 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
2280 emit_move_insn (d
, tmps
[i
]);
2284 /* Similar, but load SRC into new pseudos in a format that looks like
2285 PARALLEL. This can later be fed to emit_group_move to get things
2286 in the right place. */
2289 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, int ssize
)
2294 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
2295 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
2297 /* Convert the vector to look just like the original PARALLEL, except
2298 with the computed values. */
2299 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
2301 rtx e
= XVECEXP (parallel
, 0, i
);
2302 rtx d
= XEXP (e
, 0);
2306 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
2307 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
2309 RTVEC_ELT (vec
, i
) = e
;
2312 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
2315 /* Emit code to move a block SRC to block DST, where SRC and DST are
2316 non-consecutive groups of registers, each represented by a PARALLEL. */
2319 emit_group_move (rtx dst
, rtx src
)
2323 gcc_assert (GET_CODE (src
) == PARALLEL
2324 && GET_CODE (dst
) == PARALLEL
2325 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
2327 /* Skip first entry if NULL. */
2328 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
2329 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
2330 XEXP (XVECEXP (src
, 0, i
), 0));
2333 /* Move a group of registers represented by a PARALLEL into pseudos. */
2336 emit_group_move_into_temps (rtx src
)
2338 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
2341 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
2343 rtx e
= XVECEXP (src
, 0, i
);
2344 rtx d
= XEXP (e
, 0);
2347 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
2348 RTVEC_ELT (vec
, i
) = e
;
2351 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
2354 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
2355 where SRC is non-consecutive registers represented by a PARALLEL.
2356 SSIZE represents the total size of block ORIG_DST, or -1 if not
2360 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
2363 int start
, finish
, i
;
2364 machine_mode m
= GET_MODE (orig_dst
);
2366 gcc_assert (GET_CODE (src
) == PARALLEL
);
2368 if (!SCALAR_INT_MODE_P (m
)
2369 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
2371 scalar_int_mode imode
;
2372 if (int_mode_for_mode (GET_MODE (orig_dst
)).exists (&imode
))
2374 dst
= gen_reg_rtx (imode
);
2375 emit_group_store (dst
, src
, type
, ssize
);
2376 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
2380 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
);
2381 emit_group_store (dst
, src
, type
, ssize
);
2383 emit_move_insn (orig_dst
, dst
);
2387 /* Check for a NULL entry, used to indicate that the parameter goes
2388 both on the stack and in registers. */
2389 if (XEXP (XVECEXP (src
, 0, 0), 0))
2393 finish
= XVECLEN (src
, 0);
2395 tmps
= XALLOCAVEC (rtx
, finish
);
2397 /* Copy the (probable) hard regs into pseudos. */
2398 for (i
= start
; i
< finish
; i
++)
2400 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2401 if (!REG_P (reg
) || REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
2403 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2404 emit_move_insn (tmps
[i
], reg
);
2410 /* If we won't be storing directly into memory, protect the real destination
2411 from strange tricks we might play. */
2413 if (GET_CODE (dst
) == PARALLEL
)
2417 /* We can get a PARALLEL dst if there is a conditional expression in
2418 a return statement. In that case, the dst and src are the same,
2419 so no action is necessary. */
2420 if (rtx_equal_p (dst
, src
))
2423 /* It is unclear if we can ever reach here, but we may as well handle
2424 it. Allocate a temporary, and split this into a store/load to/from
2426 temp
= assign_stack_temp (GET_MODE (dst
), ssize
);
2427 emit_group_store (temp
, src
, type
, ssize
);
2428 emit_group_load (dst
, temp
, type
, ssize
);
2431 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
2433 machine_mode outer
= GET_MODE (dst
);
2435 HOST_WIDE_INT bytepos
;
2439 if (!REG_P (dst
) || REGNO (dst
) < FIRST_PSEUDO_REGISTER
)
2440 dst
= gen_reg_rtx (outer
);
2442 /* Make life a bit easier for combine. */
2443 /* If the first element of the vector is the low part
2444 of the destination mode, use a paradoxical subreg to
2445 initialize the destination. */
2448 inner
= GET_MODE (tmps
[start
]);
2449 bytepos
= subreg_lowpart_offset (inner
, outer
);
2450 if (INTVAL (XEXP (XVECEXP (src
, 0, start
), 1)) == bytepos
)
2452 temp
= simplify_gen_subreg (outer
, tmps
[start
],
2456 emit_move_insn (dst
, temp
);
2463 /* If the first element wasn't the low part, try the last. */
2465 && start
< finish
- 1)
2467 inner
= GET_MODE (tmps
[finish
- 1]);
2468 bytepos
= subreg_lowpart_offset (inner
, outer
);
2469 if (INTVAL (XEXP (XVECEXP (src
, 0, finish
- 1), 1)) == bytepos
)
2471 temp
= simplify_gen_subreg (outer
, tmps
[finish
- 1],
2475 emit_move_insn (dst
, temp
);
2482 /* Otherwise, simply initialize the result to zero. */
2484 emit_move_insn (dst
, CONST0_RTX (outer
));
2487 /* Process the pieces. */
2488 for (i
= start
; i
< finish
; i
++)
2490 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2491 machine_mode mode
= GET_MODE (tmps
[i
]);
2492 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2493 unsigned int adj_bytelen
;
2496 /* Handle trailing fragments that run over the size of the struct. */
2497 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2498 adj_bytelen
= ssize
- bytepos
;
2500 adj_bytelen
= bytelen
;
2502 if (GET_CODE (dst
) == CONCAT
)
2504 if (bytepos
+ adj_bytelen
2505 <= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2506 dest
= XEXP (dst
, 0);
2507 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2509 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2510 dest
= XEXP (dst
, 1);
2514 machine_mode dest_mode
= GET_MODE (dest
);
2515 machine_mode tmp_mode
= GET_MODE (tmps
[i
]);
2517 gcc_assert (bytepos
== 0 && XVECLEN (src
, 0));
2519 if (GET_MODE_ALIGNMENT (dest_mode
)
2520 >= GET_MODE_ALIGNMENT (tmp_mode
))
2522 dest
= assign_stack_temp (dest_mode
,
2523 GET_MODE_SIZE (dest_mode
));
2524 emit_move_insn (adjust_address (dest
,
2532 dest
= assign_stack_temp (tmp_mode
,
2533 GET_MODE_SIZE (tmp_mode
));
2534 emit_move_insn (dest
, tmps
[i
]);
2535 dst
= adjust_address (dest
, dest_mode
, bytepos
);
2541 /* Handle trailing fragments that run over the size of the struct. */
2542 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2544 /* store_bit_field always takes its value from the lsb.
2545 Move the fragment to the lsb if it's not already there. */
2547 #ifdef BLOCK_REG_PADDING
2548 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2549 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2555 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2556 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
2560 /* Make sure not to write past the end of the struct. */
2561 store_bit_field (dest
,
2562 adj_bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2563 bytepos
* BITS_PER_UNIT
, ssize
* BITS_PER_UNIT
- 1,
2564 VOIDmode
, tmps
[i
], false);
2567 /* Optimize the access just a bit. */
2568 else if (MEM_P (dest
)
2569 && (!SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2570 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2571 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2572 && bytelen
== GET_MODE_SIZE (mode
))
2573 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2576 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2577 0, 0, mode
, tmps
[i
], false);
2580 /* Copy from the pseudo into the (probable) hard reg. */
2581 if (orig_dst
!= dst
)
2582 emit_move_insn (orig_dst
, dst
);
2585 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2586 of the value stored in X. */
2589 maybe_emit_group_store (rtx x
, tree type
)
2591 machine_mode mode
= TYPE_MODE (type
);
2592 gcc_checking_assert (GET_MODE (x
) == VOIDmode
|| GET_MODE (x
) == mode
);
2593 if (GET_CODE (x
) == PARALLEL
)
2595 rtx result
= gen_reg_rtx (mode
);
2596 emit_group_store (result
, x
, type
, int_size_in_bytes (type
));
2602 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2604 This is used on targets that return BLKmode values in registers. */
2607 copy_blkmode_from_reg (rtx target
, rtx srcreg
, tree type
)
2609 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2610 rtx src
= NULL
, dst
= NULL
;
2611 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2612 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2613 machine_mode mode
= GET_MODE (srcreg
);
2614 machine_mode tmode
= GET_MODE (target
);
2615 machine_mode copy_mode
;
2617 /* BLKmode registers created in the back-end shouldn't have survived. */
2618 gcc_assert (mode
!= BLKmode
);
2620 /* If the structure doesn't take up a whole number of words, see whether
2621 SRCREG is padded on the left or on the right. If it's on the left,
2622 set PADDING_CORRECTION to the number of bits to skip.
2624 In most ABIs, the structure will be returned at the least end of
2625 the register, which translates to right padding on little-endian
2626 targets and left padding on big-endian targets. The opposite
2627 holds if the structure is returned at the most significant
2628 end of the register. */
2629 if (bytes
% UNITS_PER_WORD
!= 0
2630 && (targetm
.calls
.return_in_msb (type
)
2632 : BYTES_BIG_ENDIAN
))
2634 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2636 /* We can use a single move if we have an exact mode for the size. */
2637 else if (MEM_P (target
)
2638 && (!SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
))
2639 || MEM_ALIGN (target
) >= GET_MODE_ALIGNMENT (mode
))
2640 && bytes
== GET_MODE_SIZE (mode
))
2642 emit_move_insn (adjust_address (target
, mode
, 0), srcreg
);
2646 /* And if we additionally have the same mode for a register. */
2647 else if (REG_P (target
)
2648 && GET_MODE (target
) == mode
2649 && bytes
== GET_MODE_SIZE (mode
))
2651 emit_move_insn (target
, srcreg
);
2655 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2656 into a new pseudo which is a full word. */
2657 if (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
2659 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2663 /* Copy the structure BITSIZE bits at a time. If the target lives in
2664 memory, take care of not reading/writing past its end by selecting
2665 a copy mode suited to BITSIZE. This should always be possible given
2668 If the target lives in register, make sure not to select a copy mode
2669 larger than the mode of the register.
2671 We could probably emit more efficient code for machines which do not use
2672 strict alignment, but it doesn't seem worth the effort at the current
2675 copy_mode
= word_mode
;
2678 opt_scalar_int_mode mem_mode
= int_mode_for_size (bitsize
, 1);
2679 if (mem_mode
.exists ())
2680 copy_mode
= mem_mode
.require ();
2682 else if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2685 for (bitpos
= 0, xbitpos
= padding_correction
;
2686 bitpos
< bytes
* BITS_PER_UNIT
;
2687 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2689 /* We need a new source operand each time xbitpos is on a
2690 word boundary and when xbitpos == padding_correction
2691 (the first time through). */
2692 if (xbitpos
% BITS_PER_WORD
== 0 || xbitpos
== padding_correction
)
2693 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
, mode
);
2695 /* We need a new destination operand each time bitpos is on
2697 if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2699 else if (bitpos
% BITS_PER_WORD
== 0)
2700 dst
= operand_subword (target
, bitpos
/ BITS_PER_WORD
, 1, tmode
);
2702 /* Use xbitpos for the source extraction (right justified) and
2703 bitpos for the destination store (left justified). */
2704 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, 0, 0, copy_mode
,
2705 extract_bit_field (src
, bitsize
,
2706 xbitpos
% BITS_PER_WORD
, 1,
2707 NULL_RTX
, copy_mode
, copy_mode
,
2713 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2714 register if it contains any data, otherwise return null.
2716 This is used on targets that return BLKmode values in registers. */
2719 copy_blkmode_to_reg (machine_mode mode
, tree src
)
2722 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0, bytes
;
2723 unsigned int bitsize
;
2724 rtx
*dst_words
, dst
, x
, src_word
= NULL_RTX
, dst_word
= NULL_RTX
;
2725 machine_mode dst_mode
;
2727 gcc_assert (TYPE_MODE (TREE_TYPE (src
)) == BLKmode
);
2729 x
= expand_normal (src
);
2731 bytes
= int_size_in_bytes (TREE_TYPE (src
));
2735 /* If the structure doesn't take up a whole number of words, see
2736 whether the register value should be padded on the left or on
2737 the right. Set PADDING_CORRECTION to the number of padding
2738 bits needed on the left side.
2740 In most ABIs, the structure will be returned at the least end of
2741 the register, which translates to right padding on little-endian
2742 targets and left padding on big-endian targets. The opposite
2743 holds if the structure is returned at the most significant
2744 end of the register. */
2745 if (bytes
% UNITS_PER_WORD
!= 0
2746 && (targetm
.calls
.return_in_msb (TREE_TYPE (src
))
2748 : BYTES_BIG_ENDIAN
))
2749 padding_correction
= (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
)
2752 n_regs
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
2753 dst_words
= XALLOCAVEC (rtx
, n_regs
);
2754 bitsize
= MIN (TYPE_ALIGN (TREE_TYPE (src
)), BITS_PER_WORD
);
2756 /* Copy the structure BITSIZE bits at a time. */
2757 for (bitpos
= 0, xbitpos
= padding_correction
;
2758 bitpos
< bytes
* BITS_PER_UNIT
;
2759 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2761 /* We need a new destination pseudo each time xbitpos is
2762 on a word boundary and when xbitpos == padding_correction
2763 (the first time through). */
2764 if (xbitpos
% BITS_PER_WORD
== 0
2765 || xbitpos
== padding_correction
)
2767 /* Generate an appropriate register. */
2768 dst_word
= gen_reg_rtx (word_mode
);
2769 dst_words
[xbitpos
/ BITS_PER_WORD
] = dst_word
;
2771 /* Clear the destination before we move anything into it. */
2772 emit_move_insn (dst_word
, CONST0_RTX (word_mode
));
2775 /* We need a new source operand each time bitpos is on a word
2777 if (bitpos
% BITS_PER_WORD
== 0)
2778 src_word
= operand_subword_force (x
, bitpos
/ BITS_PER_WORD
, BLKmode
);
2780 /* Use bitpos for the source extraction (left justified) and
2781 xbitpos for the destination store (right justified). */
2782 store_bit_field (dst_word
, bitsize
, xbitpos
% BITS_PER_WORD
,
2784 extract_bit_field (src_word
, bitsize
,
2785 bitpos
% BITS_PER_WORD
, 1,
2786 NULL_RTX
, word_mode
, word_mode
,
2791 if (mode
== BLKmode
)
2793 /* Find the smallest integer mode large enough to hold the
2794 entire structure. */
2795 opt_scalar_int_mode mode_iter
;
2796 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_INT
)
2797 if (GET_MODE_SIZE (mode_iter
.require ()) >= bytes
)
2800 /* A suitable mode should have been found. */
2801 mode
= mode_iter
.require ();
2804 if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
2805 dst_mode
= word_mode
;
2808 dst
= gen_reg_rtx (dst_mode
);
2810 for (i
= 0; i
< n_regs
; i
++)
2811 emit_move_insn (operand_subword (dst
, i
, 0, dst_mode
), dst_words
[i
]);
2813 if (mode
!= dst_mode
)
2814 dst
= gen_lowpart (mode
, dst
);
2819 /* Add a USE expression for REG to the (possibly empty) list pointed
2820 to by CALL_FUSAGE. REG must denote a hard register. */
2823 use_reg_mode (rtx
*call_fusage
, rtx reg
, machine_mode mode
)
2825 gcc_assert (REG_P (reg
));
2827 if (!HARD_REGISTER_P (reg
))
2831 = gen_rtx_EXPR_LIST (mode
, gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2834 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2835 to by CALL_FUSAGE. REG must denote a hard register. */
2838 clobber_reg_mode (rtx
*call_fusage
, rtx reg
, machine_mode mode
)
2840 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2843 = gen_rtx_EXPR_LIST (mode
, gen_rtx_CLOBBER (VOIDmode
, reg
), *call_fusage
);
2846 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2847 starting at REGNO. All of these registers must be hard registers. */
2850 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2854 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2856 for (i
= 0; i
< nregs
; i
++)
2857 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2860 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2861 PARALLEL REGS. This is for calls that pass values in multiple
2862 non-contiguous locations. The Irix 6 ABI has examples of this. */
2865 use_group_regs (rtx
*call_fusage
, rtx regs
)
2869 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2871 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2873 /* A NULL entry means the parameter goes both on the stack and in
2874 registers. This can also be a MEM for targets that pass values
2875 partially on the stack and partially in registers. */
2876 if (reg
!= 0 && REG_P (reg
))
2877 use_reg (call_fusage
, reg
);
2881 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2882 assigment and the code of the expresion on the RHS is CODE. Return
2886 get_def_for_expr (tree name
, enum tree_code code
)
2890 if (TREE_CODE (name
) != SSA_NAME
)
2893 def_stmt
= get_gimple_for_ssa_name (name
);
2895 || gimple_assign_rhs_code (def_stmt
) != code
)
2901 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2902 assigment and the class of the expresion on the RHS is CLASS. Return
2906 get_def_for_expr_class (tree name
, enum tree_code_class tclass
)
2910 if (TREE_CODE (name
) != SSA_NAME
)
2913 def_stmt
= get_gimple_for_ssa_name (name
);
2915 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) != tclass
)
2921 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2922 its length in bytes. */
2925 clear_storage_hints (rtx object
, rtx size
, enum block_op_methods method
,
2926 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
2927 unsigned HOST_WIDE_INT min_size
,
2928 unsigned HOST_WIDE_INT max_size
,
2929 unsigned HOST_WIDE_INT probable_max_size
)
2931 machine_mode mode
= GET_MODE (object
);
2934 gcc_assert (method
== BLOCK_OP_NORMAL
|| method
== BLOCK_OP_TAILCALL
);
2936 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2937 just move a zero. Otherwise, do this a piece at a time. */
2939 && CONST_INT_P (size
)
2940 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (mode
))
2942 rtx zero
= CONST0_RTX (mode
);
2945 emit_move_insn (object
, zero
);
2949 if (COMPLEX_MODE_P (mode
))
2951 zero
= CONST0_RTX (GET_MODE_INNER (mode
));
2954 write_complex_part (object
, zero
, 0);
2955 write_complex_part (object
, zero
, 1);
2961 if (size
== const0_rtx
)
2964 align
= MEM_ALIGN (object
);
2966 if (CONST_INT_P (size
)
2967 && targetm
.use_by_pieces_infrastructure_p (INTVAL (size
), align
,
2969 optimize_insn_for_speed_p ()))
2970 clear_by_pieces (object
, INTVAL (size
), align
);
2971 else if (set_storage_via_setmem (object
, size
, const0_rtx
, align
,
2972 expected_align
, expected_size
,
2973 min_size
, max_size
, probable_max_size
))
2975 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object
)))
2976 return set_storage_via_libcall (object
, size
, const0_rtx
,
2977 method
== BLOCK_OP_TAILCALL
);
2985 clear_storage (rtx object
, rtx size
, enum block_op_methods method
)
2987 unsigned HOST_WIDE_INT max
, min
= 0;
2988 if (GET_CODE (size
) == CONST_INT
)
2989 min
= max
= UINTVAL (size
);
2991 max
= GET_MODE_MASK (GET_MODE (size
));
2992 return clear_storage_hints (object
, size
, method
, 0, -1, min
, max
, max
);
2996 /* A subroutine of clear_storage. Expand a call to memset.
2997 Return the return value of memset, 0 otherwise. */
3000 set_storage_via_libcall (rtx object
, rtx size
, rtx val
, bool tailcall
)
3002 tree call_expr
, fn
, object_tree
, size_tree
, val_tree
;
3003 machine_mode size_mode
;
3005 object
= copy_addr_to_reg (XEXP (object
, 0));
3006 object_tree
= make_tree (ptr_type_node
, object
);
3008 if (!CONST_INT_P (val
))
3009 val
= convert_to_mode (TYPE_MODE (integer_type_node
), val
, 1);
3010 val_tree
= make_tree (integer_type_node
, val
);
3012 size_mode
= TYPE_MODE (sizetype
);
3013 size
= convert_to_mode (size_mode
, size
, 1);
3014 size
= copy_to_mode_reg (size_mode
, size
);
3015 size_tree
= make_tree (sizetype
, size
);
3017 /* It is incorrect to use the libcall calling conventions for calls to
3018 memset because it can be provided by the user. */
3019 fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
3020 call_expr
= build_call_expr (fn
, 3, object_tree
, val_tree
, size_tree
);
3021 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
3023 return expand_call (call_expr
, NULL_RTX
, false);
3026 /* Expand a setmem pattern; return true if successful. */
3029 set_storage_via_setmem (rtx object
, rtx size
, rtx val
, unsigned int align
,
3030 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
3031 unsigned HOST_WIDE_INT min_size
,
3032 unsigned HOST_WIDE_INT max_size
,
3033 unsigned HOST_WIDE_INT probable_max_size
)
3035 /* Try the most limited insn first, because there's no point
3036 including more than one in the machine description unless
3037 the more limited one has some advantage. */
3039 if (expected_align
< align
)
3040 expected_align
= align
;
3041 if (expected_size
!= -1)
3043 if ((unsigned HOST_WIDE_INT
)expected_size
> max_size
)
3044 expected_size
= max_size
;
3045 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
3046 expected_size
= min_size
;
3049 opt_scalar_int_mode mode_iter
;
3050 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_INT
)
3052 scalar_int_mode mode
= mode_iter
.require ();
3053 enum insn_code code
= direct_optab_handler (setmem_optab
, mode
);
3055 if (code
!= CODE_FOR_nothing
3056 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
3057 here because if SIZE is less than the mode mask, as it is
3058 returned by the macro, it will definitely be less than the
3059 actual mode mask. Since SIZE is within the Pmode address
3060 space, we limit MODE to Pmode. */
3061 && ((CONST_INT_P (size
)
3062 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
3063 <= (GET_MODE_MASK (mode
) >> 1)))
3064 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
3065 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
3067 struct expand_operand ops
[9];
3070 nops
= insn_data
[(int) code
].n_generator_args
;
3071 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
3073 create_fixed_operand (&ops
[0], object
);
3074 /* The check above guarantees that this size conversion is valid. */
3075 create_convert_operand_to (&ops
[1], size
, mode
, true);
3076 create_convert_operand_from (&ops
[2], val
, byte_mode
, true);
3077 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
3080 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
3081 create_integer_operand (&ops
[5], expected_size
);
3085 create_integer_operand (&ops
[6], min_size
);
3086 /* If we can not represent the maximal size,
3087 make parameter NULL. */
3088 if ((HOST_WIDE_INT
) max_size
!= -1)
3089 create_integer_operand (&ops
[7], max_size
);
3091 create_fixed_operand (&ops
[7], NULL
);
3095 /* If we can not represent the maximal size,
3096 make parameter NULL. */
3097 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
3098 create_integer_operand (&ops
[8], probable_max_size
);
3100 create_fixed_operand (&ops
[8], NULL
);
3102 if (maybe_expand_insn (code
, nops
, ops
))
3111 /* Write to one of the components of the complex value CPLX. Write VAL to
3112 the real part if IMAG_P is false, and the imaginary part if its true. */
3115 write_complex_part (rtx cplx
, rtx val
, bool imag_p
)
3121 if (GET_CODE (cplx
) == CONCAT
)
3123 emit_move_insn (XEXP (cplx
, imag_p
), val
);
3127 cmode
= GET_MODE (cplx
);
3128 imode
= GET_MODE_INNER (cmode
);
3129 ibitsize
= GET_MODE_BITSIZE (imode
);
3131 /* For MEMs simplify_gen_subreg may generate an invalid new address
3132 because, e.g., the original address is considered mode-dependent
3133 by the target, which restricts simplify_subreg from invoking
3134 adjust_address_nv. Instead of preparing fallback support for an
3135 invalid address, we call adjust_address_nv directly. */
3138 emit_move_insn (adjust_address_nv (cplx
, imode
,
3139 imag_p
? GET_MODE_SIZE (imode
) : 0),
3144 /* If the sub-object is at least word sized, then we know that subregging
3145 will work. This special case is important, since store_bit_field
3146 wants to operate on integer modes, and there's rarely an OImode to
3147 correspond to TCmode. */
3148 if (ibitsize
>= BITS_PER_WORD
3149 /* For hard regs we have exact predicates. Assume we can split
3150 the original object if it spans an even number of hard regs.
3151 This special case is important for SCmode on 64-bit platforms
3152 where the natural size of floating-point regs is 32-bit. */
3154 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3155 && REG_NREGS (cplx
) % 2 == 0))
3157 rtx part
= simplify_gen_subreg (imode
, cplx
, cmode
,
3158 imag_p
? GET_MODE_SIZE (imode
) : 0);
3161 emit_move_insn (part
, val
);
3165 /* simplify_gen_subreg may fail for sub-word MEMs. */
3166 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3169 store_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0, 0, 0, imode
, val
,
3173 /* Extract one of the components of the complex value CPLX. Extract the
3174 real part if IMAG_P is false, and the imaginary part if it's true. */
3177 read_complex_part (rtx cplx
, bool imag_p
)
3179 machine_mode cmode
, imode
;
3182 if (GET_CODE (cplx
) == CONCAT
)
3183 return XEXP (cplx
, imag_p
);
3185 cmode
= GET_MODE (cplx
);
3186 imode
= GET_MODE_INNER (cmode
);
3187 ibitsize
= GET_MODE_BITSIZE (imode
);
3189 /* Special case reads from complex constants that got spilled to memory. */
3190 if (MEM_P (cplx
) && GET_CODE (XEXP (cplx
, 0)) == SYMBOL_REF
)
3192 tree decl
= SYMBOL_REF_DECL (XEXP (cplx
, 0));
3193 if (decl
&& TREE_CODE (decl
) == COMPLEX_CST
)
3195 tree part
= imag_p
? TREE_IMAGPART (decl
) : TREE_REALPART (decl
);
3196 if (CONSTANT_CLASS_P (part
))
3197 return expand_expr (part
, NULL_RTX
, imode
, EXPAND_NORMAL
);
3201 /* For MEMs simplify_gen_subreg may generate an invalid new address
3202 because, e.g., the original address is considered mode-dependent
3203 by the target, which restricts simplify_subreg from invoking
3204 adjust_address_nv. Instead of preparing fallback support for an
3205 invalid address, we call adjust_address_nv directly. */
3207 return adjust_address_nv (cplx
, imode
,
3208 imag_p
? GET_MODE_SIZE (imode
) : 0);
3210 /* If the sub-object is at least word sized, then we know that subregging
3211 will work. This special case is important, since extract_bit_field
3212 wants to operate on integer modes, and there's rarely an OImode to
3213 correspond to TCmode. */
3214 if (ibitsize
>= BITS_PER_WORD
3215 /* For hard regs we have exact predicates. Assume we can split
3216 the original object if it spans an even number of hard regs.
3217 This special case is important for SCmode on 64-bit platforms
3218 where the natural size of floating-point regs is 32-bit. */
3220 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3221 && REG_NREGS (cplx
) % 2 == 0))
3223 rtx ret
= simplify_gen_subreg (imode
, cplx
, cmode
,
3224 imag_p
? GET_MODE_SIZE (imode
) : 0);
3228 /* simplify_gen_subreg may fail for sub-word MEMs. */
3229 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3232 return extract_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0,
3233 true, NULL_RTX
, imode
, imode
, false, NULL
);
3236 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3237 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3238 represented in NEW_MODE. If FORCE is true, this will never happen, as
3239 we'll force-create a SUBREG if needed. */
3242 emit_move_change_mode (machine_mode new_mode
,
3243 machine_mode old_mode
, rtx x
, bool force
)
3247 if (push_operand (x
, GET_MODE (x
)))
3249 ret
= gen_rtx_MEM (new_mode
, XEXP (x
, 0));
3250 MEM_COPY_ATTRIBUTES (ret
, x
);
3254 /* We don't have to worry about changing the address since the
3255 size in bytes is supposed to be the same. */
3256 if (reload_in_progress
)
3258 /* Copy the MEM to change the mode and move any
3259 substitutions from the old MEM to the new one. */
3260 ret
= adjust_address_nv (x
, new_mode
, 0);
3261 copy_replacements (x
, ret
);
3264 ret
= adjust_address (x
, new_mode
, 0);
3268 /* Note that we do want simplify_subreg's behavior of validating
3269 that the new mode is ok for a hard register. If we were to use
3270 simplify_gen_subreg, we would create the subreg, but would
3271 probably run into the target not being able to implement it. */
3272 /* Except, of course, when FORCE is true, when this is exactly what
3273 we want. Which is needed for CCmodes on some targets. */
3275 ret
= simplify_gen_subreg (new_mode
, x
, old_mode
, 0);
3277 ret
= simplify_subreg (new_mode
, x
, old_mode
, 0);
3283 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3284 an integer mode of the same size as MODE. Returns the instruction
3285 emitted, or NULL if such a move could not be generated. */
3288 emit_move_via_integer (machine_mode mode
, rtx x
, rtx y
, bool force
)
3290 scalar_int_mode imode
;
3291 enum insn_code code
;
3293 /* There must exist a mode of the exact size we require. */
3294 if (!int_mode_for_mode (mode
).exists (&imode
))
3297 /* The target must support moves in this mode. */
3298 code
= optab_handler (mov_optab
, imode
);
3299 if (code
== CODE_FOR_nothing
)
3302 x
= emit_move_change_mode (imode
, mode
, x
, force
);
3305 y
= emit_move_change_mode (imode
, mode
, y
, force
);
3308 return emit_insn (GEN_FCN (code
) (x
, y
));
3311 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3312 Return an equivalent MEM that does not use an auto-increment. */
3315 emit_move_resolve_push (machine_mode mode
, rtx x
)
3317 enum rtx_code code
= GET_CODE (XEXP (x
, 0));
3318 HOST_WIDE_INT adjust
;
3321 adjust
= GET_MODE_SIZE (mode
);
3322 #ifdef PUSH_ROUNDING
3323 adjust
= PUSH_ROUNDING (adjust
);
3325 if (code
== PRE_DEC
|| code
== POST_DEC
)
3327 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
3329 rtx expr
= XEXP (XEXP (x
, 0), 1);
3332 gcc_assert (GET_CODE (expr
) == PLUS
|| GET_CODE (expr
) == MINUS
);
3333 gcc_assert (CONST_INT_P (XEXP (expr
, 1)));
3334 val
= INTVAL (XEXP (expr
, 1));
3335 if (GET_CODE (expr
) == MINUS
)
3337 gcc_assert (adjust
== val
|| adjust
== -val
);
3341 /* Do not use anti_adjust_stack, since we don't want to update
3342 stack_pointer_delta. */
3343 temp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
3344 gen_int_mode (adjust
, Pmode
), stack_pointer_rtx
,
3345 0, OPTAB_LIB_WIDEN
);
3346 if (temp
!= stack_pointer_rtx
)
3347 emit_move_insn (stack_pointer_rtx
, temp
);
3354 temp
= stack_pointer_rtx
;
3359 temp
= plus_constant (Pmode
, stack_pointer_rtx
, -adjust
);
3365 return replace_equiv_address (x
, temp
);
3368 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3369 X is known to satisfy push_operand, and MODE is known to be complex.
3370 Returns the last instruction emitted. */
3373 emit_move_complex_push (machine_mode mode
, rtx x
, rtx y
)
3375 machine_mode submode
= GET_MODE_INNER (mode
);
3378 #ifdef PUSH_ROUNDING
3379 unsigned int submodesize
= GET_MODE_SIZE (submode
);
3381 /* In case we output to the stack, but the size is smaller than the
3382 machine can push exactly, we need to use move instructions. */
3383 if (PUSH_ROUNDING (submodesize
) != submodesize
)
3385 x
= emit_move_resolve_push (mode
, x
);
3386 return emit_move_insn (x
, y
);
3390 /* Note that the real part always precedes the imag part in memory
3391 regardless of machine's endianness. */
3392 switch (GET_CODE (XEXP (x
, 0)))
3406 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3407 read_complex_part (y
, imag_first
));
3408 return emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3409 read_complex_part (y
, !imag_first
));
3412 /* A subroutine of emit_move_complex. Perform the move from Y to X
3413 via two moves of the parts. Returns the last instruction emitted. */
3416 emit_move_complex_parts (rtx x
, rtx y
)
3418 /* Show the output dies here. This is necessary for SUBREGs
3419 of pseudos since we cannot track their lifetimes correctly;
3420 hard regs shouldn't appear here except as return values. */
3421 if (!reload_completed
&& !reload_in_progress
3422 && REG_P (x
) && !reg_overlap_mentioned_p (x
, y
))
3425 write_complex_part (x
, read_complex_part (y
, false), false);
3426 write_complex_part (x
, read_complex_part (y
, true), true);
3428 return get_last_insn ();
3431 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3432 MODE is known to be complex. Returns the last instruction emitted. */
3435 emit_move_complex (machine_mode mode
, rtx x
, rtx y
)
3439 /* Need to take special care for pushes, to maintain proper ordering
3440 of the data, and possibly extra padding. */
3441 if (push_operand (x
, mode
))
3442 return emit_move_complex_push (mode
, x
, y
);
3444 /* See if we can coerce the target into moving both values at once, except
3445 for floating point where we favor moving as parts if this is easy. */
3446 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
3447 && optab_handler (mov_optab
, GET_MODE_INNER (mode
)) != CODE_FOR_nothing
3449 && HARD_REGISTER_P (x
)
3450 && REG_NREGS (x
) == 1)
3452 && HARD_REGISTER_P (y
)
3453 && REG_NREGS (y
) == 1))
3455 /* Not possible if the values are inherently not adjacent. */
3456 else if (GET_CODE (x
) == CONCAT
|| GET_CODE (y
) == CONCAT
)
3458 /* Is possible if both are registers (or subregs of registers). */
3459 else if (register_operand (x
, mode
) && register_operand (y
, mode
))
3461 /* If one of the operands is a memory, and alignment constraints
3462 are friendly enough, we may be able to do combined memory operations.
3463 We do not attempt this if Y is a constant because that combination is
3464 usually better with the by-parts thing below. */
3465 else if ((MEM_P (x
) ? !CONSTANT_P (y
) : MEM_P (y
))
3466 && (!STRICT_ALIGNMENT
3467 || get_mode_alignment (mode
) == BIGGEST_ALIGNMENT
))
3476 /* For memory to memory moves, optimal behavior can be had with the
3477 existing block move logic. */
3478 if (MEM_P (x
) && MEM_P (y
))
3480 emit_block_move (x
, y
, GEN_INT (GET_MODE_SIZE (mode
)),
3481 BLOCK_OP_NO_LIBCALL
);
3482 return get_last_insn ();
3485 ret
= emit_move_via_integer (mode
, x
, y
, true);
3490 return emit_move_complex_parts (x
, y
);
3493 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3494 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3497 emit_move_ccmode (machine_mode mode
, rtx x
, rtx y
)
3501 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3504 enum insn_code code
= optab_handler (mov_optab
, CCmode
);
3505 if (code
!= CODE_FOR_nothing
)
3507 x
= emit_move_change_mode (CCmode
, mode
, x
, true);
3508 y
= emit_move_change_mode (CCmode
, mode
, y
, true);
3509 return emit_insn (GEN_FCN (code
) (x
, y
));
3513 /* Otherwise, find the MODE_INT mode of the same width. */
3514 ret
= emit_move_via_integer (mode
, x
, y
, false);
3515 gcc_assert (ret
!= NULL
);
3519 /* Return true if word I of OP lies entirely in the
3520 undefined bits of a paradoxical subreg. */
3523 undefined_operand_subword_p (const_rtx op
, int i
)
3525 machine_mode innermode
, innermostmode
;
3527 if (GET_CODE (op
) != SUBREG
)
3529 innermode
= GET_MODE (op
);
3530 innermostmode
= GET_MODE (SUBREG_REG (op
));
3531 offset
= i
* UNITS_PER_WORD
+ SUBREG_BYTE (op
);
3532 /* The SUBREG_BYTE represents offset, as if the value were stored in
3533 memory, except for a paradoxical subreg where we define
3534 SUBREG_BYTE to be 0; undo this exception as in
3536 if (SUBREG_BYTE (op
) == 0
3537 && GET_MODE_SIZE (innermostmode
) < GET_MODE_SIZE (innermode
))
3539 int difference
= (GET_MODE_SIZE (innermostmode
) - GET_MODE_SIZE (innermode
));
3540 if (WORDS_BIG_ENDIAN
)
3541 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
3542 if (BYTES_BIG_ENDIAN
)
3543 offset
+= difference
% UNITS_PER_WORD
;
3545 if (offset
>= GET_MODE_SIZE (innermostmode
)
3546 || offset
<= -GET_MODE_SIZE (word_mode
))
3551 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3552 MODE is any multi-word or full-word mode that lacks a move_insn
3553 pattern. Note that you will get better code if you define such
3554 patterns, even if they must turn into multiple assembler instructions. */
3557 emit_move_multi_word (machine_mode mode
, rtx x
, rtx y
)
3559 rtx_insn
*last_insn
= 0;
3565 gcc_assert (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
);
3567 /* If X is a push on the stack, do the push now and replace
3568 X with a reference to the stack pointer. */
3569 if (push_operand (x
, mode
))
3570 x
= emit_move_resolve_push (mode
, x
);
3572 /* If we are in reload, see if either operand is a MEM whose address
3573 is scheduled for replacement. */
3574 if (reload_in_progress
&& MEM_P (x
)
3575 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3576 x
= replace_equiv_address_nv (x
, inner
);
3577 if (reload_in_progress
&& MEM_P (y
)
3578 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3579 y
= replace_equiv_address_nv (y
, inner
);
3583 need_clobber
= false;
3585 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3588 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3591 /* Do not generate code for a move if it would come entirely
3592 from the undefined bits of a paradoxical subreg. */
3593 if (undefined_operand_subword_p (y
, i
))
3596 ypart
= operand_subword (y
, i
, 1, mode
);
3598 /* If we can't get a part of Y, put Y into memory if it is a
3599 constant. Otherwise, force it into a register. Then we must
3600 be able to get a part of Y. */
3601 if (ypart
== 0 && CONSTANT_P (y
))
3603 y
= use_anchored_address (force_const_mem (mode
, y
));
3604 ypart
= operand_subword (y
, i
, 1, mode
);
3606 else if (ypart
== 0)
3607 ypart
= operand_subword_force (y
, i
, mode
);
3609 gcc_assert (xpart
&& ypart
);
3611 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3613 last_insn
= emit_move_insn (xpart
, ypart
);
3619 /* Show the output dies here. This is necessary for SUBREGs
3620 of pseudos since we cannot track their lifetimes correctly;
3621 hard regs shouldn't appear here except as return values.
3622 We never want to emit such a clobber after reload. */
3624 && ! (reload_in_progress
|| reload_completed
)
3625 && need_clobber
!= 0)
3633 /* Low level part of emit_move_insn.
3634 Called just like emit_move_insn, but assumes X and Y
3635 are basically valid. */
3638 emit_move_insn_1 (rtx x
, rtx y
)
3640 machine_mode mode
= GET_MODE (x
);
3641 enum insn_code code
;
3643 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
3645 code
= optab_handler (mov_optab
, mode
);
3646 if (code
!= CODE_FOR_nothing
)
3647 return emit_insn (GEN_FCN (code
) (x
, y
));
3649 /* Expand complex moves by moving real part and imag part. */
3650 if (COMPLEX_MODE_P (mode
))
3651 return emit_move_complex (mode
, x
, y
);
3653 if (GET_MODE_CLASS (mode
) == MODE_DECIMAL_FLOAT
3654 || ALL_FIXED_POINT_MODE_P (mode
))
3656 rtx_insn
*result
= emit_move_via_integer (mode
, x
, y
, true);
3658 /* If we can't find an integer mode, use multi words. */
3662 return emit_move_multi_word (mode
, x
, y
);
3665 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3666 return emit_move_ccmode (mode
, x
, y
);
3668 /* Try using a move pattern for the corresponding integer mode. This is
3669 only safe when simplify_subreg can convert MODE constants into integer
3670 constants. At present, it can only do this reliably if the value
3671 fits within a HOST_WIDE_INT. */
3672 if (!CONSTANT_P (y
) || GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3674 rtx_insn
*ret
= emit_move_via_integer (mode
, x
, y
, lra_in_progress
);
3678 if (! lra_in_progress
|| recog (PATTERN (ret
), ret
, 0) >= 0)
3683 return emit_move_multi_word (mode
, x
, y
);
3686 /* Generate code to copy Y into X.
3687 Both Y and X must have the same mode, except that
3688 Y can be a constant with VOIDmode.
3689 This mode cannot be BLKmode; use emit_block_move for that.
3691 Return the last instruction emitted. */
3694 emit_move_insn (rtx x
, rtx y
)
3696 machine_mode mode
= GET_MODE (x
);
3697 rtx y_cst
= NULL_RTX
;
3698 rtx_insn
*last_insn
;
3701 gcc_assert (mode
!= BLKmode
3702 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
3707 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3708 && (last_insn
= compress_float_constant (x
, y
)))
3713 if (!targetm
.legitimate_constant_p (mode
, y
))
3715 y
= force_const_mem (mode
, y
);
3717 /* If the target's cannot_force_const_mem prevented the spill,
3718 assume that the target's move expanders will also take care
3719 of the non-legitimate constant. */
3723 y
= use_anchored_address (y
);
3727 /* If X or Y are memory references, verify that their addresses are valid
3730 && (! memory_address_addr_space_p (GET_MODE (x
), XEXP (x
, 0),
3732 && ! push_operand (x
, GET_MODE (x
))))
3733 x
= validize_mem (x
);
3736 && ! memory_address_addr_space_p (GET_MODE (y
), XEXP (y
, 0),
3737 MEM_ADDR_SPACE (y
)))
3738 y
= validize_mem (y
);
3740 gcc_assert (mode
!= BLKmode
);
3742 last_insn
= emit_move_insn_1 (x
, y
);
3744 if (y_cst
&& REG_P (x
)
3745 && (set
= single_set (last_insn
)) != NULL_RTX
3746 && SET_DEST (set
) == x
3747 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3748 set_unique_reg_note (last_insn
, REG_EQUAL
, copy_rtx (y_cst
));
3753 /* Generate the body of an instruction to copy Y into X.
3754 It may be a list of insns, if one insn isn't enough. */
3757 gen_move_insn (rtx x
, rtx y
)
3762 emit_move_insn_1 (x
, y
);
3768 /* If Y is representable exactly in a narrower mode, and the target can
3769 perform the extension directly from constant or memory, then emit the
3770 move as an extension. */
3773 compress_float_constant (rtx x
, rtx y
)
3775 machine_mode dstmode
= GET_MODE (x
);
3776 machine_mode orig_srcmode
= GET_MODE (y
);
3777 machine_mode srcmode
;
3778 const REAL_VALUE_TYPE
*r
;
3779 int oldcost
, newcost
;
3780 bool speed
= optimize_insn_for_speed_p ();
3782 r
= CONST_DOUBLE_REAL_VALUE (y
);
3784 if (targetm
.legitimate_constant_p (dstmode
, y
))
3785 oldcost
= set_src_cost (y
, orig_srcmode
, speed
);
3787 oldcost
= set_src_cost (force_const_mem (dstmode
, y
), dstmode
, speed
);
3789 FOR_EACH_MODE_UNTIL (srcmode
, orig_srcmode
)
3793 rtx_insn
*last_insn
;
3795 /* Skip if the target can't extend this way. */
3796 ic
= can_extend_p (dstmode
, srcmode
, 0);
3797 if (ic
== CODE_FOR_nothing
)
3800 /* Skip if the narrowed value isn't exact. */
3801 if (! exact_real_truncate (srcmode
, r
))
3804 trunc_y
= const_double_from_real_value (*r
, srcmode
);
3806 if (targetm
.legitimate_constant_p (srcmode
, trunc_y
))
3808 /* Skip if the target needs extra instructions to perform
3810 if (!insn_operand_matches (ic
, 1, trunc_y
))
3812 /* This is valid, but may not be cheaper than the original. */
3813 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3815 if (oldcost
< newcost
)
3818 else if (float_extend_from_mem
[dstmode
][srcmode
])
3820 trunc_y
= force_const_mem (srcmode
, trunc_y
);
3821 /* This is valid, but may not be cheaper than the original. */
3822 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3824 if (oldcost
< newcost
)
3826 trunc_y
= validize_mem (trunc_y
);
3831 /* For CSE's benefit, force the compressed constant pool entry
3832 into a new pseudo. This constant may be used in different modes,
3833 and if not, combine will put things back together for us. */
3834 trunc_y
= force_reg (srcmode
, trunc_y
);
3836 /* If x is a hard register, perform the extension into a pseudo,
3837 so that e.g. stack realignment code is aware of it. */
3839 if (REG_P (x
) && HARD_REGISTER_P (x
))
3840 target
= gen_reg_rtx (dstmode
);
3842 emit_unop_insn (ic
, target
, trunc_y
, UNKNOWN
);
3843 last_insn
= get_last_insn ();
3846 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3849 return emit_move_insn (x
, target
);
3856 /* Pushing data onto the stack. */
3858 /* Push a block of length SIZE (perhaps variable)
3859 and return an rtx to address the beginning of the block.
3860 The value may be virtual_outgoing_args_rtx.
3862 EXTRA is the number of bytes of padding to push in addition to SIZE.
3863 BELOW nonzero means this padding comes at low addresses;
3864 otherwise, the padding comes at high addresses. */
3867 push_block (rtx size
, int extra
, int below
)
3871 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3872 if (CONSTANT_P (size
))
3873 anti_adjust_stack (plus_constant (Pmode
, size
, extra
));
3874 else if (REG_P (size
) && extra
== 0)
3875 anti_adjust_stack (size
);
3878 temp
= copy_to_mode_reg (Pmode
, size
);
3880 temp
= expand_binop (Pmode
, add_optab
, temp
,
3881 gen_int_mode (extra
, Pmode
),
3882 temp
, 0, OPTAB_LIB_WIDEN
);
3883 anti_adjust_stack (temp
);
3886 if (STACK_GROWS_DOWNWARD
)
3888 temp
= virtual_outgoing_args_rtx
;
3889 if (extra
!= 0 && below
)
3890 temp
= plus_constant (Pmode
, temp
, extra
);
3894 if (CONST_INT_P (size
))
3895 temp
= plus_constant (Pmode
, virtual_outgoing_args_rtx
,
3896 -INTVAL (size
) - (below
? 0 : extra
));
3897 else if (extra
!= 0 && !below
)
3898 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3899 negate_rtx (Pmode
, plus_constant (Pmode
, size
,
3902 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3903 negate_rtx (Pmode
, size
));
3906 return memory_address (NARROWEST_INT_MODE
, temp
);
3909 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3912 mem_autoinc_base (rtx mem
)
3916 rtx addr
= XEXP (mem
, 0);
3917 if (GET_RTX_CLASS (GET_CODE (addr
)) == RTX_AUTOINC
)
3918 return XEXP (addr
, 0);
3923 /* A utility routine used here, in reload, and in try_split. The insns
3924 after PREV up to and including LAST are known to adjust the stack,
3925 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3926 placing notes as appropriate. PREV may be NULL, indicating the
3927 entire insn sequence prior to LAST should be scanned.
3929 The set of allowed stack pointer modifications is small:
3930 (1) One or more auto-inc style memory references (aka pushes),
3931 (2) One or more addition/subtraction with the SP as destination,
3932 (3) A single move insn with the SP as destination,
3933 (4) A call_pop insn,
3934 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3936 Insns in the sequence that do not modify the SP are ignored,
3937 except for noreturn calls.
3939 The return value is the amount of adjustment that can be trivially
3940 verified, via immediate operand or auto-inc. If the adjustment
3941 cannot be trivially extracted, the return value is INT_MIN. */
3944 find_args_size_adjust (rtx_insn
*insn
)
3949 pat
= PATTERN (insn
);
3952 /* Look for a call_pop pattern. */
3955 /* We have to allow non-call_pop patterns for the case
3956 of emit_single_push_insn of a TLS address. */
3957 if (GET_CODE (pat
) != PARALLEL
)
3960 /* All call_pop have a stack pointer adjust in the parallel.
3961 The call itself is always first, and the stack adjust is
3962 usually last, so search from the end. */
3963 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; --i
)
3965 set
= XVECEXP (pat
, 0, i
);
3966 if (GET_CODE (set
) != SET
)
3968 dest
= SET_DEST (set
);
3969 if (dest
== stack_pointer_rtx
)
3972 /* We'd better have found the stack pointer adjust. */
3975 /* Fall through to process the extracted SET and DEST
3976 as if it was a standalone insn. */
3978 else if (GET_CODE (pat
) == SET
)
3980 else if ((set
= single_set (insn
)) != NULL
)
3982 else if (GET_CODE (pat
) == PARALLEL
)
3984 /* ??? Some older ports use a parallel with a stack adjust
3985 and a store for a PUSH_ROUNDING pattern, rather than a
3986 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3987 /* ??? See h8300 and m68k, pushqi1. */
3988 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; --i
)
3990 set
= XVECEXP (pat
, 0, i
);
3991 if (GET_CODE (set
) != SET
)
3993 dest
= SET_DEST (set
);
3994 if (dest
== stack_pointer_rtx
)
3997 /* We do not expect an auto-inc of the sp in the parallel. */
3998 gcc_checking_assert (mem_autoinc_base (dest
) != stack_pointer_rtx
);
3999 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
4000 != stack_pointer_rtx
);
4008 dest
= SET_DEST (set
);
4010 /* Look for direct modifications of the stack pointer. */
4011 if (REG_P (dest
) && REGNO (dest
) == STACK_POINTER_REGNUM
)
4013 /* Look for a trivial adjustment, otherwise assume nothing. */
4014 /* Note that the SPU restore_stack_block pattern refers to
4015 the stack pointer in V4SImode. Consider that non-trivial. */
4016 if (SCALAR_INT_MODE_P (GET_MODE (dest
))
4017 && GET_CODE (SET_SRC (set
)) == PLUS
4018 && XEXP (SET_SRC (set
), 0) == stack_pointer_rtx
4019 && CONST_INT_P (XEXP (SET_SRC (set
), 1)))
4020 return INTVAL (XEXP (SET_SRC (set
), 1));
4021 /* ??? Reload can generate no-op moves, which will be cleaned
4022 up later. Recognize it and continue searching. */
4023 else if (rtx_equal_p (dest
, SET_SRC (set
)))
4026 return HOST_WIDE_INT_MIN
;
4032 /* Otherwise only think about autoinc patterns. */
4033 if (mem_autoinc_base (dest
) == stack_pointer_rtx
)
4036 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
4037 != stack_pointer_rtx
);
4039 else if (mem_autoinc_base (SET_SRC (set
)) == stack_pointer_rtx
)
4040 mem
= SET_SRC (set
);
4044 addr
= XEXP (mem
, 0);
4045 switch (GET_CODE (addr
))
4049 return GET_MODE_SIZE (GET_MODE (mem
));
4052 return -GET_MODE_SIZE (GET_MODE (mem
));
4055 addr
= XEXP (addr
, 1);
4056 gcc_assert (GET_CODE (addr
) == PLUS
);
4057 gcc_assert (XEXP (addr
, 0) == stack_pointer_rtx
);
4058 gcc_assert (CONST_INT_P (XEXP (addr
, 1)));
4059 return INTVAL (XEXP (addr
, 1));
4067 fixup_args_size_notes (rtx_insn
*prev
, rtx_insn
*last
, int end_args_size
)
4069 int args_size
= end_args_size
;
4070 bool saw_unknown
= false;
4073 for (insn
= last
; insn
!= prev
; insn
= PREV_INSN (insn
))
4075 HOST_WIDE_INT this_delta
;
4077 if (!NONDEBUG_INSN_P (insn
))
4080 this_delta
= find_args_size_adjust (insn
);
4081 if (this_delta
== 0)
4084 || ACCUMULATE_OUTGOING_ARGS
4085 || find_reg_note (insn
, REG_NORETURN
, NULL_RTX
) == NULL_RTX
)
4089 gcc_assert (!saw_unknown
);
4090 if (this_delta
== HOST_WIDE_INT_MIN
)
4093 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (args_size
));
4094 if (STACK_GROWS_DOWNWARD
)
4095 this_delta
= -(unsigned HOST_WIDE_INT
) this_delta
;
4097 args_size
-= this_delta
;
4100 return saw_unknown
? INT_MIN
: args_size
;
4103 #ifdef PUSH_ROUNDING
4104 /* Emit single push insn. */
4107 emit_single_push_insn_1 (machine_mode mode
, rtx x
, tree type
)
4110 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
4112 enum insn_code icode
;
4114 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
4115 /* If there is push pattern, use it. Otherwise try old way of throwing
4116 MEM representing push operation to move expander. */
4117 icode
= optab_handler (push_optab
, mode
);
4118 if (icode
!= CODE_FOR_nothing
)
4120 struct expand_operand ops
[1];
4122 create_input_operand (&ops
[0], x
, mode
);
4123 if (maybe_expand_insn (icode
, 1, ops
))
4126 if (GET_MODE_SIZE (mode
) == rounded_size
)
4127 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
4128 /* If we are to pad downward, adjust the stack pointer first and
4129 then store X into the stack location using an offset. This is
4130 because emit_move_insn does not know how to pad; it does not have
4132 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
4134 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
4135 HOST_WIDE_INT offset
;
4137 emit_move_insn (stack_pointer_rtx
,
4138 expand_binop (Pmode
,
4139 STACK_GROWS_DOWNWARD
? sub_optab
4142 gen_int_mode (rounded_size
, Pmode
),
4143 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
4145 offset
= (HOST_WIDE_INT
) padding_size
;
4146 if (STACK_GROWS_DOWNWARD
&& STACK_PUSH_CODE
== POST_DEC
)
4147 /* We have already decremented the stack pointer, so get the
4149 offset
+= (HOST_WIDE_INT
) rounded_size
;
4151 if (!STACK_GROWS_DOWNWARD
&& STACK_PUSH_CODE
== POST_INC
)
4152 /* We have already incremented the stack pointer, so get the
4154 offset
-= (HOST_WIDE_INT
) rounded_size
;
4156 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4157 gen_int_mode (offset
, Pmode
));
4161 if (STACK_GROWS_DOWNWARD
)
4162 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4163 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4164 gen_int_mode (-(HOST_WIDE_INT
) rounded_size
,
4167 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4168 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4169 gen_int_mode (rounded_size
, Pmode
));
4171 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
4174 dest
= gen_rtx_MEM (mode
, dest_addr
);
4178 set_mem_attributes (dest
, type
, 1);
4180 if (cfun
->tail_call_marked
)
4181 /* Function incoming arguments may overlap with sibling call
4182 outgoing arguments and we cannot allow reordering of reads
4183 from function arguments with stores to outgoing arguments
4184 of sibling calls. */
4185 set_mem_alias_set (dest
, 0);
4187 emit_move_insn (dest
, x
);
4190 /* Emit and annotate a single push insn. */
4193 emit_single_push_insn (machine_mode mode
, rtx x
, tree type
)
4195 int delta
, old_delta
= stack_pointer_delta
;
4196 rtx_insn
*prev
= get_last_insn ();
4199 emit_single_push_insn_1 (mode
, x
, type
);
4201 last
= get_last_insn ();
4203 /* Notice the common case where we emitted exactly one insn. */
4204 if (PREV_INSN (last
) == prev
)
4206 add_reg_note (last
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4210 delta
= fixup_args_size_notes (prev
, last
, stack_pointer_delta
);
4211 gcc_assert (delta
== INT_MIN
|| delta
== old_delta
);
4215 /* If reading SIZE bytes from X will end up reading from
4216 Y return the number of bytes that overlap. Return -1
4217 if there is no overlap or -2 if we can't determine
4218 (for example when X and Y have different base registers). */
4221 memory_load_overlap (rtx x
, rtx y
, HOST_WIDE_INT size
)
4223 rtx tmp
= plus_constant (Pmode
, x
, size
);
4224 rtx sub
= simplify_gen_binary (MINUS
, Pmode
, tmp
, y
);
4226 if (!CONST_INT_P (sub
))
4229 HOST_WIDE_INT val
= INTVAL (sub
);
4231 return IN_RANGE (val
, 1, size
) ? val
: -1;
4234 /* Generate code to push X onto the stack, assuming it has mode MODE and
4236 MODE is redundant except when X is a CONST_INT (since they don't
4238 SIZE is an rtx for the size of data to be copied (in bytes),
4239 needed only if X is BLKmode.
4240 Return true if successful. May return false if asked to push a
4241 partial argument during a sibcall optimization (as specified by
4242 SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4245 ALIGN (in bits) is maximum alignment we can assume.
4247 If PARTIAL and REG are both nonzero, then copy that many of the first
4248 bytes of X into registers starting with REG, and push the rest of X.
4249 The amount of space pushed is decreased by PARTIAL bytes.
4250 REG must be a hard register in this case.
4251 If REG is zero but PARTIAL is not, take any all others actions for an
4252 argument partially in registers, but do not actually load any
4255 EXTRA is the amount in bytes of extra space to leave next to this arg.
4256 This is ignored if an argument block has already been allocated.
4258 On a machine that lacks real push insns, ARGS_ADDR is the address of
4259 the bottom of the argument block for this call. We use indexing off there
4260 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4261 argument block has not been preallocated.
4263 ARGS_SO_FAR is the size of args previously pushed for this call.
4265 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4266 for arguments passed in registers. If nonzero, it will be the number
4267 of bytes required. */
4270 emit_push_insn (rtx x
, machine_mode mode
, tree type
, rtx size
,
4271 unsigned int align
, int partial
, rtx reg
, int extra
,
4272 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
4273 rtx alignment_pad
, bool sibcall_p
)
4276 enum direction stack_direction
= STACK_GROWS_DOWNWARD
? downward
: upward
;
4278 /* Decide where to pad the argument: `downward' for below,
4279 `upward' for above, or `none' for don't pad it.
4280 Default is below for small data on big-endian machines; else above. */
4281 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
4283 /* Invert direction if stack is post-decrement.
4285 if (STACK_PUSH_CODE
== POST_DEC
)
4286 if (where_pad
!= none
)
4287 where_pad
= (where_pad
== downward
? upward
: downward
);
4291 int nregs
= partial
/ UNITS_PER_WORD
;
4292 rtx
*tmp_regs
= NULL
;
4293 int overlapping
= 0;
4296 || (STRICT_ALIGNMENT
&& align
< GET_MODE_ALIGNMENT (mode
)))
4298 /* Copy a block into the stack, entirely or partially. */
4305 offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4306 used
= partial
- offset
;
4308 if (mode
!= BLKmode
)
4310 /* A value is to be stored in an insufficiently aligned
4311 stack slot; copy via a suitably aligned slot if
4313 size
= GEN_INT (GET_MODE_SIZE (mode
));
4314 if (!MEM_P (xinner
))
4316 temp
= assign_temp (type
, 1, 1);
4317 emit_move_insn (temp
, xinner
);
4324 /* USED is now the # of bytes we need not copy to the stack
4325 because registers will take care of them. */
4328 xinner
= adjust_address (xinner
, BLKmode
, used
);
4330 /* If the partial register-part of the arg counts in its stack size,
4331 skip the part of stack space corresponding to the registers.
4332 Otherwise, start copying to the beginning of the stack space,
4333 by setting SKIP to 0. */
4334 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
4336 #ifdef PUSH_ROUNDING
4337 /* Do it with several push insns if that doesn't take lots of insns
4338 and if there is no difficulty with push insns that skip bytes
4339 on the stack for alignment purposes. */
4342 && CONST_INT_P (size
)
4344 && MEM_ALIGN (xinner
) >= align
4345 && can_move_by_pieces ((unsigned) INTVAL (size
) - used
, align
)
4346 /* Here we avoid the case of a structure whose weak alignment
4347 forces many pushes of a small amount of data,
4348 and such small pushes do rounding that causes trouble. */
4349 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
4350 || align
>= BIGGEST_ALIGNMENT
4351 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
4352 == (align
/ BITS_PER_UNIT
)))
4353 && (HOST_WIDE_INT
) PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
4355 /* Push padding now if padding above and stack grows down,
4356 or if padding below and stack grows up.
4357 But if space already allocated, this has already been done. */
4358 if (extra
&& args_addr
== 0
4359 && where_pad
!= none
&& where_pad
!= stack_direction
)
4360 anti_adjust_stack (GEN_INT (extra
));
4362 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
4365 #endif /* PUSH_ROUNDING */
4369 /* Otherwise make space on the stack and copy the data
4370 to the address of that space. */
4372 /* Deduct words put into registers from the size we must copy. */
4375 if (CONST_INT_P (size
))
4376 size
= GEN_INT (INTVAL (size
) - used
);
4378 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
4379 gen_int_mode (used
, GET_MODE (size
)),
4380 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4383 /* Get the address of the stack space.
4384 In this case, we do not deal with EXTRA separately.
4385 A single stack adjust will do. */
4388 temp
= push_block (size
, extra
, where_pad
== downward
);
4391 else if (CONST_INT_P (args_so_far
))
4392 temp
= memory_address (BLKmode
,
4393 plus_constant (Pmode
, args_addr
,
4394 skip
+ INTVAL (args_so_far
)));
4396 temp
= memory_address (BLKmode
,
4397 plus_constant (Pmode
,
4398 gen_rtx_PLUS (Pmode
,
4403 if (!ACCUMULATE_OUTGOING_ARGS
)
4405 /* If the source is referenced relative to the stack pointer,
4406 copy it to another register to stabilize it. We do not need
4407 to do this if we know that we won't be changing sp. */
4409 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
4410 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
4411 temp
= copy_to_reg (temp
);
4414 target
= gen_rtx_MEM (BLKmode
, temp
);
4416 /* We do *not* set_mem_attributes here, because incoming arguments
4417 may overlap with sibling call outgoing arguments and we cannot
4418 allow reordering of reads from function arguments with stores
4419 to outgoing arguments of sibling calls. We do, however, want
4420 to record the alignment of the stack slot. */
4421 /* ALIGN may well be better aligned than TYPE, e.g. due to
4422 PARM_BOUNDARY. Assume the caller isn't lying. */
4423 set_mem_align (target
, align
);
4425 /* If part should go in registers and pushing to that part would
4426 overwrite some of the values that need to go into regs, load the
4427 overlapping values into temporary pseudos to be moved into the hard
4428 regs at the end after the stack pushing has completed.
4429 We cannot load them directly into the hard regs here because
4430 they can be clobbered by the block move expansions.
4433 if (partial
> 0 && reg
!= 0 && mode
== BLKmode
4434 && GET_CODE (reg
) != PARALLEL
)
4436 overlapping
= memory_load_overlap (XEXP (x
, 0), temp
, partial
);
4437 if (overlapping
> 0)
4439 gcc_assert (overlapping
% UNITS_PER_WORD
== 0);
4440 overlapping
/= UNITS_PER_WORD
;
4442 tmp_regs
= XALLOCAVEC (rtx
, overlapping
);
4444 for (int i
= 0; i
< overlapping
; i
++)
4445 tmp_regs
[i
] = gen_reg_rtx (word_mode
);
4447 for (int i
= 0; i
< overlapping
; i
++)
4448 emit_move_insn (tmp_regs
[i
],
4449 operand_subword_force (target
, i
, mode
));
4451 else if (overlapping
== -1)
4453 /* Could not determine whether there is overlap.
4454 Fail the sibcall. */
4462 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
4465 else if (partial
> 0)
4467 /* Scalar partly in registers. */
4469 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
4472 /* # bytes of start of argument
4473 that we must make space for but need not store. */
4474 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4475 int args_offset
= INTVAL (args_so_far
);
4478 /* Push padding now if padding above and stack grows down,
4479 or if padding below and stack grows up.
4480 But if space already allocated, this has already been done. */
4481 if (extra
&& args_addr
== 0
4482 && where_pad
!= none
&& where_pad
!= stack_direction
)
4483 anti_adjust_stack (GEN_INT (extra
));
4485 /* If we make space by pushing it, we might as well push
4486 the real data. Otherwise, we can leave OFFSET nonzero
4487 and leave the space uninitialized. */
4491 /* Now NOT_STACK gets the number of words that we don't need to
4492 allocate on the stack. Convert OFFSET to words too. */
4493 not_stack
= (partial
- offset
) / UNITS_PER_WORD
;
4494 offset
/= UNITS_PER_WORD
;
4496 /* If the partial register-part of the arg counts in its stack size,
4497 skip the part of stack space corresponding to the registers.
4498 Otherwise, start copying to the beginning of the stack space,
4499 by setting SKIP to 0. */
4500 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
4502 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
4503 x
= validize_mem (force_const_mem (mode
, x
));
4505 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4506 SUBREGs of such registers are not allowed. */
4507 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
4508 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
4509 x
= copy_to_reg (x
);
4511 /* Loop over all the words allocated on the stack for this arg. */
4512 /* We can do it by words, because any scalar bigger than a word
4513 has a size a multiple of a word. */
4514 for (i
= size
- 1; i
>= not_stack
; i
--)
4515 if (i
>= not_stack
+ offset
)
4516 if (!emit_push_insn (operand_subword_force (x
, i
, mode
),
4517 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
4519 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
4521 reg_parm_stack_space
, alignment_pad
, sibcall_p
))
4529 /* Push padding now if padding above and stack grows down,
4530 or if padding below and stack grows up.
4531 But if space already allocated, this has already been done. */
4532 if (extra
&& args_addr
== 0
4533 && where_pad
!= none
&& where_pad
!= stack_direction
)
4534 anti_adjust_stack (GEN_INT (extra
));
4536 #ifdef PUSH_ROUNDING
4537 if (args_addr
== 0 && PUSH_ARGS
)
4538 emit_single_push_insn (mode
, x
, type
);
4542 if (CONST_INT_P (args_so_far
))
4544 = memory_address (mode
,
4545 plus_constant (Pmode
, args_addr
,
4546 INTVAL (args_so_far
)));
4548 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
4550 dest
= gen_rtx_MEM (mode
, addr
);
4552 /* We do *not* set_mem_attributes here, because incoming arguments
4553 may overlap with sibling call outgoing arguments and we cannot
4554 allow reordering of reads from function arguments with stores
4555 to outgoing arguments of sibling calls. We do, however, want
4556 to record the alignment of the stack slot. */
4557 /* ALIGN may well be better aligned than TYPE, e.g. due to
4558 PARM_BOUNDARY. Assume the caller isn't lying. */
4559 set_mem_align (dest
, align
);
4561 emit_move_insn (dest
, x
);
4565 /* Move the partial arguments into the registers and any overlapping
4566 values that we moved into the pseudos in tmp_regs. */
4567 if (partial
> 0 && reg
!= 0)
4569 /* Handle calls that pass values in multiple non-contiguous locations.
4570 The Irix 6 ABI has examples of this. */
4571 if (GET_CODE (reg
) == PARALLEL
)
4572 emit_group_load (reg
, x
, type
, -1);
4575 gcc_assert (partial
% UNITS_PER_WORD
== 0);
4576 move_block_to_reg (REGNO (reg
), x
, nregs
- overlapping
, mode
);
4578 for (int i
= 0; i
< overlapping
; i
++)
4579 emit_move_insn (gen_rtx_REG (word_mode
, REGNO (reg
)
4580 + nregs
- overlapping
+ i
),
4586 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
4587 anti_adjust_stack (GEN_INT (extra
));
4589 if (alignment_pad
&& args_addr
== 0)
4590 anti_adjust_stack (alignment_pad
);
4595 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4599 get_subtarget (rtx x
)
4603 /* Only registers can be subtargets. */
4605 /* Don't use hard regs to avoid extending their life. */
4606 || REGNO (x
) < FIRST_PSEUDO_REGISTER
4610 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4611 FIELD is a bitfield. Returns true if the optimization was successful,
4612 and there's nothing else to do. */
4615 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize
,
4616 unsigned HOST_WIDE_INT bitpos
,
4617 unsigned HOST_WIDE_INT bitregion_start
,
4618 unsigned HOST_WIDE_INT bitregion_end
,
4619 machine_mode mode1
, rtx str_rtx
,
4620 tree to
, tree src
, bool reverse
)
4622 machine_mode str_mode
= GET_MODE (str_rtx
);
4623 unsigned int str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4628 enum tree_code code
;
4630 if (mode1
!= VOIDmode
4631 || bitsize
>= BITS_PER_WORD
4632 || str_bitsize
> BITS_PER_WORD
4633 || TREE_SIDE_EFFECTS (to
)
4634 || TREE_THIS_VOLATILE (to
))
4638 if (TREE_CODE (src
) != SSA_NAME
)
4640 if (TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
)
4643 srcstmt
= get_gimple_for_ssa_name (src
);
4645 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt
)) != tcc_binary
)
4648 code
= gimple_assign_rhs_code (srcstmt
);
4650 op0
= gimple_assign_rhs1 (srcstmt
);
4652 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4653 to find its initialization. Hopefully the initialization will
4654 be from a bitfield load. */
4655 if (TREE_CODE (op0
) == SSA_NAME
)
4657 gimple
*op0stmt
= get_gimple_for_ssa_name (op0
);
4659 /* We want to eventually have OP0 be the same as TO, which
4660 should be a bitfield. */
4662 || !is_gimple_assign (op0stmt
)
4663 || gimple_assign_rhs_code (op0stmt
) != TREE_CODE (to
))
4665 op0
= gimple_assign_rhs1 (op0stmt
);
4668 op1
= gimple_assign_rhs2 (srcstmt
);
4670 if (!operand_equal_p (to
, op0
, 0))
4673 if (MEM_P (str_rtx
))
4675 unsigned HOST_WIDE_INT offset1
;
4677 if (str_bitsize
== 0 || str_bitsize
> BITS_PER_WORD
)
4678 str_bitsize
= BITS_PER_WORD
;
4680 scalar_int_mode best_mode
;
4681 if (!get_best_mode (bitsize
, bitpos
, bitregion_start
, bitregion_end
,
4682 MEM_ALIGN (str_rtx
), str_bitsize
, false, &best_mode
))
4684 str_mode
= best_mode
;
4685 str_bitsize
= GET_MODE_BITSIZE (best_mode
);
4688 bitpos
%= str_bitsize
;
4689 offset1
= (offset1
- bitpos
) / BITS_PER_UNIT
;
4690 str_rtx
= adjust_address (str_rtx
, str_mode
, offset1
);
4692 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
4695 gcc_assert (!reverse
);
4697 /* If the bit field covers the whole REG/MEM, store_field
4698 will likely generate better code. */
4699 if (bitsize
>= str_bitsize
)
4702 /* We can't handle fields split across multiple entities. */
4703 if (bitpos
+ bitsize
> str_bitsize
)
4706 if (reverse
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
4707 bitpos
= str_bitsize
- bitpos
- bitsize
;
4713 /* For now, just optimize the case of the topmost bitfield
4714 where we don't need to do any masking and also
4715 1 bit bitfields where xor can be used.
4716 We might win by one instruction for the other bitfields
4717 too if insv/extv instructions aren't used, so that
4718 can be added later. */
4719 if ((reverse
|| bitpos
+ bitsize
!= str_bitsize
)
4720 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
4723 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4724 value
= convert_modes (str_mode
,
4725 TYPE_MODE (TREE_TYPE (op1
)), value
,
4726 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4728 /* We may be accessing data outside the field, which means
4729 we can alias adjacent data. */
4730 if (MEM_P (str_rtx
))
4732 str_rtx
= shallow_copy_rtx (str_rtx
);
4733 set_mem_alias_set (str_rtx
, 0);
4734 set_mem_expr (str_rtx
, 0);
4737 if (bitsize
== 1 && (reverse
|| bitpos
+ bitsize
!= str_bitsize
))
4739 value
= expand_and (str_mode
, value
, const1_rtx
, NULL
);
4743 binop
= code
== PLUS_EXPR
? add_optab
: sub_optab
;
4745 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4747 value
= flip_storage_order (str_mode
, value
);
4748 result
= expand_binop (str_mode
, binop
, str_rtx
,
4749 value
, str_rtx
, 1, OPTAB_WIDEN
);
4750 if (result
!= str_rtx
)
4751 emit_move_insn (str_rtx
, result
);
4756 if (TREE_CODE (op1
) != INTEGER_CST
)
4758 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4759 value
= convert_modes (str_mode
,
4760 TYPE_MODE (TREE_TYPE (op1
)), value
,
4761 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4763 /* We may be accessing data outside the field, which means
4764 we can alias adjacent data. */
4765 if (MEM_P (str_rtx
))
4767 str_rtx
= shallow_copy_rtx (str_rtx
);
4768 set_mem_alias_set (str_rtx
, 0);
4769 set_mem_expr (str_rtx
, 0);
4772 binop
= code
== BIT_IOR_EXPR
? ior_optab
: xor_optab
;
4773 if (bitpos
+ bitsize
!= str_bitsize
)
4775 rtx mask
= gen_int_mode ((HOST_WIDE_INT_1U
<< bitsize
) - 1,
4777 value
= expand_and (str_mode
, value
, mask
, NULL_RTX
);
4779 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4781 value
= flip_storage_order (str_mode
, value
);
4782 result
= expand_binop (str_mode
, binop
, str_rtx
,
4783 value
, str_rtx
, 1, OPTAB_WIDEN
);
4784 if (result
!= str_rtx
)
4785 emit_move_insn (str_rtx
, result
);
4795 /* In the C++ memory model, consecutive bit fields in a structure are
4796 considered one memory location.
4798 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4799 returns the bit range of consecutive bits in which this COMPONENT_REF
4800 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4801 and *OFFSET may be adjusted in the process.
4803 If the access does not need to be restricted, 0 is returned in both
4804 *BITSTART and *BITEND. */
4807 get_bit_range (unsigned HOST_WIDE_INT
*bitstart
,
4808 unsigned HOST_WIDE_INT
*bitend
,
4810 HOST_WIDE_INT
*bitpos
,
4813 HOST_WIDE_INT bitoffset
;
4816 gcc_assert (TREE_CODE (exp
) == COMPONENT_REF
);
4818 field
= TREE_OPERAND (exp
, 1);
4819 repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
4820 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4821 need to limit the range we can access. */
4824 *bitstart
= *bitend
= 0;
4828 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4829 part of a larger bit field, then the representative does not serve any
4830 useful purpose. This can occur in Ada. */
4831 if (handled_component_p (TREE_OPERAND (exp
, 0)))
4834 HOST_WIDE_INT rbitsize
, rbitpos
;
4836 int unsignedp
, reversep
, volatilep
= 0;
4837 get_inner_reference (TREE_OPERAND (exp
, 0), &rbitsize
, &rbitpos
,
4838 &roffset
, &rmode
, &unsignedp
, &reversep
,
4840 if ((rbitpos
% BITS_PER_UNIT
) != 0)
4842 *bitstart
= *bitend
= 0;
4847 /* Compute the adjustment to bitpos from the offset of the field
4848 relative to the representative. DECL_FIELD_OFFSET of field and
4849 repr are the same by construction if they are not constants,
4850 see finish_bitfield_layout. */
4851 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field
))
4852 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr
)))
4853 bitoffset
= (tree_to_uhwi (DECL_FIELD_OFFSET (field
))
4854 - tree_to_uhwi (DECL_FIELD_OFFSET (repr
))) * BITS_PER_UNIT
;
4857 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
4858 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
4860 /* If the adjustment is larger than bitpos, we would have a negative bit
4861 position for the lower bound and this may wreak havoc later. Adjust
4862 offset and bitpos to make the lower bound non-negative in that case. */
4863 if (bitoffset
> *bitpos
)
4865 HOST_WIDE_INT adjust
= bitoffset
- *bitpos
;
4866 gcc_assert ((adjust
% BITS_PER_UNIT
) == 0);
4869 if (*offset
== NULL_TREE
)
4870 *offset
= size_int (-adjust
/ BITS_PER_UNIT
);
4873 = size_binop (MINUS_EXPR
, *offset
, size_int (adjust
/ BITS_PER_UNIT
));
4877 *bitstart
= *bitpos
- bitoffset
;
4879 *bitend
= *bitstart
+ tree_to_uhwi (DECL_SIZE (repr
)) - 1;
4882 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4883 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4884 DECL_RTL was not set yet, return NORTL. */
4887 addr_expr_of_non_mem_decl_p_1 (tree addr
, bool nortl
)
4889 if (TREE_CODE (addr
) != ADDR_EXPR
)
4892 tree base
= TREE_OPERAND (addr
, 0);
4895 || TREE_ADDRESSABLE (base
)
4896 || DECL_MODE (base
) == BLKmode
)
4899 if (!DECL_RTL_SET_P (base
))
4902 return (!MEM_P (DECL_RTL (base
)));
4905 /* Returns true if the MEM_REF REF refers to an object that does not
4906 reside in memory and has non-BLKmode. */
4909 mem_ref_refers_to_non_mem_p (tree ref
)
4911 tree base
= TREE_OPERAND (ref
, 0);
4912 return addr_expr_of_non_mem_decl_p_1 (base
, false);
4915 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4916 is true, try generating a nontemporal store. */
4919 expand_assignment (tree to
, tree from
, bool nontemporal
)
4925 enum insn_code icode
;
4927 /* Don't crash if the lhs of the assignment was erroneous. */
4928 if (TREE_CODE (to
) == ERROR_MARK
)
4930 expand_normal (from
);
4934 /* Optimize away no-op moves without side-effects. */
4935 if (operand_equal_p (to
, from
, 0))
4938 /* Handle misaligned stores. */
4939 mode
= TYPE_MODE (TREE_TYPE (to
));
4940 if ((TREE_CODE (to
) == MEM_REF
4941 || TREE_CODE (to
) == TARGET_MEM_REF
)
4943 && !mem_ref_refers_to_non_mem_p (to
)
4944 && ((align
= get_object_alignment (to
))
4945 < GET_MODE_ALIGNMENT (mode
))
4946 && (((icode
= optab_handler (movmisalign_optab
, mode
))
4947 != CODE_FOR_nothing
)
4948 || SLOW_UNALIGNED_ACCESS (mode
, align
)))
4952 reg
= expand_expr (from
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4953 reg
= force_not_mem (reg
);
4954 mem
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4955 if (TREE_CODE (to
) == MEM_REF
&& REF_REVERSE_STORAGE_ORDER (to
))
4956 reg
= flip_storage_order (mode
, reg
);
4958 if (icode
!= CODE_FOR_nothing
)
4960 struct expand_operand ops
[2];
4962 create_fixed_operand (&ops
[0], mem
);
4963 create_input_operand (&ops
[1], reg
, mode
);
4964 /* The movmisalign<mode> pattern cannot fail, else the assignment
4965 would silently be omitted. */
4966 expand_insn (icode
, 2, ops
);
4969 store_bit_field (mem
, GET_MODE_BITSIZE (mode
), 0, 0, 0, mode
, reg
,
4974 /* Assignment of a structure component needs special treatment
4975 if the structure component's rtx is not simply a MEM.
4976 Assignment of an array element at a constant index, and assignment of
4977 an array element in an unaligned packed structure field, has the same
4978 problem. Same for (partially) storing into a non-memory object. */
4979 if (handled_component_p (to
)
4980 || (TREE_CODE (to
) == MEM_REF
4981 && (REF_REVERSE_STORAGE_ORDER (to
)
4982 || mem_ref_refers_to_non_mem_p (to
)))
4983 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
4986 HOST_WIDE_INT bitsize
, bitpos
;
4987 unsigned HOST_WIDE_INT bitregion_start
= 0;
4988 unsigned HOST_WIDE_INT bitregion_end
= 0;
4990 int unsignedp
, reversep
, volatilep
= 0;
4994 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
4995 &unsignedp
, &reversep
, &volatilep
);
4997 /* Make sure bitpos is not negative, it can wreak havoc later. */
5000 gcc_assert (offset
== NULL_TREE
);
5001 offset
= size_int (bitpos
>> LOG2_BITS_PER_UNIT
);
5002 bitpos
&= BITS_PER_UNIT
- 1;
5005 if (TREE_CODE (to
) == COMPONENT_REF
5006 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to
, 1)))
5007 get_bit_range (&bitregion_start
, &bitregion_end
, to
, &bitpos
, &offset
);
5008 /* The C++ memory model naturally applies to byte-aligned fields.
5009 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
5010 BITSIZE are not byte-aligned, there is no need to limit the range
5011 we can access. This can occur with packed structures in Ada. */
5012 else if (bitsize
> 0
5013 && bitsize
% BITS_PER_UNIT
== 0
5014 && bitpos
% BITS_PER_UNIT
== 0)
5016 bitregion_start
= bitpos
;
5017 bitregion_end
= bitpos
+ bitsize
- 1;
5020 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5022 /* If the field has a mode, we want to access it in the
5023 field's mode, not the computed mode.
5024 If a MEM has VOIDmode (external with incomplete type),
5025 use BLKmode for it instead. */
5028 if (mode1
!= VOIDmode
)
5029 to_rtx
= adjust_address (to_rtx
, mode1
, 0);
5030 else if (GET_MODE (to_rtx
) == VOIDmode
)
5031 to_rtx
= adjust_address (to_rtx
, BLKmode
, 0);
5036 machine_mode address_mode
;
5039 if (!MEM_P (to_rtx
))
5041 /* We can get constant negative offsets into arrays with broken
5042 user code. Translate this to a trap instead of ICEing. */
5043 gcc_assert (TREE_CODE (offset
) == INTEGER_CST
);
5044 expand_builtin_trap ();
5045 to_rtx
= gen_rtx_MEM (BLKmode
, const0_rtx
);
5048 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
5049 address_mode
= get_address_mode (to_rtx
);
5050 if (GET_MODE (offset_rtx
) != address_mode
)
5052 /* We cannot be sure that the RTL in offset_rtx is valid outside
5053 of a memory address context, so force it into a register
5054 before attempting to convert it to the desired mode. */
5055 offset_rtx
= force_operand (offset_rtx
, NULL_RTX
);
5056 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
5059 /* If we have an expression in OFFSET_RTX and a non-zero
5060 byte offset in BITPOS, adding the byte offset before the
5061 OFFSET_RTX results in better intermediate code, which makes
5062 later rtl optimization passes perform better.
5064 We prefer intermediate code like this:
5066 r124:DI=r123:DI+0x18
5071 r124:DI=r123:DI+0x10
5072 [r124:DI+0x8]=r121:DI
5074 This is only done for aligned data values, as these can
5075 be expected to result in single move instructions. */
5076 if (mode1
!= VOIDmode
5079 && (bitpos
% bitsize
) == 0
5080 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
5081 && MEM_ALIGN (to_rtx
) >= GET_MODE_ALIGNMENT (mode1
))
5083 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
5084 bitregion_start
= 0;
5085 if (bitregion_end
>= (unsigned HOST_WIDE_INT
) bitpos
)
5086 bitregion_end
-= bitpos
;
5090 to_rtx
= offset_address (to_rtx
, offset_rtx
,
5091 highest_pow2_factor_for_target (to
,
5095 /* No action is needed if the target is not a memory and the field
5096 lies completely outside that target. This can occur if the source
5097 code contains an out-of-bounds access to a small array. */
5099 && GET_MODE (to_rtx
) != BLKmode
5100 && (unsigned HOST_WIDE_INT
) bitpos
5101 >= GET_MODE_PRECISION (GET_MODE (to_rtx
)))
5103 expand_normal (from
);
5106 /* Handle expand_expr of a complex value returning a CONCAT. */
5107 else if (GET_CODE (to_rtx
) == CONCAT
)
5109 unsigned short mode_bitsize
= GET_MODE_BITSIZE (GET_MODE (to_rtx
));
5110 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from
)))
5112 && bitsize
== mode_bitsize
)
5113 result
= store_expr (from
, to_rtx
, false, nontemporal
, reversep
);
5114 else if (bitsize
== mode_bitsize
/ 2
5115 && (bitpos
== 0 || bitpos
== mode_bitsize
/ 2))
5116 result
= store_expr (from
, XEXP (to_rtx
, bitpos
!= 0), false,
5117 nontemporal
, reversep
);
5118 else if (bitpos
+ bitsize
<= mode_bitsize
/ 2)
5119 result
= store_field (XEXP (to_rtx
, 0), bitsize
, bitpos
,
5120 bitregion_start
, bitregion_end
,
5121 mode1
, from
, get_alias_set (to
),
5122 nontemporal
, reversep
);
5123 else if (bitpos
>= mode_bitsize
/ 2)
5124 result
= store_field (XEXP (to_rtx
, 1), bitsize
,
5125 bitpos
- mode_bitsize
/ 2,
5126 bitregion_start
, bitregion_end
,
5127 mode1
, from
, get_alias_set (to
),
5128 nontemporal
, reversep
);
5129 else if (bitpos
== 0 && bitsize
== mode_bitsize
)
5132 result
= expand_normal (from
);
5133 from_rtx
= simplify_gen_subreg (GET_MODE (to_rtx
), result
,
5134 TYPE_MODE (TREE_TYPE (from
)), 0);
5135 emit_move_insn (XEXP (to_rtx
, 0),
5136 read_complex_part (from_rtx
, false));
5137 emit_move_insn (XEXP (to_rtx
, 1),
5138 read_complex_part (from_rtx
, true));
5142 rtx temp
= assign_stack_temp (GET_MODE (to_rtx
),
5143 GET_MODE_SIZE (GET_MODE (to_rtx
)));
5144 write_complex_part (temp
, XEXP (to_rtx
, 0), false);
5145 write_complex_part (temp
, XEXP (to_rtx
, 1), true);
5146 result
= store_field (temp
, bitsize
, bitpos
,
5147 bitregion_start
, bitregion_end
,
5148 mode1
, from
, get_alias_set (to
),
5149 nontemporal
, reversep
);
5150 emit_move_insn (XEXP (to_rtx
, 0), read_complex_part (temp
, false));
5151 emit_move_insn (XEXP (to_rtx
, 1), read_complex_part (temp
, true));
5158 /* If the field is at offset zero, we could have been given the
5159 DECL_RTX of the parent struct. Don't munge it. */
5160 to_rtx
= shallow_copy_rtx (to_rtx
);
5161 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
5163 MEM_VOLATILE_P (to_rtx
) = 1;
5166 if (optimize_bitfield_assignment_op (bitsize
, bitpos
,
5167 bitregion_start
, bitregion_end
,
5168 mode1
, to_rtx
, to
, from
,
5172 result
= store_field (to_rtx
, bitsize
, bitpos
,
5173 bitregion_start
, bitregion_end
,
5174 mode1
, from
, get_alias_set (to
),
5175 nontemporal
, reversep
);
5179 preserve_temp_slots (result
);
5184 /* If the rhs is a function call and its value is not an aggregate,
5185 call the function before we start to compute the lhs.
5186 This is needed for correct code for cases such as
5187 val = setjmp (buf) on machines where reference to val
5188 requires loading up part of an address in a separate insn.
5190 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5191 since it might be a promoted variable where the zero- or sign- extension
5192 needs to be done. Handling this in the normal way is safe because no
5193 computation is done before the call. The same is true for SSA names. */
5194 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
5195 && COMPLETE_TYPE_P (TREE_TYPE (from
))
5196 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
5198 || TREE_CODE (to
) == PARM_DECL
5199 || TREE_CODE (to
) == RESULT_DECL
)
5200 && REG_P (DECL_RTL (to
)))
5201 || TREE_CODE (to
) == SSA_NAME
))
5207 value
= expand_normal (from
);
5209 /* Split value and bounds to store them separately. */
5210 chkp_split_slot (value
, &value
, &bounds
);
5213 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5215 /* Handle calls that return values in multiple non-contiguous locations.
5216 The Irix 6 ABI has examples of this. */
5217 if (GET_CODE (to_rtx
) == PARALLEL
)
5219 if (GET_CODE (value
) == PARALLEL
)
5220 emit_group_move (to_rtx
, value
);
5222 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
5223 int_size_in_bytes (TREE_TYPE (from
)));
5225 else if (GET_CODE (value
) == PARALLEL
)
5226 emit_group_store (to_rtx
, value
, TREE_TYPE (from
),
5227 int_size_in_bytes (TREE_TYPE (from
)));
5228 else if (GET_MODE (to_rtx
) == BLKmode
)
5230 /* Handle calls that return BLKmode values in registers. */
5232 copy_blkmode_from_reg (to_rtx
, value
, TREE_TYPE (from
));
5234 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
5238 if (POINTER_TYPE_P (TREE_TYPE (to
)))
5239 value
= convert_memory_address_addr_space
5240 (as_a
<scalar_int_mode
> (GET_MODE (to_rtx
)), value
,
5241 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to
))));
5243 emit_move_insn (to_rtx
, value
);
5246 /* Store bounds if required. */
5248 && (BOUNDED_P (to
) || chkp_type_has_pointer (TREE_TYPE (to
))))
5250 gcc_assert (MEM_P (to_rtx
));
5251 chkp_emit_bounds_store (bounds
, value
, to_rtx
);
5254 preserve_temp_slots (to_rtx
);
5259 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5260 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5262 /* Don't move directly into a return register. */
5263 if (TREE_CODE (to
) == RESULT_DECL
5264 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
5270 /* If the source is itself a return value, it still is in a pseudo at
5271 this point so we can move it back to the return register directly. */
5273 && TYPE_MODE (TREE_TYPE (from
)) == BLKmode
5274 && TREE_CODE (from
) != CALL_EXPR
)
5275 temp
= copy_blkmode_to_reg (GET_MODE (to_rtx
), from
);
5277 temp
= expand_expr (from
, NULL_RTX
, GET_MODE (to_rtx
), EXPAND_NORMAL
);
5279 /* Handle calls that return values in multiple non-contiguous locations.
5280 The Irix 6 ABI has examples of this. */
5281 if (GET_CODE (to_rtx
) == PARALLEL
)
5283 if (GET_CODE (temp
) == PARALLEL
)
5284 emit_group_move (to_rtx
, temp
);
5286 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
5287 int_size_in_bytes (TREE_TYPE (from
)));
5290 emit_move_insn (to_rtx
, temp
);
5292 preserve_temp_slots (to_rtx
);
5297 /* In case we are returning the contents of an object which overlaps
5298 the place the value is being stored, use a safe function when copying
5299 a value through a pointer into a structure value return block. */
5300 if (TREE_CODE (to
) == RESULT_DECL
5301 && TREE_CODE (from
) == INDIRECT_REF
5302 && ADDR_SPACE_GENERIC_P
5303 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from
, 0)))))
5304 && refs_may_alias_p (to
, from
)
5305 && cfun
->returns_struct
5306 && !cfun
->returns_pcc_struct
)
5311 size
= expr_size (from
);
5312 from_rtx
= expand_normal (from
);
5314 emit_block_move_via_libcall (XEXP (to_rtx
, 0), XEXP (from_rtx
, 0), size
);
5316 preserve_temp_slots (to_rtx
);
5321 /* Compute FROM and store the value in the rtx we got. */
5324 result
= store_expr_with_bounds (from
, to_rtx
, 0, nontemporal
, false, to
);
5325 preserve_temp_slots (result
);
5330 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5331 succeeded, false otherwise. */
5334 emit_storent_insn (rtx to
, rtx from
)
5336 struct expand_operand ops
[2];
5337 machine_mode mode
= GET_MODE (to
);
5338 enum insn_code code
= optab_handler (storent_optab
, mode
);
5340 if (code
== CODE_FOR_nothing
)
5343 create_fixed_operand (&ops
[0], to
);
5344 create_input_operand (&ops
[1], from
, mode
);
5345 return maybe_expand_insn (code
, 2, ops
);
5348 /* Generate code for computing expression EXP,
5349 and storing the value into TARGET.
5351 If the mode is BLKmode then we may return TARGET itself.
5352 It turns out that in BLKmode it doesn't cause a problem.
5353 because C has no operators that could combine two different
5354 assignments into the same BLKmode object with different values
5355 with no sequence point. Will other languages need this to
5358 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5359 stack, and block moves may need to be treated specially.
5361 If NONTEMPORAL is true, try using a nontemporal store instruction.
5363 If REVERSE is true, the store is to be done in reverse order.
5365 If BTARGET is not NULL then computed bounds of EXP are
5366 associated with BTARGET. */
5369 store_expr_with_bounds (tree exp
, rtx target
, int call_param_p
,
5370 bool nontemporal
, bool reverse
, tree btarget
)
5373 rtx alt_rtl
= NULL_RTX
;
5374 location_t loc
= curr_insn_location ();
5376 if (VOID_TYPE_P (TREE_TYPE (exp
)))
5378 /* C++ can generate ?: expressions with a throw expression in one
5379 branch and an rvalue in the other. Here, we resolve attempts to
5380 store the throw expression's nonexistent result. */
5381 gcc_assert (!call_param_p
);
5382 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5385 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
5387 /* Perform first part of compound expression, then assign from second
5389 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
5390 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5391 return store_expr_with_bounds (TREE_OPERAND (exp
, 1), target
,
5392 call_param_p
, nontemporal
, reverse
,
5395 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
5397 /* For conditional expression, get safe form of the target. Then
5398 test the condition, doing the appropriate assignment on either
5399 side. This avoids the creation of unnecessary temporaries.
5400 For non-BLKmode, it is more efficient not to do this. */
5402 rtx_code_label
*lab1
= gen_label_rtx (), *lab2
= gen_label_rtx ();
5404 do_pending_stack_adjust ();
5406 jumpifnot (TREE_OPERAND (exp
, 0), lab1
,
5407 profile_probability::uninitialized ());
5408 store_expr_with_bounds (TREE_OPERAND (exp
, 1), target
, call_param_p
,
5409 nontemporal
, reverse
, btarget
);
5410 emit_jump_insn (targetm
.gen_jump (lab2
));
5413 store_expr_with_bounds (TREE_OPERAND (exp
, 2), target
, call_param_p
,
5414 nontemporal
, reverse
, btarget
);
5420 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
5421 /* If this is a scalar in a register that is stored in a wider mode
5422 than the declared mode, compute the result into its declared mode
5423 and then convert to the wider mode. Our value is the computed
5426 rtx inner_target
= 0;
5427 scalar_int_mode outer_mode
= subreg_unpromoted_mode (target
);
5428 scalar_int_mode inner_mode
= subreg_promoted_mode (target
);
5430 /* We can do the conversion inside EXP, which will often result
5431 in some optimizations. Do the conversion in two steps: first
5432 change the signedness, if needed, then the extend. But don't
5433 do this if the type of EXP is a subtype of something else
5434 since then the conversion might involve more than just
5435 converting modes. */
5436 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
5437 && TREE_TYPE (TREE_TYPE (exp
)) == 0
5438 && GET_MODE_PRECISION (outer_mode
)
5439 == TYPE_PRECISION (TREE_TYPE (exp
)))
5441 if (!SUBREG_CHECK_PROMOTED_SIGN (target
,
5442 TYPE_UNSIGNED (TREE_TYPE (exp
))))
5444 /* Some types, e.g. Fortran's logical*4, won't have a signed
5445 version, so use the mode instead. */
5447 = (signed_or_unsigned_type_for
5448 (SUBREG_PROMOTED_SIGN (target
), TREE_TYPE (exp
)));
5450 ntype
= lang_hooks
.types
.type_for_mode
5451 (TYPE_MODE (TREE_TYPE (exp
)),
5452 SUBREG_PROMOTED_SIGN (target
));
5454 exp
= fold_convert_loc (loc
, ntype
, exp
);
5457 exp
= fold_convert_loc (loc
, lang_hooks
.types
.type_for_mode
5458 (inner_mode
, SUBREG_PROMOTED_SIGN (target
)),
5461 inner_target
= SUBREG_REG (target
);
5464 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
5465 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5467 /* Handle bounds returned by call. */
5468 if (TREE_CODE (exp
) == CALL_EXPR
)
5471 chkp_split_slot (temp
, &temp
, &bounds
);
5472 if (bounds
&& btarget
)
5474 gcc_assert (TREE_CODE (btarget
) == SSA_NAME
);
5475 rtx tmp
= targetm
.calls
.load_returned_bounds (bounds
);
5476 chkp_set_rtl_bounds (btarget
, tmp
);
5480 /* If TEMP is a VOIDmode constant, use convert_modes to make
5481 sure that we properly convert it. */
5482 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
5484 temp
= convert_modes (outer_mode
, TYPE_MODE (TREE_TYPE (exp
)),
5485 temp
, SUBREG_PROMOTED_SIGN (target
));
5486 temp
= convert_modes (inner_mode
, outer_mode
, temp
,
5487 SUBREG_PROMOTED_SIGN (target
));
5490 convert_move (SUBREG_REG (target
), temp
,
5491 SUBREG_PROMOTED_SIGN (target
));
5495 else if ((TREE_CODE (exp
) == STRING_CST
5496 || (TREE_CODE (exp
) == MEM_REF
5497 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
5498 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5500 && integer_zerop (TREE_OPERAND (exp
, 1))))
5501 && !nontemporal
&& !call_param_p
5504 /* Optimize initialization of an array with a STRING_CST. */
5505 HOST_WIDE_INT exp_len
, str_copy_len
;
5507 tree str
= TREE_CODE (exp
) == STRING_CST
5508 ? exp
: TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5510 exp_len
= int_expr_size (exp
);
5514 if (TREE_STRING_LENGTH (str
) <= 0)
5517 str_copy_len
= strlen (TREE_STRING_POINTER (str
));
5518 if (str_copy_len
< TREE_STRING_LENGTH (str
) - 1)
5521 str_copy_len
= TREE_STRING_LENGTH (str
);
5522 if ((STORE_MAX_PIECES
& (STORE_MAX_PIECES
- 1)) == 0
5523 && TREE_STRING_POINTER (str
)[TREE_STRING_LENGTH (str
) - 1] == '\0')
5525 str_copy_len
+= STORE_MAX_PIECES
- 1;
5526 str_copy_len
&= ~(STORE_MAX_PIECES
- 1);
5528 str_copy_len
= MIN (str_copy_len
, exp_len
);
5529 if (!can_store_by_pieces (str_copy_len
, builtin_strncpy_read_str
,
5530 CONST_CAST (char *, TREE_STRING_POINTER (str
)),
5531 MEM_ALIGN (target
), false))
5536 dest_mem
= store_by_pieces (dest_mem
,
5537 str_copy_len
, builtin_strncpy_read_str
,
5539 TREE_STRING_POINTER (str
)),
5540 MEM_ALIGN (target
), false,
5541 exp_len
> str_copy_len
? 1 : 0);
5542 if (exp_len
> str_copy_len
)
5543 clear_storage (adjust_address (dest_mem
, BLKmode
, 0),
5544 GEN_INT (exp_len
- str_copy_len
),
5553 /* If we want to use a nontemporal or a reverse order store, force the
5554 value into a register first. */
5555 tmp_target
= nontemporal
|| reverse
? NULL_RTX
: target
;
5556 temp
= expand_expr_real (exp
, tmp_target
, GET_MODE (target
),
5558 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
5561 /* Handle bounds returned by call. */
5562 if (TREE_CODE (exp
) == CALL_EXPR
)
5565 chkp_split_slot (temp
, &temp
, &bounds
);
5566 if (bounds
&& btarget
)
5568 gcc_assert (TREE_CODE (btarget
) == SSA_NAME
);
5569 rtx tmp
= targetm
.calls
.load_returned_bounds (bounds
);
5570 chkp_set_rtl_bounds (btarget
, tmp
);
5575 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5576 the same as that of TARGET, adjust the constant. This is needed, for
5577 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5578 only a word-sized value. */
5579 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
5580 && TREE_CODE (exp
) != ERROR_MARK
5581 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
5582 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5583 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5585 /* If value was not generated in the target, store it there.
5586 Convert the value to TARGET's type first if necessary and emit the
5587 pending incrementations that have been queued when expanding EXP.
5588 Note that we cannot emit the whole queue blindly because this will
5589 effectively disable the POST_INC optimization later.
5591 If TEMP and TARGET compare equal according to rtx_equal_p, but
5592 one or both of them are volatile memory refs, we have to distinguish
5594 - expand_expr has used TARGET. In this case, we must not generate
5595 another copy. This can be detected by TARGET being equal according
5597 - expand_expr has not used TARGET - that means that the source just
5598 happens to have the same RTX form. Since temp will have been created
5599 by expand_expr, it will compare unequal according to == .
5600 We must generate a copy in this case, to reach the correct number
5601 of volatile memory references. */
5603 if ((! rtx_equal_p (temp
, target
)
5604 || (temp
!= target
&& (side_effects_p (temp
)
5605 || side_effects_p (target
))))
5606 && TREE_CODE (exp
) != ERROR_MARK
5607 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5608 but TARGET is not valid memory reference, TEMP will differ
5609 from TARGET although it is really the same location. */
5611 && rtx_equal_p (alt_rtl
, target
)
5612 && !side_effects_p (alt_rtl
)
5613 && !side_effects_p (target
))
5614 /* If there's nothing to copy, don't bother. Don't call
5615 expr_size unless necessary, because some front-ends (C++)
5616 expr_size-hook must not be given objects that are not
5617 supposed to be bit-copied or bit-initialized. */
5618 && expr_size (exp
) != const0_rtx
)
5620 if (GET_MODE (temp
) != GET_MODE (target
) && GET_MODE (temp
) != VOIDmode
)
5622 if (GET_MODE (target
) == BLKmode
)
5624 /* Handle calls that return BLKmode values in registers. */
5625 if (REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
5626 copy_blkmode_from_reg (target
, temp
, TREE_TYPE (exp
));
5628 store_bit_field (target
,
5629 INTVAL (expr_size (exp
)) * BITS_PER_UNIT
,
5630 0, 0, 0, GET_MODE (temp
), temp
, reverse
);
5633 convert_move (target
, temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5636 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
5638 /* Handle copying a string constant into an array. The string
5639 constant may be shorter than the array. So copy just the string's
5640 actual length, and clear the rest. First get the size of the data
5641 type of the string, which is actually the size of the target. */
5642 rtx size
= expr_size (exp
);
5644 if (CONST_INT_P (size
)
5645 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
5646 emit_block_move (target
, temp
, size
,
5648 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5651 machine_mode pointer_mode
5652 = targetm
.addr_space
.pointer_mode (MEM_ADDR_SPACE (target
));
5653 machine_mode address_mode
= get_address_mode (target
);
5655 /* Compute the size of the data to copy from the string. */
5657 = size_binop_loc (loc
, MIN_EXPR
,
5658 make_tree (sizetype
, size
),
5659 size_int (TREE_STRING_LENGTH (exp
)));
5661 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
5663 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
5664 rtx_code_label
*label
= 0;
5666 /* Copy that much. */
5667 copy_size_rtx
= convert_to_mode (pointer_mode
, copy_size_rtx
,
5668 TYPE_UNSIGNED (sizetype
));
5669 emit_block_move (target
, temp
, copy_size_rtx
,
5671 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5673 /* Figure out how much is left in TARGET that we have to clear.
5674 Do all calculations in pointer_mode. */
5675 if (CONST_INT_P (copy_size_rtx
))
5677 size
= plus_constant (address_mode
, size
,
5678 -INTVAL (copy_size_rtx
));
5679 target
= adjust_address (target
, BLKmode
,
5680 INTVAL (copy_size_rtx
));
5684 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
5685 copy_size_rtx
, NULL_RTX
, 0,
5688 if (GET_MODE (copy_size_rtx
) != address_mode
)
5689 copy_size_rtx
= convert_to_mode (address_mode
,
5691 TYPE_UNSIGNED (sizetype
));
5693 target
= offset_address (target
, copy_size_rtx
,
5694 highest_pow2_factor (copy_size
));
5695 label
= gen_label_rtx ();
5696 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
5697 GET_MODE (size
), 0, label
);
5700 if (size
!= const0_rtx
)
5701 clear_storage (target
, size
, BLOCK_OP_NORMAL
);
5707 /* Handle calls that return values in multiple non-contiguous locations.
5708 The Irix 6 ABI has examples of this. */
5709 else if (GET_CODE (target
) == PARALLEL
)
5711 if (GET_CODE (temp
) == PARALLEL
)
5712 emit_group_move (target
, temp
);
5714 emit_group_load (target
, temp
, TREE_TYPE (exp
),
5715 int_size_in_bytes (TREE_TYPE (exp
)));
5717 else if (GET_CODE (temp
) == PARALLEL
)
5718 emit_group_store (target
, temp
, TREE_TYPE (exp
),
5719 int_size_in_bytes (TREE_TYPE (exp
)));
5720 else if (GET_MODE (temp
) == BLKmode
)
5721 emit_block_move (target
, temp
, expr_size (exp
),
5723 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5724 /* If we emit a nontemporal store, there is nothing else to do. */
5725 else if (nontemporal
&& emit_storent_insn (target
, temp
))
5730 temp
= flip_storage_order (GET_MODE (target
), temp
);
5731 temp
= force_operand (temp
, target
);
5733 emit_move_insn (target
, temp
);
5740 /* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5742 store_expr (tree exp
, rtx target
, int call_param_p
, bool nontemporal
,
5745 return store_expr_with_bounds (exp
, target
, call_param_p
, nontemporal
,
5749 /* Return true if field F of structure TYPE is a flexible array. */
5752 flexible_array_member_p (const_tree f
, const_tree type
)
5757 return (DECL_CHAIN (f
) == NULL
5758 && TREE_CODE (tf
) == ARRAY_TYPE
5760 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf
))
5761 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf
)))
5762 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf
))
5763 && int_size_in_bytes (type
) >= 0);
5766 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5767 must have in order for it to completely initialize a value of type TYPE.
5768 Return -1 if the number isn't known.
5770 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5772 static HOST_WIDE_INT
5773 count_type_elements (const_tree type
, bool for_ctor_p
)
5775 switch (TREE_CODE (type
))
5781 nelts
= array_type_nelts (type
);
5782 if (nelts
&& tree_fits_uhwi_p (nelts
))
5784 unsigned HOST_WIDE_INT n
;
5786 n
= tree_to_uhwi (nelts
) + 1;
5787 if (n
== 0 || for_ctor_p
)
5790 return n
* count_type_elements (TREE_TYPE (type
), false);
5792 return for_ctor_p
? -1 : 1;
5797 unsigned HOST_WIDE_INT n
;
5801 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5802 if (TREE_CODE (f
) == FIELD_DECL
)
5805 n
+= count_type_elements (TREE_TYPE (f
), false);
5806 else if (!flexible_array_member_p (f
, type
))
5807 /* Don't count flexible arrays, which are not supposed
5808 to be initialized. */
5816 case QUAL_UNION_TYPE
:
5821 gcc_assert (!for_ctor_p
);
5822 /* Estimate the number of scalars in each field and pick the
5823 maximum. Other estimates would do instead; the idea is simply
5824 to make sure that the estimate is not sensitive to the ordering
5827 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5828 if (TREE_CODE (f
) == FIELD_DECL
)
5830 m
= count_type_elements (TREE_TYPE (f
), false);
5831 /* If the field doesn't span the whole union, add an extra
5832 scalar for the rest. */
5833 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f
)),
5834 TYPE_SIZE (type
)) != 1)
5846 return TYPE_VECTOR_SUBPARTS (type
);
5850 case FIXED_POINT_TYPE
:
5855 case REFERENCE_TYPE
:
5871 /* Helper for categorize_ctor_elements. Identical interface. */
5874 categorize_ctor_elements_1 (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
5875 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
5877 unsigned HOST_WIDE_INT idx
;
5878 HOST_WIDE_INT nz_elts
, init_elts
, num_fields
;
5879 tree value
, purpose
, elt_type
;
5881 /* Whether CTOR is a valid constant initializer, in accordance with what
5882 initializer_constant_valid_p does. If inferred from the constructor
5883 elements, true until proven otherwise. */
5884 bool const_from_elts_p
= constructor_static_from_elts_p (ctor
);
5885 bool const_p
= const_from_elts_p
? true : TREE_STATIC (ctor
);
5890 elt_type
= NULL_TREE
;
5892 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), idx
, purpose
, value
)
5894 HOST_WIDE_INT mult
= 1;
5896 if (purpose
&& TREE_CODE (purpose
) == RANGE_EXPR
)
5898 tree lo_index
= TREE_OPERAND (purpose
, 0);
5899 tree hi_index
= TREE_OPERAND (purpose
, 1);
5901 if (tree_fits_uhwi_p (lo_index
) && tree_fits_uhwi_p (hi_index
))
5902 mult
= (tree_to_uhwi (hi_index
)
5903 - tree_to_uhwi (lo_index
) + 1);
5906 elt_type
= TREE_TYPE (value
);
5908 switch (TREE_CODE (value
))
5912 HOST_WIDE_INT nz
= 0, ic
= 0;
5914 bool const_elt_p
= categorize_ctor_elements_1 (value
, &nz
, &ic
,
5917 nz_elts
+= mult
* nz
;
5918 init_elts
+= mult
* ic
;
5920 if (const_from_elts_p
&& const_p
)
5921 const_p
= const_elt_p
;
5928 if (!initializer_zerop (value
))
5934 nz_elts
+= mult
* TREE_STRING_LENGTH (value
);
5935 init_elts
+= mult
* TREE_STRING_LENGTH (value
);
5939 if (!initializer_zerop (TREE_REALPART (value
)))
5941 if (!initializer_zerop (TREE_IMAGPART (value
)))
5949 for (i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
5951 tree v
= VECTOR_CST_ELT (value
, i
);
5952 if (!initializer_zerop (v
))
5961 HOST_WIDE_INT tc
= count_type_elements (elt_type
, false);
5962 nz_elts
+= mult
* tc
;
5963 init_elts
+= mult
* tc
;
5965 if (const_from_elts_p
&& const_p
)
5967 = initializer_constant_valid_p (value
,
5969 TYPE_REVERSE_STORAGE_ORDER
5977 if (*p_complete
&& !complete_ctor_at_level_p (TREE_TYPE (ctor
),
5978 num_fields
, elt_type
))
5979 *p_complete
= false;
5981 *p_nz_elts
+= nz_elts
;
5982 *p_init_elts
+= init_elts
;
5987 /* Examine CTOR to discover:
5988 * how many scalar fields are set to nonzero values,
5989 and place it in *P_NZ_ELTS;
5990 * how many scalar fields in total are in CTOR,
5991 and place it in *P_ELT_COUNT.
5992 * whether the constructor is complete -- in the sense that every
5993 meaningful byte is explicitly given a value --
5994 and place it in *P_COMPLETE.
5996 Return whether or not CTOR is a valid static constant initializer, the same
5997 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
6000 categorize_ctor_elements (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
6001 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
6007 return categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_init_elts
, p_complete
);
6010 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
6011 of which had type LAST_TYPE. Each element was itself a complete
6012 initializer, in the sense that every meaningful byte was explicitly
6013 given a value. Return true if the same is true for the constructor
6017 complete_ctor_at_level_p (const_tree type
, HOST_WIDE_INT num_elts
,
6018 const_tree last_type
)
6020 if (TREE_CODE (type
) == UNION_TYPE
6021 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
6026 gcc_assert (num_elts
== 1 && last_type
);
6028 /* ??? We could look at each element of the union, and find the
6029 largest element. Which would avoid comparing the size of the
6030 initialized element against any tail padding in the union.
6031 Doesn't seem worth the effort... */
6032 return simple_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (last_type
)) == 1;
6035 return count_type_elements (type
, true) == num_elts
;
6038 /* Return 1 if EXP contains mostly (3/4) zeros. */
6041 mostly_zeros_p (const_tree exp
)
6043 if (TREE_CODE (exp
) == CONSTRUCTOR
)
6045 HOST_WIDE_INT nz_elts
, init_elts
;
6048 categorize_ctor_elements (exp
, &nz_elts
, &init_elts
, &complete_p
);
6049 return !complete_p
|| nz_elts
< init_elts
/ 4;
6052 return initializer_zerop (exp
);
6055 /* Return 1 if EXP contains all zeros. */
6058 all_zeros_p (const_tree exp
)
6060 if (TREE_CODE (exp
) == CONSTRUCTOR
)
6062 HOST_WIDE_INT nz_elts
, init_elts
;
6065 categorize_ctor_elements (exp
, &nz_elts
, &init_elts
, &complete_p
);
6066 return nz_elts
== 0;
6069 return initializer_zerop (exp
);
6072 /* Helper function for store_constructor.
6073 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
6074 CLEARED is as for store_constructor.
6075 ALIAS_SET is the alias set to use for any stores.
6076 If REVERSE is true, the store is to be done in reverse order.
6078 This provides a recursive shortcut back to store_constructor when it isn't
6079 necessary to go through store_field. This is so that we can pass through
6080 the cleared field to let store_constructor know that we may not have to
6081 clear a substructure if the outer structure has already been cleared. */
6084 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
6085 HOST_WIDE_INT bitpos
,
6086 unsigned HOST_WIDE_INT bitregion_start
,
6087 unsigned HOST_WIDE_INT bitregion_end
,
6089 tree exp
, int cleared
,
6090 alias_set_type alias_set
, bool reverse
)
6092 if (TREE_CODE (exp
) == CONSTRUCTOR
6093 /* We can only call store_constructor recursively if the size and
6094 bit position are on a byte boundary. */
6095 && bitpos
% BITS_PER_UNIT
== 0
6096 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
6097 /* If we have a nonzero bitpos for a register target, then we just
6098 let store_field do the bitfield handling. This is unlikely to
6099 generate unnecessary clear instructions anyways. */
6100 && (bitpos
== 0 || MEM_P (target
)))
6104 = adjust_address (target
,
6105 GET_MODE (target
) == BLKmode
6107 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
6108 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
6111 /* Update the alias set, if required. */
6112 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
6113 && MEM_ALIAS_SET (target
) != 0)
6115 target
= copy_rtx (target
);
6116 set_mem_alias_set (target
, alias_set
);
6119 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
,
6123 store_field (target
, bitsize
, bitpos
, bitregion_start
, bitregion_end
, mode
,
6124 exp
, alias_set
, false, reverse
);
6128 /* Returns the number of FIELD_DECLs in TYPE. */
6131 fields_length (const_tree type
)
6133 tree t
= TYPE_FIELDS (type
);
6136 for (; t
; t
= DECL_CHAIN (t
))
6137 if (TREE_CODE (t
) == FIELD_DECL
)
6144 /* Store the value of constructor EXP into the rtx TARGET.
6145 TARGET is either a REG or a MEM; we know it cannot conflict, since
6146 safe_from_p has been called.
6147 CLEARED is true if TARGET is known to have been zero'd.
6148 SIZE is the number of bytes of TARGET we are allowed to modify: this
6149 may not be the same as the size of EXP if we are assigning to a field
6150 which has been packed to exclude padding bits.
6151 If REVERSE is true, the store is to be done in reverse order. */
6154 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
,
6157 tree type
= TREE_TYPE (exp
);
6158 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
6159 HOST_WIDE_INT bitregion_end
= size
> 0 ? size
* BITS_PER_UNIT
- 1 : 0;
6161 switch (TREE_CODE (type
))
6165 case QUAL_UNION_TYPE
:
6167 unsigned HOST_WIDE_INT idx
;
6170 /* The storage order is specified for every aggregate type. */
6171 reverse
= TYPE_REVERSE_STORAGE_ORDER (type
);
6173 /* If size is zero or the target is already cleared, do nothing. */
6174 if (size
== 0 || cleared
)
6176 /* We either clear the aggregate or indicate the value is dead. */
6177 else if ((TREE_CODE (type
) == UNION_TYPE
6178 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
6179 && ! CONSTRUCTOR_ELTS (exp
))
6180 /* If the constructor is empty, clear the union. */
6182 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
6186 /* If we are building a static constructor into a register,
6187 set the initial value as zero so we can fold the value into
6188 a constant. But if more than one register is involved,
6189 this probably loses. */
6190 else if (REG_P (target
) && TREE_STATIC (exp
)
6191 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
6193 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6197 /* If the constructor has fewer fields than the structure or
6198 if we are initializing the structure to mostly zeros, clear
6199 the whole structure first. Don't do this if TARGET is a
6200 register whose mode size isn't equal to SIZE since
6201 clear_storage can't handle this case. */
6203 && (((int) CONSTRUCTOR_NELTS (exp
) != fields_length (type
))
6204 || mostly_zeros_p (exp
))
6206 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
6209 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6213 if (REG_P (target
) && !cleared
)
6214 emit_clobber (target
);
6216 /* Store each element of the constructor into the
6217 corresponding field of TARGET. */
6218 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, field
, value
)
6221 HOST_WIDE_INT bitsize
;
6222 HOST_WIDE_INT bitpos
= 0;
6224 rtx to_rtx
= target
;
6226 /* Just ignore missing fields. We cleared the whole
6227 structure, above, if any fields are missing. */
6231 if (cleared
&& initializer_zerop (value
))
6234 if (tree_fits_uhwi_p (DECL_SIZE (field
)))
6235 bitsize
= tree_to_uhwi (DECL_SIZE (field
));
6239 mode
= DECL_MODE (field
);
6240 if (DECL_BIT_FIELD (field
))
6243 offset
= DECL_FIELD_OFFSET (field
);
6244 if (tree_fits_shwi_p (offset
)
6245 && tree_fits_shwi_p (bit_position (field
)))
6247 bitpos
= int_bit_position (field
);
6253 /* If this initializes a field that is smaller than a
6254 word, at the start of a word, try to widen it to a full
6255 word. This special case allows us to output C++ member
6256 function initializations in a form that the optimizers
6258 if (WORD_REGISTER_OPERATIONS
6260 && bitsize
< BITS_PER_WORD
6261 && bitpos
% BITS_PER_WORD
== 0
6262 && GET_MODE_CLASS (mode
) == MODE_INT
6263 && TREE_CODE (value
) == INTEGER_CST
6265 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
6267 tree type
= TREE_TYPE (value
);
6269 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
6271 type
= lang_hooks
.types
.type_for_mode
6272 (word_mode
, TYPE_UNSIGNED (type
));
6273 value
= fold_convert (type
, value
);
6274 /* Make sure the bits beyond the original bitsize are zero
6275 so that we can correctly avoid extra zeroing stores in
6276 later constructor elements. */
6278 = wide_int_to_tree (type
, wi::mask (bitsize
, false,
6280 value
= fold_build2 (BIT_AND_EXPR
, type
, value
, bitsize_mask
);
6283 if (BYTES_BIG_ENDIAN
)
6285 = fold_build2 (LSHIFT_EXPR
, type
, value
,
6286 build_int_cst (type
,
6287 BITS_PER_WORD
- bitsize
));
6288 bitsize
= BITS_PER_WORD
;
6292 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
6293 && DECL_NONADDRESSABLE_P (field
))
6295 to_rtx
= copy_rtx (to_rtx
);
6296 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
6299 store_constructor_field (to_rtx
, bitsize
, bitpos
,
6300 0, bitregion_end
, mode
,
6302 get_alias_set (TREE_TYPE (field
)),
6310 unsigned HOST_WIDE_INT i
;
6313 tree elttype
= TREE_TYPE (type
);
6315 HOST_WIDE_INT minelt
= 0;
6316 HOST_WIDE_INT maxelt
= 0;
6318 /* The storage order is specified for every aggregate type. */
6319 reverse
= TYPE_REVERSE_STORAGE_ORDER (type
);
6321 domain
= TYPE_DOMAIN (type
);
6322 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
6323 && TYPE_MAX_VALUE (domain
)
6324 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain
))
6325 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain
)));
6327 /* If we have constant bounds for the range of the type, get them. */
6330 minelt
= tree_to_shwi (TYPE_MIN_VALUE (domain
));
6331 maxelt
= tree_to_shwi (TYPE_MAX_VALUE (domain
));
6334 /* If the constructor has fewer elements than the array, clear
6335 the whole array first. Similarly if this is static
6336 constructor of a non-BLKmode object. */
6339 else if (REG_P (target
) && TREE_STATIC (exp
))
6343 unsigned HOST_WIDE_INT idx
;
6345 HOST_WIDE_INT count
= 0, zero_count
= 0;
6346 need_to_clear
= ! const_bounds_p
;
6348 /* This loop is a more accurate version of the loop in
6349 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6350 is also needed to check for missing elements. */
6351 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, index
, value
)
6353 HOST_WIDE_INT this_node_count
;
6358 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6360 tree lo_index
= TREE_OPERAND (index
, 0);
6361 tree hi_index
= TREE_OPERAND (index
, 1);
6363 if (! tree_fits_uhwi_p (lo_index
)
6364 || ! tree_fits_uhwi_p (hi_index
))
6370 this_node_count
= (tree_to_uhwi (hi_index
)
6371 - tree_to_uhwi (lo_index
) + 1);
6374 this_node_count
= 1;
6376 count
+= this_node_count
;
6377 if (mostly_zeros_p (value
))
6378 zero_count
+= this_node_count
;
6381 /* Clear the entire array first if there are any missing
6382 elements, or if the incidence of zero elements is >=
6385 && (count
< maxelt
- minelt
+ 1
6386 || 4 * zero_count
>= 3 * count
))
6390 if (need_to_clear
&& size
> 0)
6393 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6395 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6399 if (!cleared
&& REG_P (target
))
6400 /* Inform later passes that the old value is dead. */
6401 emit_clobber (target
);
6403 /* Store each element of the constructor into the
6404 corresponding element of TARGET, determined by counting the
6406 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), i
, index
, value
)
6409 HOST_WIDE_INT bitsize
;
6410 HOST_WIDE_INT bitpos
;
6411 rtx xtarget
= target
;
6413 if (cleared
&& initializer_zerop (value
))
6416 mode
= TYPE_MODE (elttype
);
6417 if (mode
== BLKmode
)
6418 bitsize
= (tree_fits_uhwi_p (TYPE_SIZE (elttype
))
6419 ? tree_to_uhwi (TYPE_SIZE (elttype
))
6422 bitsize
= GET_MODE_BITSIZE (mode
);
6424 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6426 tree lo_index
= TREE_OPERAND (index
, 0);
6427 tree hi_index
= TREE_OPERAND (index
, 1);
6428 rtx index_r
, pos_rtx
;
6429 HOST_WIDE_INT lo
, hi
, count
;
6432 /* If the range is constant and "small", unroll the loop. */
6434 && tree_fits_shwi_p (lo_index
)
6435 && tree_fits_shwi_p (hi_index
)
6436 && (lo
= tree_to_shwi (lo_index
),
6437 hi
= tree_to_shwi (hi_index
),
6438 count
= hi
- lo
+ 1,
6441 || (tree_fits_uhwi_p (TYPE_SIZE (elttype
))
6442 && (tree_to_uhwi (TYPE_SIZE (elttype
)) * count
6445 lo
-= minelt
; hi
-= minelt
;
6446 for (; lo
<= hi
; lo
++)
6448 bitpos
= lo
* tree_to_shwi (TYPE_SIZE (elttype
));
6451 && !MEM_KEEP_ALIAS_SET_P (target
)
6452 && TREE_CODE (type
) == ARRAY_TYPE
6453 && TYPE_NONALIASED_COMPONENT (type
))
6455 target
= copy_rtx (target
);
6456 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6459 store_constructor_field
6460 (target
, bitsize
, bitpos
, 0, bitregion_end
,
6461 mode
, value
, cleared
,
6462 get_alias_set (elttype
), reverse
);
6467 rtx_code_label
*loop_start
= gen_label_rtx ();
6468 rtx_code_label
*loop_end
= gen_label_rtx ();
6471 expand_normal (hi_index
);
6473 index
= build_decl (EXPR_LOCATION (exp
),
6474 VAR_DECL
, NULL_TREE
, domain
);
6475 index_r
= gen_reg_rtx (promote_decl_mode (index
, NULL
));
6476 SET_DECL_RTL (index
, index_r
);
6477 store_expr (lo_index
, index_r
, 0, false, reverse
);
6479 /* Build the head of the loop. */
6480 do_pending_stack_adjust ();
6481 emit_label (loop_start
);
6483 /* Assign value to element index. */
6485 fold_convert (ssizetype
,
6486 fold_build2 (MINUS_EXPR
,
6489 TYPE_MIN_VALUE (domain
)));
6492 size_binop (MULT_EXPR
, position
,
6493 fold_convert (ssizetype
,
6494 TYPE_SIZE_UNIT (elttype
)));
6496 pos_rtx
= expand_normal (position
);
6497 xtarget
= offset_address (target
, pos_rtx
,
6498 highest_pow2_factor (position
));
6499 xtarget
= adjust_address (xtarget
, mode
, 0);
6500 if (TREE_CODE (value
) == CONSTRUCTOR
)
6501 store_constructor (value
, xtarget
, cleared
,
6502 bitsize
/ BITS_PER_UNIT
, reverse
);
6504 store_expr (value
, xtarget
, 0, false, reverse
);
6506 /* Generate a conditional jump to exit the loop. */
6507 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
6509 jumpif (exit_cond
, loop_end
,
6510 profile_probability::uninitialized ());
6512 /* Update the loop counter, and jump to the head of
6514 expand_assignment (index
,
6515 build2 (PLUS_EXPR
, TREE_TYPE (index
),
6516 index
, integer_one_node
),
6519 emit_jump (loop_start
);
6521 /* Build the end of the loop. */
6522 emit_label (loop_end
);
6525 else if ((index
!= 0 && ! tree_fits_shwi_p (index
))
6526 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype
)))
6531 index
= ssize_int (1);
6534 index
= fold_convert (ssizetype
,
6535 fold_build2 (MINUS_EXPR
,
6538 TYPE_MIN_VALUE (domain
)));
6541 size_binop (MULT_EXPR
, index
,
6542 fold_convert (ssizetype
,
6543 TYPE_SIZE_UNIT (elttype
)));
6544 xtarget
= offset_address (target
,
6545 expand_normal (position
),
6546 highest_pow2_factor (position
));
6547 xtarget
= adjust_address (xtarget
, mode
, 0);
6548 store_expr (value
, xtarget
, 0, false, reverse
);
6553 bitpos
= ((tree_to_shwi (index
) - minelt
)
6554 * tree_to_uhwi (TYPE_SIZE (elttype
)));
6556 bitpos
= (i
* tree_to_uhwi (TYPE_SIZE (elttype
)));
6558 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
6559 && TREE_CODE (type
) == ARRAY_TYPE
6560 && TYPE_NONALIASED_COMPONENT (type
))
6562 target
= copy_rtx (target
);
6563 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6565 store_constructor_field (target
, bitsize
, bitpos
, 0,
6566 bitregion_end
, mode
, value
,
6567 cleared
, get_alias_set (elttype
),
6576 unsigned HOST_WIDE_INT idx
;
6577 constructor_elt
*ce
;
6580 int icode
= CODE_FOR_nothing
;
6581 tree elttype
= TREE_TYPE (type
);
6582 int elt_size
= tree_to_uhwi (TYPE_SIZE (elttype
));
6583 machine_mode eltmode
= TYPE_MODE (elttype
);
6584 HOST_WIDE_INT bitsize
;
6585 HOST_WIDE_INT bitpos
;
6586 rtvec vector
= NULL
;
6588 alias_set_type alias
;
6589 bool vec_vec_init_p
= false;
6591 gcc_assert (eltmode
!= BLKmode
);
6593 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
6594 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
6596 machine_mode mode
= GET_MODE (target
);
6597 machine_mode emode
= eltmode
;
6599 if (CONSTRUCTOR_NELTS (exp
)
6600 && (TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (exp
, 0)->value
))
6603 tree etype
= TREE_TYPE (CONSTRUCTOR_ELT (exp
, 0)->value
);
6604 gcc_assert (CONSTRUCTOR_NELTS (exp
) * TYPE_VECTOR_SUBPARTS (etype
)
6606 emode
= TYPE_MODE (etype
);
6608 icode
= (int) convert_optab_handler (vec_init_optab
, mode
, emode
);
6609 if (icode
!= CODE_FOR_nothing
)
6611 unsigned int i
, n
= n_elts
;
6613 if (emode
!= eltmode
)
6615 n
= CONSTRUCTOR_NELTS (exp
);
6616 vec_vec_init_p
= true;
6618 vector
= rtvec_alloc (n
);
6619 for (i
= 0; i
< n
; i
++)
6620 RTVEC_ELT (vector
, i
) = CONST0_RTX (emode
);
6624 /* If the constructor has fewer elements than the vector,
6625 clear the whole array first. Similarly if this is static
6626 constructor of a non-BLKmode object. */
6629 else if (REG_P (target
) && TREE_STATIC (exp
))
6633 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
6636 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
6638 tree sz
= TYPE_SIZE (TREE_TYPE (value
));
6640 = tree_to_uhwi (int_const_binop (TRUNC_DIV_EXPR
, sz
,
6641 TYPE_SIZE (elttype
)));
6643 count
+= n_elts_here
;
6644 if (mostly_zeros_p (value
))
6645 zero_count
+= n_elts_here
;
6648 /* Clear the entire vector first if there are any missing elements,
6649 or if the incidence of zero elements is >= 75%. */
6650 need_to_clear
= (count
< n_elts
|| 4 * zero_count
>= 3 * count
);
6653 if (need_to_clear
&& size
> 0 && !vector
)
6656 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6658 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6662 /* Inform later passes that the old value is dead. */
6663 if (!cleared
&& !vector
&& REG_P (target
))
6664 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6667 alias
= MEM_ALIAS_SET (target
);
6669 alias
= get_alias_set (elttype
);
6671 /* Store each element of the constructor into the corresponding
6672 element of TARGET, determined by counting the elements. */
6673 for (idx
= 0, i
= 0;
6674 vec_safe_iterate (CONSTRUCTOR_ELTS (exp
), idx
, &ce
);
6675 idx
++, i
+= bitsize
/ elt_size
)
6677 HOST_WIDE_INT eltpos
;
6678 tree value
= ce
->value
;
6680 bitsize
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value
)));
6681 if (cleared
&& initializer_zerop (value
))
6685 eltpos
= tree_to_uhwi (ce
->index
);
6693 gcc_assert (ce
->index
== NULL_TREE
);
6694 gcc_assert (TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
);
6698 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
6699 RTVEC_ELT (vector
, eltpos
) = expand_normal (value
);
6703 machine_mode value_mode
6704 = (TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
6705 ? TYPE_MODE (TREE_TYPE (value
)) : eltmode
);
6706 bitpos
= eltpos
* elt_size
;
6707 store_constructor_field (target
, bitsize
, bitpos
, 0,
6708 bitregion_end
, value_mode
,
6709 value
, cleared
, alias
, reverse
);
6714 emit_insn (GEN_FCN (icode
) (target
,
6715 gen_rtx_PARALLEL (GET_MODE (target
),
6725 /* Store the value of EXP (an expression tree)
6726 into a subfield of TARGET which has mode MODE and occupies
6727 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6728 If MODE is VOIDmode, it means that we are storing into a bit-field.
6730 BITREGION_START is bitpos of the first bitfield in this region.
6731 BITREGION_END is the bitpos of the ending bitfield in this region.
6732 These two fields are 0, if the C++ memory model does not apply,
6733 or we are not interested in keeping track of bitfield regions.
6735 Always return const0_rtx unless we have something particular to
6738 ALIAS_SET is the alias set for the destination. This value will
6739 (in general) be different from that for TARGET, since TARGET is a
6740 reference to the containing structure.
6742 If NONTEMPORAL is true, try generating a nontemporal store.
6744 If REVERSE is true, the store is to be done in reverse order. */
6747 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
6748 unsigned HOST_WIDE_INT bitregion_start
,
6749 unsigned HOST_WIDE_INT bitregion_end
,
6750 machine_mode mode
, tree exp
,
6751 alias_set_type alias_set
, bool nontemporal
, bool reverse
)
6753 if (TREE_CODE (exp
) == ERROR_MARK
)
6756 /* If we have nothing to store, do nothing unless the expression has
6759 return expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6761 if (GET_CODE (target
) == CONCAT
)
6763 /* We're storing into a struct containing a single __complex. */
6765 gcc_assert (!bitpos
);
6766 return store_expr (exp
, target
, 0, nontemporal
, reverse
);
6769 /* If the structure is in a register or if the component
6770 is a bit field, we cannot use addressing to access it.
6771 Use bit-field techniques or SUBREG to store in it. */
6773 if (mode
== VOIDmode
6774 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
6775 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
6776 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
6778 || GET_CODE (target
) == SUBREG
6779 /* If the field isn't aligned enough to store as an ordinary memref,
6780 store it as a bit field. */
6782 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
6783 || bitpos
% GET_MODE_ALIGNMENT (mode
))
6784 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
6785 || (bitpos
% BITS_PER_UNIT
!= 0)))
6786 || (bitsize
>= 0 && mode
!= BLKmode
6787 && GET_MODE_BITSIZE (mode
) > bitsize
)
6788 /* If the RHS and field are a constant size and the size of the
6789 RHS isn't the same size as the bitfield, we must use bitfield
6792 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
6793 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0
6794 /* Except for initialization of full bytes from a CONSTRUCTOR, which
6795 we will handle specially below. */
6796 && !(TREE_CODE (exp
) == CONSTRUCTOR
6797 && bitsize
% BITS_PER_UNIT
== 0)
6798 /* And except for bitwise copying of TREE_ADDRESSABLE types,
6799 where the FIELD_DECL has the right bitsize, but TREE_TYPE (exp)
6800 includes some extra padding. store_expr / expand_expr will in
6801 that case call get_inner_reference that will have the bitsize
6802 we check here and thus the block move will not clobber the
6803 padding that shouldn't be clobbered. In the future we could
6804 replace the TREE_ADDRESSABLE check with a check that
6805 get_base_address needs to live in memory. */
6806 && (!TREE_ADDRESSABLE (TREE_TYPE (exp
))
6807 || TREE_CODE (exp
) != COMPONENT_REF
6808 || TREE_CODE (DECL_SIZE (TREE_OPERAND (exp
, 1))) != INTEGER_CST
6809 || (bitsize
% BITS_PER_UNIT
!= 0)
6810 || (bitpos
% BITS_PER_UNIT
!= 0)
6811 || (compare_tree_int (DECL_SIZE (TREE_OPERAND (exp
, 1)), bitsize
)
6813 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6814 decl we must use bitfield operations. */
6816 && TREE_CODE (exp
) == MEM_REF
6817 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
6818 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6819 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6820 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) != BLKmode
))
6825 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6826 implies a mask operation. If the precision is the same size as
6827 the field we're storing into, that mask is redundant. This is
6828 particularly common with bit field assignments generated by the
6830 nop_def
= get_def_for_expr (exp
, NOP_EXPR
);
6833 tree type
= TREE_TYPE (exp
);
6834 if (INTEGRAL_TYPE_P (type
)
6835 && TYPE_PRECISION (type
) < GET_MODE_BITSIZE (TYPE_MODE (type
))
6836 && bitsize
== TYPE_PRECISION (type
))
6838 tree op
= gimple_assign_rhs1 (nop_def
);
6839 type
= TREE_TYPE (op
);
6840 if (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) >= bitsize
)
6845 temp
= expand_normal (exp
);
6847 /* Handle calls that return values in multiple non-contiguous locations.
6848 The Irix 6 ABI has examples of this. */
6849 if (GET_CODE (temp
) == PARALLEL
)
6851 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
6852 scalar_int_mode temp_mode
6853 = smallest_int_mode_for_size (size
* BITS_PER_UNIT
);
6854 rtx temp_target
= gen_reg_rtx (temp_mode
);
6855 emit_group_store (temp_target
, temp
, TREE_TYPE (exp
), size
);
6859 /* Handle calls that return BLKmode values in registers. */
6860 else if (mode
== BLKmode
&& REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
6862 rtx temp_target
= gen_reg_rtx (GET_MODE (temp
));
6863 copy_blkmode_from_reg (temp_target
, temp
, TREE_TYPE (exp
));
6867 /* If the value has aggregate type and an integral mode then, if BITSIZE
6868 is narrower than this mode and this is for big-endian data, we first
6869 need to put the value into the low-order bits for store_bit_field,
6870 except when MODE is BLKmode and BITSIZE larger than the word size
6871 (see the handling of fields larger than a word in store_bit_field).
6872 Moreover, the field may be not aligned on a byte boundary; in this
6873 case, if it has reverse storage order, it needs to be accessed as a
6874 scalar field with reverse storage order and we must first put the
6875 value into target order. */
6876 scalar_int_mode temp_mode
;
6877 if (AGGREGATE_TYPE_P (TREE_TYPE (exp
))
6878 && is_int_mode (GET_MODE (temp
), &temp_mode
))
6880 HOST_WIDE_INT size
= GET_MODE_BITSIZE (temp_mode
);
6882 reverse
= TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (exp
));
6885 temp
= flip_storage_order (temp_mode
, temp
);
6888 && reverse
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
6889 && !(mode
== BLKmode
&& bitsize
> BITS_PER_WORD
))
6890 temp
= expand_shift (RSHIFT_EXPR
, temp_mode
, temp
,
6891 size
- bitsize
, NULL_RTX
, 1);
6894 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6895 if (mode
!= VOIDmode
&& mode
!= BLKmode
6896 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
6897 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
6899 /* If the mode of TEMP and TARGET is BLKmode, both must be in memory
6900 and BITPOS must be aligned on a byte boundary. If so, we simply do
6901 a block copy. Likewise for a BLKmode-like TARGET. */
6902 if (GET_MODE (temp
) == BLKmode
6903 && (GET_MODE (target
) == BLKmode
6905 && GET_MODE_CLASS (GET_MODE (target
)) == MODE_INT
6906 && (bitpos
% BITS_PER_UNIT
) == 0
6907 && (bitsize
% BITS_PER_UNIT
) == 0)))
6909 gcc_assert (MEM_P (target
) && MEM_P (temp
)
6910 && (bitpos
% BITS_PER_UNIT
) == 0);
6912 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
6913 emit_block_move (target
, temp
,
6914 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
6921 /* If the mode of TEMP is still BLKmode and BITSIZE not larger than the
6922 word size, we need to load the value (see again store_bit_field). */
6923 if (GET_MODE (temp
) == BLKmode
&& bitsize
<= BITS_PER_WORD
)
6925 scalar_int_mode temp_mode
= smallest_int_mode_for_size (bitsize
);
6926 temp
= extract_bit_field (temp
, bitsize
, 0, 1, NULL_RTX
, temp_mode
,
6927 temp_mode
, false, NULL
);
6930 /* Store the value in the bitfield. */
6931 store_bit_field (target
, bitsize
, bitpos
,
6932 bitregion_start
, bitregion_end
,
6933 mode
, temp
, reverse
);
6939 /* Now build a reference to just the desired component. */
6940 rtx to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
6942 if (to_rtx
== target
)
6943 to_rtx
= copy_rtx (to_rtx
);
6945 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
6946 set_mem_alias_set (to_rtx
, alias_set
);
6948 /* Above we avoided using bitfield operations for storing a CONSTRUCTOR
6949 into a target smaller than its type; handle that case now. */
6950 if (TREE_CODE (exp
) == CONSTRUCTOR
&& bitsize
>= 0)
6952 gcc_assert (bitsize
% BITS_PER_UNIT
== 0);
6953 store_constructor (exp
, to_rtx
, 0, bitsize
/ BITS_PER_UNIT
, reverse
);
6957 return store_expr (exp
, to_rtx
, 0, nontemporal
, reverse
);
6961 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6962 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6963 codes and find the ultimate containing object, which we return.
6965 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6966 bit position, *PUNSIGNEDP to the signedness and *PREVERSEP to the
6967 storage order of the field.
6968 If the position of the field is variable, we store a tree
6969 giving the variable offset (in units) in *POFFSET.
6970 This offset is in addition to the bit position.
6971 If the position is not variable, we store 0 in *POFFSET.
6973 If any of the extraction expressions is volatile,
6974 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6976 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6977 Otherwise, it is a mode that can be used to access the field.
6979 If the field describes a variable-sized object, *PMODE is set to
6980 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6981 this case, but the address of the object can be found. */
6984 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
6985 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
6986 machine_mode
*pmode
, int *punsignedp
,
6987 int *preversep
, int *pvolatilep
)
6990 machine_mode mode
= VOIDmode
;
6991 bool blkmode_bitfield
= false;
6992 tree offset
= size_zero_node
;
6993 offset_int bit_offset
= 0;
6995 /* First get the mode, signedness, storage order and size. We do this from
6996 just the outermost expression. */
6998 if (TREE_CODE (exp
) == COMPONENT_REF
)
7000 tree field
= TREE_OPERAND (exp
, 1);
7001 size_tree
= DECL_SIZE (field
);
7002 if (flag_strict_volatile_bitfields
> 0
7003 && TREE_THIS_VOLATILE (exp
)
7004 && DECL_BIT_FIELD_TYPE (field
)
7005 && DECL_MODE (field
) != BLKmode
)
7006 /* Volatile bitfields should be accessed in the mode of the
7007 field's type, not the mode computed based on the bit
7009 mode
= TYPE_MODE (DECL_BIT_FIELD_TYPE (field
));
7010 else if (!DECL_BIT_FIELD (field
))
7011 mode
= DECL_MODE (field
);
7012 else if (DECL_MODE (field
) == BLKmode
)
7013 blkmode_bitfield
= true;
7015 *punsignedp
= DECL_UNSIGNED (field
);
7017 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
7019 size_tree
= TREE_OPERAND (exp
, 1);
7020 *punsignedp
= (! INTEGRAL_TYPE_P (TREE_TYPE (exp
))
7021 || TYPE_UNSIGNED (TREE_TYPE (exp
)));
7023 /* For vector types, with the correct size of access, use the mode of
7025 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == VECTOR_TYPE
7026 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
7027 && tree_int_cst_equal (size_tree
, TYPE_SIZE (TREE_TYPE (exp
))))
7028 mode
= TYPE_MODE (TREE_TYPE (exp
));
7032 mode
= TYPE_MODE (TREE_TYPE (exp
));
7033 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
7035 if (mode
== BLKmode
)
7036 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
7038 *pbitsize
= GET_MODE_BITSIZE (mode
);
7043 if (! tree_fits_uhwi_p (size_tree
))
7044 mode
= BLKmode
, *pbitsize
= -1;
7046 *pbitsize
= tree_to_uhwi (size_tree
);
7049 *preversep
= reverse_storage_order_for_component_p (exp
);
7051 /* Compute cumulative bit-offset for nested component-refs and array-refs,
7052 and find the ultimate containing object. */
7055 switch (TREE_CODE (exp
))
7058 bit_offset
+= wi::to_offset (TREE_OPERAND (exp
, 2));
7063 tree field
= TREE_OPERAND (exp
, 1);
7064 tree this_offset
= component_ref_field_offset (exp
);
7066 /* If this field hasn't been filled in yet, don't go past it.
7067 This should only happen when folding expressions made during
7068 type construction. */
7069 if (this_offset
== 0)
7072 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
7073 bit_offset
+= wi::to_offset (DECL_FIELD_BIT_OFFSET (field
));
7075 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
7080 case ARRAY_RANGE_REF
:
7082 tree index
= TREE_OPERAND (exp
, 1);
7083 tree low_bound
= array_ref_low_bound (exp
);
7084 tree unit_size
= array_ref_element_size (exp
);
7086 /* We assume all arrays have sizes that are a multiple of a byte.
7087 First subtract the lower bound, if any, in the type of the
7088 index, then convert to sizetype and multiply by the size of
7089 the array element. */
7090 if (! integer_zerop (low_bound
))
7091 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
7094 offset
= size_binop (PLUS_EXPR
, offset
,
7095 size_binop (MULT_EXPR
,
7096 fold_convert (sizetype
, index
),
7105 bit_offset
+= *pbitsize
;
7108 case VIEW_CONVERT_EXPR
:
7112 /* Hand back the decl for MEM[&decl, off]. */
7113 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
7115 tree off
= TREE_OPERAND (exp
, 1);
7116 if (!integer_zerop (off
))
7118 offset_int boff
, coff
= mem_ref_offset (exp
);
7119 boff
= coff
<< LOG2_BITS_PER_UNIT
;
7122 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7130 /* If any reference in the chain is volatile, the effect is volatile. */
7131 if (TREE_THIS_VOLATILE (exp
))
7134 exp
= TREE_OPERAND (exp
, 0);
7138 /* If OFFSET is constant, see if we can return the whole thing as a
7139 constant bit position. Make sure to handle overflow during
7141 if (TREE_CODE (offset
) == INTEGER_CST
)
7143 offset_int tem
= wi::sext (wi::to_offset (offset
),
7144 TYPE_PRECISION (sizetype
));
7145 tem
<<= LOG2_BITS_PER_UNIT
;
7147 if (wi::fits_shwi_p (tem
))
7149 *pbitpos
= tem
.to_shwi ();
7150 *poffset
= offset
= NULL_TREE
;
7154 /* Otherwise, split it up. */
7157 /* Avoid returning a negative bitpos as this may wreak havoc later. */
7158 if (wi::neg_p (bit_offset
) || !wi::fits_shwi_p (bit_offset
))
7160 offset_int mask
= wi::mask
<offset_int
> (LOG2_BITS_PER_UNIT
, false);
7161 offset_int tem
= bit_offset
.and_not (mask
);
7162 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
7163 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
7165 tem
>>= LOG2_BITS_PER_UNIT
;
7166 offset
= size_binop (PLUS_EXPR
, offset
,
7167 wide_int_to_tree (sizetype
, tem
));
7170 *pbitpos
= bit_offset
.to_shwi ();
7174 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
7175 if (mode
== VOIDmode
7177 && (*pbitpos
% BITS_PER_UNIT
) == 0
7178 && (*pbitsize
% BITS_PER_UNIT
) == 0)
7186 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7188 static unsigned HOST_WIDE_INT
7189 target_align (const_tree target
)
7191 /* We might have a chain of nested references with intermediate misaligning
7192 bitfields components, so need to recurse to find out. */
7194 unsigned HOST_WIDE_INT this_align
, outer_align
;
7196 switch (TREE_CODE (target
))
7202 this_align
= DECL_ALIGN (TREE_OPERAND (target
, 1));
7203 outer_align
= target_align (TREE_OPERAND (target
, 0));
7204 return MIN (this_align
, outer_align
);
7207 case ARRAY_RANGE_REF
:
7208 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7209 outer_align
= target_align (TREE_OPERAND (target
, 0));
7210 return MIN (this_align
, outer_align
);
7213 case NON_LVALUE_EXPR
:
7214 case VIEW_CONVERT_EXPR
:
7215 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7216 outer_align
= target_align (TREE_OPERAND (target
, 0));
7217 return MAX (this_align
, outer_align
);
7220 return TYPE_ALIGN (TREE_TYPE (target
));
7225 /* Given an rtx VALUE that may contain additions and multiplications, return
7226 an equivalent value that just refers to a register, memory, or constant.
7227 This is done by generating instructions to perform the arithmetic and
7228 returning a pseudo-register containing the value.
7230 The returned value may be a REG, SUBREG, MEM or constant. */
7233 force_operand (rtx value
, rtx target
)
7236 /* Use subtarget as the target for operand 0 of a binary operation. */
7237 rtx subtarget
= get_subtarget (target
);
7238 enum rtx_code code
= GET_CODE (value
);
7240 /* Check for subreg applied to an expression produced by loop optimizer. */
7242 && !REG_P (SUBREG_REG (value
))
7243 && !MEM_P (SUBREG_REG (value
)))
7246 = simplify_gen_subreg (GET_MODE (value
),
7247 force_reg (GET_MODE (SUBREG_REG (value
)),
7248 force_operand (SUBREG_REG (value
),
7250 GET_MODE (SUBREG_REG (value
)),
7251 SUBREG_BYTE (value
));
7252 code
= GET_CODE (value
);
7255 /* Check for a PIC address load. */
7256 if ((code
== PLUS
|| code
== MINUS
)
7257 && XEXP (value
, 0) == pic_offset_table_rtx
7258 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
7259 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
7260 || GET_CODE (XEXP (value
, 1)) == CONST
))
7263 subtarget
= gen_reg_rtx (GET_MODE (value
));
7264 emit_move_insn (subtarget
, value
);
7268 if (ARITHMETIC_P (value
))
7270 op2
= XEXP (value
, 1);
7271 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
7273 if (code
== MINUS
&& CONST_INT_P (op2
))
7276 op2
= negate_rtx (GET_MODE (value
), op2
);
7279 /* Check for an addition with OP2 a constant integer and our first
7280 operand a PLUS of a virtual register and something else. In that
7281 case, we want to emit the sum of the virtual register and the
7282 constant first and then add the other value. This allows virtual
7283 register instantiation to simply modify the constant rather than
7284 creating another one around this addition. */
7285 if (code
== PLUS
&& CONST_INT_P (op2
)
7286 && GET_CODE (XEXP (value
, 0)) == PLUS
7287 && REG_P (XEXP (XEXP (value
, 0), 0))
7288 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7289 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
7291 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
7292 XEXP (XEXP (value
, 0), 0), op2
,
7293 subtarget
, 0, OPTAB_LIB_WIDEN
);
7294 return expand_simple_binop (GET_MODE (value
), code
, temp
,
7295 force_operand (XEXP (XEXP (value
,
7297 target
, 0, OPTAB_LIB_WIDEN
);
7300 op1
= force_operand (XEXP (value
, 0), subtarget
);
7301 op2
= force_operand (op2
, NULL_RTX
);
7305 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
7307 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
7308 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7309 target
, 1, OPTAB_LIB_WIDEN
);
7311 return expand_divmod (0,
7312 FLOAT_MODE_P (GET_MODE (value
))
7313 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
7314 GET_MODE (value
), op1
, op2
, target
, 0);
7316 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7319 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
7322 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7325 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7326 target
, 0, OPTAB_LIB_WIDEN
);
7328 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7329 target
, 1, OPTAB_LIB_WIDEN
);
7332 if (UNARY_P (value
))
7335 target
= gen_reg_rtx (GET_MODE (value
));
7336 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
7343 case FLOAT_TRUNCATE
:
7344 convert_move (target
, op1
, code
== ZERO_EXTEND
);
7349 expand_fix (target
, op1
, code
== UNSIGNED_FIX
);
7353 case UNSIGNED_FLOAT
:
7354 expand_float (target
, op1
, code
== UNSIGNED_FLOAT
);
7358 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
7362 #ifdef INSN_SCHEDULING
7363 /* On machines that have insn scheduling, we want all memory reference to be
7364 explicit, so we need to deal with such paradoxical SUBREGs. */
7365 if (paradoxical_subreg_p (value
) && MEM_P (SUBREG_REG (value
)))
7367 = simplify_gen_subreg (GET_MODE (value
),
7368 force_reg (GET_MODE (SUBREG_REG (value
)),
7369 force_operand (SUBREG_REG (value
),
7371 GET_MODE (SUBREG_REG (value
)),
7372 SUBREG_BYTE (value
));
7378 /* Subroutine of expand_expr: return nonzero iff there is no way that
7379 EXP can reference X, which is being modified. TOP_P is nonzero if this
7380 call is going to be used to determine whether we need a temporary
7381 for EXP, as opposed to a recursive call to this function.
7383 It is always safe for this routine to return zero since it merely
7384 searches for optimization opportunities. */
7387 safe_from_p (const_rtx x
, tree exp
, int top_p
)
7393 /* If EXP has varying size, we MUST use a target since we currently
7394 have no way of allocating temporaries of variable size
7395 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7396 So we assume here that something at a higher level has prevented a
7397 clash. This is somewhat bogus, but the best we can do. Only
7398 do this when X is BLKmode and when we are at the top level. */
7399 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
7400 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
7401 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
7402 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
7403 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
7405 && GET_MODE (x
) == BLKmode
)
7406 /* If X is in the outgoing argument area, it is always safe. */
7408 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
7409 || (GET_CODE (XEXP (x
, 0)) == PLUS
7410 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
7413 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7414 find the underlying pseudo. */
7415 if (GET_CODE (x
) == SUBREG
)
7418 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7422 /* Now look at our tree code and possibly recurse. */
7423 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
7425 case tcc_declaration
:
7426 exp_rtl
= DECL_RTL_IF_SET (exp
);
7432 case tcc_exceptional
:
7433 if (TREE_CODE (exp
) == TREE_LIST
)
7437 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
7439 exp
= TREE_CHAIN (exp
);
7442 if (TREE_CODE (exp
) != TREE_LIST
)
7443 return safe_from_p (x
, exp
, 0);
7446 else if (TREE_CODE (exp
) == CONSTRUCTOR
)
7448 constructor_elt
*ce
;
7449 unsigned HOST_WIDE_INT idx
;
7451 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp
), idx
, ce
)
7452 if ((ce
->index
!= NULL_TREE
&& !safe_from_p (x
, ce
->index
, 0))
7453 || !safe_from_p (x
, ce
->value
, 0))
7457 else if (TREE_CODE (exp
) == ERROR_MARK
)
7458 return 1; /* An already-visited SAVE_EXPR? */
7463 /* The only case we look at here is the DECL_INITIAL inside a
7465 return (TREE_CODE (exp
) != DECL_EXPR
7466 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
7467 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
7468 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
7471 case tcc_comparison
:
7472 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
7477 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7479 case tcc_expression
:
7482 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7483 the expression. If it is set, we conflict iff we are that rtx or
7484 both are in memory. Otherwise, we check all operands of the
7485 expression recursively. */
7487 switch (TREE_CODE (exp
))
7490 /* If the operand is static or we are static, we can't conflict.
7491 Likewise if we don't conflict with the operand at all. */
7492 if (staticp (TREE_OPERAND (exp
, 0))
7493 || TREE_STATIC (exp
)
7494 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
7497 /* Otherwise, the only way this can conflict is if we are taking
7498 the address of a DECL a that address if part of X, which is
7500 exp
= TREE_OPERAND (exp
, 0);
7503 if (!DECL_RTL_SET_P (exp
)
7504 || !MEM_P (DECL_RTL (exp
)))
7507 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
7513 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
7514 get_alias_set (exp
)))
7519 /* Assume that the call will clobber all hard registers and
7521 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7526 case WITH_CLEANUP_EXPR
:
7527 case CLEANUP_POINT_EXPR
:
7528 /* Lowered by gimplify.c. */
7532 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7538 /* If we have an rtx, we do not need to scan our operands. */
7542 nops
= TREE_OPERAND_LENGTH (exp
);
7543 for (i
= 0; i
< nops
; i
++)
7544 if (TREE_OPERAND (exp
, i
) != 0
7545 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
7551 /* Should never get a type here. */
7555 /* If we have an rtl, find any enclosed object. Then see if we conflict
7559 if (GET_CODE (exp_rtl
) == SUBREG
)
7561 exp_rtl
= SUBREG_REG (exp_rtl
);
7563 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
7567 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7568 are memory and they conflict. */
7569 return ! (rtx_equal_p (x
, exp_rtl
)
7570 || (MEM_P (x
) && MEM_P (exp_rtl
)
7571 && true_dependence (exp_rtl
, VOIDmode
, x
)));
7574 /* If we reach here, it is safe. */
7579 /* Return the highest power of two that EXP is known to be a multiple of.
7580 This is used in updating alignment of MEMs in array references. */
7582 unsigned HOST_WIDE_INT
7583 highest_pow2_factor (const_tree exp
)
7585 unsigned HOST_WIDE_INT ret
;
7586 int trailing_zeros
= tree_ctz (exp
);
7587 if (trailing_zeros
>= HOST_BITS_PER_WIDE_INT
)
7588 return BIGGEST_ALIGNMENT
;
7589 ret
= HOST_WIDE_INT_1U
<< trailing_zeros
;
7590 if (ret
> BIGGEST_ALIGNMENT
)
7591 return BIGGEST_ALIGNMENT
;
7595 /* Similar, except that the alignment requirements of TARGET are
7596 taken into account. Assume it is at least as aligned as its
7597 type, unless it is a COMPONENT_REF in which case the layout of
7598 the structure gives the alignment. */
7600 static unsigned HOST_WIDE_INT
7601 highest_pow2_factor_for_target (const_tree target
, const_tree exp
)
7603 unsigned HOST_WIDE_INT talign
= target_align (target
) / BITS_PER_UNIT
;
7604 unsigned HOST_WIDE_INT factor
= highest_pow2_factor (exp
);
7606 return MAX (factor
, talign
);
7609 /* Convert the tree comparison code TCODE to the rtl one where the
7610 signedness is UNSIGNEDP. */
7612 static enum rtx_code
7613 convert_tree_comp_to_rtx (enum tree_code tcode
, int unsignedp
)
7625 code
= unsignedp
? LTU
: LT
;
7628 code
= unsignedp
? LEU
: LE
;
7631 code
= unsignedp
? GTU
: GT
;
7634 code
= unsignedp
? GEU
: GE
;
7636 case UNORDERED_EXPR
:
7667 /* Subroutine of expand_expr. Expand the two operands of a binary
7668 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7669 The value may be stored in TARGET if TARGET is nonzero. The
7670 MODIFIER argument is as documented by expand_expr. */
7673 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
7674 enum expand_modifier modifier
)
7676 if (! safe_from_p (target
, exp1
, 1))
7678 if (operand_equal_p (exp0
, exp1
, 0))
7680 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7681 *op1
= copy_rtx (*op0
);
7685 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7686 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
7691 /* Return a MEM that contains constant EXP. DEFER is as for
7692 output_constant_def and MODIFIER is as for expand_expr. */
7695 expand_expr_constant (tree exp
, int defer
, enum expand_modifier modifier
)
7699 mem
= output_constant_def (exp
, defer
);
7700 if (modifier
!= EXPAND_INITIALIZER
)
7701 mem
= use_anchored_address (mem
);
7705 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7706 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7709 expand_expr_addr_expr_1 (tree exp
, rtx target
, scalar_int_mode tmode
,
7710 enum expand_modifier modifier
, addr_space_t as
)
7712 rtx result
, subtarget
;
7714 HOST_WIDE_INT bitsize
, bitpos
;
7715 int unsignedp
, reversep
, volatilep
= 0;
7718 /* If we are taking the address of a constant and are at the top level,
7719 we have to use output_constant_def since we can't call force_const_mem
7721 /* ??? This should be considered a front-end bug. We should not be
7722 generating ADDR_EXPR of something that isn't an LVALUE. The only
7723 exception here is STRING_CST. */
7724 if (CONSTANT_CLASS_P (exp
))
7726 result
= XEXP (expand_expr_constant (exp
, 0, modifier
), 0);
7727 if (modifier
< EXPAND_SUM
)
7728 result
= force_operand (result
, target
);
7732 /* Everything must be something allowed by is_gimple_addressable. */
7733 switch (TREE_CODE (exp
))
7736 /* This case will happen via recursion for &a->b. */
7737 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7741 tree tem
= TREE_OPERAND (exp
, 0);
7742 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
7743 tem
= fold_build_pointer_plus (tem
, TREE_OPERAND (exp
, 1));
7744 return expand_expr (tem
, target
, tmode
, modifier
);
7748 /* Expand the initializer like constants above. */
7749 result
= XEXP (expand_expr_constant (DECL_INITIAL (exp
),
7751 if (modifier
< EXPAND_SUM
)
7752 result
= force_operand (result
, target
);
7756 /* The real part of the complex number is always first, therefore
7757 the address is the same as the address of the parent object. */
7760 inner
= TREE_OPERAND (exp
, 0);
7764 /* The imaginary part of the complex number is always second.
7765 The expression is therefore always offset by the size of the
7768 bitpos
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
)));
7769 inner
= TREE_OPERAND (exp
, 0);
7772 case COMPOUND_LITERAL_EXPR
:
7773 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7774 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7775 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7776 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7777 the initializers aren't gimplified. */
7778 if (COMPOUND_LITERAL_EXPR_DECL (exp
)
7779 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp
)))
7780 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp
),
7781 target
, tmode
, modifier
, as
);
7784 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7785 expand_expr, as that can have various side effects; LABEL_DECLs for
7786 example, may not have their DECL_RTL set yet. Expand the rtl of
7787 CONSTRUCTORs too, which should yield a memory reference for the
7788 constructor's contents. Assume language specific tree nodes can
7789 be expanded in some interesting way. */
7790 gcc_assert (TREE_CODE (exp
) < LAST_AND_UNUSED_TREE_CODE
);
7792 || TREE_CODE (exp
) == CONSTRUCTOR
7793 || TREE_CODE (exp
) == COMPOUND_LITERAL_EXPR
)
7795 result
= expand_expr (exp
, target
, tmode
,
7796 modifier
== EXPAND_INITIALIZER
7797 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
7799 /* If the DECL isn't in memory, then the DECL wasn't properly
7800 marked TREE_ADDRESSABLE, which will be either a front-end
7801 or a tree optimizer bug. */
7803 gcc_assert (MEM_P (result
));
7804 result
= XEXP (result
, 0);
7806 /* ??? Is this needed anymore? */
7808 TREE_USED (exp
) = 1;
7810 if (modifier
!= EXPAND_INITIALIZER
7811 && modifier
!= EXPAND_CONST_ADDRESS
7812 && modifier
!= EXPAND_SUM
)
7813 result
= force_operand (result
, target
);
7817 /* Pass FALSE as the last argument to get_inner_reference although
7818 we are expanding to RTL. The rationale is that we know how to
7819 handle "aligning nodes" here: we can just bypass them because
7820 they won't change the final object whose address will be returned
7821 (they actually exist only for that purpose). */
7822 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode1
,
7823 &unsignedp
, &reversep
, &volatilep
);
7827 /* We must have made progress. */
7828 gcc_assert (inner
!= exp
);
7830 subtarget
= offset
|| bitpos
? NULL_RTX
: target
;
7831 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7832 inner alignment, force the inner to be sufficiently aligned. */
7833 if (CONSTANT_CLASS_P (inner
)
7834 && TYPE_ALIGN (TREE_TYPE (inner
)) < TYPE_ALIGN (TREE_TYPE (exp
)))
7836 inner
= copy_node (inner
);
7837 TREE_TYPE (inner
) = copy_node (TREE_TYPE (inner
));
7838 SET_TYPE_ALIGN (TREE_TYPE (inner
), TYPE_ALIGN (TREE_TYPE (exp
)));
7839 TYPE_USER_ALIGN (TREE_TYPE (inner
)) = 1;
7841 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
, as
);
7847 if (modifier
!= EXPAND_NORMAL
)
7848 result
= force_operand (result
, NULL
);
7849 tmp
= expand_expr (offset
, NULL_RTX
, tmode
,
7850 modifier
== EXPAND_INITIALIZER
7851 ? EXPAND_INITIALIZER
: EXPAND_NORMAL
);
7853 /* expand_expr is allowed to return an object in a mode other
7854 than TMODE. If it did, we need to convert. */
7855 if (GET_MODE (tmp
) != VOIDmode
&& tmode
!= GET_MODE (tmp
))
7856 tmp
= convert_modes (tmode
, GET_MODE (tmp
),
7857 tmp
, TYPE_UNSIGNED (TREE_TYPE (offset
)));
7858 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7859 tmp
= convert_memory_address_addr_space (tmode
, tmp
, as
);
7861 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7862 result
= simplify_gen_binary (PLUS
, tmode
, result
, tmp
);
7865 subtarget
= bitpos
? NULL_RTX
: target
;
7866 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
7867 1, OPTAB_LIB_WIDEN
);
7873 /* Someone beforehand should have rejected taking the address
7874 of such an object. */
7875 gcc_assert ((bitpos
% BITS_PER_UNIT
) == 0);
7877 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7878 result
= plus_constant (tmode
, result
, bitpos
/ BITS_PER_UNIT
);
7879 if (modifier
< EXPAND_SUM
)
7880 result
= force_operand (result
, target
);
7886 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7887 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7890 expand_expr_addr_expr (tree exp
, rtx target
, machine_mode tmode
,
7891 enum expand_modifier modifier
)
7893 addr_space_t as
= ADDR_SPACE_GENERIC
;
7894 scalar_int_mode address_mode
= Pmode
;
7895 scalar_int_mode pointer_mode
= ptr_mode
;
7899 /* Target mode of VOIDmode says "whatever's natural". */
7900 if (tmode
== VOIDmode
)
7901 tmode
= TYPE_MODE (TREE_TYPE (exp
));
7903 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
7905 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
7906 address_mode
= targetm
.addr_space
.address_mode (as
);
7907 pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
7910 /* We can get called with some Weird Things if the user does silliness
7911 like "(short) &a". In that case, convert_memory_address won't do
7912 the right thing, so ignore the given target mode. */
7913 scalar_int_mode new_tmode
= (tmode
== pointer_mode
7917 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
7918 new_tmode
, modifier
, as
);
7920 /* Despite expand_expr claims concerning ignoring TMODE when not
7921 strictly convenient, stuff breaks if we don't honor it. Note
7922 that combined with the above, we only do this for pointer modes. */
7923 rmode
= GET_MODE (result
);
7924 if (rmode
== VOIDmode
)
7926 if (rmode
!= new_tmode
)
7927 result
= convert_memory_address_addr_space (new_tmode
, result
, as
);
7932 /* Generate code for computing CONSTRUCTOR EXP.
7933 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7934 is TRUE, instead of creating a temporary variable in memory
7935 NULL is returned and the caller needs to handle it differently. */
7938 expand_constructor (tree exp
, rtx target
, enum expand_modifier modifier
,
7939 bool avoid_temp_mem
)
7941 tree type
= TREE_TYPE (exp
);
7942 machine_mode mode
= TYPE_MODE (type
);
7944 /* Try to avoid creating a temporary at all. This is possible
7945 if all of the initializer is zero.
7946 FIXME: try to handle all [0..255] initializers we can handle
7948 if (TREE_STATIC (exp
)
7949 && !TREE_ADDRESSABLE (exp
)
7950 && target
!= 0 && mode
== BLKmode
7951 && all_zeros_p (exp
))
7953 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
7957 /* All elts simple constants => refer to a constant in memory. But
7958 if this is a non-BLKmode mode, let it store a field at a time
7959 since that should make a CONST_INT, CONST_WIDE_INT or
7960 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7961 use, it is best to store directly into the target unless the type
7962 is large enough that memcpy will be used. If we are making an
7963 initializer and all operands are constant, put it in memory as
7966 FIXME: Avoid trying to fill vector constructors piece-meal.
7967 Output them with output_constant_def below unless we're sure
7968 they're zeros. This should go away when vector initializers
7969 are treated like VECTOR_CST instead of arrays. */
7970 if ((TREE_STATIC (exp
)
7971 && ((mode
== BLKmode
7972 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
7973 || TREE_ADDRESSABLE (exp
)
7974 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type
))
7975 && (! can_move_by_pieces
7976 (tree_to_uhwi (TYPE_SIZE_UNIT (type
)),
7978 && ! mostly_zeros_p (exp
))))
7979 || ((modifier
== EXPAND_INITIALIZER
|| modifier
== EXPAND_CONST_ADDRESS
)
7980 && TREE_CONSTANT (exp
)))
7987 constructor
= expand_expr_constant (exp
, 1, modifier
);
7989 if (modifier
!= EXPAND_CONST_ADDRESS
7990 && modifier
!= EXPAND_INITIALIZER
7991 && modifier
!= EXPAND_SUM
)
7992 constructor
= validize_mem (constructor
);
7997 /* Handle calls that pass values in multiple non-contiguous
7998 locations. The Irix 6 ABI has examples of this. */
7999 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
8000 || GET_CODE (target
) == PARALLEL
|| modifier
== EXPAND_STACK_PARM
)
8005 target
= assign_temp (type
, TREE_ADDRESSABLE (exp
), 1);
8008 store_constructor (exp
, target
, 0, int_expr_size (exp
), false);
8013 /* expand_expr: generate code for computing expression EXP.
8014 An rtx for the computed value is returned. The value is never null.
8015 In the case of a void EXP, const0_rtx is returned.
8017 The value may be stored in TARGET if TARGET is nonzero.
8018 TARGET is just a suggestion; callers must assume that
8019 the rtx returned may not be the same as TARGET.
8021 If TARGET is CONST0_RTX, it means that the value will be ignored.
8023 If TMODE is not VOIDmode, it suggests generating the
8024 result in mode TMODE. But this is done only when convenient.
8025 Otherwise, TMODE is ignored and the value generated in its natural mode.
8026 TMODE is just a suggestion; callers must assume that
8027 the rtx returned may not have mode TMODE.
8029 Note that TARGET may have neither TMODE nor MODE. In that case, it
8030 probably will not be used.
8032 If MODIFIER is EXPAND_SUM then when EXP is an addition
8033 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
8034 or a nest of (PLUS ...) and (MINUS ...) where the terms are
8035 products as above, or REG or MEM, or constant.
8036 Ordinarily in such cases we would output mul or add instructions
8037 and then return a pseudo reg containing the sum.
8039 EXPAND_INITIALIZER is much like EXPAND_SUM except that
8040 it also marks a label as absolutely required (it can't be dead).
8041 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
8042 This is used for outputting expressions used in initializers.
8044 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
8045 with a constant address even if that address is not normally legitimate.
8046 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
8048 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
8049 a call parameter. Such targets require special care as we haven't yet
8050 marked TARGET so that it's safe from being trashed by libcalls. We
8051 don't want to use TARGET for anything but the final result;
8052 Intermediate values must go elsewhere. Additionally, calls to
8053 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
8055 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
8056 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
8057 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
8058 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
8061 If INNER_REFERENCE_P is true, we are expanding an inner reference.
8062 In this case, we don't adjust a returned MEM rtx that wouldn't be
8063 sufficiently aligned for its mode; instead, it's up to the caller
8064 to deal with it afterwards. This is used to make sure that unaligned
8065 base objects for which out-of-bounds accesses are supported, for
8066 example record types with trailing arrays, aren't realigned behind
8067 the back of the caller.
8068 The normal operating mode is to pass FALSE for this parameter. */
8071 expand_expr_real (tree exp
, rtx target
, machine_mode tmode
,
8072 enum expand_modifier modifier
, rtx
*alt_rtl
,
8073 bool inner_reference_p
)
8077 /* Handle ERROR_MARK before anybody tries to access its type. */
8078 if (TREE_CODE (exp
) == ERROR_MARK
8079 || (TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
))
8081 ret
= CONST0_RTX (tmode
);
8082 return ret
? ret
: const0_rtx
;
8085 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
,
8090 /* Try to expand the conditional expression which is represented by
8091 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If it succeeds
8092 return the rtl reg which represents the result. Otherwise return
8096 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED
,
8097 tree treeop1 ATTRIBUTE_UNUSED
,
8098 tree treeop2 ATTRIBUTE_UNUSED
)
8101 rtx op00
, op01
, op1
, op2
;
8102 enum rtx_code comparison_code
;
8103 machine_mode comparison_mode
;
8106 tree type
= TREE_TYPE (treeop1
);
8107 int unsignedp
= TYPE_UNSIGNED (type
);
8108 machine_mode mode
= TYPE_MODE (type
);
8109 machine_mode orig_mode
= mode
;
8110 static bool expanding_cond_expr_using_cmove
= false;
8112 /* Conditional move expansion can end up TERing two operands which,
8113 when recursively hitting conditional expressions can result in
8114 exponential behavior if the cmove expansion ultimatively fails.
8115 It's hardly profitable to TER a cmove into a cmove so avoid doing
8116 that by failing early if we end up recursing. */
8117 if (expanding_cond_expr_using_cmove
)
8120 /* If we cannot do a conditional move on the mode, try doing it
8121 with the promoted mode. */
8122 if (!can_conditionally_move_p (mode
))
8124 mode
= promote_mode (type
, mode
, &unsignedp
);
8125 if (!can_conditionally_move_p (mode
))
8127 temp
= assign_temp (type
, 0, 0); /* Use promoted mode for temp. */
8130 temp
= assign_temp (type
, 0, 1);
8132 expanding_cond_expr_using_cmove
= true;
8134 expand_operands (treeop1
, treeop2
,
8135 temp
, &op1
, &op2
, EXPAND_NORMAL
);
8137 if (TREE_CODE (treeop0
) == SSA_NAME
8138 && (srcstmt
= get_def_for_expr_class (treeop0
, tcc_comparison
)))
8140 tree type
= TREE_TYPE (gimple_assign_rhs1 (srcstmt
));
8141 enum tree_code cmpcode
= gimple_assign_rhs_code (srcstmt
);
8142 op00
= expand_normal (gimple_assign_rhs1 (srcstmt
));
8143 op01
= expand_normal (gimple_assign_rhs2 (srcstmt
));
8144 comparison_mode
= TYPE_MODE (type
);
8145 unsignedp
= TYPE_UNSIGNED (type
);
8146 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
8148 else if (COMPARISON_CLASS_P (treeop0
))
8150 tree type
= TREE_TYPE (TREE_OPERAND (treeop0
, 0));
8151 enum tree_code cmpcode
= TREE_CODE (treeop0
);
8152 op00
= expand_normal (TREE_OPERAND (treeop0
, 0));
8153 op01
= expand_normal (TREE_OPERAND (treeop0
, 1));
8154 unsignedp
= TYPE_UNSIGNED (type
);
8155 comparison_mode
= TYPE_MODE (type
);
8156 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
8160 op00
= expand_normal (treeop0
);
8162 comparison_code
= NE
;
8163 comparison_mode
= GET_MODE (op00
);
8164 if (comparison_mode
== VOIDmode
)
8165 comparison_mode
= TYPE_MODE (TREE_TYPE (treeop0
));
8167 expanding_cond_expr_using_cmove
= false;
8169 if (GET_MODE (op1
) != mode
)
8170 op1
= gen_lowpart (mode
, op1
);
8172 if (GET_MODE (op2
) != mode
)
8173 op2
= gen_lowpart (mode
, op2
);
8175 /* Try to emit the conditional move. */
8176 insn
= emit_conditional_move (temp
, comparison_code
,
8177 op00
, op01
, comparison_mode
,
8181 /* If we could do the conditional move, emit the sequence,
8185 rtx_insn
*seq
= get_insns ();
8188 return convert_modes (orig_mode
, mode
, temp
, 0);
8191 /* Otherwise discard the sequence and fall back to code with
8198 expand_expr_real_2 (sepops ops
, rtx target
, machine_mode tmode
,
8199 enum expand_modifier modifier
)
8201 rtx op0
, op1
, op2
, temp
;
8202 rtx_code_label
*lab
;
8206 scalar_int_mode int_mode
;
8207 enum tree_code code
= ops
->code
;
8209 rtx subtarget
, original_target
;
8211 bool reduce_bit_field
;
8212 location_t loc
= ops
->location
;
8213 tree treeop0
, treeop1
, treeop2
;
8214 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8215 ? reduce_to_bit_field_precision ((expr), \
8221 mode
= TYPE_MODE (type
);
8222 unsignedp
= TYPE_UNSIGNED (type
);
8228 /* We should be called only on simple (binary or unary) expressions,
8229 exactly those that are valid in gimple expressions that aren't
8230 GIMPLE_SINGLE_RHS (or invalid). */
8231 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
8232 || get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
8233 || get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
);
8235 ignore
= (target
== const0_rtx
8236 || ((CONVERT_EXPR_CODE_P (code
)
8237 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
8238 && TREE_CODE (type
) == VOID_TYPE
));
8240 /* We should be called only if we need the result. */
8241 gcc_assert (!ignore
);
8243 /* An operation in what may be a bit-field type needs the
8244 result to be reduced to the precision of the bit-field type,
8245 which is narrower than that of the type's mode. */
8246 reduce_bit_field
= (INTEGRAL_TYPE_P (type
)
8247 && !type_has_mode_precision_p (type
));
8249 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
8252 /* Use subtarget as the target for operand 0 of a binary operation. */
8253 subtarget
= get_subtarget (target
);
8254 original_target
= target
;
8258 case NON_LVALUE_EXPR
:
8261 if (treeop0
== error_mark_node
)
8264 if (TREE_CODE (type
) == UNION_TYPE
)
8266 tree valtype
= TREE_TYPE (treeop0
);
8268 /* If both input and output are BLKmode, this conversion isn't doing
8269 anything except possibly changing memory attribute. */
8270 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
8272 rtx result
= expand_expr (treeop0
, target
, tmode
,
8275 result
= copy_rtx (result
);
8276 set_mem_attributes (result
, type
, 0);
8282 if (TYPE_MODE (type
) != BLKmode
)
8283 target
= gen_reg_rtx (TYPE_MODE (type
));
8285 target
= assign_temp (type
, 1, 1);
8289 /* Store data into beginning of memory target. */
8290 store_expr (treeop0
,
8291 adjust_address (target
, TYPE_MODE (valtype
), 0),
8292 modifier
== EXPAND_STACK_PARM
,
8293 false, TYPE_REVERSE_STORAGE_ORDER (type
));
8297 gcc_assert (REG_P (target
)
8298 && !TYPE_REVERSE_STORAGE_ORDER (type
));
8300 /* Store this field into a union of the proper type. */
8301 store_field (target
,
8302 MIN ((int_size_in_bytes (TREE_TYPE
8305 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
8306 0, 0, 0, TYPE_MODE (valtype
), treeop0
, 0,
8310 /* Return the entire union. */
8314 if (mode
== TYPE_MODE (TREE_TYPE (treeop0
)))
8316 op0
= expand_expr (treeop0
, target
, VOIDmode
,
8319 /* If the signedness of the conversion differs and OP0 is
8320 a promoted SUBREG, clear that indication since we now
8321 have to do the proper extension. */
8322 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)) != unsignedp
8323 && GET_CODE (op0
) == SUBREG
)
8324 SUBREG_PROMOTED_VAR_P (op0
) = 0;
8326 return REDUCE_BIT_FIELD (op0
);
8329 op0
= expand_expr (treeop0
, NULL_RTX
, mode
,
8330 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
);
8331 if (GET_MODE (op0
) == mode
)
8334 /* If OP0 is a constant, just convert it into the proper mode. */
8335 else if (CONSTANT_P (op0
))
8337 tree inner_type
= TREE_TYPE (treeop0
);
8338 machine_mode inner_mode
= GET_MODE (op0
);
8340 if (inner_mode
== VOIDmode
)
8341 inner_mode
= TYPE_MODE (inner_type
);
8343 if (modifier
== EXPAND_INITIALIZER
)
8344 op0
= lowpart_subreg (mode
, op0
, inner_mode
);
8346 op0
= convert_modes (mode
, inner_mode
, op0
,
8347 TYPE_UNSIGNED (inner_type
));
8350 else if (modifier
== EXPAND_INITIALIZER
)
8351 op0
= gen_rtx_fmt_e (TYPE_UNSIGNED (TREE_TYPE (treeop0
))
8352 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
8354 else if (target
== 0)
8355 op0
= convert_to_mode (mode
, op0
,
8356 TYPE_UNSIGNED (TREE_TYPE
8360 convert_move (target
, op0
,
8361 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8365 return REDUCE_BIT_FIELD (op0
);
8367 case ADDR_SPACE_CONVERT_EXPR
:
8369 tree treeop0_type
= TREE_TYPE (treeop0
);
8371 gcc_assert (POINTER_TYPE_P (type
));
8372 gcc_assert (POINTER_TYPE_P (treeop0_type
));
8374 addr_space_t as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
8375 addr_space_t as_from
= TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type
));
8377 /* Conversions between pointers to the same address space should
8378 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8379 gcc_assert (as_to
!= as_from
);
8381 op0
= expand_expr (treeop0
, NULL_RTX
, VOIDmode
, modifier
);
8383 /* Ask target code to handle conversion between pointers
8384 to overlapping address spaces. */
8385 if (targetm
.addr_space
.subset_p (as_to
, as_from
)
8386 || targetm
.addr_space
.subset_p (as_from
, as_to
))
8388 op0
= targetm
.addr_space
.convert (op0
, treeop0_type
, type
);
8392 /* For disjoint address spaces, converting anything but a null
8393 pointer invokes undefined behavior. We truncate or extend the
8394 value as if we'd converted via integers, which handles 0 as
8395 required, and all others as the programmer likely expects. */
8396 #ifndef POINTERS_EXTEND_UNSIGNED
8397 const int POINTERS_EXTEND_UNSIGNED
= 1;
8399 op0
= convert_modes (mode
, TYPE_MODE (treeop0_type
),
8400 op0
, POINTERS_EXTEND_UNSIGNED
);
8406 case POINTER_PLUS_EXPR
:
8407 /* Even though the sizetype mode and the pointer's mode can be different
8408 expand is able to handle this correctly and get the correct result out
8409 of the PLUS_EXPR code. */
8410 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8411 if sizetype precision is smaller than pointer precision. */
8412 if (TYPE_PRECISION (sizetype
) < TYPE_PRECISION (type
))
8413 treeop1
= fold_convert_loc (loc
, type
,
8414 fold_convert_loc (loc
, ssizetype
,
8416 /* If sizetype precision is larger than pointer precision, truncate the
8417 offset to have matching modes. */
8418 else if (TYPE_PRECISION (sizetype
) > TYPE_PRECISION (type
))
8419 treeop1
= fold_convert_loc (loc
, type
, treeop1
);
8423 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8424 something else, make sure we add the register to the constant and
8425 then to the other thing. This case can occur during strength
8426 reduction and doing it this way will produce better code if the
8427 frame pointer or argument pointer is eliminated.
8429 fold-const.c will ensure that the constant is always in the inner
8430 PLUS_EXPR, so the only case we need to do anything about is if
8431 sp, ap, or fp is our second argument, in which case we must swap
8432 the innermost first argument and our second argument. */
8434 if (TREE_CODE (treeop0
) == PLUS_EXPR
8435 && TREE_CODE (TREE_OPERAND (treeop0
, 1)) == INTEGER_CST
8437 && (DECL_RTL (treeop1
) == frame_pointer_rtx
8438 || DECL_RTL (treeop1
) == stack_pointer_rtx
8439 || DECL_RTL (treeop1
) == arg_pointer_rtx
))
8444 /* If the result is to be ptr_mode and we are adding an integer to
8445 something, we might be forming a constant. So try to use
8446 plus_constant. If it produces a sum and we can't accept it,
8447 use force_operand. This allows P = &ARR[const] to generate
8448 efficient code on machines where a SYMBOL_REF is not a valid
8451 If this is an EXPAND_SUM call, always return the sum. */
8452 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
8453 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
8455 if (modifier
== EXPAND_STACK_PARM
)
8457 if (TREE_CODE (treeop0
) == INTEGER_CST
8458 && GET_MODE_PRECISION (mode
) <= HOST_BITS_PER_WIDE_INT
8459 && TREE_CONSTANT (treeop1
))
8463 machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop1
));
8465 op1
= expand_expr (treeop1
, subtarget
, VOIDmode
,
8467 /* Use wi::shwi to ensure that the constant is
8468 truncated according to the mode of OP1, then sign extended
8469 to a HOST_WIDE_INT. Using the constant directly can result
8470 in non-canonical RTL in a 64x32 cross compile. */
8471 wc
= TREE_INT_CST_LOW (treeop0
);
8473 immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8474 op1
= plus_constant (mode
, op1
, INTVAL (constant_part
));
8475 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8476 op1
= force_operand (op1
, target
);
8477 return REDUCE_BIT_FIELD (op1
);
8480 else if (TREE_CODE (treeop1
) == INTEGER_CST
8481 && GET_MODE_PRECISION (mode
) <= HOST_BITS_PER_WIDE_INT
8482 && TREE_CONSTANT (treeop0
))
8486 machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop0
));
8488 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8489 (modifier
== EXPAND_INITIALIZER
8490 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
8491 if (! CONSTANT_P (op0
))
8493 op1
= expand_expr (treeop1
, NULL_RTX
,
8494 VOIDmode
, modifier
);
8495 /* Return a PLUS if modifier says it's OK. */
8496 if (modifier
== EXPAND_SUM
8497 || modifier
== EXPAND_INITIALIZER
)
8498 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
8501 /* Use wi::shwi to ensure that the constant is
8502 truncated according to the mode of OP1, then sign extended
8503 to a HOST_WIDE_INT. Using the constant directly can result
8504 in non-canonical RTL in a 64x32 cross compile. */
8505 wc
= TREE_INT_CST_LOW (treeop1
);
8507 = immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8508 op0
= plus_constant (mode
, op0
, INTVAL (constant_part
));
8509 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8510 op0
= force_operand (op0
, target
);
8511 return REDUCE_BIT_FIELD (op0
);
8515 /* Use TER to expand pointer addition of a negated value
8516 as pointer subtraction. */
8517 if ((POINTER_TYPE_P (TREE_TYPE (treeop0
))
8518 || (TREE_CODE (TREE_TYPE (treeop0
)) == VECTOR_TYPE
8519 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0
)))))
8520 && TREE_CODE (treeop1
) == SSA_NAME
8521 && TYPE_MODE (TREE_TYPE (treeop0
))
8522 == TYPE_MODE (TREE_TYPE (treeop1
)))
8524 gimple
*def
= get_def_for_expr (treeop1
, NEGATE_EXPR
);
8527 treeop1
= gimple_assign_rhs1 (def
);
8533 /* No sense saving up arithmetic to be done
8534 if it's all in the wrong mode to form part of an address.
8535 And force_operand won't know whether to sign-extend or
8537 if (modifier
!= EXPAND_INITIALIZER
8538 && (modifier
!= EXPAND_SUM
|| mode
!= ptr_mode
))
8540 expand_operands (treeop0
, treeop1
,
8541 subtarget
, &op0
, &op1
, modifier
);
8542 if (op0
== const0_rtx
)
8544 if (op1
== const0_rtx
)
8549 expand_operands (treeop0
, treeop1
,
8550 subtarget
, &op0
, &op1
, modifier
);
8551 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8555 /* For initializers, we are allowed to return a MINUS of two
8556 symbolic constants. Here we handle all cases when both operands
8558 /* Handle difference of two symbolic constants,
8559 for the sake of an initializer. */
8560 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8561 && really_constant_p (treeop0
)
8562 && really_constant_p (treeop1
))
8564 expand_operands (treeop0
, treeop1
,
8565 NULL_RTX
, &op0
, &op1
, modifier
);
8567 /* If the last operand is a CONST_INT, use plus_constant of
8568 the negated constant. Else make the MINUS. */
8569 if (CONST_INT_P (op1
))
8570 return REDUCE_BIT_FIELD (plus_constant (mode
, op0
,
8573 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode
, op0
, op1
));
8576 /* No sense saving up arithmetic to be done
8577 if it's all in the wrong mode to form part of an address.
8578 And force_operand won't know whether to sign-extend or
8580 if (modifier
!= EXPAND_INITIALIZER
8581 && (modifier
!= EXPAND_SUM
|| mode
!= ptr_mode
))
8584 expand_operands (treeop0
, treeop1
,
8585 subtarget
, &op0
, &op1
, modifier
);
8587 /* Convert A - const to A + (-const). */
8588 if (CONST_INT_P (op1
))
8590 op1
= negate_rtx (mode
, op1
);
8591 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8596 case WIDEN_MULT_PLUS_EXPR
:
8597 case WIDEN_MULT_MINUS_EXPR
:
8598 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8599 op2
= expand_normal (treeop2
);
8600 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
8604 case WIDEN_MULT_EXPR
:
8605 /* If first operand is constant, swap them.
8606 Thus the following special case checks need only
8607 check the second operand. */
8608 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8609 std::swap (treeop0
, treeop1
);
8611 /* First, check if we have a multiplication of one signed and one
8612 unsigned operand. */
8613 if (TREE_CODE (treeop1
) != INTEGER_CST
8614 && (TYPE_UNSIGNED (TREE_TYPE (treeop0
))
8615 != TYPE_UNSIGNED (TREE_TYPE (treeop1
))))
8617 machine_mode innermode
= TYPE_MODE (TREE_TYPE (treeop0
));
8618 this_optab
= usmul_widen_optab
;
8619 if (find_widening_optab_handler (this_optab
, mode
, innermode
, 0)
8620 != CODE_FOR_nothing
)
8622 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8623 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8626 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op1
, &op0
,
8628 /* op0 and op1 might still be constant, despite the above
8629 != INTEGER_CST check. Handle it. */
8630 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8632 op0
= convert_modes (innermode
, mode
, op0
, true);
8633 op1
= convert_modes (innermode
, mode
, op1
, false);
8634 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8635 target
, unsignedp
));
8640 /* Check for a multiplication with matching signedness. */
8641 else if ((TREE_CODE (treeop1
) == INTEGER_CST
8642 && int_fits_type_p (treeop1
, TREE_TYPE (treeop0
)))
8643 || (TYPE_UNSIGNED (TREE_TYPE (treeop1
))
8644 == TYPE_UNSIGNED (TREE_TYPE (treeop0
))))
8646 tree op0type
= TREE_TYPE (treeop0
);
8647 machine_mode innermode
= TYPE_MODE (op0type
);
8648 bool zextend_p
= TYPE_UNSIGNED (op0type
);
8649 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
8650 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
8652 if (TREE_CODE (treeop0
) != INTEGER_CST
)
8654 if (find_widening_optab_handler (this_optab
, mode
, innermode
, 0)
8655 != CODE_FOR_nothing
)
8657 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8659 /* op0 and op1 might still be constant, despite the above
8660 != INTEGER_CST check. Handle it. */
8661 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8664 op0
= convert_modes (innermode
, mode
, op0
, zextend_p
);
8666 = convert_modes (innermode
, mode
, op1
,
8667 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8668 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8672 temp
= expand_widening_mult (mode
, op0
, op1
, target
,
8673 unsignedp
, this_optab
);
8674 return REDUCE_BIT_FIELD (temp
);
8676 if (find_widening_optab_handler (other_optab
, mode
, innermode
, 0)
8678 && innermode
== word_mode
)
8681 op0
= expand_normal (treeop0
);
8682 if (TREE_CODE (treeop1
) == INTEGER_CST
)
8683 op1
= convert_modes (word_mode
, mode
,
8684 expand_normal (treeop1
),
8685 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8687 op1
= expand_normal (treeop1
);
8688 /* op0 and op1 might still be constant, despite the above
8689 != INTEGER_CST check. Handle it. */
8690 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8691 goto widen_mult_const
;
8692 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8693 unsignedp
, OPTAB_LIB_WIDEN
);
8694 hipart
= gen_highpart (word_mode
, temp
);
8695 htem
= expand_mult_highpart_adjust (word_mode
, hipart
,
8699 emit_move_insn (hipart
, htem
);
8700 return REDUCE_BIT_FIELD (temp
);
8704 treeop0
= fold_build1 (CONVERT_EXPR
, type
, treeop0
);
8705 treeop1
= fold_build1 (CONVERT_EXPR
, type
, treeop1
);
8706 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8707 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8711 optab opt
= fma_optab
;
8712 gimple
*def0
, *def2
;
8714 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8716 if (optab_handler (fma_optab
, mode
) == CODE_FOR_nothing
)
8718 tree fn
= mathfn_built_in (TREE_TYPE (treeop0
), BUILT_IN_FMA
);
8721 gcc_assert (fn
!= NULL_TREE
);
8722 call_expr
= build_call_expr (fn
, 3, treeop0
, treeop1
, treeop2
);
8723 return expand_builtin (call_expr
, target
, subtarget
, mode
, false);
8726 def0
= get_def_for_expr (treeop0
, NEGATE_EXPR
);
8727 /* The multiplication is commutative - look at its 2nd operand
8728 if the first isn't fed by a negate. */
8731 def0
= get_def_for_expr (treeop1
, NEGATE_EXPR
);
8732 /* Swap operands if the 2nd operand is fed by a negate. */
8734 std::swap (treeop0
, treeop1
);
8736 def2
= get_def_for_expr (treeop2
, NEGATE_EXPR
);
8741 && optab_handler (fnms_optab
, mode
) != CODE_FOR_nothing
)
8744 op0
= expand_normal (gimple_assign_rhs1 (def0
));
8745 op2
= expand_normal (gimple_assign_rhs1 (def2
));
8748 && optab_handler (fnma_optab
, mode
) != CODE_FOR_nothing
)
8751 op0
= expand_normal (gimple_assign_rhs1 (def0
));
8754 && optab_handler (fms_optab
, mode
) != CODE_FOR_nothing
)
8757 op2
= expand_normal (gimple_assign_rhs1 (def2
));
8761 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
8763 op2
= expand_normal (treeop2
);
8764 op1
= expand_normal (treeop1
);
8766 return expand_ternary_op (TYPE_MODE (type
), opt
,
8767 op0
, op1
, op2
, target
, 0);
8771 /* If this is a fixed-point operation, then we cannot use the code
8772 below because "expand_mult" doesn't support sat/no-sat fixed-point
8774 if (ALL_FIXED_POINT_MODE_P (mode
))
8777 /* If first operand is constant, swap them.
8778 Thus the following special case checks need only
8779 check the second operand. */
8780 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8781 std::swap (treeop0
, treeop1
);
8783 /* Attempt to return something suitable for generating an
8784 indexed address, for machines that support that. */
8786 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8787 && tree_fits_shwi_p (treeop1
))
8789 tree exp1
= treeop1
;
8791 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8795 op0
= force_operand (op0
, NULL_RTX
);
8797 op0
= copy_to_mode_reg (mode
, op0
);
8799 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
8800 gen_int_mode (tree_to_shwi (exp1
),
8801 TYPE_MODE (TREE_TYPE (exp1
)))));
8804 if (modifier
== EXPAND_STACK_PARM
)
8807 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8808 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8810 case TRUNC_MOD_EXPR
:
8811 case FLOOR_MOD_EXPR
:
8813 case ROUND_MOD_EXPR
:
8815 case TRUNC_DIV_EXPR
:
8816 case FLOOR_DIV_EXPR
:
8818 case ROUND_DIV_EXPR
:
8819 case EXACT_DIV_EXPR
:
8821 /* If this is a fixed-point operation, then we cannot use the code
8822 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8824 if (ALL_FIXED_POINT_MODE_P (mode
))
8827 if (modifier
== EXPAND_STACK_PARM
)
8829 /* Possible optimization: compute the dividend with EXPAND_SUM
8830 then if the divisor is constant can optimize the case
8831 where some terms of the dividend have coeffs divisible by it. */
8832 expand_operands (treeop0
, treeop1
,
8833 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8834 bool mod_p
= code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
8835 || code
== CEIL_MOD_EXPR
|| code
== ROUND_MOD_EXPR
;
8836 if (SCALAR_INT_MODE_P (mode
)
8838 && get_range_pos_neg (treeop0
) == 1
8839 && get_range_pos_neg (treeop1
) == 1)
8841 /* If both arguments are known to be positive when interpreted
8842 as signed, we can expand it as both signed and unsigned
8843 division or modulo. Choose the cheaper sequence in that case. */
8844 bool speed_p
= optimize_insn_for_speed_p ();
8845 do_pending_stack_adjust ();
8847 rtx uns_ret
= expand_divmod (mod_p
, code
, mode
, op0
, op1
, target
, 1);
8848 rtx_insn
*uns_insns
= get_insns ();
8851 rtx sgn_ret
= expand_divmod (mod_p
, code
, mode
, op0
, op1
, target
, 0);
8852 rtx_insn
*sgn_insns
= get_insns ();
8854 unsigned uns_cost
= seq_cost (uns_insns
, speed_p
);
8855 unsigned sgn_cost
= seq_cost (sgn_insns
, speed_p
);
8857 /* If costs are the same then use as tie breaker the other
8859 if (uns_cost
== sgn_cost
)
8861 uns_cost
= seq_cost (uns_insns
, !speed_p
);
8862 sgn_cost
= seq_cost (sgn_insns
, !speed_p
);
8865 if (uns_cost
< sgn_cost
|| (uns_cost
== sgn_cost
&& unsignedp
))
8867 emit_insn (uns_insns
);
8870 emit_insn (sgn_insns
);
8873 return expand_divmod (mod_p
, code
, mode
, op0
, op1
, target
, unsignedp
);
8878 case MULT_HIGHPART_EXPR
:
8879 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8880 temp
= expand_mult_highpart (mode
, op0
, op1
, target
, unsignedp
);
8884 case FIXED_CONVERT_EXPR
:
8885 op0
= expand_normal (treeop0
);
8886 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8887 target
= gen_reg_rtx (mode
);
8889 if ((TREE_CODE (TREE_TYPE (treeop0
)) == INTEGER_TYPE
8890 && TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8891 || (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_UNSIGNED (type
)))
8892 expand_fixed_convert (target
, op0
, 1, TYPE_SATURATING (type
));
8894 expand_fixed_convert (target
, op0
, 0, TYPE_SATURATING (type
));
8897 case FIX_TRUNC_EXPR
:
8898 op0
= expand_normal (treeop0
);
8899 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8900 target
= gen_reg_rtx (mode
);
8901 expand_fix (target
, op0
, unsignedp
);
8905 op0
= expand_normal (treeop0
);
8906 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8907 target
= gen_reg_rtx (mode
);
8908 /* expand_float can't figure out what to do if FROM has VOIDmode.
8909 So give it the correct mode. With -O, cse will optimize this. */
8910 if (GET_MODE (op0
) == VOIDmode
)
8911 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0
)),
8913 expand_float (target
, op0
,
8914 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8918 op0
= expand_expr (treeop0
, subtarget
,
8919 VOIDmode
, EXPAND_NORMAL
);
8920 if (modifier
== EXPAND_STACK_PARM
)
8922 temp
= expand_unop (mode
,
8923 optab_for_tree_code (NEGATE_EXPR
, type
,
8927 return REDUCE_BIT_FIELD (temp
);
8930 op0
= expand_expr (treeop0
, subtarget
,
8931 VOIDmode
, EXPAND_NORMAL
);
8932 if (modifier
== EXPAND_STACK_PARM
)
8935 /* ABS_EXPR is not valid for complex arguments. */
8936 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
8937 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
8939 /* Unsigned abs is simply the operand. Testing here means we don't
8940 risk generating incorrect code below. */
8941 if (TYPE_UNSIGNED (type
))
8944 return expand_abs (mode
, op0
, target
, unsignedp
,
8945 safe_from_p (target
, treeop0
, 1));
8949 target
= original_target
;
8951 || modifier
== EXPAND_STACK_PARM
8952 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
8953 || GET_MODE (target
) != mode
8955 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8956 target
= gen_reg_rtx (mode
);
8957 expand_operands (treeop0
, treeop1
,
8958 target
, &op0
, &op1
, EXPAND_NORMAL
);
8960 /* First try to do it with a special MIN or MAX instruction.
8961 If that does not win, use a conditional jump to select the proper
8963 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8964 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8969 /* For vector MIN <x, y>, expand it a VEC_COND_EXPR <x <= y, x, y>
8970 and similarly for MAX <x, y>. */
8971 if (VECTOR_TYPE_P (type
))
8973 tree t0
= make_tree (type
, op0
);
8974 tree t1
= make_tree (type
, op1
);
8975 tree comparison
= build2 (code
== MIN_EXPR
? LE_EXPR
: GE_EXPR
,
8977 return expand_vec_cond_expr (type
, comparison
, t0
, t1
,
8981 /* At this point, a MEM target is no longer useful; we will get better
8984 if (! REG_P (target
))
8985 target
= gen_reg_rtx (mode
);
8987 /* If op1 was placed in target, swap op0 and op1. */
8988 if (target
!= op0
&& target
== op1
)
8989 std::swap (op0
, op1
);
8991 /* We generate better code and avoid problems with op1 mentioning
8992 target by forcing op1 into a pseudo if it isn't a constant. */
8993 if (! CONSTANT_P (op1
))
8994 op1
= force_reg (mode
, op1
);
8997 enum rtx_code comparison_code
;
9000 if (code
== MAX_EXPR
)
9001 comparison_code
= unsignedp
? GEU
: GE
;
9003 comparison_code
= unsignedp
? LEU
: LE
;
9005 /* Canonicalize to comparisons against 0. */
9006 if (op1
== const1_rtx
)
9008 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
9009 or (a != 0 ? a : 1) for unsigned.
9010 For MIN we are safe converting (a <= 1 ? a : 1)
9011 into (a <= 0 ? a : 1) */
9012 cmpop1
= const0_rtx
;
9013 if (code
== MAX_EXPR
)
9014 comparison_code
= unsignedp
? NE
: GT
;
9016 if (op1
== constm1_rtx
&& !unsignedp
)
9018 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
9019 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
9020 cmpop1
= const0_rtx
;
9021 if (code
== MIN_EXPR
)
9022 comparison_code
= LT
;
9025 /* Use a conditional move if possible. */
9026 if (can_conditionally_move_p (mode
))
9032 /* Try to emit the conditional move. */
9033 insn
= emit_conditional_move (target
, comparison_code
,
9038 /* If we could do the conditional move, emit the sequence,
9042 rtx_insn
*seq
= get_insns ();
9048 /* Otherwise discard the sequence and fall back to code with
9054 emit_move_insn (target
, op0
);
9056 lab
= gen_label_rtx ();
9057 do_compare_rtx_and_jump (target
, cmpop1
, comparison_code
,
9058 unsignedp
, mode
, NULL_RTX
, NULL
, lab
,
9059 profile_probability::uninitialized ());
9061 emit_move_insn (target
, op1
);
9066 op0
= expand_expr (treeop0
, subtarget
,
9067 VOIDmode
, EXPAND_NORMAL
);
9068 if (modifier
== EXPAND_STACK_PARM
)
9070 /* In case we have to reduce the result to bitfield precision
9071 for unsigned bitfield expand this as XOR with a proper constant
9073 if (reduce_bit_field
&& TYPE_UNSIGNED (type
))
9075 int_mode
= SCALAR_INT_TYPE_MODE (type
);
9076 wide_int mask
= wi::mask (TYPE_PRECISION (type
),
9077 false, GET_MODE_PRECISION (int_mode
));
9079 temp
= expand_binop (int_mode
, xor_optab
, op0
,
9080 immed_wide_int_const (mask
, int_mode
),
9081 target
, 1, OPTAB_LIB_WIDEN
);
9084 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
9088 /* ??? Can optimize bitwise operations with one arg constant.
9089 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
9090 and (a bitwise1 b) bitwise2 b (etc)
9091 but that is probably not worth while. */
9100 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type
))
9101 || type_has_mode_precision_p (type
));
9107 /* If this is a fixed-point operation, then we cannot use the code
9108 below because "expand_shift" doesn't support sat/no-sat fixed-point
9110 if (ALL_FIXED_POINT_MODE_P (mode
))
9113 if (! safe_from_p (subtarget
, treeop1
, 1))
9115 if (modifier
== EXPAND_STACK_PARM
)
9117 op0
= expand_expr (treeop0
, subtarget
,
9118 VOIDmode
, EXPAND_NORMAL
);
9120 /* Left shift optimization when shifting across word_size boundary.
9122 If mode == GET_MODE_WIDER_MODE (word_mode), then normally
9123 there isn't native instruction to support this wide mode
9124 left shift. Given below scenario:
9126 Type A = (Type) B << C
9129 | dest_high | dest_low |
9133 If the shift amount C caused we shift B to across the word
9134 size boundary, i.e part of B shifted into high half of
9135 destination register, and part of B remains in the low
9136 half, then GCC will use the following left shift expand
9139 1. Initialize dest_low to B.
9140 2. Initialize every bit of dest_high to the sign bit of B.
9141 3. Logic left shift dest_low by C bit to finalize dest_low.
9142 The value of dest_low before this shift is kept in a temp D.
9143 4. Logic left shift dest_high by C.
9144 5. Logic right shift D by (word_size - C).
9145 6. Or the result of 4 and 5 to finalize dest_high.
9147 While, by checking gimple statements, if operand B is
9148 coming from signed extension, then we can simplify above
9151 1. dest_high = src_low >> (word_size - C).
9152 2. dest_low = src_low << C.
9154 We can use one arithmetic right shift to finish all the
9155 purpose of steps 2, 4, 5, 6, thus we reduce the steps
9156 needed from 6 into 2.
9158 The case is similar for zero extension, except that we
9159 initialize dest_high to zero rather than copies of the sign
9160 bit from B. Furthermore, we need to use a logical right shift
9163 The choice of sign-extension versus zero-extension is
9164 determined entirely by whether or not B is signed and is
9165 independent of the current setting of unsignedp. */
9168 if (code
== LSHIFT_EXPR
9171 && GET_MODE_2XWIDER_MODE (word_mode
).exists (&int_mode
)
9173 && TREE_CONSTANT (treeop1
)
9174 && TREE_CODE (treeop0
) == SSA_NAME
)
9176 gimple
*def
= SSA_NAME_DEF_STMT (treeop0
);
9177 if (is_gimple_assign (def
)
9178 && gimple_assign_rhs_code (def
) == NOP_EXPR
)
9180 scalar_int_mode rmode
= SCALAR_INT_TYPE_MODE
9181 (TREE_TYPE (gimple_assign_rhs1 (def
)));
9183 if (GET_MODE_SIZE (rmode
) < GET_MODE_SIZE (int_mode
)
9184 && TREE_INT_CST_LOW (treeop1
) < GET_MODE_BITSIZE (word_mode
)
9185 && ((TREE_INT_CST_LOW (treeop1
) + GET_MODE_BITSIZE (rmode
))
9186 >= GET_MODE_BITSIZE (word_mode
)))
9188 rtx_insn
*seq
, *seq_old
;
9189 unsigned int high_off
= subreg_highpart_offset (word_mode
,
9191 bool extend_unsigned
9192 = TYPE_UNSIGNED (TREE_TYPE (gimple_assign_rhs1 (def
)));
9193 rtx low
= lowpart_subreg (word_mode
, op0
, int_mode
);
9194 rtx dest_low
= lowpart_subreg (word_mode
, target
, int_mode
);
9195 rtx dest_high
= simplify_gen_subreg (word_mode
, target
,
9196 int_mode
, high_off
);
9197 HOST_WIDE_INT ramount
= (BITS_PER_WORD
9198 - TREE_INT_CST_LOW (treeop1
));
9199 tree rshift
= build_int_cst (TREE_TYPE (treeop1
), ramount
);
9202 /* dest_high = src_low >> (word_size - C). */
9203 temp
= expand_variable_shift (RSHIFT_EXPR
, word_mode
, low
,
9206 if (temp
!= dest_high
)
9207 emit_move_insn (dest_high
, temp
);
9209 /* dest_low = src_low << C. */
9210 temp
= expand_variable_shift (LSHIFT_EXPR
, word_mode
, low
,
9211 treeop1
, dest_low
, unsignedp
);
9212 if (temp
!= dest_low
)
9213 emit_move_insn (dest_low
, temp
);
9219 if (have_insn_for (ASHIFT
, int_mode
))
9221 bool speed_p
= optimize_insn_for_speed_p ();
9223 rtx ret_old
= expand_variable_shift (code
, int_mode
,
9228 seq_old
= get_insns ();
9230 if (seq_cost (seq
, speed_p
)
9231 >= seq_cost (seq_old
, speed_p
))
9242 if (temp
== NULL_RTX
)
9243 temp
= expand_variable_shift (code
, mode
, op0
, treeop1
, target
,
9245 if (code
== LSHIFT_EXPR
)
9246 temp
= REDUCE_BIT_FIELD (temp
);
9250 /* Could determine the answer when only additive constants differ. Also,
9251 the addition of one can be handled by changing the condition. */
9258 case UNORDERED_EXPR
:
9267 temp
= do_store_flag (ops
,
9268 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
9269 tmode
!= VOIDmode
? tmode
: mode
);
9273 /* Use a compare and a jump for BLKmode comparisons, or for function
9274 type comparisons is have_canonicalize_funcptr_for_compare. */
9277 || modifier
== EXPAND_STACK_PARM
9278 || ! safe_from_p (target
, treeop0
, 1)
9279 || ! safe_from_p (target
, treeop1
, 1)
9280 /* Make sure we don't have a hard reg (such as function's return
9281 value) live across basic blocks, if not optimizing. */
9282 || (!optimize
&& REG_P (target
)
9283 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
9284 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
9286 emit_move_insn (target
, const0_rtx
);
9288 rtx_code_label
*lab1
= gen_label_rtx ();
9289 jumpifnot_1 (code
, treeop0
, treeop1
, lab1
,
9290 profile_probability::uninitialized ());
9292 if (TYPE_PRECISION (type
) == 1 && !TYPE_UNSIGNED (type
))
9293 emit_move_insn (target
, constm1_rtx
);
9295 emit_move_insn (target
, const1_rtx
);
9301 /* Get the rtx code of the operands. */
9302 op0
= expand_normal (treeop0
);
9303 op1
= expand_normal (treeop1
);
9306 target
= gen_reg_rtx (TYPE_MODE (type
));
9308 /* If target overlaps with op1, then either we need to force
9309 op1 into a pseudo (if target also overlaps with op0),
9310 or write the complex parts in reverse order. */
9311 switch (GET_CODE (target
))
9314 if (reg_overlap_mentioned_p (XEXP (target
, 0), op1
))
9316 if (reg_overlap_mentioned_p (XEXP (target
, 1), op0
))
9318 complex_expr_force_op1
:
9319 temp
= gen_reg_rtx (GET_MODE_INNER (GET_MODE (target
)));
9320 emit_move_insn (temp
, op1
);
9324 complex_expr_swap_order
:
9325 /* Move the imaginary (op1) and real (op0) parts to their
9327 write_complex_part (target
, op1
, true);
9328 write_complex_part (target
, op0
, false);
9334 temp
= adjust_address_nv (target
,
9335 GET_MODE_INNER (GET_MODE (target
)), 0);
9336 if (reg_overlap_mentioned_p (temp
, op1
))
9338 machine_mode imode
= GET_MODE_INNER (GET_MODE (target
));
9339 temp
= adjust_address_nv (target
, imode
,
9340 GET_MODE_SIZE (imode
));
9341 if (reg_overlap_mentioned_p (temp
, op0
))
9342 goto complex_expr_force_op1
;
9343 goto complex_expr_swap_order
;
9347 if (reg_overlap_mentioned_p (target
, op1
))
9349 if (reg_overlap_mentioned_p (target
, op0
))
9350 goto complex_expr_force_op1
;
9351 goto complex_expr_swap_order
;
9356 /* Move the real (op0) and imaginary (op1) parts to their location. */
9357 write_complex_part (target
, op0
, false);
9358 write_complex_part (target
, op1
, true);
9362 case WIDEN_SUM_EXPR
:
9364 tree oprnd0
= treeop0
;
9365 tree oprnd1
= treeop1
;
9367 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9368 target
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, op1
,
9373 case REDUC_MAX_EXPR
:
9374 case REDUC_MIN_EXPR
:
9375 case REDUC_PLUS_EXPR
:
9377 op0
= expand_normal (treeop0
);
9378 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9379 machine_mode vec_mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9381 struct expand_operand ops
[2];
9382 enum insn_code icode
= optab_handler (this_optab
, vec_mode
);
9384 create_output_operand (&ops
[0], target
, mode
);
9385 create_input_operand (&ops
[1], op0
, vec_mode
);
9386 expand_insn (icode
, 2, ops
);
9387 target
= ops
[0].value
;
9388 if (GET_MODE (target
) != mode
)
9389 return gen_lowpart (tmode
, target
);
9393 case VEC_UNPACK_HI_EXPR
:
9394 case VEC_UNPACK_LO_EXPR
:
9396 op0
= expand_normal (treeop0
);
9397 temp
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, NULL_RTX
,
9403 case VEC_UNPACK_FLOAT_HI_EXPR
:
9404 case VEC_UNPACK_FLOAT_LO_EXPR
:
9406 op0
= expand_normal (treeop0
);
9407 /* The signedness is determined from input operand. */
9408 temp
= expand_widen_pattern_expr
9409 (ops
, op0
, NULL_RTX
, NULL_RTX
,
9410 target
, TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
9416 case VEC_WIDEN_MULT_HI_EXPR
:
9417 case VEC_WIDEN_MULT_LO_EXPR
:
9418 case VEC_WIDEN_MULT_EVEN_EXPR
:
9419 case VEC_WIDEN_MULT_ODD_EXPR
:
9420 case VEC_WIDEN_LSHIFT_HI_EXPR
:
9421 case VEC_WIDEN_LSHIFT_LO_EXPR
:
9422 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9423 target
= expand_widen_pattern_expr (ops
, op0
, op1
, NULL_RTX
,
9425 gcc_assert (target
);
9428 case VEC_PACK_TRUNC_EXPR
:
9429 case VEC_PACK_SAT_EXPR
:
9430 case VEC_PACK_FIX_TRUNC_EXPR
:
9431 mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9435 expand_operands (treeop0
, treeop1
, target
, &op0
, &op1
, EXPAND_NORMAL
);
9436 op2
= expand_normal (treeop2
);
9438 /* Careful here: if the target doesn't support integral vector modes,
9439 a constant selection vector could wind up smooshed into a normal
9440 integral constant. */
9441 if (CONSTANT_P (op2
) && GET_CODE (op2
) != CONST_VECTOR
)
9443 tree sel_type
= TREE_TYPE (treeop2
);
9445 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type
)),
9446 TYPE_VECTOR_SUBPARTS (sel_type
));
9447 gcc_assert (GET_MODE_CLASS (vmode
) == MODE_VECTOR_INT
);
9448 op2
= simplify_subreg (vmode
, op2
, TYPE_MODE (sel_type
), 0);
9449 gcc_assert (op2
&& GET_CODE (op2
) == CONST_VECTOR
);
9452 gcc_assert (GET_MODE_CLASS (GET_MODE (op2
)) == MODE_VECTOR_INT
);
9454 temp
= expand_vec_perm (mode
, op0
, op1
, op2
, target
);
9460 tree oprnd0
= treeop0
;
9461 tree oprnd1
= treeop1
;
9462 tree oprnd2
= treeop2
;
9465 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9466 op2
= expand_normal (oprnd2
);
9467 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9474 tree oprnd0
= treeop0
;
9475 tree oprnd1
= treeop1
;
9476 tree oprnd2
= treeop2
;
9479 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9480 op2
= expand_normal (oprnd2
);
9481 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9486 case REALIGN_LOAD_EXPR
:
9488 tree oprnd0
= treeop0
;
9489 tree oprnd1
= treeop1
;
9490 tree oprnd2
= treeop2
;
9493 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9494 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9495 op2
= expand_normal (oprnd2
);
9496 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
9504 /* A COND_EXPR with its type being VOID_TYPE represents a
9505 conditional jump and is handled in
9506 expand_gimple_cond_expr. */
9507 gcc_assert (!VOID_TYPE_P (type
));
9509 /* Note that COND_EXPRs whose type is a structure or union
9510 are required to be constructed to contain assignments of
9511 a temporary variable, so that we can evaluate them here
9512 for side effect only. If type is void, we must do likewise. */
9514 gcc_assert (!TREE_ADDRESSABLE (type
)
9516 && TREE_TYPE (treeop1
) != void_type_node
9517 && TREE_TYPE (treeop2
) != void_type_node
);
9519 temp
= expand_cond_expr_using_cmove (treeop0
, treeop1
, treeop2
);
9523 /* If we are not to produce a result, we have no target. Otherwise,
9524 if a target was specified use it; it will not be used as an
9525 intermediate target unless it is safe. If no target, use a
9528 if (modifier
!= EXPAND_STACK_PARM
9530 && safe_from_p (original_target
, treeop0
, 1)
9531 && GET_MODE (original_target
) == mode
9532 && !MEM_P (original_target
))
9533 temp
= original_target
;
9535 temp
= assign_temp (type
, 0, 1);
9537 do_pending_stack_adjust ();
9539 rtx_code_label
*lab0
= gen_label_rtx ();
9540 rtx_code_label
*lab1
= gen_label_rtx ();
9541 jumpifnot (treeop0
, lab0
,
9542 profile_probability::uninitialized ());
9543 store_expr (treeop1
, temp
,
9544 modifier
== EXPAND_STACK_PARM
,
9547 emit_jump_insn (targetm
.gen_jump (lab1
));
9550 store_expr (treeop2
, temp
,
9551 modifier
== EXPAND_STACK_PARM
,
9560 target
= expand_vec_cond_expr (type
, treeop0
, treeop1
, treeop2
, target
);
9563 case BIT_INSERT_EXPR
:
9565 unsigned bitpos
= tree_to_uhwi (treeop2
);
9567 if (INTEGRAL_TYPE_P (TREE_TYPE (treeop1
)))
9568 bitsize
= TYPE_PRECISION (TREE_TYPE (treeop1
));
9570 bitsize
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (treeop1
)));
9571 rtx op0
= expand_normal (treeop0
);
9572 rtx op1
= expand_normal (treeop1
);
9573 rtx dst
= gen_reg_rtx (mode
);
9574 emit_move_insn (dst
, op0
);
9575 store_bit_field (dst
, bitsize
, bitpos
, 0, 0,
9576 TYPE_MODE (TREE_TYPE (treeop1
)), op1
, false);
9584 /* Here to do an ordinary binary operator. */
9586 expand_operands (treeop0
, treeop1
,
9587 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9589 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9591 if (modifier
== EXPAND_STACK_PARM
)
9593 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9594 unsignedp
, OPTAB_LIB_WIDEN
);
9596 /* Bitwise operations do not need bitfield reduction as we expect their
9597 operands being properly truncated. */
9598 if (code
== BIT_XOR_EXPR
9599 || code
== BIT_AND_EXPR
9600 || code
== BIT_IOR_EXPR
)
9602 return REDUCE_BIT_FIELD (temp
);
9604 #undef REDUCE_BIT_FIELD
9607 /* Return TRUE if expression STMT is suitable for replacement.
9608 Never consider memory loads as replaceable, because those don't ever lead
9609 into constant expressions. */
9612 stmt_is_replaceable_p (gimple
*stmt
)
9614 if (ssa_is_replaceable_p (stmt
))
9616 /* Don't move around loads. */
9617 if (!gimple_assign_single_p (stmt
)
9618 || is_gimple_val (gimple_assign_rhs1 (stmt
)))
9625 expand_expr_real_1 (tree exp
, rtx target
, machine_mode tmode
,
9626 enum expand_modifier modifier
, rtx
*alt_rtl
,
9627 bool inner_reference_p
)
9629 rtx op0
, op1
, temp
, decl_rtl
;
9632 machine_mode mode
, dmode
;
9633 enum tree_code code
= TREE_CODE (exp
);
9634 rtx subtarget
, original_target
;
9637 bool reduce_bit_field
;
9638 location_t loc
= EXPR_LOCATION (exp
);
9639 struct separate_ops ops
;
9640 tree treeop0
, treeop1
, treeop2
;
9641 tree ssa_name
= NULL_TREE
;
9644 type
= TREE_TYPE (exp
);
9645 mode
= TYPE_MODE (type
);
9646 unsignedp
= TYPE_UNSIGNED (type
);
9648 treeop0
= treeop1
= treeop2
= NULL_TREE
;
9649 if (!VL_EXP_CLASS_P (exp
))
9650 switch (TREE_CODE_LENGTH (code
))
9653 case 3: treeop2
= TREE_OPERAND (exp
, 2); /* FALLTHRU */
9654 case 2: treeop1
= TREE_OPERAND (exp
, 1); /* FALLTHRU */
9655 case 1: treeop0
= TREE_OPERAND (exp
, 0); /* FALLTHRU */
9665 ignore
= (target
== const0_rtx
9666 || ((CONVERT_EXPR_CODE_P (code
)
9667 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
9668 && TREE_CODE (type
) == VOID_TYPE
));
9670 /* An operation in what may be a bit-field type needs the
9671 result to be reduced to the precision of the bit-field type,
9672 which is narrower than that of the type's mode. */
9673 reduce_bit_field
= (!ignore
9674 && INTEGRAL_TYPE_P (type
)
9675 && !type_has_mode_precision_p (type
));
9677 /* If we are going to ignore this result, we need only do something
9678 if there is a side-effect somewhere in the expression. If there
9679 is, short-circuit the most common cases here. Note that we must
9680 not call expand_expr with anything but const0_rtx in case this
9681 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9685 if (! TREE_SIDE_EFFECTS (exp
))
9688 /* Ensure we reference a volatile object even if value is ignored, but
9689 don't do this if all we are doing is taking its address. */
9690 if (TREE_THIS_VOLATILE (exp
)
9691 && TREE_CODE (exp
) != FUNCTION_DECL
9692 && mode
!= VOIDmode
&& mode
!= BLKmode
9693 && modifier
!= EXPAND_CONST_ADDRESS
)
9695 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
9701 if (TREE_CODE_CLASS (code
) == tcc_unary
9702 || code
== BIT_FIELD_REF
9703 || code
== COMPONENT_REF
9704 || code
== INDIRECT_REF
)
9705 return expand_expr (treeop0
, const0_rtx
, VOIDmode
,
9708 else if (TREE_CODE_CLASS (code
) == tcc_binary
9709 || TREE_CODE_CLASS (code
) == tcc_comparison
9710 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
9712 expand_expr (treeop0
, const0_rtx
, VOIDmode
, modifier
);
9713 expand_expr (treeop1
, const0_rtx
, VOIDmode
, modifier
);
9720 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
9723 /* Use subtarget as the target for operand 0 of a binary operation. */
9724 subtarget
= get_subtarget (target
);
9725 original_target
= target
;
9731 tree function
= decl_function_context (exp
);
9733 temp
= label_rtx (exp
);
9734 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
9736 if (function
!= current_function_decl
9738 LABEL_REF_NONLOCAL_P (temp
) = 1;
9740 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
9745 /* ??? ivopts calls expander, without any preparation from
9746 out-of-ssa. So fake instructions as if this was an access to the
9747 base variable. This unnecessarily allocates a pseudo, see how we can
9748 reuse it, if partition base vars have it set already. */
9749 if (!currently_expanding_to_rtl
)
9751 tree var
= SSA_NAME_VAR (exp
);
9752 if (var
&& DECL_RTL_SET_P (var
))
9753 return DECL_RTL (var
);
9754 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp
)),
9755 LAST_VIRTUAL_REGISTER
+ 1);
9758 g
= get_gimple_for_ssa_name (exp
);
9759 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9761 && modifier
== EXPAND_INITIALIZER
9762 && !SSA_NAME_IS_DEFAULT_DEF (exp
)
9763 && (optimize
|| !SSA_NAME_VAR (exp
)
9764 || DECL_IGNORED_P (SSA_NAME_VAR (exp
)))
9765 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp
)))
9766 g
= SSA_NAME_DEF_STMT (exp
);
9770 location_t saved_loc
= curr_insn_location ();
9771 location_t loc
= gimple_location (g
);
9772 if (loc
!= UNKNOWN_LOCATION
)
9773 set_curr_insn_location (loc
);
9774 ops
.code
= gimple_assign_rhs_code (g
);
9775 switch (get_gimple_rhs_class (ops
.code
))
9777 case GIMPLE_TERNARY_RHS
:
9778 ops
.op2
= gimple_assign_rhs3 (g
);
9780 case GIMPLE_BINARY_RHS
:
9781 ops
.op1
= gimple_assign_rhs2 (g
);
9783 /* Try to expand conditonal compare. */
9784 if (targetm
.gen_ccmp_first
)
9786 gcc_checking_assert (targetm
.gen_ccmp_next
!= NULL
);
9787 r
= expand_ccmp_expr (g
, mode
);
9792 case GIMPLE_UNARY_RHS
:
9793 ops
.op0
= gimple_assign_rhs1 (g
);
9794 ops
.type
= TREE_TYPE (gimple_assign_lhs (g
));
9796 r
= expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
9798 case GIMPLE_SINGLE_RHS
:
9800 r
= expand_expr_real (gimple_assign_rhs1 (g
), target
,
9801 tmode
, modifier
, alt_rtl
,
9808 set_curr_insn_location (saved_loc
);
9809 if (REG_P (r
) && !REG_EXPR (r
))
9810 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp
), r
);
9815 decl_rtl
= get_rtx_for_ssa_name (ssa_name
);
9816 exp
= SSA_NAME_VAR (ssa_name
);
9817 goto expand_decl_rtl
;
9821 /* If a static var's type was incomplete when the decl was written,
9822 but the type is complete now, lay out the decl now. */
9823 if (DECL_SIZE (exp
) == 0
9824 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
9825 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
9826 layout_decl (exp
, 0);
9832 decl_rtl
= DECL_RTL (exp
);
9834 gcc_assert (decl_rtl
);
9836 /* DECL_MODE might change when TYPE_MODE depends on attribute target
9837 settings for VECTOR_TYPE_P that might switch for the function. */
9838 if (currently_expanding_to_rtl
9839 && code
== VAR_DECL
&& MEM_P (decl_rtl
)
9840 && VECTOR_TYPE_P (type
) && exp
&& DECL_MODE (exp
) != mode
)
9841 decl_rtl
= change_address (decl_rtl
, TYPE_MODE (type
), 0);
9843 decl_rtl
= copy_rtx (decl_rtl
);
9845 /* Record writes to register variables. */
9846 if (modifier
== EXPAND_WRITE
9848 && HARD_REGISTER_P (decl_rtl
))
9849 add_to_hard_reg_set (&crtl
->asm_clobbers
,
9850 GET_MODE (decl_rtl
), REGNO (decl_rtl
));
9852 /* Ensure variable marked as used even if it doesn't go through
9853 a parser. If it hasn't be used yet, write out an external
9856 TREE_USED (exp
) = 1;
9858 /* Show we haven't gotten RTL for this yet. */
9861 /* Variables inherited from containing functions should have
9862 been lowered by this point. */
9864 context
= decl_function_context (exp
);
9866 || SCOPE_FILE_SCOPE_P (context
)
9867 || context
== current_function_decl
9868 || TREE_STATIC (exp
)
9869 || DECL_EXTERNAL (exp
)
9870 /* ??? C++ creates functions that are not TREE_STATIC. */
9871 || TREE_CODE (exp
) == FUNCTION_DECL
);
9873 /* This is the case of an array whose size is to be determined
9874 from its initializer, while the initializer is still being parsed.
9875 ??? We aren't parsing while expanding anymore. */
9877 if (MEM_P (decl_rtl
) && REG_P (XEXP (decl_rtl
, 0)))
9878 temp
= validize_mem (decl_rtl
);
9880 /* If DECL_RTL is memory, we are in the normal case and the
9881 address is not valid, get the address into a register. */
9883 else if (MEM_P (decl_rtl
) && modifier
!= EXPAND_INITIALIZER
)
9886 *alt_rtl
= decl_rtl
;
9887 decl_rtl
= use_anchored_address (decl_rtl
);
9888 if (modifier
!= EXPAND_CONST_ADDRESS
9889 && modifier
!= EXPAND_SUM
9890 && !memory_address_addr_space_p (exp
? DECL_MODE (exp
)
9891 : GET_MODE (decl_rtl
),
9893 MEM_ADDR_SPACE (decl_rtl
)))
9894 temp
= replace_equiv_address (decl_rtl
,
9895 copy_rtx (XEXP (decl_rtl
, 0)));
9898 /* If we got something, return it. But first, set the alignment
9899 if the address is a register. */
9902 if (exp
&& MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
9903 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
9909 dmode
= DECL_MODE (exp
);
9911 dmode
= TYPE_MODE (TREE_TYPE (ssa_name
));
9913 /* If the mode of DECL_RTL does not match that of the decl,
9914 there are two cases: we are dealing with a BLKmode value
9915 that is returned in a register, or we are dealing with
9916 a promoted value. In the latter case, return a SUBREG
9917 of the wanted mode, but mark it so that we know that it
9918 was already extended. */
9919 if (REG_P (decl_rtl
)
9921 && GET_MODE (decl_rtl
) != dmode
)
9925 /* Get the signedness to be used for this variable. Ensure we get
9926 the same mode we got when the variable was declared. */
9927 if (code
!= SSA_NAME
)
9928 pmode
= promote_decl_mode (exp
, &unsignedp
);
9929 else if ((g
= SSA_NAME_DEF_STMT (ssa_name
))
9930 && gimple_code (g
) == GIMPLE_CALL
9931 && !gimple_call_internal_p (g
))
9932 pmode
= promote_function_mode (type
, mode
, &unsignedp
,
9933 gimple_call_fntype (g
),
9936 pmode
= promote_ssa_mode (ssa_name
, &unsignedp
);
9937 gcc_assert (GET_MODE (decl_rtl
) == pmode
);
9939 temp
= gen_lowpart_SUBREG (mode
, decl_rtl
);
9940 SUBREG_PROMOTED_VAR_P (temp
) = 1;
9941 SUBREG_PROMOTED_SET (temp
, unsignedp
);
9949 /* Given that TYPE_PRECISION (type) is not always equal to
9950 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9951 the former to the latter according to the signedness of the
9953 scalar_int_mode mode
= SCALAR_INT_TYPE_MODE (type
);
9954 temp
= immed_wide_int_const
9955 (wi::to_wide (exp
, GET_MODE_PRECISION (mode
)), mode
);
9961 tree tmp
= NULL_TREE
;
9962 if (GET_MODE_CLASS (mode
) == MODE_VECTOR_INT
9963 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
9964 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FRACT
9965 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UFRACT
9966 || GET_MODE_CLASS (mode
) == MODE_VECTOR_ACCUM
9967 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UACCUM
)
9968 return const_vector_from_tree (exp
);
9969 scalar_int_mode int_mode
;
9970 if (is_int_mode (mode
, &int_mode
))
9972 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp
)))
9973 return const_scalar_mask_from_tree (int_mode
, exp
);
9977 = lang_hooks
.types
.type_for_mode (int_mode
, 1);
9979 tmp
= fold_unary_loc (loc
, VIEW_CONVERT_EXPR
,
9980 type_for_mode
, exp
);
9985 vec
<constructor_elt
, va_gc
> *v
;
9987 vec_alloc (v
, VECTOR_CST_NELTS (exp
));
9988 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
9989 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, VECTOR_CST_ELT (exp
, i
));
9990 tmp
= build_constructor (type
, v
);
9992 return expand_expr (tmp
, ignore
? const0_rtx
: target
,
9997 if (modifier
== EXPAND_WRITE
)
9999 /* Writing into CONST_DECL is always invalid, but handle it
10001 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (exp
));
10002 scalar_int_mode address_mode
= targetm
.addr_space
.address_mode (as
);
10003 op0
= expand_expr_addr_expr_1 (exp
, NULL_RTX
, address_mode
,
10004 EXPAND_NORMAL
, as
);
10005 op0
= memory_address_addr_space (mode
, op0
, as
);
10006 temp
= gen_rtx_MEM (mode
, op0
);
10007 set_mem_addr_space (temp
, as
);
10010 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
10013 /* If optimized, generate immediate CONST_DOUBLE
10014 which will be turned into memory by reload if necessary.
10016 We used to force a register so that loop.c could see it. But
10017 this does not allow gen_* patterns to perform optimizations with
10018 the constants. It also produces two insns in cases like "x = 1.0;".
10019 On most machines, floating-point constants are not permitted in
10020 many insns, so we'd end up copying it to a register in any case.
10022 Now, we do the copying in expand_binop, if appropriate. */
10023 return const_double_from_real_value (TREE_REAL_CST (exp
),
10024 TYPE_MODE (TREE_TYPE (exp
)));
10027 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp
),
10028 TYPE_MODE (TREE_TYPE (exp
)));
10031 /* Handle evaluating a complex constant in a CONCAT target. */
10032 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
10034 machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
10037 rtarg
= XEXP (original_target
, 0);
10038 itarg
= XEXP (original_target
, 1);
10040 /* Move the real and imaginary parts separately. */
10041 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, EXPAND_NORMAL
);
10042 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, EXPAND_NORMAL
);
10045 emit_move_insn (rtarg
, op0
);
10047 emit_move_insn (itarg
, op1
);
10049 return original_target
;
10055 temp
= expand_expr_constant (exp
, 1, modifier
);
10057 /* temp contains a constant address.
10058 On RISC machines where a constant address isn't valid,
10059 make some insns to get that address into a register. */
10060 if (modifier
!= EXPAND_CONST_ADDRESS
10061 && modifier
!= EXPAND_INITIALIZER
10062 && modifier
!= EXPAND_SUM
10063 && ! memory_address_addr_space_p (mode
, XEXP (temp
, 0),
10064 MEM_ADDR_SPACE (temp
)))
10065 return replace_equiv_address (temp
,
10066 copy_rtx (XEXP (temp
, 0)));
10071 tree val
= treeop0
;
10072 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
,
10073 inner_reference_p
);
10075 if (!SAVE_EXPR_RESOLVED_P (exp
))
10077 /* We can indeed still hit this case, typically via builtin
10078 expanders calling save_expr immediately before expanding
10079 something. Assume this means that we only have to deal
10080 with non-BLKmode values. */
10081 gcc_assert (GET_MODE (ret
) != BLKmode
);
10083 val
= build_decl (curr_insn_location (),
10084 VAR_DECL
, NULL
, TREE_TYPE (exp
));
10085 DECL_ARTIFICIAL (val
) = 1;
10086 DECL_IGNORED_P (val
) = 1;
10088 TREE_OPERAND (exp
, 0) = treeop0
;
10089 SAVE_EXPR_RESOLVED_P (exp
) = 1;
10091 if (!CONSTANT_P (ret
))
10092 ret
= copy_to_reg (ret
);
10093 SET_DECL_RTL (val
, ret
);
10101 /* If we don't need the result, just ensure we evaluate any
10105 unsigned HOST_WIDE_INT idx
;
10108 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
10109 expand_expr (value
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
10114 return expand_constructor (exp
, target
, modifier
, false);
10116 case TARGET_MEM_REF
:
10119 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
10120 enum insn_code icode
;
10121 unsigned int align
;
10123 op0
= addr_for_mem_ref (exp
, as
, true);
10124 op0
= memory_address_addr_space (mode
, op0
, as
);
10125 temp
= gen_rtx_MEM (mode
, op0
);
10126 set_mem_attributes (temp
, exp
, 0);
10127 set_mem_addr_space (temp
, as
);
10128 align
= get_object_alignment (exp
);
10129 if (modifier
!= EXPAND_WRITE
10130 && modifier
!= EXPAND_MEMORY
10132 && align
< GET_MODE_ALIGNMENT (mode
)
10133 /* If the target does not have special handling for unaligned
10134 loads of mode then it can use regular moves for them. */
10135 && ((icode
= optab_handler (movmisalign_optab
, mode
))
10136 != CODE_FOR_nothing
))
10138 struct expand_operand ops
[2];
10140 /* We've already validated the memory, and we're creating a
10141 new pseudo destination. The predicates really can't fail,
10142 nor can the generator. */
10143 create_output_operand (&ops
[0], NULL_RTX
, mode
);
10144 create_fixed_operand (&ops
[1], temp
);
10145 expand_insn (icode
, 2, ops
);
10146 temp
= ops
[0].value
;
10153 const bool reverse
= REF_REVERSE_STORAGE_ORDER (exp
);
10155 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
10156 machine_mode address_mode
;
10157 tree base
= TREE_OPERAND (exp
, 0);
10159 enum insn_code icode
;
10161 /* Handle expansion of non-aliased memory with non-BLKmode. That
10162 might end up in a register. */
10163 if (mem_ref_refers_to_non_mem_p (exp
))
10165 HOST_WIDE_INT offset
= mem_ref_offset (exp
).to_short_addr ();
10166 base
= TREE_OPERAND (base
, 0);
10169 && tree_fits_uhwi_p (TYPE_SIZE (type
))
10170 && (GET_MODE_BITSIZE (DECL_MODE (base
))
10171 == tree_to_uhwi (TYPE_SIZE (type
))))
10172 return expand_expr (build1 (VIEW_CONVERT_EXPR
, type
, base
),
10173 target
, tmode
, modifier
);
10174 if (TYPE_MODE (type
) == BLKmode
)
10176 temp
= assign_stack_temp (DECL_MODE (base
),
10177 GET_MODE_SIZE (DECL_MODE (base
)));
10178 store_expr (base
, temp
, 0, false, false);
10179 temp
= adjust_address (temp
, BLKmode
, offset
);
10180 set_mem_size (temp
, int_size_in_bytes (type
));
10183 exp
= build3 (BIT_FIELD_REF
, type
, base
, TYPE_SIZE (type
),
10184 bitsize_int (offset
* BITS_PER_UNIT
));
10185 REF_REVERSE_STORAGE_ORDER (exp
) = reverse
;
10186 return expand_expr (exp
, target
, tmode
, modifier
);
10188 address_mode
= targetm
.addr_space
.address_mode (as
);
10189 base
= TREE_OPERAND (exp
, 0);
10190 if ((def_stmt
= get_def_for_expr (base
, BIT_AND_EXPR
)))
10192 tree mask
= gimple_assign_rhs2 (def_stmt
);
10193 base
= build2 (BIT_AND_EXPR
, TREE_TYPE (base
),
10194 gimple_assign_rhs1 (def_stmt
), mask
);
10195 TREE_OPERAND (exp
, 0) = base
;
10197 align
= get_object_alignment (exp
);
10198 op0
= expand_expr (base
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
10199 op0
= memory_address_addr_space (mode
, op0
, as
);
10200 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
10202 rtx off
= immed_wide_int_const (mem_ref_offset (exp
), address_mode
);
10203 op0
= simplify_gen_binary (PLUS
, address_mode
, op0
, off
);
10204 op0
= memory_address_addr_space (mode
, op0
, as
);
10206 temp
= gen_rtx_MEM (mode
, op0
);
10207 set_mem_attributes (temp
, exp
, 0);
10208 set_mem_addr_space (temp
, as
);
10209 if (TREE_THIS_VOLATILE (exp
))
10210 MEM_VOLATILE_P (temp
) = 1;
10211 if (modifier
!= EXPAND_WRITE
10212 && modifier
!= EXPAND_MEMORY
10213 && !inner_reference_p
10215 && align
< GET_MODE_ALIGNMENT (mode
))
10217 if ((icode
= optab_handler (movmisalign_optab
, mode
))
10218 != CODE_FOR_nothing
)
10220 struct expand_operand ops
[2];
10222 /* We've already validated the memory, and we're creating a
10223 new pseudo destination. The predicates really can't fail,
10224 nor can the generator. */
10225 create_output_operand (&ops
[0], NULL_RTX
, mode
);
10226 create_fixed_operand (&ops
[1], temp
);
10227 expand_insn (icode
, 2, ops
);
10228 temp
= ops
[0].value
;
10230 else if (SLOW_UNALIGNED_ACCESS (mode
, align
))
10231 temp
= extract_bit_field (temp
, GET_MODE_BITSIZE (mode
),
10232 0, TYPE_UNSIGNED (TREE_TYPE (exp
)),
10233 (modifier
== EXPAND_STACK_PARM
10234 ? NULL_RTX
: target
),
10235 mode
, mode
, false, alt_rtl
);
10238 && modifier
!= EXPAND_MEMORY
10239 && modifier
!= EXPAND_WRITE
)
10240 temp
= flip_storage_order (mode
, temp
);
10247 tree array
= treeop0
;
10248 tree index
= treeop1
;
10251 /* Fold an expression like: "foo"[2].
10252 This is not done in fold so it won't happen inside &.
10253 Don't fold if this is for wide characters since it's too
10254 difficult to do correctly and this is a very rare case. */
10256 if (modifier
!= EXPAND_CONST_ADDRESS
10257 && modifier
!= EXPAND_INITIALIZER
10258 && modifier
!= EXPAND_MEMORY
)
10260 tree t
= fold_read_from_constant_string (exp
);
10263 return expand_expr (t
, target
, tmode
, modifier
);
10266 /* If this is a constant index into a constant array,
10267 just get the value from the array. Handle both the cases when
10268 we have an explicit constructor and when our operand is a variable
10269 that was declared const. */
10271 if (modifier
!= EXPAND_CONST_ADDRESS
10272 && modifier
!= EXPAND_INITIALIZER
10273 && modifier
!= EXPAND_MEMORY
10274 && TREE_CODE (array
) == CONSTRUCTOR
10275 && ! TREE_SIDE_EFFECTS (array
)
10276 && TREE_CODE (index
) == INTEGER_CST
)
10278 unsigned HOST_WIDE_INT ix
;
10281 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array
), ix
,
10283 if (tree_int_cst_equal (field
, index
))
10285 if (!TREE_SIDE_EFFECTS (value
))
10286 return expand_expr (fold (value
), target
, tmode
, modifier
);
10291 else if (optimize
>= 1
10292 && modifier
!= EXPAND_CONST_ADDRESS
10293 && modifier
!= EXPAND_INITIALIZER
10294 && modifier
!= EXPAND_MEMORY
10295 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
10296 && TREE_CODE (index
) == INTEGER_CST
10297 && (VAR_P (array
) || TREE_CODE (array
) == CONST_DECL
)
10298 && (init
= ctor_for_folding (array
)) != error_mark_node
)
10300 if (init
== NULL_TREE
)
10302 tree value
= build_zero_cst (type
);
10303 if (TREE_CODE (value
) == CONSTRUCTOR
)
10305 /* If VALUE is a CONSTRUCTOR, this optimization is only
10306 useful if this doesn't store the CONSTRUCTOR into
10307 memory. If it does, it is more efficient to just
10308 load the data from the array directly. */
10309 rtx ret
= expand_constructor (value
, target
,
10311 if (ret
== NULL_RTX
)
10316 return expand_expr (value
, target
, tmode
, modifier
);
10318 else if (TREE_CODE (init
) == CONSTRUCTOR
)
10320 unsigned HOST_WIDE_INT ix
;
10323 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), ix
,
10325 if (tree_int_cst_equal (field
, index
))
10327 if (TREE_SIDE_EFFECTS (value
))
10330 if (TREE_CODE (value
) == CONSTRUCTOR
)
10332 /* If VALUE is a CONSTRUCTOR, this
10333 optimization is only useful if
10334 this doesn't store the CONSTRUCTOR
10335 into memory. If it does, it is more
10336 efficient to just load the data from
10337 the array directly. */
10338 rtx ret
= expand_constructor (value
, target
,
10340 if (ret
== NULL_RTX
)
10345 expand_expr (fold (value
), target
, tmode
, modifier
);
10348 else if (TREE_CODE (init
) == STRING_CST
)
10350 tree low_bound
= array_ref_low_bound (exp
);
10351 tree index1
= fold_convert_loc (loc
, sizetype
, treeop1
);
10353 /* Optimize the special case of a zero lower bound.
10355 We convert the lower bound to sizetype to avoid problems
10356 with constant folding. E.g. suppose the lower bound is
10357 1 and its mode is QI. Without the conversion
10358 (ARRAY + (INDEX - (unsigned char)1))
10360 (ARRAY + (-(unsigned char)1) + INDEX)
10362 (ARRAY + 255 + INDEX). Oops! */
10363 if (!integer_zerop (low_bound
))
10364 index1
= size_diffop_loc (loc
, index1
,
10365 fold_convert_loc (loc
, sizetype
,
10368 if (tree_fits_uhwi_p (index1
)
10369 && compare_tree_int (index1
, TREE_STRING_LENGTH (init
)) < 0)
10371 tree type
= TREE_TYPE (TREE_TYPE (init
));
10372 scalar_int_mode mode
;
10374 if (is_int_mode (TYPE_MODE (type
), &mode
)
10375 && GET_MODE_SIZE (mode
) == 1)
10376 return gen_int_mode (TREE_STRING_POINTER (init
)
10377 [TREE_INT_CST_LOW (index1
)],
10383 goto normal_inner_ref
;
10385 case COMPONENT_REF
:
10386 /* If the operand is a CONSTRUCTOR, we can just extract the
10387 appropriate field if it is present. */
10388 if (TREE_CODE (treeop0
) == CONSTRUCTOR
)
10390 unsigned HOST_WIDE_INT idx
;
10392 scalar_int_mode field_mode
;
10394 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0
),
10396 if (field
== treeop1
10397 /* We can normally use the value of the field in the
10398 CONSTRUCTOR. However, if this is a bitfield in
10399 an integral mode that we can fit in a HOST_WIDE_INT,
10400 we must mask only the number of bits in the bitfield,
10401 since this is done implicitly by the constructor. If
10402 the bitfield does not meet either of those conditions,
10403 we can't do this optimization. */
10404 && (! DECL_BIT_FIELD (field
)
10405 || (is_int_mode (DECL_MODE (field
), &field_mode
)
10406 && (GET_MODE_PRECISION (field_mode
)
10407 <= HOST_BITS_PER_WIDE_INT
))))
10409 if (DECL_BIT_FIELD (field
)
10410 && modifier
== EXPAND_STACK_PARM
)
10412 op0
= expand_expr (value
, target
, tmode
, modifier
);
10413 if (DECL_BIT_FIELD (field
))
10415 HOST_WIDE_INT bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
10416 scalar_int_mode imode
10417 = SCALAR_INT_TYPE_MODE (TREE_TYPE (field
));
10419 if (TYPE_UNSIGNED (TREE_TYPE (field
)))
10421 op1
= gen_int_mode ((HOST_WIDE_INT_1
<< bitsize
) - 1,
10423 op0
= expand_and (imode
, op0
, op1
, target
);
10427 int count
= GET_MODE_PRECISION (imode
) - bitsize
;
10429 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
10431 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
10439 goto normal_inner_ref
;
10441 case BIT_FIELD_REF
:
10442 case ARRAY_RANGE_REF
:
10445 machine_mode mode1
, mode2
;
10446 HOST_WIDE_INT bitsize
, bitpos
;
10448 int reversep
, volatilep
= 0, must_force_mem
;
10450 = get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode1
,
10451 &unsignedp
, &reversep
, &volatilep
);
10452 rtx orig_op0
, memloc
;
10453 bool clear_mem_expr
= false;
10455 /* If we got back the original object, something is wrong. Perhaps
10456 we are evaluating an expression too early. In any event, don't
10457 infinitely recurse. */
10458 gcc_assert (tem
!= exp
);
10460 /* If TEM's type is a union of variable size, pass TARGET to the inner
10461 computation, since it will need a temporary and TARGET is known
10462 to have to do. This occurs in unchecked conversion in Ada. */
10464 = expand_expr_real (tem
,
10465 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
10466 && COMPLETE_TYPE_P (TREE_TYPE (tem
))
10467 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
10469 && modifier
!= EXPAND_STACK_PARM
10470 ? target
: NULL_RTX
),
10472 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
10475 /* If the field has a mode, we want to access it in the
10476 field's mode, not the computed mode.
10477 If a MEM has VOIDmode (external with incomplete type),
10478 use BLKmode for it instead. */
10481 if (mode1
!= VOIDmode
)
10482 op0
= adjust_address (op0
, mode1
, 0);
10483 else if (GET_MODE (op0
) == VOIDmode
)
10484 op0
= adjust_address (op0
, BLKmode
, 0);
10488 = CONSTANT_P (op0
) ? TYPE_MODE (TREE_TYPE (tem
)) : GET_MODE (op0
);
10490 /* If we have either an offset, a BLKmode result, or a reference
10491 outside the underlying object, we must force it to memory.
10492 Such a case can occur in Ada if we have unchecked conversion
10493 of an expression from a scalar type to an aggregate type or
10494 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10495 passed a partially uninitialized object or a view-conversion
10496 to a larger size. */
10497 must_force_mem
= (offset
10498 || mode1
== BLKmode
10499 || bitpos
+ bitsize
> GET_MODE_BITSIZE (mode2
));
10501 /* Handle CONCAT first. */
10502 if (GET_CODE (op0
) == CONCAT
&& !must_force_mem
)
10505 && bitsize
== GET_MODE_BITSIZE (GET_MODE (op0
))
10506 && COMPLEX_MODE_P (mode1
)
10507 && COMPLEX_MODE_P (GET_MODE (op0
))
10508 && (GET_MODE_PRECISION (GET_MODE_INNER (mode1
))
10509 == GET_MODE_PRECISION (GET_MODE_INNER (GET_MODE (op0
)))))
10512 op0
= flip_storage_order (GET_MODE (op0
), op0
);
10513 if (mode1
!= GET_MODE (op0
))
10516 for (int i
= 0; i
< 2; i
++)
10518 rtx op
= read_complex_part (op0
, i
!= 0);
10519 if (GET_CODE (op
) == SUBREG
)
10520 op
= force_reg (GET_MODE (op
), op
);
10521 rtx temp
= gen_lowpart_common (GET_MODE_INNER (mode1
),
10527 if (!REG_P (op
) && !MEM_P (op
))
10528 op
= force_reg (GET_MODE (op
), op
);
10529 op
= gen_lowpart (GET_MODE_INNER (mode1
), op
);
10533 op0
= gen_rtx_CONCAT (mode1
, parts
[0], parts
[1]);
10538 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
10541 op0
= XEXP (op0
, 0);
10542 mode2
= GET_MODE (op0
);
10544 else if (bitpos
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
10545 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 1)))
10549 op0
= XEXP (op0
, 1);
10551 mode2
= GET_MODE (op0
);
10554 /* Otherwise force into memory. */
10555 must_force_mem
= 1;
10558 /* If this is a constant, put it in a register if it is a legitimate
10559 constant and we don't need a memory reference. */
10560 if (CONSTANT_P (op0
)
10561 && mode2
!= BLKmode
10562 && targetm
.legitimate_constant_p (mode2
, op0
)
10563 && !must_force_mem
)
10564 op0
= force_reg (mode2
, op0
);
10566 /* Otherwise, if this is a constant, try to force it to the constant
10567 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10568 is a legitimate constant. */
10569 else if (CONSTANT_P (op0
) && (memloc
= force_const_mem (mode2
, op0
)))
10570 op0
= validize_mem (memloc
);
10572 /* Otherwise, if this is a constant or the object is not in memory
10573 and need be, put it there. */
10574 else if (CONSTANT_P (op0
) || (!MEM_P (op0
) && must_force_mem
))
10576 memloc
= assign_temp (TREE_TYPE (tem
), 1, 1);
10577 emit_move_insn (memloc
, op0
);
10579 clear_mem_expr
= true;
10584 machine_mode address_mode
;
10585 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
10588 gcc_assert (MEM_P (op0
));
10590 address_mode
= get_address_mode (op0
);
10591 if (GET_MODE (offset_rtx
) != address_mode
)
10593 /* We cannot be sure that the RTL in offset_rtx is valid outside
10594 of a memory address context, so force it into a register
10595 before attempting to convert it to the desired mode. */
10596 offset_rtx
= force_operand (offset_rtx
, NULL_RTX
);
10597 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
10600 /* See the comment in expand_assignment for the rationale. */
10601 if (mode1
!= VOIDmode
10604 && (bitpos
% bitsize
) == 0
10605 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
10606 && MEM_ALIGN (op0
) >= GET_MODE_ALIGNMENT (mode1
))
10608 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10612 op0
= offset_address (op0
, offset_rtx
,
10613 highest_pow2_factor (offset
));
10616 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10617 record its alignment as BIGGEST_ALIGNMENT. */
10618 if (MEM_P (op0
) && bitpos
== 0 && offset
!= 0
10619 && is_aligning_offset (offset
, tem
))
10620 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
10622 /* Don't forget about volatility even if this is a bitfield. */
10623 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
10625 if (op0
== orig_op0
)
10626 op0
= copy_rtx (op0
);
10628 MEM_VOLATILE_P (op0
) = 1;
10631 /* In cases where an aligned union has an unaligned object
10632 as a field, we might be extracting a BLKmode value from
10633 an integer-mode (e.g., SImode) object. Handle this case
10634 by doing the extract into an object as wide as the field
10635 (which we know to be the width of a basic mode), then
10636 storing into memory, and changing the mode to BLKmode. */
10637 if (mode1
== VOIDmode
10638 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
10639 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
10640 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
10641 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
10642 && modifier
!= EXPAND_CONST_ADDRESS
10643 && modifier
!= EXPAND_INITIALIZER
10644 && modifier
!= EXPAND_MEMORY
)
10645 /* If the bitfield is volatile and the bitsize
10646 is narrower than the access size of the bitfield,
10647 we need to extract bitfields from the access. */
10648 || (volatilep
&& TREE_CODE (exp
) == COMPONENT_REF
10649 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp
, 1))
10650 && mode1
!= BLKmode
10651 && bitsize
< GET_MODE_SIZE (mode1
) * BITS_PER_UNIT
)
10652 /* If the field isn't aligned enough to fetch as a memref,
10653 fetch it as a bit field. */
10654 || (mode1
!= BLKmode
10656 ? MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
10657 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0)
10658 : TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
10659 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))
10660 && modifier
!= EXPAND_MEMORY
10661 && ((modifier
== EXPAND_CONST_ADDRESS
10662 || modifier
== EXPAND_INITIALIZER
)
10664 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
10665 || (bitpos
% BITS_PER_UNIT
!= 0)))
10666 /* If the type and the field are a constant size and the
10667 size of the type isn't the same size as the bitfield,
10668 we must use bitfield operations. */
10670 && TYPE_SIZE (TREE_TYPE (exp
))
10671 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
10672 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
10675 machine_mode ext_mode
= mode
;
10677 if (ext_mode
== BLKmode
10678 && ! (target
!= 0 && MEM_P (op0
)
10680 && bitpos
% BITS_PER_UNIT
== 0))
10681 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
10683 if (ext_mode
== BLKmode
)
10686 target
= assign_temp (type
, 1, 1);
10688 /* ??? Unlike the similar test a few lines below, this one is
10689 very likely obsolete. */
10693 /* In this case, BITPOS must start at a byte boundary and
10694 TARGET, if specified, must be a MEM. */
10695 gcc_assert (MEM_P (op0
)
10696 && (!target
|| MEM_P (target
))
10697 && !(bitpos
% BITS_PER_UNIT
));
10699 emit_block_move (target
,
10700 adjust_address (op0
, VOIDmode
,
10701 bitpos
/ BITS_PER_UNIT
),
10702 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
10704 (modifier
== EXPAND_STACK_PARM
10705 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
10710 /* If we have nothing to extract, the result will be 0 for targets
10711 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10712 return 0 for the sake of consistency, as reading a zero-sized
10713 bitfield is valid in Ada and the value is fully specified. */
10717 op0
= validize_mem (op0
);
10719 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
10720 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10722 /* If the result has a record type and the extraction is done in
10723 an integral mode, then the field may be not aligned on a byte
10724 boundary; in this case, if it has reverse storage order, it
10725 needs to be extracted as a scalar field with reverse storage
10726 order and put back into memory order afterwards. */
10727 if (TREE_CODE (type
) == RECORD_TYPE
10728 && GET_MODE_CLASS (ext_mode
) == MODE_INT
)
10729 reversep
= TYPE_REVERSE_STORAGE_ORDER (type
);
10731 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
10732 (modifier
== EXPAND_STACK_PARM
10733 ? NULL_RTX
: target
),
10734 ext_mode
, ext_mode
, reversep
, alt_rtl
);
10736 /* If the result has a record type and the mode of OP0 is an
10737 integral mode then, if BITSIZE is narrower than this mode
10738 and this is for big-endian data, we must put the field
10739 into the high-order bits. And we must also put it back
10740 into memory order if it has been previously reversed. */
10741 scalar_int_mode op0_mode
;
10742 if (TREE_CODE (type
) == RECORD_TYPE
10743 && is_int_mode (GET_MODE (op0
), &op0_mode
))
10745 HOST_WIDE_INT size
= GET_MODE_BITSIZE (op0_mode
);
10748 && reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
10749 op0
= expand_shift (LSHIFT_EXPR
, op0_mode
, op0
,
10750 size
- bitsize
, op0
, 1);
10753 op0
= flip_storage_order (op0_mode
, op0
);
10756 /* If the result type is BLKmode, store the data into a temporary
10757 of the appropriate type, but with the mode corresponding to the
10758 mode for the data we have (op0's mode). */
10759 if (mode
== BLKmode
)
10762 = assign_stack_temp_for_type (ext_mode
,
10763 GET_MODE_BITSIZE (ext_mode
),
10765 emit_move_insn (new_rtx
, op0
);
10766 op0
= copy_rtx (new_rtx
);
10767 PUT_MODE (op0
, BLKmode
);
10773 /* If the result is BLKmode, use that to access the object
10775 if (mode
== BLKmode
)
10778 /* Get a reference to just this component. */
10779 if (modifier
== EXPAND_CONST_ADDRESS
10780 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
10781 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10783 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10785 if (op0
== orig_op0
)
10786 op0
= copy_rtx (op0
);
10788 /* Don't set memory attributes if the base expression is
10789 SSA_NAME that got expanded as a MEM. In that case, we should
10790 just honor its original memory attributes. */
10791 if (TREE_CODE (tem
) != SSA_NAME
|| !MEM_P (orig_op0
))
10792 set_mem_attributes (op0
, exp
, 0);
10794 if (REG_P (XEXP (op0
, 0)))
10795 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10797 /* If op0 is a temporary because the original expressions was forced
10798 to memory, clear MEM_EXPR so that the original expression cannot
10799 be marked as addressable through MEM_EXPR of the temporary. */
10800 if (clear_mem_expr
)
10801 set_mem_expr (op0
, NULL_TREE
);
10803 MEM_VOLATILE_P (op0
) |= volatilep
;
10806 && modifier
!= EXPAND_MEMORY
10807 && modifier
!= EXPAND_WRITE
)
10808 op0
= flip_storage_order (mode1
, op0
);
10810 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
10811 || modifier
== EXPAND_CONST_ADDRESS
10812 || modifier
== EXPAND_INITIALIZER
)
10816 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
10818 convert_move (target
, op0
, unsignedp
);
10823 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
10826 /* All valid uses of __builtin_va_arg_pack () are removed during
10828 if (CALL_EXPR_VA_ARG_PACK (exp
))
10829 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
10831 tree fndecl
= get_callee_fndecl (exp
), attr
;
10834 && (attr
= lookup_attribute ("error",
10835 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10836 error ("%Kcall to %qs declared with attribute error: %s",
10837 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
10838 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10840 && (attr
= lookup_attribute ("warning",
10841 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10842 warning_at (tree_nonartificial_location (exp
),
10843 0, "%Kcall to %qs declared with attribute warning: %s",
10844 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
10845 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10847 /* Check for a built-in function. */
10848 if (fndecl
&& DECL_BUILT_IN (fndecl
))
10850 gcc_assert (DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_FRONTEND
);
10851 if (CALL_WITH_BOUNDS_P (exp
))
10852 return expand_builtin_with_bounds (exp
, target
, subtarget
,
10855 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
10858 return expand_call (exp
, target
, ignore
);
10860 case VIEW_CONVERT_EXPR
:
10863 /* If we are converting to BLKmode, try to avoid an intermediate
10864 temporary by fetching an inner memory reference. */
10865 if (mode
== BLKmode
10866 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
10867 && TYPE_MODE (TREE_TYPE (treeop0
)) != BLKmode
10868 && handled_component_p (treeop0
))
10870 machine_mode mode1
;
10871 HOST_WIDE_INT bitsize
, bitpos
;
10873 int unsignedp
, reversep
, volatilep
= 0;
10875 = get_inner_reference (treeop0
, &bitsize
, &bitpos
, &offset
, &mode1
,
10876 &unsignedp
, &reversep
, &volatilep
);
10879 /* ??? We should work harder and deal with non-zero offsets. */
10881 && (bitpos
% BITS_PER_UNIT
) == 0
10884 && compare_tree_int (TYPE_SIZE (type
), bitsize
) == 0)
10886 /* See the normal_inner_ref case for the rationale. */
10888 = expand_expr_real (tem
,
10889 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
10890 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
10892 && modifier
!= EXPAND_STACK_PARM
10893 ? target
: NULL_RTX
),
10895 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
10898 if (MEM_P (orig_op0
))
10902 /* Get a reference to just this component. */
10903 if (modifier
== EXPAND_CONST_ADDRESS
10904 || modifier
== EXPAND_SUM
10905 || modifier
== EXPAND_INITIALIZER
)
10906 op0
= adjust_address_nv (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
10908 op0
= adjust_address (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
10910 if (op0
== orig_op0
)
10911 op0
= copy_rtx (op0
);
10913 set_mem_attributes (op0
, treeop0
, 0);
10914 if (REG_P (XEXP (op0
, 0)))
10915 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10917 MEM_VOLATILE_P (op0
) |= volatilep
;
10923 op0
= expand_expr_real (treeop0
, NULL_RTX
, VOIDmode
, modifier
,
10924 NULL
, inner_reference_p
);
10926 /* If the input and output modes are both the same, we are done. */
10927 if (mode
== GET_MODE (op0
))
10929 /* If neither mode is BLKmode, and both modes are the same size
10930 then we can use gen_lowpart. */
10931 else if (mode
!= BLKmode
&& GET_MODE (op0
) != BLKmode
10932 && (GET_MODE_PRECISION (mode
)
10933 == GET_MODE_PRECISION (GET_MODE (op0
)))
10934 && !COMPLEX_MODE_P (GET_MODE (op0
)))
10936 if (GET_CODE (op0
) == SUBREG
)
10937 op0
= force_reg (GET_MODE (op0
), op0
);
10938 temp
= gen_lowpart_common (mode
, op0
);
10943 if (!REG_P (op0
) && !MEM_P (op0
))
10944 op0
= force_reg (GET_MODE (op0
), op0
);
10945 op0
= gen_lowpart (mode
, op0
);
10948 /* If both types are integral, convert from one mode to the other. */
10949 else if (INTEGRAL_TYPE_P (type
) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0
)))
10950 op0
= convert_modes (mode
, GET_MODE (op0
), op0
,
10951 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
10952 /* If the output type is a bit-field type, do an extraction. */
10953 else if (reduce_bit_field
)
10954 return extract_bit_field (op0
, TYPE_PRECISION (type
), 0,
10955 TYPE_UNSIGNED (type
), NULL_RTX
,
10956 mode
, mode
, false, NULL
);
10957 /* As a last resort, spill op0 to memory, and reload it in a
10959 else if (!MEM_P (op0
))
10961 /* If the operand is not a MEM, force it into memory. Since we
10962 are going to be changing the mode of the MEM, don't call
10963 force_const_mem for constants because we don't allow pool
10964 constants to change mode. */
10965 tree inner_type
= TREE_TYPE (treeop0
);
10967 gcc_assert (!TREE_ADDRESSABLE (exp
));
10969 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
10971 = assign_stack_temp_for_type
10972 (TYPE_MODE (inner_type
),
10973 GET_MODE_SIZE (TYPE_MODE (inner_type
)), inner_type
);
10975 emit_move_insn (target
, op0
);
10979 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10980 output type is such that the operand is known to be aligned, indicate
10981 that it is. Otherwise, we need only be concerned about alignment for
10982 non-BLKmode results. */
10985 enum insn_code icode
;
10987 if (modifier
!= EXPAND_WRITE
10988 && modifier
!= EXPAND_MEMORY
10989 && !inner_reference_p
10991 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode
))
10993 /* If the target does have special handling for unaligned
10994 loads of mode then use them. */
10995 if ((icode
= optab_handler (movmisalign_optab
, mode
))
10996 != CODE_FOR_nothing
)
11000 op0
= adjust_address (op0
, mode
, 0);
11001 /* We've already validated the memory, and we're creating a
11002 new pseudo destination. The predicates really can't
11004 reg
= gen_reg_rtx (mode
);
11006 /* Nor can the insn generator. */
11007 rtx_insn
*insn
= GEN_FCN (icode
) (reg
, op0
);
11011 else if (STRICT_ALIGNMENT
)
11013 tree inner_type
= TREE_TYPE (treeop0
);
11014 HOST_WIDE_INT temp_size
11015 = MAX (int_size_in_bytes (inner_type
),
11016 (HOST_WIDE_INT
) GET_MODE_SIZE (mode
));
11018 = assign_stack_temp_for_type (mode
, temp_size
, type
);
11019 rtx new_with_op0_mode
11020 = adjust_address (new_rtx
, GET_MODE (op0
), 0);
11022 gcc_assert (!TREE_ADDRESSABLE (exp
));
11024 if (GET_MODE (op0
) == BLKmode
)
11025 emit_block_move (new_with_op0_mode
, op0
,
11026 GEN_INT (GET_MODE_SIZE (mode
)),
11027 (modifier
== EXPAND_STACK_PARM
11028 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
11030 emit_move_insn (new_with_op0_mode
, op0
);
11036 op0
= adjust_address (op0
, mode
, 0);
11043 tree lhs
= treeop0
;
11044 tree rhs
= treeop1
;
11045 gcc_assert (ignore
);
11047 /* Check for |= or &= of a bitfield of size one into another bitfield
11048 of size 1. In this case, (unless we need the result of the
11049 assignment) we can do this more efficiently with a
11050 test followed by an assignment, if necessary.
11052 ??? At this point, we can't get a BIT_FIELD_REF here. But if
11053 things change so we do, this code should be enhanced to
11055 if (TREE_CODE (lhs
) == COMPONENT_REF
11056 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
11057 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
11058 && TREE_OPERAND (rhs
, 0) == lhs
11059 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
11060 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
11061 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
11063 rtx_code_label
*label
= gen_label_rtx ();
11064 int value
= TREE_CODE (rhs
) == BIT_IOR_EXPR
;
11065 do_jump (TREE_OPERAND (rhs
, 1),
11068 profile_probability::uninitialized ());
11069 expand_assignment (lhs
, build_int_cst (TREE_TYPE (rhs
), value
),
11071 do_pending_stack_adjust ();
11072 emit_label (label
);
11076 expand_assignment (lhs
, rhs
, false);
11081 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
11083 case REALPART_EXPR
:
11084 op0
= expand_normal (treeop0
);
11085 return read_complex_part (op0
, false);
11087 case IMAGPART_EXPR
:
11088 op0
= expand_normal (treeop0
);
11089 return read_complex_part (op0
, true);
11096 /* Expanded in cfgexpand.c. */
11097 gcc_unreachable ();
11099 case TRY_CATCH_EXPR
:
11101 case EH_FILTER_EXPR
:
11102 case TRY_FINALLY_EXPR
:
11103 /* Lowered by tree-eh.c. */
11104 gcc_unreachable ();
11106 case WITH_CLEANUP_EXPR
:
11107 case CLEANUP_POINT_EXPR
:
11109 case CASE_LABEL_EXPR
:
11114 case COMPOUND_EXPR
:
11115 case PREINCREMENT_EXPR
:
11116 case PREDECREMENT_EXPR
:
11117 case POSTINCREMENT_EXPR
:
11118 case POSTDECREMENT_EXPR
:
11121 case COMPOUND_LITERAL_EXPR
:
11122 /* Lowered by gimplify.c. */
11123 gcc_unreachable ();
11126 /* Function descriptors are not valid except for as
11127 initialization constants, and should not be expanded. */
11128 gcc_unreachable ();
11130 case WITH_SIZE_EXPR
:
11131 /* WITH_SIZE_EXPR expands to its first argument. The caller should
11132 have pulled out the size to use in whatever context it needed. */
11133 return expand_expr_real (treeop0
, original_target
, tmode
,
11134 modifier
, alt_rtl
, inner_reference_p
);
11137 return expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
11141 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
11142 signedness of TYPE), possibly returning the result in TARGET.
11143 TYPE is known to be a partial integer type. */
11145 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
11147 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
11148 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
11150 /* For constant values, reduce using build_int_cst_type. */
11151 if (CONST_INT_P (exp
))
11153 HOST_WIDE_INT value
= INTVAL (exp
);
11154 tree t
= build_int_cst_type (type
, value
);
11155 return expand_expr (t
, target
, VOIDmode
, EXPAND_NORMAL
);
11157 else if (TYPE_UNSIGNED (type
))
11159 scalar_int_mode mode
= as_a
<scalar_int_mode
> (GET_MODE (exp
));
11160 rtx mask
= immed_wide_int_const
11161 (wi::mask (prec
, false, GET_MODE_PRECISION (mode
)), mode
);
11162 return expand_and (mode
, exp
, mask
, target
);
11166 scalar_int_mode mode
= as_a
<scalar_int_mode
> (GET_MODE (exp
));
11167 int count
= GET_MODE_PRECISION (mode
) - prec
;
11168 exp
= expand_shift (LSHIFT_EXPR
, mode
, exp
, count
, target
, 0);
11169 return expand_shift (RSHIFT_EXPR
, mode
, exp
, count
, target
, 0);
11173 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
11174 when applied to the address of EXP produces an address known to be
11175 aligned more than BIGGEST_ALIGNMENT. */
11178 is_aligning_offset (const_tree offset
, const_tree exp
)
11180 /* Strip off any conversions. */
11181 while (CONVERT_EXPR_P (offset
))
11182 offset
= TREE_OPERAND (offset
, 0);
11184 /* We must now have a BIT_AND_EXPR with a constant that is one less than
11185 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
11186 if (TREE_CODE (offset
) != BIT_AND_EXPR
11187 || !tree_fits_uhwi_p (TREE_OPERAND (offset
, 1))
11188 || compare_tree_int (TREE_OPERAND (offset
, 1),
11189 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
11190 || !pow2p_hwi (tree_to_uhwi (TREE_OPERAND (offset
, 1)) + 1))
11193 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
11194 It must be NEGATE_EXPR. Then strip any more conversions. */
11195 offset
= TREE_OPERAND (offset
, 0);
11196 while (CONVERT_EXPR_P (offset
))
11197 offset
= TREE_OPERAND (offset
, 0);
11199 if (TREE_CODE (offset
) != NEGATE_EXPR
)
11202 offset
= TREE_OPERAND (offset
, 0);
11203 while (CONVERT_EXPR_P (offset
))
11204 offset
= TREE_OPERAND (offset
, 0);
11206 /* This must now be the address of EXP. */
11207 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
11210 /* Return the tree node if an ARG corresponds to a string constant or zero
11211 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
11212 in bytes within the string that ARG is accessing. The type of the
11213 offset will be `sizetype'. */
11216 string_constant (tree arg
, tree
*ptr_offset
)
11218 tree array
, offset
, lower_bound
;
11221 if (TREE_CODE (arg
) == ADDR_EXPR
)
11223 if (TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
11225 *ptr_offset
= size_zero_node
;
11226 return TREE_OPERAND (arg
, 0);
11228 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == VAR_DECL
)
11230 array
= TREE_OPERAND (arg
, 0);
11231 offset
= size_zero_node
;
11233 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
11235 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
11236 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
11237 if (TREE_CODE (array
) != STRING_CST
&& !VAR_P (array
))
11240 /* Check if the array has a nonzero lower bound. */
11241 lower_bound
= array_ref_low_bound (TREE_OPERAND (arg
, 0));
11242 if (!integer_zerop (lower_bound
))
11244 /* If the offset and base aren't both constants, return 0. */
11245 if (TREE_CODE (lower_bound
) != INTEGER_CST
)
11247 if (TREE_CODE (offset
) != INTEGER_CST
)
11249 /* Adjust offset by the lower bound. */
11250 offset
= size_diffop (fold_convert (sizetype
, offset
),
11251 fold_convert (sizetype
, lower_bound
));
11254 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == MEM_REF
)
11256 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
11257 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
11258 if (TREE_CODE (array
) != ADDR_EXPR
)
11260 array
= TREE_OPERAND (array
, 0);
11261 if (TREE_CODE (array
) != STRING_CST
&& !VAR_P (array
))
11267 else if (TREE_CODE (arg
) == PLUS_EXPR
|| TREE_CODE (arg
) == POINTER_PLUS_EXPR
)
11269 tree arg0
= TREE_OPERAND (arg
, 0);
11270 tree arg1
= TREE_OPERAND (arg
, 1);
11275 if (TREE_CODE (arg0
) == ADDR_EXPR
11276 && (TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
11277 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == VAR_DECL
))
11279 array
= TREE_OPERAND (arg0
, 0);
11282 else if (TREE_CODE (arg1
) == ADDR_EXPR
11283 && (TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
11284 || TREE_CODE (TREE_OPERAND (arg1
, 0)) == VAR_DECL
))
11286 array
= TREE_OPERAND (arg1
, 0);
11295 if (TREE_CODE (array
) == STRING_CST
)
11297 *ptr_offset
= fold_convert (sizetype
, offset
);
11300 else if (VAR_P (array
) || TREE_CODE (array
) == CONST_DECL
)
11303 tree init
= ctor_for_folding (array
);
11305 /* Variables initialized to string literals can be handled too. */
11306 if (init
== error_mark_node
11308 || TREE_CODE (init
) != STRING_CST
)
11311 /* Avoid const char foo[4] = "abcde"; */
11312 if (DECL_SIZE_UNIT (array
) == NULL_TREE
11313 || TREE_CODE (DECL_SIZE_UNIT (array
)) != INTEGER_CST
11314 || (length
= TREE_STRING_LENGTH (init
)) <= 0
11315 || compare_tree_int (DECL_SIZE_UNIT (array
), length
) < 0)
11318 /* If variable is bigger than the string literal, OFFSET must be constant
11319 and inside of the bounds of the string literal. */
11320 offset
= fold_convert (sizetype
, offset
);
11321 if (compare_tree_int (DECL_SIZE_UNIT (array
), length
) > 0
11322 && (! tree_fits_uhwi_p (offset
)
11323 || compare_tree_int (offset
, length
) >= 0))
11326 *ptr_offset
= offset
;
11333 /* Generate code to calculate OPS, and exploded expression
11334 using a store-flag instruction and return an rtx for the result.
11335 OPS reflects a comparison.
11337 If TARGET is nonzero, store the result there if convenient.
11339 Return zero if there is no suitable set-flag instruction
11340 available on this machine.
11342 Once expand_expr has been called on the arguments of the comparison,
11343 we are committed to doing the store flag, since it is not safe to
11344 re-evaluate the expression. We emit the store-flag insn by calling
11345 emit_store_flag, but only expand the arguments if we have a reason
11346 to believe that emit_store_flag will be successful. If we think that
11347 it will, but it isn't, we have to simulate the store-flag with a
11348 set/jump/set sequence. */
11351 do_store_flag (sepops ops
, rtx target
, machine_mode mode
)
11353 enum rtx_code code
;
11354 tree arg0
, arg1
, type
;
11355 machine_mode operand_mode
;
11358 rtx subtarget
= target
;
11359 location_t loc
= ops
->location
;
11364 /* Don't crash if the comparison was erroneous. */
11365 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
11368 type
= TREE_TYPE (arg0
);
11369 operand_mode
= TYPE_MODE (type
);
11370 unsignedp
= TYPE_UNSIGNED (type
);
11372 /* We won't bother with BLKmode store-flag operations because it would mean
11373 passing a lot of information to emit_store_flag. */
11374 if (operand_mode
== BLKmode
)
11377 /* We won't bother with store-flag operations involving function pointers
11378 when function pointers must be canonicalized before comparisons. */
11379 if (targetm
.have_canonicalize_funcptr_for_compare ()
11380 && ((TREE_CODE (TREE_TYPE (arg0
)) == POINTER_TYPE
11381 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
)))
11383 || (TREE_CODE (TREE_TYPE (arg1
)) == POINTER_TYPE
11384 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
)))
11385 == FUNCTION_TYPE
))))
11391 /* For vector typed comparisons emit code to generate the desired
11392 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
11393 expander for this. */
11394 if (TREE_CODE (ops
->type
) == VECTOR_TYPE
)
11396 tree ifexp
= build2 (ops
->code
, ops
->type
, arg0
, arg1
);
11397 if (VECTOR_BOOLEAN_TYPE_P (ops
->type
)
11398 && expand_vec_cmp_expr_p (TREE_TYPE (arg0
), ops
->type
, ops
->code
))
11399 return expand_vec_cmp_expr (ops
->type
, ifexp
, target
);
11402 tree if_true
= constant_boolean_node (true, ops
->type
);
11403 tree if_false
= constant_boolean_node (false, ops
->type
);
11404 return expand_vec_cond_expr (ops
->type
, ifexp
, if_true
,
11409 /* Get the rtx comparison code to use. We know that EXP is a comparison
11410 operation of some type. Some comparisons against 1 and -1 can be
11411 converted to comparisons with zero. Do so here so that the tests
11412 below will be aware that we have a comparison with zero. These
11413 tests will not catch constants in the first operand, but constants
11414 are rarely passed as the first operand. */
11425 if (integer_onep (arg1
))
11426 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
11428 code
= unsignedp
? LTU
: LT
;
11431 if (! unsignedp
&& integer_all_onesp (arg1
))
11432 arg1
= integer_zero_node
, code
= LT
;
11434 code
= unsignedp
? LEU
: LE
;
11437 if (! unsignedp
&& integer_all_onesp (arg1
))
11438 arg1
= integer_zero_node
, code
= GE
;
11440 code
= unsignedp
? GTU
: GT
;
11443 if (integer_onep (arg1
))
11444 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
11446 code
= unsignedp
? GEU
: GE
;
11449 case UNORDERED_EXPR
:
11475 gcc_unreachable ();
11478 /* Put a constant second. */
11479 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
11480 || TREE_CODE (arg0
) == FIXED_CST
)
11482 std::swap (arg0
, arg1
);
11483 code
= swap_condition (code
);
11486 /* If this is an equality or inequality test of a single bit, we can
11487 do this by shifting the bit being tested to the low-order bit and
11488 masking the result with the constant 1. If the condition was EQ,
11489 we xor it with 1. This does not require an scc insn and is faster
11490 than an scc insn even if we have it.
11492 The code to make this transformation was moved into fold_single_bit_test,
11493 so we just call into the folder and expand its result. */
11495 if ((code
== NE
|| code
== EQ
)
11496 && integer_zerop (arg1
)
11497 && (TYPE_PRECISION (ops
->type
) != 1 || TYPE_UNSIGNED (ops
->type
)))
11499 gimple
*srcstmt
= get_def_for_expr (arg0
, BIT_AND_EXPR
);
11501 && integer_pow2p (gimple_assign_rhs2 (srcstmt
)))
11503 enum tree_code tcode
= code
== NE
? NE_EXPR
: EQ_EXPR
;
11504 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
11505 tree temp
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg1
),
11506 gimple_assign_rhs1 (srcstmt
),
11507 gimple_assign_rhs2 (srcstmt
));
11508 temp
= fold_single_bit_test (loc
, tcode
, temp
, arg1
, type
);
11510 return expand_expr (temp
, target
, VOIDmode
, EXPAND_NORMAL
);
11514 if (! get_subtarget (target
)
11515 || GET_MODE (subtarget
) != operand_mode
)
11518 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
11521 target
= gen_reg_rtx (mode
);
11523 /* Try a cstore if possible. */
11524 return emit_store_flag_force (target
, code
, op0
, op1
,
11525 operand_mode
, unsignedp
,
11526 (TYPE_PRECISION (ops
->type
) == 1
11527 && !TYPE_UNSIGNED (ops
->type
)) ? -1 : 1);
11530 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11531 0 otherwise (i.e. if there is no casesi instruction).
11533 DEFAULT_PROBABILITY is the probability of jumping to the default
11536 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
11537 rtx table_label
, rtx default_label
, rtx fallback_label
,
11538 profile_probability default_probability
)
11540 struct expand_operand ops
[5];
11541 scalar_int_mode index_mode
= SImode
;
11542 rtx op1
, op2
, index
;
11544 if (! targetm
.have_casesi ())
11547 /* The index must be some form of integer. Convert it to SImode. */
11548 scalar_int_mode omode
= SCALAR_INT_TYPE_MODE (index_type
);
11549 if (GET_MODE_BITSIZE (omode
) > GET_MODE_BITSIZE (index_mode
))
11551 rtx rangertx
= expand_normal (range
);
11553 /* We must handle the endpoints in the original mode. */
11554 index_expr
= build2 (MINUS_EXPR
, index_type
,
11555 index_expr
, minval
);
11556 minval
= integer_zero_node
;
11557 index
= expand_normal (index_expr
);
11559 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
11560 omode
, 1, default_label
,
11561 default_probability
);
11562 /* Now we can safely truncate. */
11563 index
= convert_to_mode (index_mode
, index
, 0);
11567 if (omode
!= index_mode
)
11569 index_type
= lang_hooks
.types
.type_for_mode (index_mode
, 0);
11570 index_expr
= fold_convert (index_type
, index_expr
);
11573 index
= expand_normal (index_expr
);
11576 do_pending_stack_adjust ();
11578 op1
= expand_normal (minval
);
11579 op2
= expand_normal (range
);
11581 create_input_operand (&ops
[0], index
, index_mode
);
11582 create_convert_operand_from_type (&ops
[1], op1
, TREE_TYPE (minval
));
11583 create_convert_operand_from_type (&ops
[2], op2
, TREE_TYPE (range
));
11584 create_fixed_operand (&ops
[3], table_label
);
11585 create_fixed_operand (&ops
[4], (default_label
11587 : fallback_label
));
11588 expand_jump_insn (targetm
.code_for_casesi
, 5, ops
);
11592 /* Attempt to generate a tablejump instruction; same concept. */
11593 /* Subroutine of the next function.
11595 INDEX is the value being switched on, with the lowest value
11596 in the table already subtracted.
11597 MODE is its expected mode (needed if INDEX is constant).
11598 RANGE is the length of the jump table.
11599 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11601 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11602 index value is out of range.
11603 DEFAULT_PROBABILITY is the probability of jumping to
11604 the default label. */
11607 do_tablejump (rtx index
, machine_mode mode
, rtx range
, rtx table_label
,
11608 rtx default_label
, profile_probability default_probability
)
11612 if (INTVAL (range
) > cfun
->cfg
->max_jumptable_ents
)
11613 cfun
->cfg
->max_jumptable_ents
= INTVAL (range
);
11615 /* Do an unsigned comparison (in the proper mode) between the index
11616 expression and the value which represents the length of the range.
11617 Since we just finished subtracting the lower bound of the range
11618 from the index expression, this comparison allows us to simultaneously
11619 check that the original index expression value is both greater than
11620 or equal to the minimum value of the range and less than or equal to
11621 the maximum value of the range. */
11624 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
11625 default_label
, default_probability
);
11628 /* If index is in range, it must fit in Pmode.
11629 Convert to Pmode so we can index with it. */
11631 index
= convert_to_mode (Pmode
, index
, 1);
11633 /* Don't let a MEM slip through, because then INDEX that comes
11634 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11635 and break_out_memory_refs will go to work on it and mess it up. */
11636 #ifdef PIC_CASE_VECTOR_ADDRESS
11637 if (flag_pic
&& !REG_P (index
))
11638 index
= copy_to_mode_reg (Pmode
, index
);
11641 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11642 GET_MODE_SIZE, because this indicates how large insns are. The other
11643 uses should all be Pmode, because they are addresses. This code
11644 could fail if addresses and insns are not the same size. */
11645 index
= simplify_gen_binary (MULT
, Pmode
, index
,
11646 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE
),
11648 index
= simplify_gen_binary (PLUS
, Pmode
, index
,
11649 gen_rtx_LABEL_REF (Pmode
, table_label
));
11651 #ifdef PIC_CASE_VECTOR_ADDRESS
11653 index
= PIC_CASE_VECTOR_ADDRESS (index
);
11656 index
= memory_address (CASE_VECTOR_MODE
, index
);
11657 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
11658 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
11659 convert_move (temp
, vector
, 0);
11661 emit_jump_insn (targetm
.gen_tablejump (temp
, table_label
));
11663 /* If we are generating PIC code or if the table is PC-relative, the
11664 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11665 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
11670 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
11671 rtx table_label
, rtx default_label
,
11672 profile_probability default_probability
)
11676 if (! targetm
.have_tablejump ())
11679 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
11680 fold_convert (index_type
, index_expr
),
11681 fold_convert (index_type
, minval
));
11682 index
= expand_normal (index_expr
);
11683 do_pending_stack_adjust ();
11685 do_tablejump (index
, TYPE_MODE (index_type
),
11686 convert_modes (TYPE_MODE (index_type
),
11687 TYPE_MODE (TREE_TYPE (range
)),
11688 expand_normal (range
),
11689 TYPE_UNSIGNED (TREE_TYPE (range
))),
11690 table_label
, default_label
, default_probability
);
11694 /* Return a CONST_VECTOR rtx representing vector mask for
11695 a VECTOR_CST of booleans. */
11697 const_vector_mask_from_tree (tree exp
)
11703 machine_mode inner
, mode
;
11705 mode
= TYPE_MODE (TREE_TYPE (exp
));
11706 units
= GET_MODE_NUNITS (mode
);
11707 inner
= GET_MODE_INNER (mode
);
11709 v
= rtvec_alloc (units
);
11711 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
11713 elt
= VECTOR_CST_ELT (exp
, i
);
11715 gcc_assert (TREE_CODE (elt
) == INTEGER_CST
);
11716 if (integer_zerop (elt
))
11717 RTVEC_ELT (v
, i
) = CONST0_RTX (inner
);
11718 else if (integer_onep (elt
)
11719 || integer_minus_onep (elt
))
11720 RTVEC_ELT (v
, i
) = CONSTM1_RTX (inner
);
11722 gcc_unreachable ();
11725 return gen_rtx_CONST_VECTOR (mode
, v
);
11728 /* EXP is a VECTOR_CST in which each element is either all-zeros or all-ones.
11729 Return a constant scalar rtx of mode MODE in which bit X is set if element
11730 X of EXP is nonzero. */
11732 const_scalar_mask_from_tree (scalar_int_mode mode
, tree exp
)
11734 wide_int res
= wi::zero (GET_MODE_PRECISION (mode
));
11738 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
11740 elt
= VECTOR_CST_ELT (exp
, i
);
11741 gcc_assert (TREE_CODE (elt
) == INTEGER_CST
);
11742 if (integer_all_onesp (elt
))
11743 res
= wi::set_bit (res
, i
);
11745 gcc_assert (integer_zerop (elt
));
11748 return immed_wide_int_const (res
, mode
);
11751 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11753 const_vector_from_tree (tree exp
)
11759 machine_mode inner
, mode
;
11761 mode
= TYPE_MODE (TREE_TYPE (exp
));
11763 if (initializer_zerop (exp
))
11764 return CONST0_RTX (mode
);
11766 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp
)))
11767 return const_vector_mask_from_tree (exp
);
11769 units
= GET_MODE_NUNITS (mode
);
11770 inner
= GET_MODE_INNER (mode
);
11772 v
= rtvec_alloc (units
);
11774 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
11776 elt
= VECTOR_CST_ELT (exp
, i
);
11778 if (TREE_CODE (elt
) == REAL_CST
)
11779 RTVEC_ELT (v
, i
) = const_double_from_real_value (TREE_REAL_CST (elt
),
11781 else if (TREE_CODE (elt
) == FIXED_CST
)
11782 RTVEC_ELT (v
, i
) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt
),
11785 RTVEC_ELT (v
, i
) = immed_wide_int_const (elt
, inner
);
11788 return gen_rtx_CONST_VECTOR (mode
, v
);
11791 /* Build a decl for a personality function given a language prefix. */
11794 build_personality_function (const char *lang
)
11796 const char *unwind_and_version
;
11800 switch (targetm_common
.except_unwind_info (&global_options
))
11805 unwind_and_version
= "_sj0";
11809 unwind_and_version
= "_v0";
11812 unwind_and_version
= "_seh0";
11815 gcc_unreachable ();
11818 name
= ACONCAT (("__", lang
, "_personality", unwind_and_version
, NULL
));
11820 type
= build_function_type_list (integer_type_node
, integer_type_node
,
11821 long_long_unsigned_type_node
,
11822 ptr_type_node
, ptr_type_node
, NULL_TREE
);
11823 decl
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
,
11824 get_identifier (name
), type
);
11825 DECL_ARTIFICIAL (decl
) = 1;
11826 DECL_EXTERNAL (decl
) = 1;
11827 TREE_PUBLIC (decl
) = 1;
11829 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11830 are the flags assigned by targetm.encode_section_info. */
11831 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl
), 0), NULL
);
11836 /* Extracts the personality function of DECL and returns the corresponding
11840 get_personality_function (tree decl
)
11842 tree personality
= DECL_FUNCTION_PERSONALITY (decl
);
11843 enum eh_personality_kind pk
;
11845 pk
= function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl
));
11846 if (pk
== eh_personality_none
)
11850 && pk
== eh_personality_any
)
11851 personality
= lang_hooks
.eh_personality ();
11853 if (pk
== eh_personality_lang
)
11854 gcc_assert (personality
!= NULL_TREE
);
11856 return XEXP (DECL_RTL (personality
), 0);
11859 /* Returns a tree for the size of EXP in bytes. */
11862 tree_expr_size (const_tree exp
)
11865 && DECL_SIZE_UNIT (exp
) != 0)
11866 return DECL_SIZE_UNIT (exp
);
11868 return size_in_bytes (TREE_TYPE (exp
));
11871 /* Return an rtx for the size in bytes of the value of EXP. */
11874 expr_size (tree exp
)
11878 if (TREE_CODE (exp
) == WITH_SIZE_EXPR
)
11879 size
= TREE_OPERAND (exp
, 1);
11882 size
= tree_expr_size (exp
);
11884 gcc_assert (size
== SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, exp
));
11887 return expand_expr (size
, NULL_RTX
, TYPE_MODE (sizetype
), EXPAND_NORMAL
);
11890 /* Return a wide integer for the size in bytes of the value of EXP, or -1
11891 if the size can vary or is larger than an integer. */
11893 static HOST_WIDE_INT
11894 int_expr_size (tree exp
)
11898 if (TREE_CODE (exp
) == WITH_SIZE_EXPR
)
11899 size
= TREE_OPERAND (exp
, 1);
11902 size
= tree_expr_size (exp
);
11906 if (size
== 0 || !tree_fits_shwi_p (size
))
11909 return tree_to_shwi (size
);