]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/expr.c
2015-06-17 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / expr.c
CommitLineData
10f307d9 1/* Convert tree expression to rtl instructions, for GNU compiler.
d353bf18 2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
10f307d9 3
f12b58b3 4This file is part of GCC.
10f307d9 5
f12b58b3 6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
f12b58b3 9version.
10f307d9 10
f12b58b3 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
10f307d9 15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
10f307d9 19
10f307d9 20#include "config.h"
405711de 21#include "system.h"
805e22b2 22#include "coretypes.h"
23#include "tm.h"
10f307d9 24#include "rtl.h"
b20a8bb4 25#include "alias.h"
26#include "symtab.h"
10f307d9 27#include "tree.h"
b20a8bb4 28#include "fold-const.h"
9ed99284 29#include "stringpool.h"
30#include "stor-layout.h"
31#include "attribs.h"
32#include "varasm.h"
10f307d9 33#include "flags.h"
09994a52 34#include "regs.h"
261db321 35#include "hard-reg-set.h"
037a5228 36#include "except.h"
10f307d9 37#include "function.h"
10f307d9 38#include "insn-config.h"
3084721c 39#include "insn-attr.h"
d53441c8 40#include "expmed.h"
41#include "dojump.h"
42#include "explow.h"
43#include "calls.h"
44#include "emit-rtl.h"
45#include "stmt.h"
fa56dc1d 46/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
cd03a192 47#include "expr.h"
34517c64 48#include "insn-codes.h"
d8fc4d0b 49#include "optabs.h"
50#include "libfuncs.h"
10f307d9 51#include "recog.h"
6702c250 52#include "reload.h"
10f307d9 53#include "typeclass.h"
12874aaf 54#include "toplev.h"
b3187c7c 55#include "langhooks.h"
a3c49299 56#include "intl.h"
075136a2 57#include "tm_p.h"
4ee9c684 58#include "tree-iterator.h"
94ea8568 59#include "predict.h"
60#include "dominance.h"
61#include "cfg.h"
bc61cadb 62#include "basic-block.h"
63#include "tree-ssa-alias.h"
64#include "internal-fn.h"
65#include "gimple-expr.h"
073c1fd5 66#include "gimple.h"
67#include "gimple-ssa.h"
1140c305 68#include "plugin-api.h"
69#include "ipa-ref.h"
073c1fd5 70#include "cgraph.h"
71#include "tree-ssanames.h"
2c8ff1ed 72#include "target.h"
218e3e4e 73#include "common/common-target.h"
5290ebdb 74#include "timevar.h"
3072d30e 75#include "df.h"
cb7f680b 76#include "diagnostic.h"
b23fb4cb 77#include "tree-ssa-live.h"
f7373a91 78#include "tree-outof-ssa.h"
db5ca0ab 79#include "target-globals.h"
4bb60ec7 80#include "params.h"
424a4a92 81#include "tree-ssa-address.h"
e797f49f 82#include "cfgexpand.h"
f7715905 83#include "builtins.h"
058a1b7a 84#include "tree-chkp.h"
85#include "rtl-chkp.h"
9336ad57 86#include "ccmp.h"
10f307d9 87
c0bfc78e 88
10f307d9 89/* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95int cse_not_expected;
96
3ebd94bd 97/* This structure is used by move_by_pieces to describe the move to
98 be performed. */
584511c1 99struct move_by_pieces_d
3ebd94bd 100{
101 rtx to;
102 rtx to_addr;
103 int autinc_to;
104 int explicit_inc_to;
105 rtx from;
106 rtx from_addr;
107 int autinc_from;
108 int explicit_inc_from;
f7c44134 109 unsigned HOST_WIDE_INT len;
110 HOST_WIDE_INT offset;
3ebd94bd 111 int reverse;
112};
113
6840589f 114/* This structure is used by store_by_pieces to describe the clear to
dbd14dc5 115 be performed. */
116
584511c1 117struct store_by_pieces_d
dbd14dc5 118{
119 rtx to;
120 rtx to_addr;
121 int autinc_to;
122 int explicit_inc_to;
f7c44134 123 unsigned HOST_WIDE_INT len;
124 HOST_WIDE_INT offset;
3754d046 125 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode);
b9a7cc69 126 void *constfundata;
dbd14dc5 127 int reverse;
128};
129
3d953cb1 130static void move_by_pieces_1 (insn_gen_fn, machine_mode,
584511c1 131 struct move_by_pieces_d *);
35cb5232 132static bool block_move_libcall_safe_for_call_parm (void);
36d63243 133static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
9db0f34d 134 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
135 unsigned HOST_WIDE_INT);
35cb5232 136static tree emit_block_move_libcall_fn (int);
137static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
3754d046 138static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, machine_mode);
35cb5232 139static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
584511c1 140static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
3d953cb1 141static void store_by_pieces_2 (insn_gen_fn, machine_mode,
584511c1 142 struct store_by_pieces_d *);
35cb5232 143static tree clear_storage_libcall_fn (int);
c81fd430 144static rtx_insn *compress_float_constant (rtx, rtx);
35cb5232 145static rtx get_subtarget (rtx);
35cb5232 146static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
3754d046 147 HOST_WIDE_INT, machine_mode,
f955ca51 148 tree, int, alias_set_type);
35cb5232 149static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
4bb60ec7 150static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
151 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
3754d046 152 machine_mode, tree, alias_set_type, bool);
35cb5232 153
b7bf20db 154static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
35cb5232 155
1f1872fd 156static int is_aligning_offset (const_tree, const_tree);
4f7f7efd 157static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
3754d046 158static rtx do_store_flag (sepops, rtx, machine_mode);
fad4a30c 159#ifdef PUSH_ROUNDING
3754d046 160static void emit_single_push_insn (machine_mode, rtx, tree);
fad4a30c 161#endif
3754d046 162static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx, int);
35cb5232 163static rtx const_vector_from_tree (tree);
d53441c8 164static tree tree_expr_size (const_tree);
165static HOST_WIDE_INT int_expr_size (tree);
10f307d9 166
10f307d9 167\f
6d8b68a3 168/* This is run to set up which modes can be used
169 directly in memory and to initialize the block move optab. It is run
170 at the beginning of compilation and when the target is reinitialized. */
07edfa02 171
172void
6d8b68a3 173init_expr_target (void)
07edfa02 174{
175 rtx insn, pat;
3754d046 176 machine_mode mode;
6fa98783 177 int num_clobbers;
9e042f31 178 rtx mem, mem1;
0c7f5242 179 rtx reg;
9e042f31 180
a97fcedd 181 /* Try indexing by frame ptr and try by stack ptr.
182 It is known that on the Convex the stack ptr isn't a valid index.
183 With luck, one or the other is valid on any machine. */
06a78ffe 184 mem = gen_rtx_MEM (word_mode, stack_pointer_rtx);
185 mem1 = gen_rtx_MEM (word_mode, frame_pointer_rtx);
07edfa02 186
0c7f5242 187 /* A scratch register we can modify in-place below to avoid
188 useless RTL allocations. */
dcd6d0f4 189 reg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
0c7f5242 190
7a5749cc 191 insn = rtx_alloc (INSN);
d1f9b275 192 pat = gen_rtx_SET (NULL_RTX, NULL_RTX);
7a5749cc 193 PATTERN (insn) = pat;
07edfa02 194
195 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
3754d046 196 mode = (machine_mode) ((int) mode + 1))
07edfa02 197 {
198 int regno;
07edfa02 199
200 direct_load[(int) mode] = direct_store[(int) mode] = 0;
201 PUT_MODE (mem, mode);
a97fcedd 202 PUT_MODE (mem1, mode);
07edfa02 203
3c209fda 204 /* See if there is some register that can be used in this mode and
205 directly loaded or stored from memory. */
206
b63679d2 207 if (mode != VOIDmode && mode != BLKmode)
208 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
209 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
210 regno++)
211 {
212 if (! HARD_REGNO_MODE_OK (regno, mode))
213 continue;
3c209fda 214
937ca48e 215 set_mode_and_regno (reg, mode, regno);
3c209fda 216
b63679d2 217 SET_SRC (pat) = mem;
218 SET_DEST (pat) = reg;
219 if (recog (pat, insn, &num_clobbers) >= 0)
220 direct_load[(int) mode] = 1;
3c209fda 221
a97fcedd 222 SET_SRC (pat) = mem1;
223 SET_DEST (pat) = reg;
224 if (recog (pat, insn, &num_clobbers) >= 0)
225 direct_load[(int) mode] = 1;
226
b63679d2 227 SET_SRC (pat) = reg;
228 SET_DEST (pat) = mem;
229 if (recog (pat, insn, &num_clobbers) >= 0)
230 direct_store[(int) mode] = 1;
a97fcedd 231
232 SET_SRC (pat) = reg;
233 SET_DEST (pat) = mem1;
234 if (recog (pat, insn, &num_clobbers) >= 0)
235 direct_store[(int) mode] = 1;
b63679d2 236 }
07edfa02 237 }
238
dcd6d0f4 239 mem = gen_rtx_MEM (VOIDmode, gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 1));
c0c4a46d 240
241 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
242 mode = GET_MODE_WIDER_MODE (mode))
243 {
3754d046 244 machine_mode srcmode;
c0c4a46d 245 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
ff385626 246 srcmode = GET_MODE_WIDER_MODE (srcmode))
c0c4a46d 247 {
248 enum insn_code ic;
249
250 ic = can_extend_p (mode, srcmode, 0);
251 if (ic == CODE_FOR_nothing)
252 continue;
253
254 PUT_MODE (mem, srcmode);
ff385626 255
39c56a89 256 if (insn_operand_matches (ic, 1, mem))
c0c4a46d 257 float_extend_from_mem[mode][srcmode] = true;
258 }
259 }
07edfa02 260}
6fa98783 261
10f307d9 262/* This is run at the start of compiling a function. */
263
264void
35cb5232 265init_expr (void)
10f307d9 266{
fd6ffb7c 267 memset (&crtl->expr, 0, sizeof (crtl->expr));
10f307d9 268}
10f307d9 269\f
270/* Copy data from FROM to TO, where the machine modes are not the same.
68a556d6 271 Both modes may be integer, or both may be floating, or both may be
272 fixed-point.
10f307d9 273 UNSIGNEDP should be nonzero if FROM is an unsigned type.
274 This causes zero-extension instead of sign-extension. */
275
276void
35cb5232 277convert_move (rtx to, rtx from, int unsignedp)
10f307d9 278{
3754d046 279 machine_mode to_mode = GET_MODE (to);
280 machine_mode from_mode = GET_MODE (from);
cee7491d 281 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
282 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
10f307d9 283 enum insn_code code;
284 rtx libcall;
285
286 /* rtx code for making an equivalent value. */
65923445 287 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
288 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
10f307d9 289
10f307d9 290
611234b4 291 gcc_assert (to_real == from_real);
1f8b6002 292 gcc_assert (to_mode != BLKmode);
293 gcc_assert (from_mode != BLKmode);
10f307d9 294
4ee9c684 295 /* If the source and destination are already the same, then there's
296 nothing to do. */
297 if (to == from)
298 return;
299
acfb31e5 300 /* If FROM is a SUBREG that indicates that we have already done at least
301 the required extension, strip it. We don't handle such SUBREGs as
302 TO here. */
303
304 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
995b44f5 305 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
306 >= GET_MODE_PRECISION (to_mode))
e8629f9e 307 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
acfb31e5 308 from = gen_lowpart (to_mode, from), from_mode = to_mode;
309
611234b4 310 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
acfb31e5 311
10f307d9 312 if (to_mode == from_mode
313 || (from_mode == VOIDmode && CONSTANT_P (from)))
314 {
315 emit_move_insn (to, from);
316 return;
317 }
318
8a95ab85 319 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
320 {
611234b4 321 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
fa56dc1d 322
8a95ab85 323 if (VECTOR_MODE_P (to_mode))
1c0d4c2c 324 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
8a95ab85 325 else
1c0d4c2c 326 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
8a95ab85 327
328 emit_move_insn (to, from);
329 return;
330 }
331
a9f93c81 332 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
333 {
334 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
335 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
336 return;
337 }
338
10f307d9 339 if (to_real)
340 {
1d277a67 341 rtx value;
342 rtx_insn *insns;
a7cc195f 343 convert_optab tab;
ece3ba9a 344
069b07bf 345 gcc_assert ((GET_MODE_PRECISION (from_mode)
346 != GET_MODE_PRECISION (to_mode))
347 || (DECIMAL_FLOAT_MODE_P (from_mode)
348 != DECIMAL_FLOAT_MODE_P (to_mode)));
1f8b6002 349
069b07bf 350 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
351 /* Conversion between decimal float and binary float, same size. */
352 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
353 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
a7cc195f 354 tab = sext_optab;
a7cc195f 355 else
611234b4 356 tab = trunc_optab;
dd8a4c60 357
a7cc195f 358 /* Try converting directly if the insn is supported. */
dd8a4c60 359
d6bf3b14 360 code = convert_optab_handler (tab, to_mode, from_mode);
a7cc195f 361 if (code != CODE_FOR_nothing)
c2a91a88 362 {
a7cc195f 363 emit_unop_insn (code, to, from,
364 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
c2a91a88 365 return;
366 }
c2a91a88 367
a7cc195f 368 /* Otherwise use a libcall. */
f36b9f69 369 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
fa56dc1d 370
611234b4 371 /* Is this conversion implemented yet? */
372 gcc_assert (libcall);
10f307d9 373
542baf17 374 start_sequence ();
2c5d421b 375 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
ece3ba9a 376 1, from, from_mode);
542baf17 377 insns = get_insns ();
378 end_sequence ();
1d5ca076 379 emit_libcall_block (insns, to, value,
380 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
381 from)
382 : gen_rtx_FLOAT_EXTEND (to_mode, from));
10f307d9 383 return;
384 }
385
a7cc195f 386 /* Handle pointer conversion. */ /* SPEE 900220. */
91ae0791 387 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
388 {
389 convert_optab ctab;
390
391 if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
392 ctab = trunc_optab;
393 else if (unsignedp)
394 ctab = zext_optab;
395 else
396 ctab = sext_optab;
397
398 if (convert_optab_handler (ctab, to_mode, from_mode)
399 != CODE_FOR_nothing)
400 {
401 emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
402 to, from, UNKNOWN);
403 return;
404 }
405 }
406
a7cc195f 407 /* Targets are expected to provide conversion insns between PxImode and
408 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
409 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
410 {
3754d046 411 machine_mode full_mode
a7cc195f 412 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
413
d6bf3b14 414 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
611234b4 415 != CODE_FOR_nothing);
a7cc195f 416
417 if (full_mode != from_mode)
418 from = convert_to_mode (full_mode, from, unsignedp);
d6bf3b14 419 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
a7cc195f 420 to, from, UNKNOWN);
421 return;
422 }
423 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
424 {
06633980 425 rtx new_from;
3754d046 426 machine_mode full_mode
a7cc195f 427 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
c8076084 428 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
429 enum insn_code icode;
a7cc195f 430
c8076084 431 icode = convert_optab_handler (ctab, full_mode, from_mode);
432 gcc_assert (icode != CODE_FOR_nothing);
a7cc195f 433
a7cc195f 434 if (to_mode == full_mode)
06633980 435 {
c8076084 436 emit_unop_insn (icode, to, from, UNKNOWN);
06633980 437 return;
438 }
439
440 new_from = gen_reg_rtx (full_mode);
c8076084 441 emit_unop_insn (icode, new_from, from, UNKNOWN);
a7cc195f 442
aab2cf92 443 /* else proceed to integer conversions below. */
a7cc195f 444 from_mode = full_mode;
06633980 445 from = new_from;
a7cc195f 446 }
447
68a556d6 448 /* Make sure both are fixed-point modes or both are not. */
449 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
450 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
451 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
452 {
453 /* If we widen from_mode to to_mode and they are in the same class,
454 we won't saturate the result.
455 Otherwise, always saturate the result to play safe. */
456 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
457 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
458 expand_fixed_convert (to, from, 0, 0);
459 else
460 expand_fixed_convert (to, from, 0, 1);
461 return;
462 }
463
10f307d9 464 /* Now both modes are integers. */
465
466 /* Handle expanding beyond a word. */
995b44f5 467 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
468 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
10f307d9 469 {
1d277a67 470 rtx_insn *insns;
10f307d9 471 rtx lowpart;
472 rtx fill_value;
473 rtx lowfrom;
474 int i;
3754d046 475 machine_mode lowpart_mode;
10f307d9 476 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
477
478 /* Try converting directly if the insn is supported. */
479 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
480 != CODE_FOR_nothing)
481 {
6a0b5011 482 /* If FROM is a SUBREG, put it into a register. Do this
483 so that we always generate the same set of insns for
484 better cse'ing; if an intermediate assignment occurred,
485 we won't be doing the operation directly on the SUBREG. */
486 if (optimize > 0 && GET_CODE (from) == SUBREG)
487 from = force_reg (from_mode, from);
10f307d9 488 emit_unop_insn (code, to, from, equiv_code);
489 return;
490 }
491 /* Next, try converting via full word. */
995b44f5 492 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
10f307d9 493 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
494 != CODE_FOR_nothing))
495 {
8ae2fa3b 496 rtx word_to = gen_reg_rtx (word_mode);
8ad4c111 497 if (REG_P (to))
d6af6bc2 498 {
499 if (reg_overlap_mentioned_p (to, from))
500 from = force_reg (from_mode, from);
18b42941 501 emit_clobber (to);
d6af6bc2 502 }
8ae2fa3b 503 convert_move (word_to, from, unsignedp);
504 emit_unop_insn (code, to, word_to, equiv_code);
10f307d9 505 return;
506 }
507
508 /* No special multiword conversion insn; do it by hand. */
509 start_sequence ();
510
c43fbd61 511 /* Since we will turn this into a no conflict block, we must ensure the
512 the source does not overlap the target so force it into an isolated
513 register when maybe so. Likewise for any MEM input, since the
514 conversion sequence might require several references to it and we
515 must ensure we're getting the same value every time. */
ab72e117 516
c43fbd61 517 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
ab72e117 518 from = force_reg (from_mode, from);
519
10f307d9 520 /* Get a copy of FROM widened to a word, if necessary. */
995b44f5 521 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
10f307d9 522 lowpart_mode = word_mode;
523 else
524 lowpart_mode = from_mode;
525
526 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
527
528 lowpart = gen_lowpart (lowpart_mode, to);
529 emit_move_insn (lowpart, lowfrom);
530
531 /* Compute the value to put in each remaining word. */
532 if (unsignedp)
533 fill_value = const0_rtx;
534 else
155586ea 535 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
536 LT, lowfrom, const0_rtx,
537 lowpart_mode, 0, -1);
10f307d9 538
539 /* Fill the remaining words. */
540 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
541 {
542 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
543 rtx subword = operand_subword (to, index, 1, to_mode);
544
611234b4 545 gcc_assert (subword);
10f307d9 546
547 if (fill_value != subword)
548 emit_move_insn (subword, fill_value);
549 }
550
551 insns = get_insns ();
552 end_sequence ();
553
e29831db 554 emit_insn (insns);
10f307d9 555 return;
556 }
557
5602c36d 558 /* Truncating multi-word to a word or less. */
995b44f5 559 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
560 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
10f307d9 561 {
e16ceb8e 562 if (!((MEM_P (from)
d5601bb1 563 && ! MEM_VOLATILE_P (from)
564 && direct_load[(int) to_mode]
4e27ffd0 565 && ! mode_dependent_address_p (XEXP (from, 0),
566 MEM_ADDR_SPACE (from)))
8ad4c111 567 || REG_P (from)
d5601bb1 568 || GET_CODE (from) == SUBREG))
569 from = force_reg (from_mode, from);
10f307d9 570 convert_move (to, gen_lowpart (word_mode, from), 0);
571 return;
572 }
573
10f307d9 574 /* Now follow all the conversions between integers
575 no more than a word long. */
576
577 /* For truncation, usually we can just refer to FROM in a narrower mode. */
578 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
396f2130 579 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
10f307d9 580 {
e16ceb8e 581 if (!((MEM_P (from)
5602c36d 582 && ! MEM_VOLATILE_P (from)
583 && direct_load[(int) to_mode]
4e27ffd0 584 && ! mode_dependent_address_p (XEXP (from, 0),
585 MEM_ADDR_SPACE (from)))
8ad4c111 586 || REG_P (from)
5602c36d 587 || GET_CODE (from) == SUBREG))
588 from = force_reg (from_mode, from);
8ad4c111 589 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
7de79a05 590 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
591 from = copy_to_reg (from);
10f307d9 592 emit_move_insn (to, gen_lowpart (to_mode, from));
593 return;
594 }
595
5602c36d 596 /* Handle extension. */
995b44f5 597 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
10f307d9 598 {
599 /* Convert directly if that works. */
600 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
601 != CODE_FOR_nothing)
602 {
603 emit_unop_insn (code, to, from, equiv_code);
604 return;
605 }
606 else
607 {
3754d046 608 machine_mode intermediate;
851e6849 609 rtx tmp;
f5ff0b21 610 int shift_amount;
10f307d9 611
612 /* Search for a mode to convert via. */
613 for (intermediate = from_mode; intermediate != VOIDmode;
614 intermediate = GET_MODE_WIDER_MODE (intermediate))
0f22a35c 615 if (((can_extend_p (to_mode, intermediate, unsignedp)
616 != CODE_FOR_nothing)
617 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
396f2130 618 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
10f307d9 619 && (can_extend_p (intermediate, from_mode, unsignedp)
620 != CODE_FOR_nothing))
621 {
622 convert_move (to, convert_to_mode (intermediate, from,
623 unsignedp), unsignedp);
624 return;
625 }
626
851e6849 627 /* No suitable intermediate mode.
fa56dc1d 628 Generate what we need with shifts. */
995b44f5 629 shift_amount = (GET_MODE_PRECISION (to_mode)
630 - GET_MODE_PRECISION (from_mode));
851e6849 631 from = gen_lowpart (to_mode, force_reg (from_mode, from));
632 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
633 to, unsignedp);
fa56dc1d 634 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
851e6849 635 to, unsignedp);
636 if (tmp != to)
637 emit_move_insn (to, tmp);
638 return;
10f307d9 639 }
640 }
641
fa56dc1d 642 /* Support special truncate insns for certain modes. */
d6bf3b14 643 if (convert_optab_handler (trunc_optab, to_mode,
644 from_mode) != CODE_FOR_nothing)
10f307d9 645 {
d6bf3b14 646 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
a7cc195f 647 to, from, UNKNOWN);
cd0fdd24 648 return;
649 }
650
10f307d9 651 /* Handle truncation of volatile memrefs, and so on;
652 the things that couldn't be truncated directly,
a7cc195f 653 and for which there was no special instruction.
654
655 ??? Code above formerly short-circuited this, for most integer
656 mode pairs, with a force_reg in from_mode followed by a recursive
657 call to this routine. Appears always to have been wrong. */
995b44f5 658 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
10f307d9 659 {
660 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
661 emit_move_insn (to, temp);
662 return;
663 }
664
665 /* Mode combination is not recognized. */
611234b4 666 gcc_unreachable ();
10f307d9 667}
668
669/* Return an rtx for a value that would result
670 from converting X to mode MODE.
671 Both X and MODE may be floating, or both integer.
672 UNSIGNEDP is nonzero if X is an unsigned value.
673 This can be done by referring to a part of X in place
0a534ba7 674 or by copying to a new temporary with conversion. */
10f307d9 675
676rtx
3754d046 677convert_to_mode (machine_mode mode, rtx x, int unsignedp)
a63e1c46 678{
679 return convert_modes (mode, VOIDmode, x, unsignedp);
680}
681
682/* Return an rtx for a value that would result
683 from converting X from mode OLDMODE to mode MODE.
684 Both modes may be floating, or both integer.
685 UNSIGNEDP is nonzero if X is an unsigned value.
686
687 This can be done by referring to a part of X in place
688 or by copying to a new temporary with conversion.
689
0a534ba7 690 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
a63e1c46 691
692rtx
3754d046 693convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
10f307d9 694{
19cb6b50 695 rtx temp;
a63e1c46 696
acfb31e5 697 /* If FROM is a SUBREG that indicates that we have already done at least
698 the required extension, strip it. */
699
700 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
701 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
e8629f9e 702 && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
9f484185 703 x = gen_lowpart (mode, SUBREG_REG (x));
10f307d9 704
e4d9bbd7 705 if (GET_MODE (x) != VOIDmode)
706 oldmode = GET_MODE (x);
fa56dc1d 707
79a80d86 708 if (mode == oldmode)
709 return x;
710
c4050ce7 711 if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
e92ac421 712 {
c4050ce7 713 /* If the caller did not tell us the old mode, then there is not
714 much to do with respect to canonicalization. We have to
715 assume that all the bits are significant. */
2508ba32 716 if (GET_MODE_CLASS (oldmode) != MODE_INT)
720f3e8a 717 oldmode = MAX_MODE_INT;
718 wide_int w = wide_int::from (std::make_pair (x, oldmode),
719 GET_MODE_PRECISION (mode),
720 unsignedp ? UNSIGNED : SIGNED);
a23d6610 721 return immed_wide_int_const (w, mode);
e92ac421 722 }
10f307d9 723
724 /* We can do this with a gen_lowpart if both desired and current modes
725 are integer, and this is either a constant integer, a register, or a
a23d6610 726 non-volatile MEM. */
727 if (GET_MODE_CLASS (mode) == MODE_INT
728 && GET_MODE_CLASS (oldmode) == MODE_INT
729 && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
730 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
731 || (REG_P (x)
732 && (!HARD_REGISTER_P (x)
733 || HARD_REGNO_MODE_OK (REGNO (x), mode))
734 && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
735
736 return gen_lowpart (mode, x);
10f307d9 737
77d25dbd 738 /* Converting from integer constant into mode is always equivalent to an
739 subreg operation. */
740 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
741 {
611234b4 742 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
77d25dbd 743 return simplify_gen_subreg (mode, x, oldmode, 0);
744 }
745
10f307d9 746 temp = gen_reg_rtx (mode);
747 convert_move (temp, x, unsignedp);
748 return temp;
749}
750\f
c7e41aee 751/* Return the largest alignment we can use for doing a move (or store)
752 of MAX_PIECES. ALIGN is the largest alignment we could use. */
753
754static unsigned int
755alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
756{
3754d046 757 machine_mode tmode;
c7e41aee 758
759 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
760 if (align >= GET_MODE_ALIGNMENT (tmode))
761 align = GET_MODE_ALIGNMENT (tmode);
762 else
763 {
3754d046 764 machine_mode tmode, xmode;
c7e41aee 765
766 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
767 tmode != VOIDmode;
768 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
769 if (GET_MODE_SIZE (tmode) > max_pieces
770 || SLOW_UNALIGNED_ACCESS (tmode, align))
771 break;
772
773 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
774 }
775
776 return align;
777}
778
779/* Return the widest integer mode no wider than SIZE. If no such mode
780 can be found, return VOIDmode. */
781
3754d046 782static machine_mode
c7e41aee 783widest_int_mode_for_size (unsigned int size)
784{
3754d046 785 machine_mode tmode, mode = VOIDmode;
c7e41aee 786
787 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
788 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
789 if (GET_MODE_SIZE (tmode) < size)
790 mode = tmode;
791
792 return mode;
793}
794
9fe0e1b8 795/* Determine whether the LEN bytes can be moved by using several move
796 instructions. Return nonzero if a call to move_by_pieces should
797 succeed. */
798
799int
d4bd0e64 800can_move_by_pieces (unsigned HOST_WIDE_INT len,
801 unsigned int align)
9fe0e1b8 802{
d4bd0e64 803 return targetm.use_by_pieces_infrastructure_p (len, align, MOVE_BY_PIECES,
804 optimize_insn_for_speed_p ());
9fe0e1b8 805}
806
fad4a30c 807/* Generate several move instructions to copy LEN bytes from block FROM to
0a534ba7 808 block TO. (These are MEM rtx's with BLKmode).
ef7dc4b4 809
fad4a30c 810 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
811 used to push FROM to the stack.
ef7dc4b4 812
9fe0e1b8 813 ALIGN is maximum stack alignment we can assume.
10f307d9 814
9fe0e1b8 815 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
816 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
817 stpcpy. */
818
819rtx
35cb5232 820move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
821 unsigned int align, int endp)
10f307d9 822{
584511c1 823 struct move_by_pieces_d data;
3754d046 824 machine_mode to_addr_mode;
825 machine_mode from_addr_mode = get_address_mode (from);
ef7dc4b4 826 rtx to_addr, from_addr = XEXP (from, 0);
02e7a332 827 unsigned int max_size = MOVE_MAX_PIECES + 1;
53bd09ab 828 enum insn_code icode;
10f307d9 829
b4ad0ea6 830 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
831
10f307d9 832 data.offset = 0;
10f307d9 833 data.from_addr = from_addr;
ef7dc4b4 834 if (to)
835 {
87cf5753 836 to_addr_mode = get_address_mode (to);
ef7dc4b4 837 to_addr = XEXP (to, 0);
838 data.to = to;
839 data.autinc_to
840 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
841 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
842 data.reverse
843 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
844 }
845 else
846 {
98155838 847 to_addr_mode = VOIDmode;
ef7dc4b4 848 to_addr = NULL_RTX;
849 data.to = NULL_RTX;
850 data.autinc_to = 1;
3764c94e 851 if (STACK_GROWS_DOWNWARD)
852 data.reverse = 1;
853 else
854 data.reverse = 0;
ef7dc4b4 855 }
856 data.to_addr = to_addr;
10f307d9 857 data.from = from;
10f307d9 858 data.autinc_from
859 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
860 || GET_CODE (from_addr) == POST_INC
861 || GET_CODE (from_addr) == POST_DEC);
862
863 data.explicit_inc_from = 0;
864 data.explicit_inc_to = 0;
10f307d9 865 if (data.reverse) data.offset = len;
866 data.len = len;
867
868 /* If copying requires more than two move insns,
869 copy addresses to registers (to make displacements shorter)
870 and use post-increment if available. */
871 if (!(data.autinc_from && data.autinc_to)
025d4f81 872 && move_by_pieces_ninsns (len, align, max_size) > 2)
10f307d9 873 {
c7e41aee 874 /* Find the mode of the largest move...
875 MODE might not be used depending on the definitions of the
876 USE_* macros below. */
3754d046 877 machine_mode mode ATTRIBUTE_UNUSED
c7e41aee 878 = widest_int_mode_for_size (max_size);
53bd09ab 879
880 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
10f307d9 881 {
98155838 882 data.from_addr = copy_to_mode_reg (from_addr_mode,
29c05e22 883 plus_constant (from_addr_mode,
884 from_addr, len));
10f307d9 885 data.autinc_from = 1;
886 data.explicit_inc_from = -1;
887 }
53bd09ab 888 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
10f307d9 889 {
98155838 890 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
10f307d9 891 data.autinc_from = 1;
892 data.explicit_inc_from = 1;
893 }
10f307d9 894 if (!data.autinc_from && CONSTANT_P (from_addr))
98155838 895 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
53bd09ab 896 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
10f307d9 897 {
98155838 898 data.to_addr = copy_to_mode_reg (to_addr_mode,
29c05e22 899 plus_constant (to_addr_mode,
900 to_addr, len));
10f307d9 901 data.autinc_to = 1;
902 data.explicit_inc_to = -1;
903 }
53bd09ab 904 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
10f307d9 905 {
98155838 906 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
10f307d9 907 data.autinc_to = 1;
908 data.explicit_inc_to = 1;
909 }
10f307d9 910 if (!data.autinc_to && CONSTANT_P (to_addr))
98155838 911 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
10f307d9 912 }
913
c7e41aee 914 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
10f307d9 915
916 /* First move what we can in the largest integer mode, then go to
917 successively smaller modes. */
918
01dd0067 919 while (max_size > 1 && data.len > 0)
10f307d9 920 {
3754d046 921 machine_mode mode = widest_int_mode_for_size (max_size);
10f307d9 922
923 if (mode == VOIDmode)
924 break;
925
d6bf3b14 926 icode = optab_handler (mov_optab, mode);
325d1c45 927 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
10f307d9 928 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
929
930 max_size = GET_MODE_SIZE (mode);
931 }
932
933 /* The code above should have handled everything. */
611234b4 934 gcc_assert (!data.len);
9fe0e1b8 935
936 if (endp)
937 {
938 rtx to1;
939
611234b4 940 gcc_assert (!data.reverse);
9fe0e1b8 941 if (data.autinc_to)
942 {
943 if (endp == 2)
944 {
945 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
946 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
947 else
98155838 948 data.to_addr = copy_to_mode_reg (to_addr_mode,
29c05e22 949 plus_constant (to_addr_mode,
950 data.to_addr,
9fe0e1b8 951 -1));
952 }
953 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
954 data.offset);
955 }
956 else
957 {
958 if (endp == 2)
959 --data.offset;
960 to1 = adjust_address (data.to, QImode, data.offset);
961 }
962 return to1;
963 }
964 else
965 return data.to;
10f307d9 966}
967
968/* Return number of insns required to move L bytes by pieces.
decd7a45 969 ALIGN (in bits) is maximum alignment we can assume. */
10f307d9 970
8136e769 971unsigned HOST_WIDE_INT
025d4f81 972move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
973 unsigned int max_size)
10f307d9 974{
f7c44134 975 unsigned HOST_WIDE_INT n_insns = 0;
10f307d9 976
c7e41aee 977 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
10f307d9 978
01dd0067 979 while (max_size > 1 && l > 0)
10f307d9 980 {
3754d046 981 machine_mode mode;
10f307d9 982 enum insn_code icode;
983
c7e41aee 984 mode = widest_int_mode_for_size (max_size);
10f307d9 985
986 if (mode == VOIDmode)
987 break;
988
d6bf3b14 989 icode = optab_handler (mov_optab, mode);
325d1c45 990 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
10f307d9 991 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
992
993 max_size = GET_MODE_SIZE (mode);
994 }
995
611234b4 996 gcc_assert (!l);
10f307d9 997 return n_insns;
998}
999
1000/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1001 with move instructions for mode MODE. GENFUN is the gen_... function
1002 to make a move insn for that mode. DATA has all the other info. */
1003
1004static void
3d953cb1 1005move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
584511c1 1006 struct move_by_pieces_d *data)
10f307d9 1007{
f7c44134 1008 unsigned int size = GET_MODE_SIZE (mode);
97b330ca 1009 rtx to1 = NULL_RTX, from1;
10f307d9 1010
1011 while (data->len >= size)
1012 {
f7c44134 1013 if (data->reverse)
1014 data->offset -= size;
1015
ef7dc4b4 1016 if (data->to)
f7c44134 1017 {
ef7dc4b4 1018 if (data->autinc_to)
bf42c62d 1019 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1020 data->offset);
ef7dc4b4 1021 else
e513d163 1022 to1 = adjust_address (data->to, mode, data->offset);
f7c44134 1023 }
f7c44134 1024
1025 if (data->autinc_from)
bf42c62d 1026 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1027 data->offset);
f7c44134 1028 else
e513d163 1029 from1 = adjust_address (data->from, mode, data->offset);
10f307d9 1030
e4e498cf 1031 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
e1855348 1032 emit_insn (gen_add2_insn (data->to_addr,
d11aedc7 1033 gen_int_mode (-(HOST_WIDE_INT) size,
1034 GET_MODE (data->to_addr))));
e4e498cf 1035 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
e1855348 1036 emit_insn (gen_add2_insn (data->from_addr,
d11aedc7 1037 gen_int_mode (-(HOST_WIDE_INT) size,
1038 GET_MODE (data->from_addr))));
10f307d9 1039
ef7dc4b4 1040 if (data->to)
1041 emit_insn ((*genfun) (to1, from1));
1042 else
fad4a30c 1043 {
1044#ifdef PUSH_ROUNDING
1045 emit_single_push_insn (mode, from1, NULL);
1046#else
611234b4 1047 gcc_unreachable ();
fad4a30c 1048#endif
1049 }
f7c44134 1050
e4e498cf 1051 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
d11aedc7 1052 emit_insn (gen_add2_insn (data->to_addr,
1053 gen_int_mode (size,
1054 GET_MODE (data->to_addr))));
e4e498cf 1055 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
d11aedc7 1056 emit_insn (gen_add2_insn (data->from_addr,
1057 gen_int_mode (size,
1058 GET_MODE (data->from_addr))));
10f307d9 1059
f7c44134 1060 if (! data->reverse)
1061 data->offset += size;
10f307d9 1062
1063 data->len -= size;
1064 }
1065}
1066\f
c0bfc78e 1067/* Emit code to move a block Y to a block X. This may be done with
1068 string-move instructions, with multiple scalar move instructions,
1069 or with a library call.
10f307d9 1070
c0bfc78e 1071 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
10f307d9 1072 SIZE is an rtx that says how long they are.
325d1c45 1073 ALIGN is the maximum alignment we can assume they have.
0378dbdc 1074 METHOD describes what kind of copy this is, and what mechanisms may be used.
36d63243 1075 MIN_SIZE is the minimal size of block to move
1076 MAX_SIZE is the maximal size of block to move, if it can not be represented
1077 in unsigned HOST_WIDE_INT, than it is mask of all ones.
10f307d9 1078
0dbd1c74 1079 Return the address of the new block, if memcpy is called and returns it,
1080 0 otherwise. */
1081
1082rtx
162719b3 1083emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
36d63243 1084 unsigned int expected_align, HOST_WIDE_INT expected_size,
1085 unsigned HOST_WIDE_INT min_size,
9db0f34d 1086 unsigned HOST_WIDE_INT max_size,
1087 unsigned HOST_WIDE_INT probable_max_size)
10f307d9 1088{
0378dbdc 1089 bool may_use_call;
0dbd1c74 1090 rtx retval = 0;
0378dbdc 1091 unsigned int align;
1092
aeccaf28 1093 gcc_assert (size);
1094 if (CONST_INT_P (size)
1095 && INTVAL (size) == 0)
1096 return 0;
1097
0378dbdc 1098 switch (method)
1099 {
1100 case BLOCK_OP_NORMAL:
0b25db21 1101 case BLOCK_OP_TAILCALL:
0378dbdc 1102 may_use_call = true;
1103 break;
1104
1105 case BLOCK_OP_CALL_PARM:
1106 may_use_call = block_move_libcall_safe_for_call_parm ();
1107
1108 /* Make inhibit_defer_pop nonzero around the library call
1109 to force it to pop the arguments right away. */
1110 NO_DEFER_POP;
1111 break;
1112
1113 case BLOCK_OP_NO_LIBCALL:
1114 may_use_call = false;
1115 break;
1116
1117 default:
611234b4 1118 gcc_unreachable ();
0378dbdc 1119 }
1120
aeccaf28 1121 gcc_assert (MEM_P (x) && MEM_P (y));
0378dbdc 1122 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
0ea95334 1123 gcc_assert (align >= BITS_PER_UNIT);
0dbd1c74 1124
67c155cb 1125 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1126 block copy is more efficient for other large modes, e.g. DCmode. */
1127 x = adjust_address (x, BLKmode, 0);
1128 y = adjust_address (y, BLKmode, 0);
1129
e83ff88b 1130 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1131 can be incorrect is coming from __builtin_memcpy. */
971ba038 1132 if (CONST_INT_P (size))
e83ff88b 1133 {
1134 x = shallow_copy_rtx (x);
1135 y = shallow_copy_rtx (y);
5b2a69fa 1136 set_mem_size (x, INTVAL (size));
1137 set_mem_size (y, INTVAL (size));
e83ff88b 1138 }
1139
d4bd0e64 1140 if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
9fe0e1b8 1141 move_by_pieces (x, y, INTVAL (size), align, 0);
162719b3 1142 else if (emit_block_move_via_movmem (x, y, size, align,
36d63243 1143 expected_align, expected_size,
9db0f34d 1144 min_size, max_size, probable_max_size))
c0bfc78e 1145 ;
bd1a81f7 1146 else if (may_use_call
1147 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1148 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
006e2d5a 1149 {
1150 /* Since x and y are passed to a libcall, mark the corresponding
1151 tree EXPR as addressable. */
1152 tree y_expr = MEM_EXPR (y);
1153 tree x_expr = MEM_EXPR (x);
1154 if (y_expr)
1155 mark_addressable (y_expr);
1156 if (x_expr)
1157 mark_addressable (x_expr);
1158 retval = emit_block_move_via_libcall (x, y, size,
1159 method == BLOCK_OP_TAILCALL);
1160 }
1161
0378dbdc 1162 else
1163 emit_block_move_via_loop (x, y, size, align);
1164
1165 if (method == BLOCK_OP_CALL_PARM)
1166 OK_DEFER_POP;
a5fd5157 1167
c0bfc78e 1168 return retval;
1169}
a5fd5157 1170
162719b3 1171rtx
1172emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1173{
36d63243 1174 unsigned HOST_WIDE_INT max, min = 0;
1175 if (GET_CODE (size) == CONST_INT)
1176 min = max = UINTVAL (size);
1177 else
1178 max = GET_MODE_MASK (GET_MODE (size));
1179 return emit_block_move_hints (x, y, size, method, 0, -1,
9db0f34d 1180 min, max, max);
162719b3 1181}
1182
35cb5232 1183/* A subroutine of emit_block_move. Returns true if calling the
0378dbdc 1184 block move libcall will not clobber any parameters which may have
1185 already been placed on the stack. */
1186
1187static bool
35cb5232 1188block_move_libcall_safe_for_call_parm (void)
0378dbdc 1189{
22c61100 1190#if defined (REG_PARM_STACK_SPACE)
1191 tree fn;
1192#endif
1193
a58c0619 1194 /* If arguments are pushed on the stack, then they're safe. */
0378dbdc 1195 if (PUSH_ARGS)
1196 return true;
0378dbdc 1197
1d5ca076 1198 /* If registers go on the stack anyway, any argument is sure to clobber
a58c0619 1199 an outgoing argument. */
63c68695 1200#if defined (REG_PARM_STACK_SPACE)
22c61100 1201 fn = emit_block_move_libcall_fn (false);
ac2fdd89 1202 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1203 depend on its argument. */
1204 (void) fn;
22c61100 1205 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1206 && REG_PARM_STACK_SPACE (fn) != 0)
1207 return false;
0378dbdc 1208#endif
0378dbdc 1209
a58c0619 1210 /* If any argument goes in memory, then it might clobber an outgoing
1211 argument. */
1212 {
39cba157 1213 CUMULATIVE_ARGS args_so_far_v;
1214 cumulative_args_t args_so_far;
a58c0619 1215 tree fn, arg;
1d5ca076 1216
a58c0619 1217 fn = emit_block_move_libcall_fn (false);
39cba157 1218 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1219 args_so_far = pack_cumulative_args (&args_so_far_v);
1d5ca076 1220
a58c0619 1221 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1222 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1223 {
3754d046 1224 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
39cba157 1225 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
f387af4f 1226 NULL_TREE, true);
a58c0619 1227 if (!tmp || !REG_P (tmp))
0378dbdc 1228 return false;
39cba157 1229 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
a58c0619 1230 return false;
39cba157 1231 targetm.calls.function_arg_advance (args_so_far, mode,
f387af4f 1232 NULL_TREE, true);
a58c0619 1233 }
1234 }
1235 return true;
0378dbdc 1236}
1237
008c057d 1238/* A subroutine of emit_block_move. Expand a movmem pattern;
c0bfc78e 1239 return true if successful. */
6702c250 1240
c0bfc78e 1241static bool
162719b3 1242emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
36d63243 1243 unsigned int expected_align, HOST_WIDE_INT expected_size,
1244 unsigned HOST_WIDE_INT min_size,
9db0f34d 1245 unsigned HOST_WIDE_INT max_size,
1246 unsigned HOST_WIDE_INT probable_max_size)
c0bfc78e 1247{
fbc6244b 1248 int save_volatile_ok = volatile_ok;
3754d046 1249 machine_mode mode;
a5fd5157 1250
162719b3 1251 if (expected_align < align)
1252 expected_align = align;
36d63243 1253 if (expected_size != -1)
1254 {
9db0f34d 1255 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1256 expected_size = probable_max_size;
36d63243 1257 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1258 expected_size = min_size;
1259 }
162719b3 1260
c0bfc78e 1261 /* Since this is a move insn, we don't care about volatility. */
1262 volatile_ok = 1;
1263
d5f9786f 1264 /* Try the most limited insn first, because there's no point
1265 including more than one in the machine description unless
1266 the more limited one has some advantage. */
1267
c0bfc78e 1268 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1269 mode = GET_MODE_WIDER_MODE (mode))
1270 {
6b531606 1271 enum insn_code code = direct_optab_handler (movmem_optab, mode);
c0bfc78e 1272
1273 if (code != CODE_FOR_nothing
1274 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1275 here because if SIZE is less than the mode mask, as it is
1276 returned by the macro, it will definitely be less than the
300c6cee 1277 actual mode mask. Since SIZE is within the Pmode address
1278 space, we limit MODE to Pmode. */
971ba038 1279 && ((CONST_INT_P (size)
c0bfc78e 1280 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1281 <= (GET_MODE_MASK (mode) >> 1)))
36d63243 1282 || max_size <= (GET_MODE_MASK (mode) >> 1)
300c6cee 1283 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
8786db1e 1284 {
9db0f34d 1285 struct expand_operand ops[9];
8786db1e 1286 unsigned int nops;
c0bfc78e 1287
1288 /* ??? When called via emit_block_move_for_call, it'd be
1289 nice if there were some way to inform the backend, so
1290 that it doesn't fail the expansion because it thinks
1291 emitting the libcall would be more efficient. */
32f79657 1292 nops = insn_data[(int) code].n_generator_args;
9db0f34d 1293 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
b52cb719 1294
8786db1e 1295 create_fixed_operand (&ops[0], x);
1296 create_fixed_operand (&ops[1], y);
1297 /* The check above guarantees that this size conversion is valid. */
1298 create_convert_operand_to (&ops[2], size, mode, true);
1299 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
36d63243 1300 if (nops >= 6)
8786db1e 1301 {
1302 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1303 create_integer_operand (&ops[5], expected_size);
8786db1e 1304 }
9db0f34d 1305 if (nops >= 8)
36d63243 1306 {
1307 create_integer_operand (&ops[6], min_size);
1308 /* If we can not represent the maximal size,
1309 make parameter NULL. */
1310 if ((HOST_WIDE_INT) max_size != -1)
1311 create_integer_operand (&ops[7], max_size);
1312 else
1313 create_fixed_operand (&ops[7], NULL);
1314 }
9db0f34d 1315 if (nops == 9)
1316 {
1317 /* If we can not represent the maximal size,
1318 make parameter NULL. */
1319 if ((HOST_WIDE_INT) probable_max_size != -1)
1320 create_integer_operand (&ops[8], probable_max_size);
1321 else
1322 create_fixed_operand (&ops[8], NULL);
1323 }
8786db1e 1324 if (maybe_expand_insn (code, nops, ops))
c0bfc78e 1325 {
fbc6244b 1326 volatile_ok = save_volatile_ok;
c0bfc78e 1327 return true;
10f307d9 1328 }
1329 }
c0bfc78e 1330 }
10f307d9 1331
fbc6244b 1332 volatile_ok = save_volatile_ok;
c0bfc78e 1333 return false;
1334}
6702c250 1335
f896c932 1336/* A subroutine of emit_block_move. Expand a call to memcpy.
c0bfc78e 1337 Return the return value from memcpy, 0 otherwise. */
06b8e3db 1338
ab608690 1339rtx
0b25db21 1340emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
c0bfc78e 1341{
d5f9786f 1342 rtx dst_addr, src_addr;
c2f47e15 1343 tree call_expr, fn, src_tree, dst_tree, size_tree;
3754d046 1344 machine_mode size_mode;
c0bfc78e 1345 rtx retval;
06b8e3db 1346
0a534ba7 1347 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1348 pseudos. We can then place those new pseudos into a VAR_DECL and
1349 use them later. */
d5f9786f 1350
99182918 1351 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1352 src_addr = copy_addr_to_reg (XEXP (src, 0));
c0bfc78e 1353
d5f9786f 1354 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1355 src_addr = convert_memory_address (ptr_mode, src_addr);
d5f9786f 1356
1357 dst_tree = make_tree (ptr_type_node, dst_addr);
1358 src_tree = make_tree (ptr_type_node, src_addr);
c0bfc78e 1359
f896c932 1360 size_mode = TYPE_MODE (sizetype);
d5f9786f 1361
c0bfc78e 1362 size = convert_to_mode (size_mode, size, 1);
1363 size = copy_to_mode_reg (size_mode, size);
1364
1365 /* It is incorrect to use the libcall calling conventions to call
1366 memcpy in this context. This could be a user call to memcpy and
1367 the user may wish to examine the return value from memcpy. For
1368 targets where libcalls and normal calls have different conventions
f896c932 1369 for returning pointers, we could end up generating incorrect code. */
c0bfc78e 1370
f896c932 1371 size_tree = make_tree (sizetype, size);
c0bfc78e 1372
1373 fn = emit_block_move_libcall_fn (true);
c2f47e15 1374 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
0b25db21 1375 CALL_EXPR_TAILCALL (call_expr) = tailcall;
c0bfc78e 1376
8ec3c5c2 1377 retval = expand_normal (call_expr);
c0bfc78e 1378
f896c932 1379 return retval;
c0bfc78e 1380}
f708f8fd 1381
c0bfc78e 1382/* A subroutine of emit_block_move_via_libcall. Create the tree node
ea259bbe 1383 for the function we use for block copies. */
f708f8fd 1384
c0bfc78e 1385static GTY(()) tree block_move_fn;
1386
d459e0d8 1387void
35cb5232 1388init_block_move_fn (const char *asmspec)
c0bfc78e 1389{
d459e0d8 1390 if (!block_move_fn)
c0bfc78e 1391 {
c8010b80 1392 tree args, fn, attrs, attr_args;
d459e0d8 1393
f896c932 1394 fn = get_identifier ("memcpy");
1395 args = build_function_type_list (ptr_type_node, ptr_type_node,
1396 const_ptr_type_node, sizetype,
1397 NULL_TREE);
f708f8fd 1398
e60a6f7b 1399 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
c0bfc78e 1400 DECL_EXTERNAL (fn) = 1;
1401 TREE_PUBLIC (fn) = 1;
1402 DECL_ARTIFICIAL (fn) = 1;
1403 TREE_NOTHROW (fn) = 1;
f0f2eb24 1404 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1405 DECL_VISIBILITY_SPECIFIED (fn) = 1;
8ca560c1 1406
c8010b80 1407 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1408 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1409
1410 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1411
c0bfc78e 1412 block_move_fn = fn;
10f307d9 1413 }
0dbd1c74 1414
d459e0d8 1415 if (asmspec)
b2c4af5e 1416 set_user_assembler_name (block_move_fn, asmspec);
d459e0d8 1417}
1418
1419static tree
35cb5232 1420emit_block_move_libcall_fn (int for_call)
d459e0d8 1421{
1422 static bool emitted_extern;
1423
1424 if (!block_move_fn)
1425 init_block_move_fn (NULL);
1426
c0bfc78e 1427 if (for_call && !emitted_extern)
1428 {
1429 emitted_extern = true;
b2c4af5e 1430 make_decl_rtl (block_move_fn);
c0bfc78e 1431 }
1432
d459e0d8 1433 return block_move_fn;
10f307d9 1434}
0378dbdc 1435
1436/* A subroutine of emit_block_move. Copy the data via an explicit
1437 loop. This is used only when libcalls are forbidden. */
1438/* ??? It'd be nice to copy in hunks larger than QImode. */
1439
1440static void
35cb5232 1441emit_block_move_via_loop (rtx x, rtx y, rtx size,
1442 unsigned int align ATTRIBUTE_UNUSED)
0378dbdc 1443{
1d277a67 1444 rtx_code_label *cmp_label, *top_label;
1445 rtx iter, x_addr, y_addr, tmp;
3754d046 1446 machine_mode x_addr_mode = get_address_mode (x);
1447 machine_mode y_addr_mode = get_address_mode (y);
1448 machine_mode iter_mode;
0378dbdc 1449
1450 iter_mode = GET_MODE (size);
1451 if (iter_mode == VOIDmode)
1452 iter_mode = word_mode;
1453
1454 top_label = gen_label_rtx ();
1455 cmp_label = gen_label_rtx ();
1456 iter = gen_reg_rtx (iter_mode);
1457
1458 emit_move_insn (iter, const0_rtx);
1459
1460 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1461 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1462 do_pending_stack_adjust ();
1463
0378dbdc 1464 emit_jump (cmp_label);
1465 emit_label (top_label);
1466
98155838 1467 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
59d4eb16 1468 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
98155838 1469
1470 if (x_addr_mode != y_addr_mode)
1471 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
59d4eb16 1472 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
98155838 1473
0378dbdc 1474 x = change_address (x, QImode, x_addr);
1475 y = change_address (y, QImode, y_addr);
1476
1477 emit_move_insn (x, y);
1478
1479 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1480 true, OPTAB_LIB_WIDEN);
1481 if (tmp != iter)
1482 emit_move_insn (iter, tmp);
1483
0378dbdc 1484 emit_label (cmp_label);
1485
1486 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
584abc98 1487 true, top_label, REG_BR_PROB_BASE * 90 / 100);
0378dbdc 1488}
10f307d9 1489\f
1490/* Copy all or part of a value X into registers starting at REGNO.
1491 The number of registers to be filled is NREGS. */
1492
1493void
3754d046 1494move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
10f307d9 1495{
1496 int i;
fa56dc1d 1497 rtx pat;
1d277a67 1498 rtx_insn *last;
10f307d9 1499
c9750f6d 1500 if (nregs == 0)
1501 return;
1502
ca316360 1503 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
10f307d9 1504 x = validize_mem (force_const_mem (mode, x));
1505
1506 /* See if the machine can do this with a load multiple insn. */
d3afc10f 1507 if (HAVE_load_multiple)
10f307d9 1508 {
d3afc10f 1509 last = get_last_insn ();
941522d6 1510 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
d3afc10f 1511 GEN_INT (nregs));
1512 if (pat)
1513 {
1514 emit_insn (pat);
1515 return;
1516 }
1517 else
1518 delete_insns_since (last);
10f307d9 1519 }
10f307d9 1520
1521 for (i = 0; i < nregs; i++)
941522d6 1522 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
10f307d9 1523 operand_subword_force (x, i, mode));
1524}
1525
1526/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
530178a9 1527 The number of registers to be filled is NREGS. */
db7bca86 1528
10f307d9 1529void
35cb5232 1530move_block_from_reg (int regno, rtx x, int nregs)
10f307d9 1531{
1532 int i;
10f307d9 1533
cc119c14 1534 if (nregs == 0)
1535 return;
1536
10f307d9 1537 /* See if the machine can do this with a store multiple insn. */
d3afc10f 1538 if (HAVE_store_multiple)
10f307d9 1539 {
1d277a67 1540 rtx_insn *last = get_last_insn ();
530178a9 1541 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1542 GEN_INT (nregs));
d3afc10f 1543 if (pat)
1544 {
1545 emit_insn (pat);
1546 return;
1547 }
1548 else
1549 delete_insns_since (last);
10f307d9 1550 }
10f307d9 1551
1552 for (i = 0; i < nregs; i++)
1553 {
1554 rtx tem = operand_subword (x, i, 1, BLKmode);
1555
611234b4 1556 gcc_assert (tem);
10f307d9 1557
941522d6 1558 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
10f307d9 1559 }
1560}
1561
b566e2e5 1562/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1563 ORIG, where ORIG is a non-consecutive group of registers represented by
1564 a PARALLEL. The clone is identical to the original except in that the
1565 original set of registers is replaced by a new set of pseudo registers.
1566 The new set has the same modes as the original set. */
1567
1568rtx
35cb5232 1569gen_group_rtx (rtx orig)
b566e2e5 1570{
1571 int i, length;
1572 rtx *tmps;
1573
611234b4 1574 gcc_assert (GET_CODE (orig) == PARALLEL);
b566e2e5 1575
1576 length = XVECLEN (orig, 0);
2457c754 1577 tmps = XALLOCAVEC (rtx, length);
b566e2e5 1578
1579 /* Skip a NULL entry in first slot. */
1580 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1581
1582 if (i)
1583 tmps[0] = 0;
1584
1585 for (; i < length; i++)
1586 {
3754d046 1587 machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
b566e2e5 1588 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1589
1590 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1591 }
1592
1593 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1594}
1595
e2ff5c1b 1596/* A subroutine of emit_group_load. Arguments as for emit_group_load,
1597 except that values are placed in TMPS[i], and must later be moved
10689255 1598 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
ce739127 1599
e2ff5c1b 1600static void
1601emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
ce739127 1602{
e2ff5c1b 1603 rtx src;
6ede8018 1604 int start, i;
3754d046 1605 machine_mode m = GET_MODE (orig_src);
ce739127 1606
611234b4 1607 gcc_assert (GET_CODE (dst) == PARALLEL);
ce739127 1608
553b7a5d 1609 if (m != VOIDmode
1610 && !SCALAR_INT_MODE_P (m)
1611 && !MEM_P (orig_src)
1612 && GET_CODE (orig_src) != CONCAT)
57fb21b0 1613 {
3754d046 1614 machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
57fb21b0 1615 if (imode == BLKmode)
0ab48139 1616 src = assign_stack_temp (GET_MODE (orig_src), ssize);
57fb21b0 1617 else
1618 src = gen_reg_rtx (imode);
1619 if (imode != BLKmode)
1620 src = gen_lowpart (GET_MODE (orig_src), src);
1621 emit_move_insn (src, orig_src);
1622 /* ...and back again. */
1623 if (imode != BLKmode)
1624 src = gen_lowpart (imode, src);
e2ff5c1b 1625 emit_group_load_1 (tmps, dst, src, type, ssize);
57fb21b0 1626 return;
1627 }
1628
ce739127 1629 /* Check for a NULL entry, used to indicate that the parameter goes
1630 both on the stack and in registers. */
6ede8018 1631 if (XEXP (XVECEXP (dst, 0, 0), 0))
1632 start = 0;
ce739127 1633 else
6ede8018 1634 start = 1;
1635
6ede8018 1636 /* Process the pieces. */
1637 for (i = start; i < XVECLEN (dst, 0); i++)
1638 {
3754d046 1639 machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
02e7a332 1640 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1641 unsigned int bytelen = GET_MODE_SIZE (mode);
6ede8018 1642 int shift = 0;
1643
1644 /* Handle trailing fragments that run over the size of the struct. */
e1439bcb 1645 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
6ede8018 1646 {
5f4cd670 1647 /* Arrange to shift the fragment to where it belongs.
1648 extract_bit_field loads to the lsb of the reg. */
1649 if (
1650#ifdef BLOCK_REG_PADDING
1651 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1652 == (BYTES_BIG_ENDIAN ? upward : downward)
1653#else
1654 BYTES_BIG_ENDIAN
1655#endif
1656 )
1657 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
6ede8018 1658 bytelen = ssize - bytepos;
611234b4 1659 gcc_assert (bytelen > 0);
6ede8018 1660 }
1661
c037cba7 1662 /* If we won't be loading directly from memory, protect the real source
1663 from strange tricks we might play; but make sure that the source can
1664 be loaded directly into the destination. */
1665 src = orig_src;
e16ceb8e 1666 if (!MEM_P (orig_src)
c037cba7 1667 && (!CONSTANT_P (orig_src)
1668 || (GET_MODE (orig_src) != mode
1669 && GET_MODE (orig_src) != VOIDmode)))
1670 {
1671 if (GET_MODE (orig_src) == VOIDmode)
1672 src = gen_reg_rtx (mode);
1673 else
1674 src = gen_reg_rtx (GET_MODE (orig_src));
2c269e73 1675
c037cba7 1676 emit_move_insn (src, orig_src);
1677 }
1678
6ede8018 1679 /* Optimize the access just a bit. */
e16ceb8e 1680 if (MEM_P (src)
5f4cd670 1681 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1682 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
fe352cf1 1683 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
6ede8018 1684 && bytelen == GET_MODE_SIZE (mode))
1685 {
1686 tmps[i] = gen_reg_rtx (mode);
e513d163 1687 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
ce739127 1688 }
9a98257b 1689 else if (COMPLEX_MODE_P (mode)
1690 && GET_MODE (src) == mode
1691 && bytelen == GET_MODE_SIZE (mode))
1692 /* Let emit_move_complex do the bulk of the work. */
1693 tmps[i] = src;
a1000ec6 1694 else if (GET_CODE (src) == CONCAT)
1695 {
2a075f91 1696 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1697 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1698
1699 if ((bytepos == 0 && bytelen == slen0)
1700 || (bytepos != 0 && bytepos + bytelen <= slen))
4c183732 1701 {
2a075f91 1702 /* The following assumes that the concatenated objects all
1703 have the same size. In this case, a simple calculation
1704 can be used to determine the object and the bit field
1705 to be extracted. */
1706 tmps[i] = XEXP (src, bytepos / slen0);
4c183732 1707 if (! CONSTANT_P (tmps[i])
8ad4c111 1708 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
4c183732 1709 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2a075f91 1710 (bytepos % slen0) * BITS_PER_UNIT,
3f71db40 1711 1, NULL_RTX, mode, mode);
4c183732 1712 }
611234b4 1713 else
10d075b5 1714 {
611234b4 1715 rtx mem;
6ee1d299 1716
611234b4 1717 gcc_assert (!bytepos);
0ab48139 1718 mem = assign_stack_temp (GET_MODE (src), slen);
10d075b5 1719 emit_move_insn (mem, src);
6ee1d299 1720 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
3f71db40 1721 0, 1, NULL_RTX, mode, mode);
10d075b5 1722 }
a1000ec6 1723 }
c050f95a 1724 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1725 SIMD register, which is currently broken. While we get GCC
1726 to emit proper RTL for these cases, let's dump to memory. */
1727 else if (VECTOR_MODE_P (GET_MODE (dst))
8ad4c111 1728 && REG_P (src))
c050f95a 1729 {
1730 int slen = GET_MODE_SIZE (GET_MODE (src));
1731 rtx mem;
1732
0ab48139 1733 mem = assign_stack_temp (GET_MODE (src), slen);
c050f95a 1734 emit_move_insn (mem, src);
1735 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1736 }
568b64fd 1737 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1738 && XVECLEN (dst, 0) > 1)
9af5ce0c 1739 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
25b1440f 1740 else if (CONSTANT_P (src))
1741 {
1742 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1743
1744 if (len == ssize)
1745 tmps[i] = src;
1746 else
1747 {
1748 rtx first, second;
1749
e913b5cd 1750 /* TODO: const_wide_int can have sizes other than this... */
25b1440f 1751 gcc_assert (2 * len == ssize);
1752 split_double (src, &first, &second);
1753 if (i)
1754 tmps[i] = second;
1755 else
1756 tmps[i] = first;
1757 }
1758 }
1759 else if (REG_P (src) && GET_MODE (src) == mode)
73645c13 1760 tmps[i] = src;
ce739127 1761 else
325d1c45 1762 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
3f71db40 1763 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1445ea5b 1764 mode, mode);
ce739127 1765
5f4cd670 1766 if (shift)
92966f8b 1767 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
f5ff0b21 1768 shift, tmps[i], 0);
ce739127 1769 }
e2ff5c1b 1770}
1771
1772/* Emit code to move a block SRC of type TYPE to a block DST,
1773 where DST is non-consecutive registers represented by a PARALLEL.
1774 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1775 if not known. */
1776
1777void
1778emit_group_load (rtx dst, rtx src, tree type, int ssize)
1779{
1780 rtx *tmps;
1781 int i;
1782
2457c754 1783 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
e2ff5c1b 1784 emit_group_load_1 (tmps, dst, src, type, ssize);
325d1c45 1785
6ede8018 1786 /* Copy the extracted pieces into the proper (probable) hard regs. */
e2ff5c1b 1787 for (i = 0; i < XVECLEN (dst, 0); i++)
1788 {
1789 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1790 if (d == NULL)
1791 continue;
1792 emit_move_insn (d, tmps[i]);
1793 }
1794}
1795
1796/* Similar, but load SRC into new pseudos in a format that looks like
1797 PARALLEL. This can later be fed to emit_group_move to get things
1798 in the right place. */
1799
1800rtx
1801emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1802{
1803 rtvec vec;
1804 int i;
1805
1806 vec = rtvec_alloc (XVECLEN (parallel, 0));
1807 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1808
1809 /* Convert the vector to look just like the original PARALLEL, except
1810 with the computed values. */
1811 for (i = 0; i < XVECLEN (parallel, 0); i++)
1812 {
1813 rtx e = XVECEXP (parallel, 0, i);
1814 rtx d = XEXP (e, 0);
1815
1816 if (d)
1817 {
1818 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1819 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1820 }
1821 RTVEC_ELT (vec, i) = e;
1822 }
1823
1824 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
ce739127 1825}
1826
b566e2e5 1827/* Emit code to move a block SRC to block DST, where SRC and DST are
1828 non-consecutive groups of registers, each represented by a PARALLEL. */
1829
1830void
35cb5232 1831emit_group_move (rtx dst, rtx src)
b566e2e5 1832{
1833 int i;
1834
611234b4 1835 gcc_assert (GET_CODE (src) == PARALLEL
1836 && GET_CODE (dst) == PARALLEL
1837 && XVECLEN (src, 0) == XVECLEN (dst, 0));
b566e2e5 1838
1839 /* Skip first entry if NULL. */
1840 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1841 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1842 XEXP (XVECEXP (src, 0, i), 0));
1843}
1844
e2ff5c1b 1845/* Move a group of registers represented by a PARALLEL into pseudos. */
1846
1847rtx
1848emit_group_move_into_temps (rtx src)
1849{
1850 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1851 int i;
1852
1853 for (i = 0; i < XVECLEN (src, 0); i++)
1854 {
1855 rtx e = XVECEXP (src, 0, i);
1856 rtx d = XEXP (e, 0);
1857
1858 if (d)
1859 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1860 RTVEC_ELT (vec, i) = e;
1861 }
1862
1863 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1864}
1865
5f4cd670 1866/* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1867 where SRC is non-consecutive registers represented by a PARALLEL.
1868 SSIZE represents the total size of block ORIG_DST, or -1 if not
1869 known. */
ce739127 1870
1871void
5f4cd670 1872emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
ce739127 1873{
6ede8018 1874 rtx *tmps, dst;
73fe0e40 1875 int start, finish, i;
3754d046 1876 machine_mode m = GET_MODE (orig_dst);
ce739127 1877
611234b4 1878 gcc_assert (GET_CODE (src) == PARALLEL);
ce739127 1879
0b755acc 1880 if (!SCALAR_INT_MODE_P (m)
1881 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
57fb21b0 1882 {
3754d046 1883 machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
57fb21b0 1884 if (imode == BLKmode)
0ab48139 1885 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
57fb21b0 1886 else
1887 dst = gen_reg_rtx (imode);
1888 emit_group_store (dst, src, type, ssize);
1889 if (imode != BLKmode)
1890 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1891 emit_move_insn (orig_dst, dst);
1892 return;
1893 }
1894
ce739127 1895 /* Check for a NULL entry, used to indicate that the parameter goes
1896 both on the stack and in registers. */
6ede8018 1897 if (XEXP (XVECEXP (src, 0, 0), 0))
1898 start = 0;
ce739127 1899 else
6ede8018 1900 start = 1;
73fe0e40 1901 finish = XVECLEN (src, 0);
6ede8018 1902
2457c754 1903 tmps = XALLOCAVEC (rtx, finish);
ce739127 1904
6ede8018 1905 /* Copy the (probable) hard regs into pseudos. */
73fe0e40 1906 for (i = start; i < finish; i++)
ce739127 1907 {
6ede8018 1908 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2b4bed8a 1909 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1910 {
1911 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1912 emit_move_insn (tmps[i], reg);
1913 }
1914 else
1915 tmps[i] = reg;
6ede8018 1916 }
ce739127 1917
6ede8018 1918 /* If we won't be storing directly into memory, protect the real destination
1919 from strange tricks we might play. */
1920 dst = orig_dst;
723d3639 1921 if (GET_CODE (dst) == PARALLEL)
1922 {
1923 rtx temp;
1924
1925 /* We can get a PARALLEL dst if there is a conditional expression in
1926 a return statement. In that case, the dst and src are the same,
1927 so no action is necessary. */
1928 if (rtx_equal_p (dst, src))
1929 return;
1930
1931 /* It is unclear if we can ever reach here, but we may as well handle
1932 it. Allocate a temporary, and split this into a store/load to/from
1933 the temporary. */
0ab48139 1934 temp = assign_stack_temp (GET_MODE (dst), ssize);
5f4cd670 1935 emit_group_store (temp, src, type, ssize);
1936 emit_group_load (dst, temp, type, ssize);
723d3639 1937 return;
1938 }
e16ceb8e 1939 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
6ede8018 1940 {
3754d046 1941 machine_mode outer = GET_MODE (dst);
1942 machine_mode inner;
f25b36d2 1943 HOST_WIDE_INT bytepos;
73fe0e40 1944 bool done = false;
1945 rtx temp;
1946
2b4bed8a 1947 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
73fe0e40 1948 dst = gen_reg_rtx (outer);
1949
6ede8018 1950 /* Make life a bit easier for combine. */
73fe0e40 1951 /* If the first element of the vector is the low part
1952 of the destination mode, use a paradoxical subreg to
1953 initialize the destination. */
1954 if (start < finish)
1955 {
1956 inner = GET_MODE (tmps[start]);
47e9d4ca 1957 bytepos = subreg_lowpart_offset (inner, outer);
73fe0e40 1958 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1959 {
1960 temp = simplify_gen_subreg (outer, tmps[start],
47e9d4ca 1961 inner, 0);
4bd9981b 1962 if (temp)
1963 {
1964 emit_move_insn (dst, temp);
1965 done = true;
1966 start++;
1967 }
73fe0e40 1968 }
1969 }
1970
1971 /* If the first element wasn't the low part, try the last. */
1972 if (!done
1973 && start < finish - 1)
1974 {
1975 inner = GET_MODE (tmps[finish - 1]);
47e9d4ca 1976 bytepos = subreg_lowpart_offset (inner, outer);
73fe0e40 1977 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1978 {
1979 temp = simplify_gen_subreg (outer, tmps[finish - 1],
47e9d4ca 1980 inner, 0);
4bd9981b 1981 if (temp)
1982 {
1983 emit_move_insn (dst, temp);
1984 done = true;
1985 finish--;
1986 }
73fe0e40 1987 }
1988 }
1989
1990 /* Otherwise, simply initialize the result to zero. */
1991 if (!done)
1992 emit_move_insn (dst, CONST0_RTX (outer));
6ede8018 1993 }
6ede8018 1994
1995 /* Process the pieces. */
73fe0e40 1996 for (i = start; i < finish; i++)
6ede8018 1997 {
02e7a332 1998 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
3754d046 1999 machine_mode mode = GET_MODE (tmps[i]);
02e7a332 2000 unsigned int bytelen = GET_MODE_SIZE (mode);
1603adf9 2001 unsigned int adj_bytelen;
463e3bf7 2002 rtx dest = dst;
6ede8018 2003
2004 /* Handle trailing fragments that run over the size of the struct. */
e1439bcb 2005 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
dc77d5c5 2006 adj_bytelen = ssize - bytepos;
1603adf9 2007 else
2008 adj_bytelen = bytelen;
ce739127 2009
463e3bf7 2010 if (GET_CODE (dst) == CONCAT)
2011 {
dc77d5c5 2012 if (bytepos + adj_bytelen
2013 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
463e3bf7 2014 dest = XEXP (dst, 0);
2015 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2016 {
2017 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2018 dest = XEXP (dst, 1);
2019 }
611234b4 2020 else
376c21d1 2021 {
3754d046 2022 machine_mode dest_mode = GET_MODE (dest);
2023 machine_mode tmp_mode = GET_MODE (tmps[i]);
2c49840d 2024
47b0fad7 2025 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2c49840d 2026
2027 if (GET_MODE_ALIGNMENT (dest_mode)
2028 >= GET_MODE_ALIGNMENT (tmp_mode))
2029 {
47b0fad7 2030 dest = assign_stack_temp (dest_mode,
0ab48139 2031 GET_MODE_SIZE (dest_mode));
2c49840d 2032 emit_move_insn (adjust_address (dest,
2033 tmp_mode,
2034 bytepos),
2035 tmps[i]);
2036 dst = dest;
2037 }
2038 else
2039 {
47b0fad7 2040 dest = assign_stack_temp (tmp_mode,
0ab48139 2041 GET_MODE_SIZE (tmp_mode));
2c49840d 2042 emit_move_insn (dest, tmps[i]);
2043 dst = adjust_address (dest, dest_mode, bytepos);
2044 }
376c21d1 2045 break;
2046 }
463e3bf7 2047 }
2048
1603adf9 2049 /* Handle trailing fragments that run over the size of the struct. */
dc77d5c5 2050 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2051 {
2052 /* store_bit_field always takes its value from the lsb.
2053 Move the fragment to the lsb if it's not already there. */
2054 if (
2055#ifdef BLOCK_REG_PADDING
2056 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2057 == (BYTES_BIG_ENDIAN ? upward : downward)
2058#else
2059 BYTES_BIG_ENDIAN
2060#endif
2061 )
2062 {
2063 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2064 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
f5ff0b21 2065 shift, tmps[i], 0);
dc77d5c5 2066 }
1603adf9 2067
2068 /* Make sure not to write past the end of the struct. */
2069 store_bit_field (dest,
2070 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
7998fe4b 2071 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
1603adf9 2072 VOIDmode, tmps[i]);
dc77d5c5 2073 }
2074
6ede8018 2075 /* Optimize the access just a bit. */
1603adf9 2076 else if (MEM_P (dest)
2077 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2078 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2079 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2080 && bytelen == GET_MODE_SIZE (mode))
463e3bf7 2081 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1603adf9 2082
6ede8018 2083 else
463e3bf7 2084 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
4bb60ec7 2085 0, 0, mode, tmps[i]);
ce739127 2086 }
fe352cf1 2087
6ede8018 2088 /* Copy from the pseudo into the (probable) hard reg. */
376c21d1 2089 if (orig_dst != dst)
6ede8018 2090 emit_move_insn (orig_dst, dst);
ce739127 2091}
2092
933eb13a 2093/* Return a form of X that does not use a PARALLEL. TYPE is the type
2094 of the value stored in X. */
2095
2096rtx
2097maybe_emit_group_store (rtx x, tree type)
2098{
3754d046 2099 machine_mode mode = TYPE_MODE (type);
933eb13a 2100 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2101 if (GET_CODE (x) == PARALLEL)
2102 {
2103 rtx result = gen_reg_rtx (mode);
2104 emit_group_store (result, x, type, int_size_in_bytes (type));
2105 return result;
2106 }
2107 return x;
2108}
2109
7e91b548 2110/* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
25eb0f59 2111
7e91b548 2112 This is used on targets that return BLKmode values in registers. */
25eb0f59 2113
7e91b548 2114void
2115copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
25eb0f59 2116{
325d1c45 2117 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2118 rtx src = NULL, dst = NULL;
2119 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2c8ff1ed 2120 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
3754d046 2121 machine_mode mode = GET_MODE (srcreg);
2122 machine_mode tmode = GET_MODE (target);
2123 machine_mode copy_mode;
325d1c45 2124
7e91b548 2125 /* BLKmode registers created in the back-end shouldn't have survived. */
2126 gcc_assert (mode != BLKmode);
325d1c45 2127
2c8ff1ed 2128 /* If the structure doesn't take up a whole number of words, see whether
2129 SRCREG is padded on the left or on the right. If it's on the left,
2130 set PADDING_CORRECTION to the number of bits to skip.
2131
2132 In most ABIs, the structure will be returned at the least end of
2133 the register, which translates to right padding on little-endian
2134 targets and left padding on big-endian targets. The opposite
2135 holds if the structure is returned at the most significant
2136 end of the register. */
2137 if (bytes % UNITS_PER_WORD != 0
2138 && (targetm.calls.return_in_msb (type)
2139 ? !BYTES_BIG_ENDIAN
2140 : BYTES_BIG_ENDIAN))
2141 padding_correction
325d1c45 2142 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2143
7e91b548 2144 /* We can use a single move if we have an exact mode for the size. */
2145 else if (MEM_P (target)
2146 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2147 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2148 && bytes == GET_MODE_SIZE (mode))
2149 {
2150 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2151 return;
2152 }
2153
2154 /* And if we additionally have the same mode for a register. */
2155 else if (REG_P (target)
2156 && GET_MODE (target) == mode
2157 && bytes == GET_MODE_SIZE (mode))
2158 {
2159 emit_move_insn (target, srcreg);
2160 return;
2161 }
2162
2163 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2164 into a new pseudo which is a full word. */
2165 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2166 {
2167 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2168 mode = word_mode;
2169 }
2170
13a68d5b 2171 /* Copy the structure BITSIZE bits at a time. If the target lives in
2172 memory, take care of not reading/writing past its end by selecting
2173 a copy mode suited to BITSIZE. This should always be possible given
2174 how it is computed.
fa56dc1d 2175
7e91b548 2176 If the target lives in register, make sure not to select a copy mode
2177 larger than the mode of the register.
2178
325d1c45 2179 We could probably emit more efficient code for machines which do not use
2180 strict alignment, but it doesn't seem worth the effort at the current
2181 time. */
13a68d5b 2182
2183 copy_mode = word_mode;
7e91b548 2184 if (MEM_P (target))
13a68d5b 2185 {
3754d046 2186 machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
13a68d5b 2187 if (mem_mode != BLKmode)
2188 copy_mode = mem_mode;
2189 }
7e91b548 2190 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2191 copy_mode = tmode;
13a68d5b 2192
2c8ff1ed 2193 for (bitpos = 0, xbitpos = padding_correction;
325d1c45 2194 bitpos < bytes * BITS_PER_UNIT;
2195 bitpos += bitsize, xbitpos += bitsize)
2196 {
fa56dc1d 2197 /* We need a new source operand each time xbitpos is on a
2c8ff1ed 2198 word boundary and when xbitpos == padding_correction
325d1c45 2199 (the first time through). */
7e91b548 2200 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2201 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
325d1c45 2202
2203 /* We need a new destination operand each time bitpos is on
2204 a word boundary. */
7e91b548 2205 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2206 dst = target;
2207 else if (bitpos % BITS_PER_WORD == 0)
2208 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
fa56dc1d 2209
325d1c45 2210 /* Use xbitpos for the source extraction (right justified) and
13a68d5b 2211 bitpos for the destination store (left justified). */
4bb60ec7 2212 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
325d1c45 2213 extract_bit_field (src, bitsize,
3f71db40 2214 xbitpos % BITS_PER_WORD, 1,
13a68d5b 2215 NULL_RTX, copy_mode, copy_mode));
325d1c45 2216 }
25eb0f59 2217}
2218
ee5ab2d1 2219/* Copy BLKmode value SRC into a register of mode MODE. Return the
2220 register if it contains any data, otherwise return null.
2221
2222 This is used on targets that return BLKmode values in registers. */
2223
2224rtx
3754d046 2225copy_blkmode_to_reg (machine_mode mode, tree src)
ee5ab2d1 2226{
2227 int i, n_regs;
2228 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2229 unsigned int bitsize;
2230 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
3754d046 2231 machine_mode dst_mode;
ee5ab2d1 2232
2233 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2234
2235 x = expand_normal (src);
2236
2237 bytes = int_size_in_bytes (TREE_TYPE (src));
2238 if (bytes == 0)
2239 return NULL_RTX;
2240
2241 /* If the structure doesn't take up a whole number of words, see
2242 whether the register value should be padded on the left or on
2243 the right. Set PADDING_CORRECTION to the number of padding
2244 bits needed on the left side.
2245
2246 In most ABIs, the structure will be returned at the least end of
2247 the register, which translates to right padding on little-endian
2248 targets and left padding on big-endian targets. The opposite
2249 holds if the structure is returned at the most significant
2250 end of the register. */
2251 if (bytes % UNITS_PER_WORD != 0
2252 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2253 ? !BYTES_BIG_ENDIAN
2254 : BYTES_BIG_ENDIAN))
2255 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2256 * BITS_PER_UNIT));
2257
2258 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2259 dst_words = XALLOCAVEC (rtx, n_regs);
2260 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2261
2262 /* Copy the structure BITSIZE bits at a time. */
2263 for (bitpos = 0, xbitpos = padding_correction;
2264 bitpos < bytes * BITS_PER_UNIT;
2265 bitpos += bitsize, xbitpos += bitsize)
2266 {
2267 /* We need a new destination pseudo each time xbitpos is
2268 on a word boundary and when xbitpos == padding_correction
2269 (the first time through). */
2270 if (xbitpos % BITS_PER_WORD == 0
2271 || xbitpos == padding_correction)
2272 {
2273 /* Generate an appropriate register. */
2274 dst_word = gen_reg_rtx (word_mode);
2275 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2276
2277 /* Clear the destination before we move anything into it. */
2278 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2279 }
2280
2281 /* We need a new source operand each time bitpos is on a word
2282 boundary. */
2283 if (bitpos % BITS_PER_WORD == 0)
2284 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2285
2286 /* Use bitpos for the source extraction (left justified) and
2287 xbitpos for the destination store (right justified). */
2288 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2289 0, 0, word_mode,
2290 extract_bit_field (src_word, bitsize,
3f71db40 2291 bitpos % BITS_PER_WORD, 1,
ee5ab2d1 2292 NULL_RTX, word_mode, word_mode));
2293 }
2294
2295 if (mode == BLKmode)
2296 {
2297 /* Find the smallest integer mode large enough to hold the
2298 entire structure. */
2299 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2300 mode != VOIDmode;
2301 mode = GET_MODE_WIDER_MODE (mode))
2302 /* Have we found a large enough mode? */
2303 if (GET_MODE_SIZE (mode) >= bytes)
2304 break;
2305
2306 /* A suitable mode should have been found. */
2307 gcc_assert (mode != VOIDmode);
2308 }
2309
2310 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2311 dst_mode = word_mode;
2312 else
2313 dst_mode = mode;
2314 dst = gen_reg_rtx (dst_mode);
2315
2316 for (i = 0; i < n_regs; i++)
2317 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2318
2319 if (mode != dst_mode)
2320 dst = gen_lowpart (mode, dst);
2321
2322 return dst;
2323}
2324
07409b3a 2325/* Add a USE expression for REG to the (possibly empty) list pointed
2326 to by CALL_FUSAGE. REG must denote a hard register. */
10f307d9 2327
2328void
3754d046 2329use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
7e2ca70b 2330{
b69de85a 2331 gcc_assert (REG_P (reg));
2332
2333 if (!HARD_REGISTER_P (reg))
2334 return;
1f8b6002 2335
7e2ca70b 2336 *call_fusage
b4eeceb9 2337 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
7e2ca70b 2338}
2339
e67cfba4 2340/* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2341 to by CALL_FUSAGE. REG must denote a hard register. */
2342
2343void
3754d046 2344clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
e67cfba4 2345{
2346 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2347
2348 *call_fusage
2349 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2350}
2351
07409b3a 2352/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2353 starting at REGNO. All of these registers must be hard registers. */
7e2ca70b 2354
2355void
35cb5232 2356use_regs (rtx *call_fusage, int regno, int nregs)
10f307d9 2357{
f2799de7 2358 int i;
10f307d9 2359
611234b4 2360 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
f2799de7 2361
2362 for (i = 0; i < nregs; i++)
936082bb 2363 use_reg (call_fusage, regno_reg_rtx[regno + i]);
10f307d9 2364}
ce739127 2365
2366/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2367 PARALLEL REGS. This is for calls that pass values in multiple
2368 non-contiguous locations. The Irix 6 ABI has examples of this. */
2369
2370void
35cb5232 2371use_group_regs (rtx *call_fusage, rtx regs)
ce739127 2372{
2373 int i;
2374
2f373e5d 2375 for (i = 0; i < XVECLEN (regs, 0); i++)
2376 {
2377 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
ce739127 2378
2f373e5d 2379 /* A NULL entry means the parameter goes both on the stack and in
2380 registers. This can also be a MEM for targets that pass values
2381 partially on the stack and partially in registers. */
8ad4c111 2382 if (reg != 0 && REG_P (reg))
2f373e5d 2383 use_reg (call_fusage, reg);
2384 }
ce739127 2385}
c1a83279 2386
2387/* Return the defining gimple statement for SSA_NAME NAME if it is an
2388 assigment and the code of the expresion on the RHS is CODE. Return
2389 NULL otherwise. */
2390
2391static gimple
2392get_def_for_expr (tree name, enum tree_code code)
2393{
2394 gimple def_stmt;
2395
2396 if (TREE_CODE (name) != SSA_NAME)
2397 return NULL;
2398
2399 def_stmt = get_gimple_for_ssa_name (name);
2400 if (!def_stmt
2401 || gimple_assign_rhs_code (def_stmt) != code)
2402 return NULL;
2403
2404 return def_stmt;
2405}
c909ed33 2406
2407/* Return the defining gimple statement for SSA_NAME NAME if it is an
2408 assigment and the class of the expresion on the RHS is CLASS. Return
2409 NULL otherwise. */
2410
2411static gimple
2412get_def_for_expr_class (tree name, enum tree_code_class tclass)
2413{
2414 gimple def_stmt;
2415
2416 if (TREE_CODE (name) != SSA_NAME)
2417 return NULL;
2418
2419 def_stmt = get_gimple_for_ssa_name (name);
2420 if (!def_stmt
2421 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2422 return NULL;
2423
2424 return def_stmt;
2425}
10f307d9 2426\f
6840589f 2427
d1f6ae0c 2428/* Determine whether the LEN bytes generated by CONSTFUN can be
2429 stored to memory using several move instructions. CONSTFUNDATA is
2430 a pointer which will be passed as argument in every CONSTFUN call.
4b297e2e 2431 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2432 a memset operation and false if it's a copy of a constant string.
2433 Return nonzero if a call to store_by_pieces should succeed. */
d1f6ae0c 2434
6840589f 2435int
35cb5232 2436can_store_by_pieces (unsigned HOST_WIDE_INT len,
3754d046 2437 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
4b297e2e 2438 void *constfundata, unsigned int align, bool memsetp)
6840589f 2439{
025d4f81 2440 unsigned HOST_WIDE_INT l;
2441 unsigned int max_size;
6840589f 2442 HOST_WIDE_INT offset = 0;
3754d046 2443 machine_mode mode;
6840589f 2444 enum insn_code icode;
2445 int reverse;
d92517d3 2446 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2447 rtx cst ATTRIBUTE_UNUSED;
6840589f 2448
1d881c02 2449 if (len == 0)
2450 return 1;
2451
d4bd0e64 2452 if (!targetm.use_by_pieces_infrastructure_p (len, align,
2453 memsetp
2454 ? SET_BY_PIECES
2455 : STORE_BY_PIECES,
2456 optimize_insn_for_speed_p ()))
6840589f 2457 return 0;
2458
c7e41aee 2459 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
6840589f 2460
2461 /* We would first store what we can in the largest integer mode, then go to
2462 successively smaller modes. */
2463
2464 for (reverse = 0;
2465 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2466 reverse++)
2467 {
2468 l = len;
d1f6ae0c 2469 max_size = STORE_MAX_PIECES + 1;
01dd0067 2470 while (max_size > 1 && l > 0)
6840589f 2471 {
c7e41aee 2472 mode = widest_int_mode_for_size (max_size);
6840589f 2473
2474 if (mode == VOIDmode)
2475 break;
2476
d6bf3b14 2477 icode = optab_handler (mov_optab, mode);
6840589f 2478 if (icode != CODE_FOR_nothing
2479 && align >= GET_MODE_ALIGNMENT (mode))
2480 {
2481 unsigned int size = GET_MODE_SIZE (mode);
2482
2483 while (l >= size)
2484 {
2485 if (reverse)
2486 offset -= size;
2487
2488 cst = (*constfun) (constfundata, offset, mode);
ca316360 2489 if (!targetm.legitimate_constant_p (mode, cst))
6840589f 2490 return 0;
2491
2492 if (!reverse)
2493 offset += size;
2494
2495 l -= size;
2496 }
2497 }
2498
2499 max_size = GET_MODE_SIZE (mode);
2500 }
2501
2502 /* The code above should have handled everything. */
611234b4 2503 gcc_assert (!l);
6840589f 2504 }
2505
2506 return 1;
2507}
2508
2509/* Generate several move instructions to store LEN bytes generated by
2510 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2511 pointer which will be passed as argument in every CONSTFUN call.
4b297e2e 2512 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2513 a memset operation and false if it's a copy of a constant string.
9fe0e1b8 2514 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2515 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2516 stpcpy. */
6840589f 2517
9fe0e1b8 2518rtx
35cb5232 2519store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
3754d046 2520 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
4b297e2e 2521 void *constfundata, unsigned int align, bool memsetp, int endp)
6840589f 2522{
3754d046 2523 machine_mode to_addr_mode = get_address_mode (to);
584511c1 2524 struct store_by_pieces_d data;
6840589f 2525
1d881c02 2526 if (len == 0)
2527 {
611234b4 2528 gcc_assert (endp != 2);
1d881c02 2529 return to;
2530 }
2531
d4bd0e64 2532 gcc_assert (targetm.use_by_pieces_infrastructure_p
2533 (len, align,
2534 memsetp
2535 ? SET_BY_PIECES
2536 : STORE_BY_PIECES,
2537 optimize_insn_for_speed_p ()));
2538
6840589f 2539 data.constfun = constfun;
2540 data.constfundata = constfundata;
2541 data.len = len;
2542 data.to = to;
2543 store_by_pieces_1 (&data, align);
9fe0e1b8 2544 if (endp)
2545 {
2546 rtx to1;
2547
611234b4 2548 gcc_assert (!data.reverse);
9fe0e1b8 2549 if (data.autinc_to)
2550 {
2551 if (endp == 2)
2552 {
2553 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2554 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2555 else
98155838 2556 data.to_addr = copy_to_mode_reg (to_addr_mode,
29c05e22 2557 plus_constant (to_addr_mode,
2558 data.to_addr,
9fe0e1b8 2559 -1));
2560 }
2561 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2562 data.offset);
2563 }
2564 else
2565 {
2566 if (endp == 2)
2567 --data.offset;
2568 to1 = adjust_address (data.to, QImode, data.offset);
2569 }
2570 return to1;
2571 }
2572 else
2573 return data.to;
6840589f 2574}
2575
325d1c45 2576/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
0a534ba7 2577 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
dbd14dc5 2578
2579static void
f1667d92 2580clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
dbd14dc5 2581{
584511c1 2582 struct store_by_pieces_d data;
6840589f 2583
1d881c02 2584 if (len == 0)
2585 return;
2586
6840589f 2587 data.constfun = clear_by_pieces_1;
2571646d 2588 data.constfundata = NULL;
6840589f 2589 data.len = len;
2590 data.to = to;
2591 store_by_pieces_1 (&data, align);
2592}
2593
2594/* Callback routine for clear_by_pieces.
2595 Return const0_rtx unconditionally. */
2596
2597static rtx
35cb5232 2598clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2599 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3754d046 2600 machine_mode mode ATTRIBUTE_UNUSED)
6840589f 2601{
2602 return const0_rtx;
2603}
2604
2605/* Subroutine of clear_by_pieces and store_by_pieces.
2606 Generate several move instructions to store LEN bytes of block TO. (A MEM
0a534ba7 2607 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
6840589f 2608
2609static void
584511c1 2610store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
35cb5232 2611 unsigned int align ATTRIBUTE_UNUSED)
6840589f 2612{
3754d046 2613 machine_mode to_addr_mode = get_address_mode (data->to);
6840589f 2614 rtx to_addr = XEXP (data->to, 0);
025d4f81 2615 unsigned int max_size = STORE_MAX_PIECES + 1;
53bd09ab 2616 enum insn_code icode;
dbd14dc5 2617
6840589f 2618 data->offset = 0;
2619 data->to_addr = to_addr;
2620 data->autinc_to
dbd14dc5 2621 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2622 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2623
6840589f 2624 data->explicit_inc_to = 0;
2625 data->reverse
dbd14dc5 2626 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
6840589f 2627 if (data->reverse)
2628 data->offset = data->len;
dbd14dc5 2629
6840589f 2630 /* If storing requires more than two move insns,
dbd14dc5 2631 copy addresses to registers (to make displacements shorter)
2632 and use post-increment if available. */
6840589f 2633 if (!data->autinc_to
025d4f81 2634 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
dbd14dc5 2635 {
c7e41aee 2636 /* Determine the main mode we'll be using.
2637 MODE might not be used depending on the definitions of the
2638 USE_* macros below. */
3754d046 2639 machine_mode mode ATTRIBUTE_UNUSED
c7e41aee 2640 = widest_int_mode_for_size (max_size);
53bd09ab 2641
6840589f 2642 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
dbd14dc5 2643 {
98155838 2644 data->to_addr = copy_to_mode_reg (to_addr_mode,
29c05e22 2645 plus_constant (to_addr_mode,
2646 to_addr,
2647 data->len));
6840589f 2648 data->autinc_to = 1;
2649 data->explicit_inc_to = -1;
dbd14dc5 2650 }
f7c44134 2651
6840589f 2652 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2653 && ! data->autinc_to)
dbd14dc5 2654 {
98155838 2655 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
6840589f 2656 data->autinc_to = 1;
2657 data->explicit_inc_to = 1;
dbd14dc5 2658 }
f7c44134 2659
6840589f 2660 if ( !data->autinc_to && CONSTANT_P (to_addr))
98155838 2661 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
dbd14dc5 2662 }
2663
c7e41aee 2664 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
dbd14dc5 2665
6840589f 2666 /* First store what we can in the largest integer mode, then go to
dbd14dc5 2667 successively smaller modes. */
2668
01dd0067 2669 while (max_size > 1 && data->len > 0)
dbd14dc5 2670 {
3754d046 2671 machine_mode mode = widest_int_mode_for_size (max_size);
dbd14dc5 2672
2673 if (mode == VOIDmode)
2674 break;
2675
d6bf3b14 2676 icode = optab_handler (mov_optab, mode);
325d1c45 2677 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
6840589f 2678 store_by_pieces_2 (GEN_FCN (icode), mode, data);
dbd14dc5 2679
2680 max_size = GET_MODE_SIZE (mode);
2681 }
2682
2683 /* The code above should have handled everything. */
611234b4 2684 gcc_assert (!data->len);
dbd14dc5 2685}
2686
6840589f 2687/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
dbd14dc5 2688 with move instructions for mode MODE. GENFUN is the gen_... function
2689 to make a move insn for that mode. DATA has all the other info. */
2690
2691static void
3d953cb1 2692store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
584511c1 2693 struct store_by_pieces_d *data)
dbd14dc5 2694{
f7c44134 2695 unsigned int size = GET_MODE_SIZE (mode);
6840589f 2696 rtx to1, cst;
dbd14dc5 2697
2698 while (data->len >= size)
2699 {
f7c44134 2700 if (data->reverse)
2701 data->offset -= size;
dbd14dc5 2702
f7c44134 2703 if (data->autinc_to)
bf42c62d 2704 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2705 data->offset);
fa56dc1d 2706 else
e513d163 2707 to1 = adjust_address (data->to, mode, data->offset);
dbd14dc5 2708
e4e498cf 2709 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
6840589f 2710 emit_insn (gen_add2_insn (data->to_addr,
d11aedc7 2711 gen_int_mode (-(HOST_WIDE_INT) size,
2712 GET_MODE (data->to_addr))));
dbd14dc5 2713
6840589f 2714 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2715 emit_insn ((*genfun) (to1, cst));
f7c44134 2716
e4e498cf 2717 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
d11aedc7 2718 emit_insn (gen_add2_insn (data->to_addr,
2719 gen_int_mode (size,
2720 GET_MODE (data->to_addr))));
dbd14dc5 2721
f7c44134 2722 if (! data->reverse)
2723 data->offset += size;
dbd14dc5 2724
2725 data->len -= size;
2726 }
2727}
2728\f
325d1c45 2729/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2a631e19 2730 its length in bytes. */
0dbd1c74 2731
2732rtx
162719b3 2733clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
36d63243 2734 unsigned int expected_align, HOST_WIDE_INT expected_size,
2735 unsigned HOST_WIDE_INT min_size,
9db0f34d 2736 unsigned HOST_WIDE_INT max_size,
2737 unsigned HOST_WIDE_INT probable_max_size)
10f307d9 2738{
3754d046 2739 machine_mode mode = GET_MODE (object);
83016f38 2740 unsigned int align;
0dbd1c74 2741
0b25db21 2742 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2743
20c377c2 2744 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2745 just move a zero. Otherwise, do this a piece at a time. */
83016f38 2746 if (mode != BLKmode
971ba038 2747 && CONST_INT_P (size)
83016f38 2748 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
10f307d9 2749 {
83016f38 2750 rtx zero = CONST0_RTX (mode);
2751 if (zero != NULL)
2752 {
2753 emit_move_insn (object, zero);
2754 return NULL;
2755 }
2756
2757 if (COMPLEX_MODE_P (mode))
2758 {
2759 zero = CONST0_RTX (GET_MODE_INNER (mode));
2760 if (zero != NULL)
2761 {
2762 write_complex_part (object, zero, 0);
2763 write_complex_part (object, zero, 1);
2764 return NULL;
2765 }
2766 }
c0bfc78e 2767 }
2768
83016f38 2769 if (size == const0_rtx)
2770 return NULL;
2771
2772 align = MEM_ALIGN (object);
2773
971ba038 2774 if (CONST_INT_P (size)
d4bd0e64 2775 && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
2776 CLEAR_BY_PIECES,
2777 optimize_insn_for_speed_p ()))
83016f38 2778 clear_by_pieces (object, INTVAL (size), align);
162719b3 2779 else if (set_storage_via_setmem (object, size, const0_rtx, align,
36d63243 2780 expected_align, expected_size,
9db0f34d 2781 min_size, max_size, probable_max_size))
83016f38 2782 ;
bd1a81f7 2783 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
ab608690 2784 return set_storage_via_libcall (object, size, const0_rtx,
2785 method == BLOCK_OP_TAILCALL);
bd1a81f7 2786 else
2787 gcc_unreachable ();
83016f38 2788
2789 return NULL;
c0bfc78e 2790}
2791
162719b3 2792rtx
2793clear_storage (rtx object, rtx size, enum block_op_methods method)
2794{
36d63243 2795 unsigned HOST_WIDE_INT max, min = 0;
2796 if (GET_CODE (size) == CONST_INT)
2797 min = max = UINTVAL (size);
2798 else
2799 max = GET_MODE_MASK (GET_MODE (size));
9db0f34d 2800 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
162719b3 2801}
2802
2803
f896c932 2804/* A subroutine of clear_storage. Expand a call to memset.
c0bfc78e 2805 Return the return value of memset, 0 otherwise. */
dbd14dc5 2806
ab608690 2807rtx
2808set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
c0bfc78e 2809{
c2f47e15 2810 tree call_expr, fn, object_tree, size_tree, val_tree;
3754d046 2811 machine_mode size_mode;
c0bfc78e 2812 rtx retval;
dbd14dc5 2813
0a534ba7 2814 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2815 place those into new pseudos into a VAR_DECL and use them later. */
f708f8fd 2816
99182918 2817 object = copy_addr_to_reg (XEXP (object, 0));
f708f8fd 2818
f896c932 2819 size_mode = TYPE_MODE (sizetype);
c0bfc78e 2820 size = convert_to_mode (size_mode, size, 1);
2821 size = copy_to_mode_reg (size_mode, size);
f708f8fd 2822
c0bfc78e 2823 /* It is incorrect to use the libcall calling conventions to call
2824 memset in this context. This could be a user call to memset and
2825 the user may wish to examine the return value from memset. For
2826 targets where libcalls and normal calls have different conventions
f896c932 2827 for returning pointers, we could end up generating incorrect code. */
06b8e3db 2828
c0bfc78e 2829 object_tree = make_tree (ptr_type_node, object);
971ba038 2830 if (!CONST_INT_P (val))
ab608690 2831 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
f896c932 2832 size_tree = make_tree (sizetype, size);
ab608690 2833 val_tree = make_tree (integer_type_node, val);
c0bfc78e 2834
2835 fn = clear_storage_libcall_fn (true);
d52d7a3a 2836 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
0b25db21 2837 CALL_EXPR_TAILCALL (call_expr) = tailcall;
c0bfc78e 2838
8ec3c5c2 2839 retval = expand_normal (call_expr);
c0bfc78e 2840
f896c932 2841 return retval;
c0bfc78e 2842}
2843
ab608690 2844/* A subroutine of set_storage_via_libcall. Create the tree node
ea259bbe 2845 for the function we use for block clears. */
c0bfc78e 2846
aa140b76 2847tree block_clear_fn;
8ca560c1 2848
d459e0d8 2849void
35cb5232 2850init_block_clear_fn (const char *asmspec)
c0bfc78e 2851{
d459e0d8 2852 if (!block_clear_fn)
c0bfc78e 2853 {
d459e0d8 2854 tree fn, args;
2855
f896c932 2856 fn = get_identifier ("memset");
2857 args = build_function_type_list (ptr_type_node, ptr_type_node,
2858 integer_type_node, sizetype,
2859 NULL_TREE);
c0bfc78e 2860
e60a6f7b 2861 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
c0bfc78e 2862 DECL_EXTERNAL (fn) = 1;
2863 TREE_PUBLIC (fn) = 1;
2864 DECL_ARTIFICIAL (fn) = 1;
2865 TREE_NOTHROW (fn) = 1;
f0f2eb24 2866 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2867 DECL_VISIBILITY_SPECIFIED (fn) = 1;
c0bfc78e 2868
2869 block_clear_fn = fn;
10f307d9 2870 }
0dbd1c74 2871
d459e0d8 2872 if (asmspec)
b2c4af5e 2873 set_user_assembler_name (block_clear_fn, asmspec);
d459e0d8 2874}
2875
2876static tree
35cb5232 2877clear_storage_libcall_fn (int for_call)
d459e0d8 2878{
2879 static bool emitted_extern;
2880
2881 if (!block_clear_fn)
2882 init_block_clear_fn (NULL);
2883
c0bfc78e 2884 if (for_call && !emitted_extern)
2885 {
2886 emitted_extern = true;
b2c4af5e 2887 make_decl_rtl (block_clear_fn);
c0bfc78e 2888 }
10f307d9 2889
d459e0d8 2890 return block_clear_fn;
c0bfc78e 2891}
7a3e5564 2892\f
2893/* Expand a setmem pattern; return true if successful. */
2894
2895bool
162719b3 2896set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
36d63243 2897 unsigned int expected_align, HOST_WIDE_INT expected_size,
2898 unsigned HOST_WIDE_INT min_size,
9db0f34d 2899 unsigned HOST_WIDE_INT max_size,
2900 unsigned HOST_WIDE_INT probable_max_size)
7a3e5564 2901{
2902 /* Try the most limited insn first, because there's no point
2903 including more than one in the machine description unless
2904 the more limited one has some advantage. */
2905
3754d046 2906 machine_mode mode;
7a3e5564 2907
162719b3 2908 if (expected_align < align)
2909 expected_align = align;
36d63243 2910 if (expected_size != -1)
2911 {
2912 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2913 expected_size = max_size;
2914 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2915 expected_size = min_size;
2916 }
162719b3 2917
7a3e5564 2918 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2919 mode = GET_MODE_WIDER_MODE (mode))
2920 {
6b531606 2921 enum insn_code code = direct_optab_handler (setmem_optab, mode);
7a3e5564 2922
2923 if (code != CODE_FOR_nothing
300c6cee 2924 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2925 here because if SIZE is less than the mode mask, as it is
2926 returned by the macro, it will definitely be less than the
2927 actual mode mask. Since SIZE is within the Pmode address
2928 space, we limit MODE to Pmode. */
971ba038 2929 && ((CONST_INT_P (size)
7a3e5564 2930 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2931 <= (GET_MODE_MASK (mode) >> 1)))
36d63243 2932 || max_size <= (GET_MODE_MASK (mode) >> 1)
300c6cee 2933 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
8786db1e 2934 {
9db0f34d 2935 struct expand_operand ops[9];
8786db1e 2936 unsigned int nops;
2937
32f79657 2938 nops = insn_data[(int) code].n_generator_args;
9db0f34d 2939 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
b52cb719 2940
8786db1e 2941 create_fixed_operand (&ops[0], object);
2942 /* The check above guarantees that this size conversion is valid. */
2943 create_convert_operand_to (&ops[1], size, mode, true);
2944 create_convert_operand_from (&ops[2], val, byte_mode, true);
2945 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
36d63243 2946 if (nops >= 6)
cc0dc0e0 2947 {
8786db1e 2948 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2949 create_integer_operand (&ops[5], expected_size);
cc0dc0e0 2950 }
9db0f34d 2951 if (nops >= 8)
36d63243 2952 {
2953 create_integer_operand (&ops[6], min_size);
2954 /* If we can not represent the maximal size,
2955 make parameter NULL. */
2956 if ((HOST_WIDE_INT) max_size != -1)
2957 create_integer_operand (&ops[7], max_size);
2958 else
2959 create_fixed_operand (&ops[7], NULL);
2960 }
9db0f34d 2961 if (nops == 9)
2962 {
2963 /* If we can not represent the maximal size,
2964 make parameter NULL. */
2965 if ((HOST_WIDE_INT) probable_max_size != -1)
2966 create_integer_operand (&ops[8], probable_max_size);
2967 else
2968 create_fixed_operand (&ops[8], NULL);
2969 }
8786db1e 2970 if (maybe_expand_insn (code, nops, ops))
2971 return true;
7a3e5564 2972 }
2973 }
2974
2975 return false;
2976}
2977
c0bfc78e 2978\f
de17a47b 2979/* Write to one of the components of the complex value CPLX. Write VAL to
2980 the real part if IMAG_P is false, and the imaginary part if its true. */
10f307d9 2981
0c93c8a9 2982void
de17a47b 2983write_complex_part (rtx cplx, rtx val, bool imag_p)
2984{
3754d046 2985 machine_mode cmode;
2986 machine_mode imode;
a3b104d2 2987 unsigned ibitsize;
2988
de17a47b 2989 if (GET_CODE (cplx) == CONCAT)
de17a47b 2990 {
a3b104d2 2991 emit_move_insn (XEXP (cplx, imag_p), val);
2992 return;
2993 }
2994
2995 cmode = GET_MODE (cplx);
2996 imode = GET_MODE_INNER (cmode);
2997 ibitsize = GET_MODE_BITSIZE (imode);
10f307d9 2998
ba881251 2999 /* For MEMs simplify_gen_subreg may generate an invalid new address
3000 because, e.g., the original address is considered mode-dependent
3001 by the target, which restricts simplify_subreg from invoking
3002 adjust_address_nv. Instead of preparing fallback support for an
3003 invalid address, we call adjust_address_nv directly. */
3004 if (MEM_P (cplx))
69edf651 3005 {
3006 emit_move_insn (adjust_address_nv (cplx, imode,
3007 imag_p ? GET_MODE_SIZE (imode) : 0),
3008 val);
3009 return;
3010 }
ba881251 3011
a3b104d2 3012 /* If the sub-object is at least word sized, then we know that subregging
3013 will work. This special case is important, since store_bit_field
3014 wants to operate on integer modes, and there's rarely an OImode to
3015 correspond to TCmode. */
ccd5a3ef 3016 if (ibitsize >= BITS_PER_WORD
3017 /* For hard regs we have exact predicates. Assume we can split
3018 the original object if it spans an even number of hard regs.
3019 This special case is important for SCmode on 64-bit platforms
3020 where the natural size of floating-point regs is 32-bit. */
1c14a50e 3021 || (REG_P (cplx)
ccd5a3ef 3022 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
0933f1d9 3023 && REG_NREGS (cplx) % 2 == 0))
a3b104d2 3024 {
3025 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3026 imag_p ? GET_MODE_SIZE (imode) : 0);
ccd5a3ef 3027 if (part)
3028 {
3029 emit_move_insn (part, val);
3030 return;
3031 }
3032 else
3033 /* simplify_gen_subreg may fail for sub-word MEMs. */
3034 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
de17a47b 3035 }
ccd5a3ef 3036
4bb60ec7 3037 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
de17a47b 3038}
3039
3040/* Extract one of the components of the complex value CPLX. Extract the
3041 real part if IMAG_P is false, and the imaginary part if it's true. */
3042
3043static rtx
3044read_complex_part (rtx cplx, bool imag_p)
10f307d9 3045{
3754d046 3046 machine_mode cmode, imode;
de17a47b 3047 unsigned ibitsize;
10f307d9 3048
de17a47b 3049 if (GET_CODE (cplx) == CONCAT)
3050 return XEXP (cplx, imag_p);
10f307d9 3051
de17a47b 3052 cmode = GET_MODE (cplx);
3053 imode = GET_MODE_INNER (cmode);
3054 ibitsize = GET_MODE_BITSIZE (imode);
3055
3056 /* Special case reads from complex constants that got spilled to memory. */
3057 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
94580317 3058 {
de17a47b 3059 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3060 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3061 {
3062 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3063 if (CONSTANT_CLASS_P (part))
3064 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3065 }
3066 }
c0c4a46d 3067
ba881251 3068 /* For MEMs simplify_gen_subreg may generate an invalid new address
3069 because, e.g., the original address is considered mode-dependent
3070 by the target, which restricts simplify_subreg from invoking
3071 adjust_address_nv. Instead of preparing fallback support for an
3072 invalid address, we call adjust_address_nv directly. */
3073 if (MEM_P (cplx))
3074 return adjust_address_nv (cplx, imode,
3075 imag_p ? GET_MODE_SIZE (imode) : 0);
3076
a3b104d2 3077 /* If the sub-object is at least word sized, then we know that subregging
3078 will work. This special case is important, since extract_bit_field
3079 wants to operate on integer modes, and there's rarely an OImode to
3080 correspond to TCmode. */
ccd5a3ef 3081 if (ibitsize >= BITS_PER_WORD
3082 /* For hard regs we have exact predicates. Assume we can split
3083 the original object if it spans an even number of hard regs.
3084 This special case is important for SCmode on 64-bit platforms
3085 where the natural size of floating-point regs is 32-bit. */
1c14a50e 3086 || (REG_P (cplx)
ccd5a3ef 3087 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
0933f1d9 3088 && REG_NREGS (cplx) % 2 == 0))
a3b104d2 3089 {
3090 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3091 imag_p ? GET_MODE_SIZE (imode) : 0);
ccd5a3ef 3092 if (ret)
3093 return ret;
3094 else
3095 /* simplify_gen_subreg may fail for sub-word MEMs. */
3096 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
a3b104d2 3097 }
3098
de17a47b 3099 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3f71db40 3100 true, NULL_RTX, imode, imode);
de17a47b 3101}
3102\f
f2ed60da 3103/* A subroutine of emit_move_insn_1. Yet another lowpart generator.
df297520 3104 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
f2ed60da 3105 represented in NEW_MODE. If FORCE is true, this will never happen, as
3106 we'll force-create a SUBREG if needed. */
6442675c 3107
de17a47b 3108static rtx
3754d046 3109emit_move_change_mode (machine_mode new_mode,
3110 machine_mode old_mode, rtx x, bool force)
de17a47b 3111{
df297520 3112 rtx ret;
de17a47b 3113
2749a22e 3114 if (push_operand (x, GET_MODE (x)))
3115 {
3116 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3117 MEM_COPY_ATTRIBUTES (ret, x);
3118 }
3119 else if (MEM_P (x))
de17a47b 3120 {
d2121072 3121 /* We don't have to worry about changing the address since the
3122 size in bytes is supposed to be the same. */
3123 if (reload_in_progress)
3124 {
3125 /* Copy the MEM to change the mode and move any
3126 substitutions from the old MEM to the new one. */
3127 ret = adjust_address_nv (x, new_mode, 0);
3128 copy_replacements (x, ret);
3129 }
3130 else
3131 ret = adjust_address (x, new_mode, 0);
94580317 3132 }
de17a47b 3133 else
3134 {
0975351b 3135 /* Note that we do want simplify_subreg's behavior of validating
df297520 3136 that the new mode is ok for a hard register. If we were to use
3137 simplify_gen_subreg, we would create the subreg, but would
3138 probably run into the target not being able to implement it. */
f2ed60da 3139 /* Except, of course, when FORCE is true, when this is exactly what
3140 we want. Which is needed for CCmodes on some targets. */
3141 if (force)
3142 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3143 else
3144 ret = simplify_subreg (new_mode, x, old_mode, 0);
de17a47b 3145 }
10f307d9 3146
df297520 3147 return ret;
3148}
3149
de17a47b 3150/* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3151 an integer mode of the same size as MODE. Returns the instruction
3152 emitted, or NULL if such a move could not be generated. */
10f307d9 3153
c81fd430 3154static rtx_insn *
3754d046 3155emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
de17a47b 3156{
3754d046 3157 machine_mode imode;
de17a47b 3158 enum insn_code code;
10f307d9 3159
de17a47b 3160 /* There must exist a mode of the exact size we require. */
3161 imode = int_mode_for_mode (mode);
3162 if (imode == BLKmode)
c81fd430 3163 return NULL;
94580317 3164
de17a47b 3165 /* The target must support moves in this mode. */
d6bf3b14 3166 code = optab_handler (mov_optab, imode);
de17a47b 3167 if (code == CODE_FOR_nothing)
c81fd430 3168 return NULL;
94580317 3169
80e467e2 3170 x = emit_move_change_mode (imode, mode, x, force);
f2ed60da 3171 if (x == NULL_RTX)
c81fd430 3172 return NULL;
80e467e2 3173 y = emit_move_change_mode (imode, mode, y, force);
f2ed60da 3174 if (y == NULL_RTX)
c81fd430 3175 return NULL;
f2ed60da 3176 return emit_insn (GEN_FCN (code) (x, y));
aaad03e5 3177}
3178
de17a47b 3179/* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3180 Return an equivalent MEM that does not use an auto-increment. */
aaad03e5 3181
dc7cdd37 3182rtx
3754d046 3183emit_move_resolve_push (machine_mode mode, rtx x)
aaad03e5 3184{
de17a47b 3185 enum rtx_code code = GET_CODE (XEXP (x, 0));
3186 HOST_WIDE_INT adjust;
3187 rtx temp;
aaad03e5 3188
de17a47b 3189 adjust = GET_MODE_SIZE (mode);
3190#ifdef PUSH_ROUNDING
3191 adjust = PUSH_ROUNDING (adjust);
3192#endif
3193 if (code == PRE_DEC || code == POST_DEC)
3194 adjust = -adjust;
3cb7a129 3195 else if (code == PRE_MODIFY || code == POST_MODIFY)
3196 {
3197 rtx expr = XEXP (XEXP (x, 0), 1);
3198 HOST_WIDE_INT val;
3199
3200 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
971ba038 3201 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3cb7a129 3202 val = INTVAL (XEXP (expr, 1));
3203 if (GET_CODE (expr) == MINUS)
3204 val = -val;
3205 gcc_assert (adjust == val || adjust == -val);
3206 adjust = val;
3207 }
1203f673 3208
de17a47b 3209 /* Do not use anti_adjust_stack, since we don't want to update
3210 stack_pointer_delta. */
3211 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
0359f9f5 3212 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
de17a47b 3213 0, OPTAB_LIB_WIDEN);
3214 if (temp != stack_pointer_rtx)
3215 emit_move_insn (stack_pointer_rtx, temp);
10f307d9 3216
de17a47b 3217 switch (code)
b63679d2 3218 {
de17a47b 3219 case PRE_INC:
3220 case PRE_DEC:
3cb7a129 3221 case PRE_MODIFY:
de17a47b 3222 temp = stack_pointer_rtx;
3223 break;
3224 case POST_INC:
de17a47b 3225 case POST_DEC:
3cb7a129 3226 case POST_MODIFY:
29c05e22 3227 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
de17a47b 3228 break;
3229 default:
3230 gcc_unreachable ();
3231 }
b63679d2 3232
de17a47b 3233 return replace_equiv_address (x, temp);
3234}
a8d8b962 3235
de17a47b 3236/* A subroutine of emit_move_complex. Generate a move from Y into X.
3237 X is known to satisfy push_operand, and MODE is known to be complex.
3238 Returns the last instruction emitted. */
76ab50f8 3239
c81fd430 3240rtx_insn *
3754d046 3241emit_move_complex_push (machine_mode mode, rtx x, rtx y)
de17a47b 3242{
3754d046 3243 machine_mode submode = GET_MODE_INNER (mode);
de17a47b 3244 bool imag_first;
76ab50f8 3245
de17a47b 3246#ifdef PUSH_ROUNDING
3247 unsigned int submodesize = GET_MODE_SIZE (submode);
76ab50f8 3248
de17a47b 3249 /* In case we output to the stack, but the size is smaller than the
3250 machine can push exactly, we need to use move instructions. */
3251 if (PUSH_ROUNDING (submodesize) != submodesize)
3252 {
3253 x = emit_move_resolve_push (mode, x);
3254 return emit_move_insn (x, y);
3255 }
4ed008e7 3256#endif
b63679d2 3257
de17a47b 3258 /* Note that the real part always precedes the imag part in memory
3259 regardless of machine's endianness. */
3260 switch (GET_CODE (XEXP (x, 0)))
3261 {
3262 case PRE_DEC:
3263 case POST_DEC:
3264 imag_first = true;
3265 break;
3266 case PRE_INC:
3267 case POST_INC:
3268 imag_first = false;
3269 break;
3270 default:
3271 gcc_unreachable ();
3272 }
2166bbaa 3273
de17a47b 3274 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3275 read_complex_part (y, imag_first));
3276 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3277 read_complex_part (y, !imag_first));
3278}
5b5abf88 3279
64003036 3280/* A subroutine of emit_move_complex. Perform the move from Y to X
3281 via two moves of the parts. Returns the last instruction emitted. */
3282
c81fd430 3283rtx_insn *
64003036 3284emit_move_complex_parts (rtx x, rtx y)
3285{
3286 /* Show the output dies here. This is necessary for SUBREGs
3287 of pseudos since we cannot track their lifetimes correctly;
3288 hard regs shouldn't appear here except as return values. */
3289 if (!reload_completed && !reload_in_progress
3290 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
18b42941 3291 emit_clobber (x);
64003036 3292
3293 write_complex_part (x, read_complex_part (y, false), false);
3294 write_complex_part (x, read_complex_part (y, true), true);
3295
3296 return get_last_insn ();
3297}
3298
de17a47b 3299/* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3300 MODE is known to be complex. Returns the last instruction emitted. */
2166bbaa 3301
c81fd430 3302static rtx_insn *
3754d046 3303emit_move_complex (machine_mode mode, rtx x, rtx y)
de17a47b 3304{
3305 bool try_int;
5b5abf88 3306
de17a47b 3307 /* Need to take special care for pushes, to maintain proper ordering
3308 of the data, and possibly extra padding. */
3309 if (push_operand (x, mode))
3310 return emit_move_complex_push (mode, x, y);
b63679d2 3311
493bce58 3312 /* See if we can coerce the target into moving both values at once, except
3313 for floating point where we favor moving as parts if this is easy. */
5720e0a5 3314 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
493bce58 3315 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3316 && !(REG_P (x)
3317 && HARD_REGISTER_P (x)
0933f1d9 3318 && REG_NREGS (x) == 1)
493bce58 3319 && !(REG_P (y)
3320 && HARD_REGISTER_P (y)
0933f1d9 3321 && REG_NREGS (y) == 1))
5720e0a5 3322 try_int = false;
de17a47b 3323 /* Not possible if the values are inherently not adjacent. */
5720e0a5 3324 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
de17a47b 3325 try_int = false;
3326 /* Is possible if both are registers (or subregs of registers). */
3327 else if (register_operand (x, mode) && register_operand (y, mode))
3328 try_int = true;
3329 /* If one of the operands is a memory, and alignment constraints
3330 are friendly enough, we may be able to do combined memory operations.
3331 We do not attempt this if Y is a constant because that combination is
3332 usually better with the by-parts thing below. */
3333 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3334 && (!STRICT_ALIGNMENT
3335 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3336 try_int = true;
3337 else
3338 try_int = false;
3339
3340 if (try_int)
8d94ba7c 3341 {
c81fd430 3342 rtx_insn *ret;
5720e0a5 3343
3344 /* For memory to memory moves, optimal behavior can be had with the
3345 existing block move logic. */
3346 if (MEM_P (x) && MEM_P (y))
3347 {
3348 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3349 BLOCK_OP_NO_LIBCALL);
3350 return get_last_insn ();
3351 }
3352
80e467e2 3353 ret = emit_move_via_integer (mode, x, y, true);
de17a47b 3354 if (ret)
3355 return ret;
3356 }
8d94ba7c 3357
64003036 3358 return emit_move_complex_parts (x, y);
de17a47b 3359}
8d94ba7c 3360
de17a47b 3361/* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3362 MODE is known to be MODE_CC. Returns the last instruction emitted. */
8d94ba7c 3363
c81fd430 3364static rtx_insn *
3754d046 3365emit_move_ccmode (machine_mode mode, rtx x, rtx y)
de17a47b 3366{
c81fd430 3367 rtx_insn *ret;
8d94ba7c 3368
de17a47b 3369 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3370 if (mode != CCmode)
3371 {
d6bf3b14 3372 enum insn_code code = optab_handler (mov_optab, CCmode);
de17a47b 3373 if (code != CODE_FOR_nothing)
f2ed60da 3374 {
3375 x = emit_move_change_mode (CCmode, mode, x, true);
3376 y = emit_move_change_mode (CCmode, mode, y, true);
3377 return emit_insn (GEN_FCN (code) (x, y));
3378 }
de17a47b 3379 }
3380
3381 /* Otherwise, find the MODE_INT mode of the same width. */
80e467e2 3382 ret = emit_move_via_integer (mode, x, y, false);
de17a47b 3383 gcc_assert (ret != NULL);
3384 return ret;
3385}
3386
8dfa1b7f 3387/* Return true if word I of OP lies entirely in the
3388 undefined bits of a paradoxical subreg. */
3389
3390static bool
1f1872fd 3391undefined_operand_subword_p (const_rtx op, int i)
8dfa1b7f 3392{
3754d046 3393 machine_mode innermode, innermostmode;
8dfa1b7f 3394 int offset;
3395 if (GET_CODE (op) != SUBREG)
3396 return false;
3397 innermode = GET_MODE (op);
3398 innermostmode = GET_MODE (SUBREG_REG (op));
3399 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3400 /* The SUBREG_BYTE represents offset, as if the value were stored in
3401 memory, except for a paradoxical subreg where we define
3402 SUBREG_BYTE to be 0; undo this exception as in
3403 simplify_subreg. */
3404 if (SUBREG_BYTE (op) == 0
3405 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3406 {
3407 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3408 if (WORDS_BIG_ENDIAN)
3409 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3410 if (BYTES_BIG_ENDIAN)
3411 offset += difference % UNITS_PER_WORD;
3412 }
3413 if (offset >= GET_MODE_SIZE (innermostmode)
3414 || offset <= -GET_MODE_SIZE (word_mode))
3415 return true;
3416 return false;
3417}
3418
de17a47b 3419/* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3420 MODE is any multi-word or full-word mode that lacks a move_insn
3421 pattern. Note that you will get better code if you define such
3422 patterns, even if they must turn into multiple assembler instructions. */
3423
c81fd430 3424static rtx_insn *
3754d046 3425emit_move_multi_word (machine_mode mode, rtx x, rtx y)
de17a47b 3426{
c81fd430 3427 rtx_insn *last_insn = 0;
3428 rtx_insn *seq;
3429 rtx inner;
de17a47b 3430 bool need_clobber;
3431 int i;
1f8b6002 3432
de17a47b 3433 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
1f8b6002 3434
de17a47b 3435 /* If X is a push on the stack, do the push now and replace
3436 X with a reference to the stack pointer. */
3437 if (push_operand (x, mode))
3438 x = emit_move_resolve_push (mode, x);
3439
3440 /* If we are in reload, see if either operand is a MEM whose address
3441 is scheduled for replacement. */
3442 if (reload_in_progress && MEM_P (x)
3443 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3444 x = replace_equiv_address_nv (x, inner);
3445 if (reload_in_progress && MEM_P (y)
3446 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3447 y = replace_equiv_address_nv (y, inner);
3448
3449 start_sequence ();
3450
3451 need_clobber = false;
3452 for (i = 0;
3453 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3454 i++)
3455 {
3456 rtx xpart = operand_subword (x, i, 1, mode);
8dfa1b7f 3457 rtx ypart;
3458
3459 /* Do not generate code for a move if it would come entirely
3460 from the undefined bits of a paradoxical subreg. */
3461 if (undefined_operand_subword_p (y, i))
3462 continue;
3463
3464 ypart = operand_subword (y, i, 1, mode);
de17a47b 3465
3466 /* If we can't get a part of Y, put Y into memory if it is a
89f18f73 3467 constant. Otherwise, force it into a register. Then we must
3468 be able to get a part of Y. */
de17a47b 3469 if (ypart == 0 && CONSTANT_P (y))
8d94ba7c 3470 {
f2d0e9f1 3471 y = use_anchored_address (force_const_mem (mode, y));
de17a47b 3472 ypart = operand_subword (y, i, 1, mode);
8d94ba7c 3473 }
de17a47b 3474 else if (ypart == 0)
3475 ypart = operand_subword_force (y, i, mode);
3476
3477 gcc_assert (xpart && ypart);
3478
3479 need_clobber |= (GET_CODE (xpart) == SUBREG);
35cb5232 3480
de17a47b 3481 last_insn = emit_move_insn (xpart, ypart);
8d94ba7c 3482 }
3483
de17a47b 3484 seq = get_insns ();
3485 end_sequence ();
3486
3487 /* Show the output dies here. This is necessary for SUBREGs
3488 of pseudos since we cannot track their lifetimes correctly;
3489 hard regs shouldn't appear here except as return values.
3490 We never want to emit such a clobber after reload. */
3491 if (x != y
3492 && ! (reload_in_progress || reload_completed)
3493 && need_clobber != 0)
18b42941 3494 emit_clobber (x);
de17a47b 3495
3496 emit_insn (seq);
3497
3498 return last_insn;
3499}
3500
3501/* Low level part of emit_move_insn.
3502 Called just like emit_move_insn, but assumes X and Y
3503 are basically valid. */
3504
c81fd430 3505rtx_insn *
de17a47b 3506emit_move_insn_1 (rtx x, rtx y)
3507{
3754d046 3508 machine_mode mode = GET_MODE (x);
de17a47b 3509 enum insn_code code;
3510
3511 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3512
d6bf3b14 3513 code = optab_handler (mov_optab, mode);
de17a47b 3514 if (code != CODE_FOR_nothing)
3515 return emit_insn (GEN_FCN (code) (x, y));
3516
3517 /* Expand complex moves by moving real part and imag part. */
3518 if (COMPLEX_MODE_P (mode))
3519 return emit_move_complex (mode, x, y);
3520
68a556d6 3521 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3522 || ALL_FIXED_POINT_MODE_P (mode))
d2121072 3523 {
c81fd430 3524 rtx_insn *result = emit_move_via_integer (mode, x, y, true);
d2121072 3525
3526 /* If we can't find an integer mode, use multi words. */
3527 if (result)
3528 return result;
3529 else
3530 return emit_move_multi_word (mode, x, y);
3531 }
3532
de17a47b 3533 if (GET_MODE_CLASS (mode) == MODE_CC)
3534 return emit_move_ccmode (mode, x, y);
3535
7be9cf34 3536 /* Try using a move pattern for the corresponding integer mode. This is
3537 only safe when simplify_subreg can convert MODE constants into integer
3538 constants. At present, it can only do this reliably if the value
3539 fits within a HOST_WIDE_INT. */
de17a47b 3540 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10f307d9 3541 {
c81fd430 3542 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
c6a6cdaa 3543
de17a47b 3544 if (ret)
c6a6cdaa 3545 {
3546 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3547 return ret;
3548 }
de17a47b 3549 }
ff385626 3550
de17a47b 3551 return emit_move_multi_word (mode, x, y);
3552}
07c143fb 3553
de17a47b 3554/* Generate code to copy Y into X.
3555 Both Y and X must have the same mode, except that
3556 Y can be a constant with VOIDmode.
3557 This mode cannot be BLKmode; use emit_block_move for that.
fa56dc1d 3558
de17a47b 3559 Return the last instruction emitted. */
6702c250 3560
c81fd430 3561rtx_insn *
de17a47b 3562emit_move_insn (rtx x, rtx y)
3563{
3754d046 3564 machine_mode mode = GET_MODE (x);
de17a47b 3565 rtx y_cst = NULL_RTX;
c81fd430 3566 rtx_insn *last_insn;
3567 rtx set;
9cb64ebc 3568
de17a47b 3569 gcc_assert (mode != BLKmode
3570 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
10f307d9 3571
de17a47b 3572 if (CONSTANT_P (y))
3573 {
3574 if (optimize
3575 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3576 && (last_insn = compress_float_constant (x, y)))
3577 return last_insn;
10f307d9 3578
de17a47b 3579 y_cst = y;
10f307d9 3580
ca316360 3581 if (!targetm.legitimate_constant_p (mode, y))
de17a47b 3582 {
3583 y = force_const_mem (mode, y);
7f964718 3584
de17a47b 3585 /* If the target's cannot_force_const_mem prevented the spill,
3586 assume that the target's move expanders will also take care
3587 of the non-legitimate constant. */
3588 if (!y)
3589 y = y_cst;
f2d0e9f1 3590 else
3591 y = use_anchored_address (y);
10f307d9 3592 }
de17a47b 3593 }
dd0d17cd 3594
de17a47b 3595 /* If X or Y are memory references, verify that their addresses are valid
3596 for the machine. */
3597 if (MEM_P (x)
bd1a81f7 3598 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3599 MEM_ADDR_SPACE (x))
4d25f9eb 3600 && ! push_operand (x, GET_MODE (x))))
de17a47b 3601 x = validize_mem (x);
7f964718 3602
de17a47b 3603 if (MEM_P (y)
bd1a81f7 3604 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3605 MEM_ADDR_SPACE (y)))
de17a47b 3606 y = validize_mem (y);
7f964718 3607
de17a47b 3608 gcc_assert (mode != BLKmode);
7f964718 3609
de17a47b 3610 last_insn = emit_move_insn_1 (x, y);
3611
3612 if (y_cst && REG_P (x)
3613 && (set = single_set (last_insn)) != NULL_RTX
3614 && SET_DEST (set) == x
3615 && ! rtx_equal_p (y_cst, SET_SRC (set)))
722c0f6e 3616 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
de17a47b 3617
3618 return last_insn;
10f307d9 3619}
c0c4a46d 3620
34517c64 3621/* Generate the body of an instruction to copy Y into X.
3622 It may be a list of insns, if one insn isn't enough. */
3623
f9a00e9e 3624rtx_insn *
34517c64 3625gen_move_insn (rtx x, rtx y)
3626{
3627 rtx_insn *seq;
3628
3629 start_sequence ();
3630 emit_move_insn_1 (x, y);
3631 seq = get_insns ();
3632 end_sequence ();
3633 return seq;
3634}
3635
f9a00e9e 3636/* Same as above, but return rtx (used as a callback, which must have
3637 prototype compatible with other functions returning rtx). */
3638
3639rtx
3640gen_move_insn_uncast (rtx x, rtx y)
3641{
3642 return gen_move_insn (x, y);
3643}
3644
c0c4a46d 3645/* If Y is representable exactly in a narrower mode, and the target can
3646 perform the extension directly from constant or memory, then emit the
3647 move as an extension. */
3648
c81fd430 3649static rtx_insn *
35cb5232 3650compress_float_constant (rtx x, rtx y)
c0c4a46d 3651{
3754d046 3652 machine_mode dstmode = GET_MODE (x);
3653 machine_mode orig_srcmode = GET_MODE (y);
3654 machine_mode srcmode;
c0c4a46d 3655 REAL_VALUE_TYPE r;
8b1bf1e9 3656 int oldcost, newcost;
f529eb25 3657 bool speed = optimize_insn_for_speed_p ();
c0c4a46d 3658
3659 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3660
ca316360 3661 if (targetm.legitimate_constant_p (dstmode, y))
7013e87c 3662 oldcost = set_src_cost (y, speed);
8b1bf1e9 3663 else
7013e87c 3664 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
8b1bf1e9 3665
c0c4a46d 3666 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3667 srcmode != orig_srcmode;
3668 srcmode = GET_MODE_WIDER_MODE (srcmode))
3669 {
3670 enum insn_code ic;
c81fd430 3671 rtx trunc_y;
3672 rtx_insn *last_insn;
c0c4a46d 3673
3674 /* Skip if the target can't extend this way. */
3675 ic = can_extend_p (dstmode, srcmode, 0);
3676 if (ic == CODE_FOR_nothing)
3677 continue;
3678
3679 /* Skip if the narrowed value isn't exact. */
3680 if (! exact_real_truncate (srcmode, &r))
3681 continue;
3682
3683 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3684
ca316360 3685 if (targetm.legitimate_constant_p (srcmode, trunc_y))
c0c4a46d 3686 {
3687 /* Skip if the target needs extra instructions to perform
3688 the extension. */
39c56a89 3689 if (!insn_operand_matches (ic, 1, trunc_y))
c0c4a46d 3690 continue;
8b1bf1e9 3691 /* This is valid, but may not be cheaper than the original. */
7013e87c 3692 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3693 speed);
8b1bf1e9 3694 if (oldcost < newcost)
3695 continue;
c0c4a46d 3696 }
3697 else if (float_extend_from_mem[dstmode][srcmode])
8b1bf1e9 3698 {
3699 trunc_y = force_const_mem (srcmode, trunc_y);
3700 /* This is valid, but may not be cheaper than the original. */
7013e87c 3701 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3702 speed);
8b1bf1e9 3703 if (oldcost < newcost)
3704 continue;
3705 trunc_y = validize_mem (trunc_y);
3706 }
c0c4a46d 3707 else
3708 continue;
3220d3c5 3709
3710 /* For CSE's benefit, force the compressed constant pool entry
3711 into a new pseudo. This constant may be used in different modes,
3712 and if not, combine will put things back together for us. */
3713 trunc_y = force_reg (srcmode, trunc_y);
0614d12c 3714
3715 /* If x is a hard register, perform the extension into a pseudo,
3716 so that e.g. stack realignment code is aware of it. */
3717 rtx target = x;
3718 if (REG_P (x) && HARD_REGISTER_P (x))
3719 target = gen_reg_rtx (dstmode);
3720
3721 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
c0c4a46d 3722 last_insn = get_last_insn ();
3723
0614d12c 3724 if (REG_P (target))
6442675c 3725 set_unique_reg_note (last_insn, REG_EQUAL, y);
c0c4a46d 3726
0614d12c 3727 if (target != x)
3728 return emit_move_insn (x, target);
c0c4a46d 3729 return last_insn;
3730 }
3731
c81fd430 3732 return NULL;
c0c4a46d 3733}
10f307d9 3734\f
3735/* Pushing data onto the stack. */
3736
3737/* Push a block of length SIZE (perhaps variable)
3738 and return an rtx to address the beginning of the block.
10f307d9 3739 The value may be virtual_outgoing_args_rtx.
3740
3741 EXTRA is the number of bytes of padding to push in addition to SIZE.
3742 BELOW nonzero means this padding comes at low addresses;
3743 otherwise, the padding comes at high addresses. */
3744
3745rtx
35cb5232 3746push_block (rtx size, int extra, int below)
10f307d9 3747{
19cb6b50 3748 rtx temp;
ed8d3eee 3749
3750 size = convert_modes (Pmode, ptr_mode, size, 1);
10f307d9 3751 if (CONSTANT_P (size))
29c05e22 3752 anti_adjust_stack (plus_constant (Pmode, size, extra));
8ad4c111 3753 else if (REG_P (size) && extra == 0)
10f307d9 3754 anti_adjust_stack (size);
3755 else
3756 {
481feae3 3757 temp = copy_to_mode_reg (Pmode, size);
10f307d9 3758 if (extra != 0)
0359f9f5 3759 temp = expand_binop (Pmode, add_optab, temp,
3760 gen_int_mode (extra, Pmode),
10f307d9 3761 temp, 0, OPTAB_LIB_WIDEN);
3762 anti_adjust_stack (temp);
3763 }
3764
2b785411 3765 if (STACK_GROWS_DOWNWARD)
4448f543 3766 {
4448f543 3767 temp = virtual_outgoing_args_rtx;
3768 if (extra != 0 && below)
29c05e22 3769 temp = plus_constant (Pmode, temp, extra);
4448f543 3770 }
3771 else
3772 {
971ba038 3773 if (CONST_INT_P (size))
29c05e22 3774 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
fa56dc1d 3775 -INTVAL (size) - (below ? 0 : extra));
4448f543 3776 else if (extra != 0 && !below)
3777 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
29c05e22 3778 negate_rtx (Pmode, plus_constant (Pmode, size,
3779 extra)));
4448f543 3780 else
3781 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3782 negate_rtx (Pmode, size));
3783 }
10f307d9 3784
3785 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3786}
3787
dfe00a8f 3788/* A utility routine that returns the base of an auto-inc memory, or NULL. */
3789
3790static rtx
3791mem_autoinc_base (rtx mem)
3792{
3793 if (MEM_P (mem))
3794 {
3795 rtx addr = XEXP (mem, 0);
3796 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3797 return XEXP (addr, 0);
3798 }
3799 return NULL;
3800}
3801
3802/* A utility routine used here, in reload, and in try_split. The insns
3803 after PREV up to and including LAST are known to adjust the stack,
3804 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3805 placing notes as appropriate. PREV may be NULL, indicating the
3806 entire insn sequence prior to LAST should be scanned.
3807
3808 The set of allowed stack pointer modifications is small:
3809 (1) One or more auto-inc style memory references (aka pushes),
3810 (2) One or more addition/subtraction with the SP as destination,
3811 (3) A single move insn with the SP as destination,
45152a7b 3812 (4) A call_pop insn,
3813 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
dfe00a8f 3814
45152a7b 3815 Insns in the sequence that do not modify the SP are ignored,
3816 except for noreturn calls.
dfe00a8f 3817
3818 The return value is the amount of adjustment that can be trivially
3819 verified, via immediate operand or auto-inc. If the adjustment
3820 cannot be trivially extracted, the return value is INT_MIN. */
3821
40125f1c 3822HOST_WIDE_INT
50fc2d35 3823find_args_size_adjust (rtx_insn *insn)
dfe00a8f 3824{
40125f1c 3825 rtx dest, set, pat;
3826 int i;
dfe00a8f 3827
40125f1c 3828 pat = PATTERN (insn);
3829 set = NULL;
dfe00a8f 3830
40125f1c 3831 /* Look for a call_pop pattern. */
3832 if (CALL_P (insn))
3833 {
3834 /* We have to allow non-call_pop patterns for the case
3835 of emit_single_push_insn of a TLS address. */
3836 if (GET_CODE (pat) != PARALLEL)
3837 return 0;
dfe00a8f 3838
40125f1c 3839 /* All call_pop have a stack pointer adjust in the parallel.
3840 The call itself is always first, and the stack adjust is
3841 usually last, so search from the end. */
3842 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
dfe00a8f 3843 {
40125f1c 3844 set = XVECEXP (pat, 0, i);
3845 if (GET_CODE (set) != SET)
a36f1a95 3846 continue;
40125f1c 3847 dest = SET_DEST (set);
3848 if (dest == stack_pointer_rtx)
3849 break;
dfe00a8f 3850 }
40125f1c 3851 /* We'd better have found the stack pointer adjust. */
3852 if (i == 0)
3853 return 0;
3854 /* Fall through to process the extracted SET and DEST
3855 as if it was a standalone insn. */
3856 }
3857 else if (GET_CODE (pat) == SET)
3858 set = pat;
3859 else if ((set = single_set (insn)) != NULL)
3860 ;
3861 else if (GET_CODE (pat) == PARALLEL)
3862 {
3863 /* ??? Some older ports use a parallel with a stack adjust
3864 and a store for a PUSH_ROUNDING pattern, rather than a
3865 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3866 /* ??? See h8300 and m68k, pushqi1. */
3867 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
dfe00a8f 3868 {
40125f1c 3869 set = XVECEXP (pat, 0, i);
3870 if (GET_CODE (set) != SET)
dfe00a8f 3871 continue;
40125f1c 3872 dest = SET_DEST (set);
3873 if (dest == stack_pointer_rtx)
3874 break;
3875
3876 /* We do not expect an auto-inc of the sp in the parallel. */
3877 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3878 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3879 != stack_pointer_rtx);
dfe00a8f 3880 }
40125f1c 3881 if (i < 0)
3882 return 0;
3883 }
3884 else
3885 return 0;
3886
3887 dest = SET_DEST (set);
3888
3889 /* Look for direct modifications of the stack pointer. */
3890 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3891 {
3892 /* Look for a trivial adjustment, otherwise assume nothing. */
3893 /* Note that the SPU restore_stack_block pattern refers to
3894 the stack pointer in V4SImode. Consider that non-trivial. */
3895 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3896 && GET_CODE (SET_SRC (set)) == PLUS
3897 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3898 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3899 return INTVAL (XEXP (SET_SRC (set), 1));
3900 /* ??? Reload can generate no-op moves, which will be cleaned
3901 up later. Recognize it and continue searching. */
3902 else if (rtx_equal_p (dest, SET_SRC (set)))
3903 return 0;
dfe00a8f 3904 else
40125f1c 3905 return HOST_WIDE_INT_MIN;
3906 }
3907 else
3908 {
3909 rtx mem, addr;
dfe00a8f 3910
dfe00a8f 3911 /* Otherwise only think about autoinc patterns. */
40125f1c 3912 if (mem_autoinc_base (dest) == stack_pointer_rtx)
dfe00a8f 3913 {
40125f1c 3914 mem = dest;
3915 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3916 != stack_pointer_rtx);
dfe00a8f 3917 }
40125f1c 3918 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3919 mem = SET_SRC (set);
dfe00a8f 3920 else
40125f1c 3921 return 0;
3922
3923 addr = XEXP (mem, 0);
3924 switch (GET_CODE (addr))
3925 {
3926 case PRE_INC:
3927 case POST_INC:
3928 return GET_MODE_SIZE (GET_MODE (mem));
3929 case PRE_DEC:
3930 case POST_DEC:
3931 return -GET_MODE_SIZE (GET_MODE (mem));
3932 case PRE_MODIFY:
3933 case POST_MODIFY:
3934 addr = XEXP (addr, 1);
3935 gcc_assert (GET_CODE (addr) == PLUS);
3936 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3937 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3938 return INTVAL (XEXP (addr, 1));
3939 default:
3940 gcc_unreachable ();
3941 }
3942 }
3943}
3944
3945int
32f1a0c8 3946fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, int end_args_size)
40125f1c 3947{
3948 int args_size = end_args_size;
3949 bool saw_unknown = false;
4cd001d5 3950 rtx_insn *insn;
40125f1c 3951
3952 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3953 {
3954 HOST_WIDE_INT this_delta;
3955
3956 if (!NONDEBUG_INSN_P (insn))
dfe00a8f 3957 continue;
3958
40125f1c 3959 this_delta = find_args_size_adjust (insn);
3960 if (this_delta == 0)
45152a7b 3961 {
3962 if (!CALL_P (insn)
3963 || ACCUMULATE_OUTGOING_ARGS
3964 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3965 continue;
3966 }
40125f1c 3967
3968 gcc_assert (!saw_unknown);
3969 if (this_delta == HOST_WIDE_INT_MIN)
3970 saw_unknown = true;
3971
dfe00a8f 3972 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3764c94e 3973 if (STACK_GROWS_DOWNWARD)
3974 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3975
dfe00a8f 3976 args_size -= this_delta;
3977 }
3978
3979 return saw_unknown ? INT_MIN : args_size;
3980}
fad4a30c 3981
dfe00a8f 3982#ifdef PUSH_ROUNDING
ef7dc4b4 3983/* Emit single push insn. */
fad4a30c 3984
ef7dc4b4 3985static void
3754d046 3986emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
ef7dc4b4 3987{
ef7dc4b4 3988 rtx dest_addr;
07c143fb 3989 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
ef7dc4b4 3990 rtx dest;
675b92cc 3991 enum insn_code icode;
ef7dc4b4 3992
675b92cc 3993 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3994 /* If there is push pattern, use it. Otherwise try old way of throwing
3995 MEM representing push operation to move expander. */
d6bf3b14 3996 icode = optab_handler (push_optab, mode);
675b92cc 3997 if (icode != CODE_FOR_nothing)
3998 {
8786db1e 3999 struct expand_operand ops[1];
4000
4001 create_input_operand (&ops[0], x, mode);
4002 if (maybe_expand_insn (icode, 1, ops))
4003 return;
675b92cc 4004 }
ef7dc4b4 4005 if (GET_MODE_SIZE (mode) == rounded_size)
4006 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
20e1fca5 4007 /* If we are to pad downward, adjust the stack pointer first and
4008 then store X into the stack location using an offset. This is
4009 because emit_move_insn does not know how to pad; it does not have
4010 access to type. */
4011 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
4012 {
4013 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
4014 HOST_WIDE_INT offset;
4015
4016 emit_move_insn (stack_pointer_rtx,
4017 expand_binop (Pmode,
3764c94e 4018 STACK_GROWS_DOWNWARD ? sub_optab
4019 : add_optab,
20e1fca5 4020 stack_pointer_rtx,
0359f9f5 4021 gen_int_mode (rounded_size, Pmode),
20e1fca5 4022 NULL_RTX, 0, OPTAB_LIB_WIDEN));
4023
4024 offset = (HOST_WIDE_INT) padding_size;
3764c94e 4025 if (STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_DEC)
20e1fca5 4026 /* We have already decremented the stack pointer, so get the
4027 previous value. */
4028 offset += (HOST_WIDE_INT) rounded_size;
3764c94e 4029
4030 if (!STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_INC)
20e1fca5 4031 /* We have already incremented the stack pointer, so get the
4032 previous value. */
4033 offset -= (HOST_WIDE_INT) rounded_size;
3764c94e 4034
c338f2e3 4035 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4036 gen_int_mode (offset, Pmode));
20e1fca5 4037 }
ef7dc4b4 4038 else
4039 {
3764c94e 4040 if (STACK_GROWS_DOWNWARD)
4041 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4042 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4043 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4044 Pmode));
4045 else
4046 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4047 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4048 gen_int_mode (rounded_size, Pmode));
4049
ef7dc4b4 4050 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4051 }
4052
4053 dest = gen_rtx_MEM (mode, dest_addr);
4054
ef7dc4b4 4055 if (type != 0)
4056 {
4057 set_mem_attributes (dest, type, 1);
a9d9ab08 4058
dc7cdd37 4059 if (cfun->tail_call_marked)
a9d9ab08 4060 /* Function incoming arguments may overlap with sibling call
4061 outgoing arguments and we cannot allow reordering of reads
4062 from function arguments with stores to outgoing arguments
4063 of sibling calls. */
4064 set_mem_alias_set (dest, 0);
ef7dc4b4 4065 }
4066 emit_move_insn (dest, x);
ef7dc4b4 4067}
dfe00a8f 4068
4069/* Emit and annotate a single push insn. */
4070
4071static void
3754d046 4072emit_single_push_insn (machine_mode mode, rtx x, tree type)
dfe00a8f 4073{
4074 int delta, old_delta = stack_pointer_delta;
1d277a67 4075 rtx_insn *prev = get_last_insn ();
4076 rtx_insn *last;
dfe00a8f 4077
4078 emit_single_push_insn_1 (mode, x, type);
4079
4080 last = get_last_insn ();
4081
4082 /* Notice the common case where we emitted exactly one insn. */
4083 if (PREV_INSN (last) == prev)
4084 {
4085 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4086 return;
4087 }
4088
4089 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4090 gcc_assert (delta == INT_MIN || delta == old_delta);
4091}
fad4a30c 4092#endif
ef7dc4b4 4093
a95e5776 4094/* If reading SIZE bytes from X will end up reading from
4095 Y return the number of bytes that overlap. Return -1
4096 if there is no overlap or -2 if we can't determine
4097 (for example when X and Y have different base registers). */
4098
4099static int
4100memory_load_overlap (rtx x, rtx y, HOST_WIDE_INT size)
4101{
4102 rtx tmp = plus_constant (Pmode, x, size);
4103 rtx sub = simplify_gen_binary (MINUS, Pmode, tmp, y);
4104
4105 if (!CONST_INT_P (sub))
4106 return -2;
4107
4108 HOST_WIDE_INT val = INTVAL (sub);
4109
4110 return IN_RANGE (val, 1, size) ? val : -1;
4111}
4112
10f307d9 4113/* Generate code to push X onto the stack, assuming it has mode MODE and
4114 type TYPE.
4115 MODE is redundant except when X is a CONST_INT (since they don't
4116 carry mode info).
4117 SIZE is an rtx for the size of data to be copied (in bytes),
4118 needed only if X is BLKmode.
a95e5776 4119 Return true if successful. May return false if asked to push a
4120 partial argument during a sibcall optimization (as specified by
4121 SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4122 to not overlap.
10f307d9 4123
decd7a45 4124 ALIGN (in bits) is maximum alignment we can assume.
10f307d9 4125
a984cc1e 4126 If PARTIAL and REG are both nonzero, then copy that many of the first
f054eb3c 4127 bytes of X into registers starting with REG, and push the rest of X.
4128 The amount of space pushed is decreased by PARTIAL bytes.
10f307d9 4129 REG must be a hard register in this case.
a984cc1e 4130 If REG is zero but PARTIAL is not, take any all others actions for an
4131 argument partially in registers, but do not actually load any
4132 registers.
10f307d9 4133
4134 EXTRA is the amount in bytes of extra space to leave next to this arg.
4bbea254 4135 This is ignored if an argument block has already been allocated.
10f307d9 4136
4137 On a machine that lacks real push insns, ARGS_ADDR is the address of
4138 the bottom of the argument block for this call. We use indexing off there
4139 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4140 argument block has not been preallocated.
4141
997d68fe 4142 ARGS_SO_FAR is the size of args previously pushed for this call.
4143
4144 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4145 for arguments passed in registers. If nonzero, it will be the number
4146 of bytes required. */
10f307d9 4147
a95e5776 4148bool
3754d046 4149emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
35cb5232 4150 unsigned int align, int partial, rtx reg, int extra,
4151 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
a95e5776 4152 rtx alignment_pad, bool sibcall_p)
10f307d9 4153{
4154 rtx xinner;
3764c94e 4155 enum direction stack_direction = STACK_GROWS_DOWNWARD ? downward : upward;
10f307d9 4156
4157 /* Decide where to pad the argument: `downward' for below,
4158 `upward' for above, or `none' for don't pad it.
4159 Default is below for small data on big-endian machines; else above. */
4160 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4161
ff385626 4162 /* Invert direction if stack is post-decrement.
12a97a04 4163 FIXME: why? */
4164 if (STACK_PUSH_CODE == POST_DEC)
10f307d9 4165 if (where_pad != none)
4166 where_pad = (where_pad == downward ? upward : downward);
4167
0a534ba7 4168 xinner = x;
10f307d9 4169
a95e5776 4170 int nregs = partial / UNITS_PER_WORD;
4171 rtx *tmp_regs = NULL;
4172 int overlapping = 0;
4173
851fc2b3 4174 if (mode == BLKmode
4175 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
10f307d9 4176 {
4177 /* Copy a block into the stack, entirely or partially. */
4178
19cb6b50 4179 rtx temp;
f054eb3c 4180 int used;
a2509aaa 4181 int offset;
10f307d9 4182 int skip;
fa56dc1d 4183
f054eb3c 4184 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4185 used = partial - offset;
a2509aaa 4186
851fc2b3 4187 if (mode != BLKmode)
4188 {
4189 /* A value is to be stored in an insufficiently aligned
4190 stack slot; copy via a suitably aligned slot if
4191 necessary. */
4192 size = GEN_INT (GET_MODE_SIZE (mode));
4193 if (!MEM_P (xinner))
4194 {
0ab48139 4195 temp = assign_temp (type, 1, 1);
851fc2b3 4196 emit_move_insn (temp, xinner);
4197 xinner = temp;
4198 }
4199 }
4200
611234b4 4201 gcc_assert (size);
10f307d9 4202
10f307d9 4203 /* USED is now the # of bytes we need not copy to the stack
4204 because registers will take care of them. */
4205
4206 if (partial != 0)
e513d163 4207 xinner = adjust_address (xinner, BLKmode, used);
10f307d9 4208
4209 /* If the partial register-part of the arg counts in its stack size,
4210 skip the part of stack space corresponding to the registers.
4211 Otherwise, start copying to the beginning of the stack space,
4212 by setting SKIP to 0. */
997d68fe 4213 skip = (reg_parm_stack_space == 0) ? 0 : used;
10f307d9 4214
4215#ifdef PUSH_ROUNDING
4216 /* Do it with several push insns if that doesn't take lots of insns
4217 and if there is no difficulty with push insns that skip bytes
4218 on the stack for alignment purposes. */
4219 if (args_addr == 0
4448f543 4220 && PUSH_ARGS
971ba038 4221 && CONST_INT_P (size)
10f307d9 4222 && skip == 0
b4ad0ea6 4223 && MEM_ALIGN (xinner) >= align
d4bd0e64 4224 && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
10f307d9 4225 /* Here we avoid the case of a structure whose weak alignment
4226 forces many pushes of a small amount of data,
4227 and such small pushes do rounding that causes trouble. */
9439ebf7 4228 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
325d1c45 4229 || align >= BIGGEST_ALIGNMENT
decd7a45 4230 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4231 == (align / BITS_PER_UNIT)))
db5b2472 4232 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
10f307d9 4233 {
4234 /* Push padding now if padding above and stack grows down,
4235 or if padding below and stack grows up.
4236 But if space already allocated, this has already been done. */
4237 if (extra && args_addr == 0
4238 && where_pad != none && where_pad != stack_direction)
b572011e 4239 anti_adjust_stack (GEN_INT (extra));
10f307d9 4240
9fe0e1b8 4241 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
10f307d9 4242 }
4243 else
fa56dc1d 4244#endif /* PUSH_ROUNDING */
10f307d9 4245 {
a9f2963b 4246 rtx target;
4247
10f307d9 4248 /* Otherwise make space on the stack and copy the data
4249 to the address of that space. */
4250
4251 /* Deduct words put into registers from the size we must copy. */
4252 if (partial != 0)
4253 {
971ba038 4254 if (CONST_INT_P (size))
b572011e 4255 size = GEN_INT (INTVAL (size) - used);
10f307d9 4256 else
4257 size = expand_binop (GET_MODE (size), sub_optab, size,
0359f9f5 4258 gen_int_mode (used, GET_MODE (size)),
4259 NULL_RTX, 0, OPTAB_LIB_WIDEN);
10f307d9 4260 }
4261
4262 /* Get the address of the stack space.
4263 In this case, we do not deal with EXTRA separately.
4264 A single stack adjust will do. */
4265 if (! args_addr)
4266 {
4267 temp = push_block (size, extra, where_pad == downward);
4268 extra = 0;
4269 }
971ba038 4270 else if (CONST_INT_P (args_so_far))
10f307d9 4271 temp = memory_address (BLKmode,
29c05e22 4272 plus_constant (Pmode, args_addr,
10f307d9 4273 skip + INTVAL (args_so_far)));
4274 else
4275 temp = memory_address (BLKmode,
29c05e22 4276 plus_constant (Pmode,
4277 gen_rtx_PLUS (Pmode,
941522d6 4278 args_addr,
4279 args_so_far),
10f307d9 4280 skip));
c0bfc78e 4281
4282 if (!ACCUMULATE_OUTGOING_ARGS)
4283 {
4284 /* If the source is referenced relative to the stack pointer,
4285 copy it to another register to stabilize it. We do not need
4286 to do this if we know that we won't be changing sp. */
4287
4288 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4289 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4290 temp = copy_to_reg (temp);
4291 }
4292
fa56dc1d 4293 target = gen_rtx_MEM (BLKmode, temp);
a9f2963b 4294
f7db692b 4295 /* We do *not* set_mem_attributes here, because incoming arguments
4296 may overlap with sibling call outgoing arguments and we cannot
4297 allow reordering of reads from function arguments with stores
4298 to outgoing arguments of sibling calls. We do, however, want
4299 to record the alignment of the stack slot. */
0378dbdc 4300 /* ALIGN may well be better aligned than TYPE, e.g. due to
4301 PARM_BOUNDARY. Assume the caller isn't lying. */
4302 set_mem_align (target, align);
c0bfc78e 4303
a95e5776 4304 /* If part should go in registers and pushing to that part would
4305 overwrite some of the values that need to go into regs, load the
4306 overlapping values into temporary pseudos to be moved into the hard
4307 regs at the end after the stack pushing has completed.
4308 We cannot load them directly into the hard regs here because
4309 they can be clobbered by the block move expansions.
4310 See PR 65358. */
4311
4312 if (partial > 0 && reg != 0 && mode == BLKmode
4313 && GET_CODE (reg) != PARALLEL)
4314 {
4315 overlapping = memory_load_overlap (XEXP (x, 0), temp, partial);
4316 if (overlapping > 0)
4317 {
4318 gcc_assert (overlapping % UNITS_PER_WORD == 0);
4319 overlapping /= UNITS_PER_WORD;
4320
4321 tmp_regs = XALLOCAVEC (rtx, overlapping);
4322
4323 for (int i = 0; i < overlapping; i++)
4324 tmp_regs[i] = gen_reg_rtx (word_mode);
4325
4326 for (int i = 0; i < overlapping; i++)
4327 emit_move_insn (tmp_regs[i],
4328 operand_subword_force (target, i, mode));
4329 }
4330 else if (overlapping == -1)
4331 overlapping = 0;
4332 /* Could not determine whether there is overlap.
4333 Fail the sibcall. */
4334 else
4335 {
4336 overlapping = 0;
4337 if (sibcall_p)
4338 return false;
4339 }
4340 }
0378dbdc 4341 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
10f307d9 4342 }
4343 }
4344 else if (partial > 0)
4345 {
4346 /* Scalar partly in registers. */
4347
4348 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4349 int i;
4350 int not_stack;
f054eb3c 4351 /* # bytes of start of argument
10f307d9 4352 that we must make space for but need not store. */
f0cf03cb 4353 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
10f307d9 4354 int args_offset = INTVAL (args_so_far);
4355 int skip;
4356
4357 /* Push padding now if padding above and stack grows down,
4358 or if padding below and stack grows up.
4359 But if space already allocated, this has already been done. */
4360 if (extra && args_addr == 0
4361 && where_pad != none && where_pad != stack_direction)
b572011e 4362 anti_adjust_stack (GEN_INT (extra));
10f307d9 4363
4364 /* If we make space by pushing it, we might as well push
4365 the real data. Otherwise, we can leave OFFSET nonzero
4366 and leave the space uninitialized. */
4367 if (args_addr == 0)
4368 offset = 0;
4369
4370 /* Now NOT_STACK gets the number of words that we don't need to
dc537795 4371 allocate on the stack. Convert OFFSET to words too. */
f054eb3c 4372 not_stack = (partial - offset) / UNITS_PER_WORD;
f0cf03cb 4373 offset /= UNITS_PER_WORD;
10f307d9 4374
4375 /* If the partial register-part of the arg counts in its stack size,
4376 skip the part of stack space corresponding to the registers.
4377 Otherwise, start copying to the beginning of the stack space,
4378 by setting SKIP to 0. */
997d68fe 4379 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
10f307d9 4380
ca316360 4381 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
10f307d9 4382 x = validize_mem (force_const_mem (mode, x));
4383
4384 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4385 SUBREGs of such registers are not allowed. */
8ad4c111 4386 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
10f307d9 4387 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4388 x = copy_to_reg (x);
4389
4390 /* Loop over all the words allocated on the stack for this arg. */
4391 /* We can do it by words, because any scalar bigger than a word
4392 has a size a multiple of a word. */
10f307d9 4393 for (i = size - 1; i >= not_stack; i--)
10f307d9 4394 if (i >= not_stack + offset)
a95e5776 4395 if (!emit_push_insn (operand_subword_force (x, i, mode),
b572011e 4396 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4397 0, args_addr,
4398 GEN_INT (args_offset + ((i - not_stack + skip)
997d68fe 4399 * UNITS_PER_WORD)),
a95e5776 4400 reg_parm_stack_space, alignment_pad, sibcall_p))
4401 return false;
10f307d9 4402 }
4403 else
4404 {
4405 rtx addr;
f7c44134 4406 rtx dest;
10f307d9 4407
4408 /* Push padding now if padding above and stack grows down,
4409 or if padding below and stack grows up.
4410 But if space already allocated, this has already been done. */
4411 if (extra && args_addr == 0
4412 && where_pad != none && where_pad != stack_direction)
b572011e 4413 anti_adjust_stack (GEN_INT (extra));
10f307d9 4414
4415#ifdef PUSH_ROUNDING
4448f543 4416 if (args_addr == 0 && PUSH_ARGS)
ef7dc4b4 4417 emit_single_push_insn (mode, x, type);
10f307d9 4418 else
4419#endif
eb4b06b6 4420 {
971ba038 4421 if (CONST_INT_P (args_so_far))
eb4b06b6 4422 addr
4423 = memory_address (mode,
29c05e22 4424 plus_constant (Pmode, args_addr,
eb4b06b6 4425 INTVAL (args_so_far)));
fa56dc1d 4426 else
941522d6 4427 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4428 args_so_far));
ef7dc4b4 4429 dest = gen_rtx_MEM (mode, addr);
f7db692b 4430
4431 /* We do *not* set_mem_attributes here, because incoming arguments
4432 may overlap with sibling call outgoing arguments and we cannot
4433 allow reordering of reads from function arguments with stores
4434 to outgoing arguments of sibling calls. We do, however, want
4435 to record the alignment of the stack slot. */
4436 /* ALIGN may well be better aligned than TYPE, e.g. due to
4437 PARM_BOUNDARY. Assume the caller isn't lying. */
4438 set_mem_align (dest, align);
10f307d9 4439
ef7dc4b4 4440 emit_move_insn (dest, x);
ef7dc4b4 4441 }
10f307d9 4442 }
4443
a95e5776 4444 /* Move the partial arguments into the registers and any overlapping
4445 values that we moved into the pseudos in tmp_regs. */
a984cc1e 4446 if (partial > 0 && reg != 0)
ce739127 4447 {
4448 /* Handle calls that pass values in multiple non-contiguous locations.
4449 The Irix 6 ABI has examples of this. */
4450 if (GET_CODE (reg) == PARALLEL)
5f4cd670 4451 emit_group_load (reg, x, type, -1);
ce739127 4452 else
a95e5776 4453 {
f054eb3c 4454 gcc_assert (partial % UNITS_PER_WORD == 0);
a95e5776 4455 move_block_to_reg (REGNO (reg), x, nregs - overlapping, mode);
4456
4457 for (int i = 0; i < overlapping; i++)
4458 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg)
4459 + nregs - overlapping + i),
4460 tmp_regs[i]);
4461
f054eb3c 4462 }
ce739127 4463 }
10f307d9 4464
4465 if (extra && args_addr == 0 && where_pad == stack_direction)
b572011e 4466 anti_adjust_stack (GEN_INT (extra));
fa56dc1d 4467
364a85bd 4468 if (alignment_pad && args_addr == 0)
9d855d2f 4469 anti_adjust_stack (alignment_pad);
a95e5776 4470
4471 return true;
10f307d9 4472}
4473\f
d8e5b213 4474/* Return X if X can be used as a subtarget in a sequence of arithmetic
4475 operations. */
4476
4477static rtx
35cb5232 4478get_subtarget (rtx x)
d8e5b213 4479{
a1ad7483 4480 return (optimize
4481 || x == 0
d8e5b213 4482 /* Only registers can be subtargets. */
8ad4c111 4483 || !REG_P (x)
d8e5b213 4484 /* Don't use hard regs to avoid extending their life. */
4485 || REGNO (x) < FIRST_PSEUDO_REGISTER
d8e5b213 4486 ? 0 : x);
4487}
4488
79367e65 4489/* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4490 FIELD is a bitfield. Returns true if the optimization was successful,
4491 and there's nothing else to do. */
4492
4493static bool
4494optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4495 unsigned HOST_WIDE_INT bitpos,
4bb60ec7 4496 unsigned HOST_WIDE_INT bitregion_start,
4497 unsigned HOST_WIDE_INT bitregion_end,
3754d046 4498 machine_mode mode1, rtx str_rtx,
79367e65 4499 tree to, tree src)
4500{
3754d046 4501 machine_mode str_mode = GET_MODE (str_rtx);
79367e65 4502 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4503 tree op0, op1;
4504 rtx value, result;
4505 optab binop;
c4532f22 4506 gimple srcstmt;
4507 enum tree_code code;
79367e65 4508
4509 if (mode1 != VOIDmode
4510 || bitsize >= BITS_PER_WORD
4511 || str_bitsize > BITS_PER_WORD
4512 || TREE_SIDE_EFFECTS (to)
4513 || TREE_THIS_VOLATILE (to))
4514 return false;
4515
4516 STRIP_NOPS (src);
c4532f22 4517 if (TREE_CODE (src) != SSA_NAME)
4518 return false;
4519 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4520 return false;
4521
4522 srcstmt = get_gimple_for_ssa_name (src);
4523 if (!srcstmt
4524 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
79367e65 4525 return false;
4526
c4532f22 4527 code = gimple_assign_rhs_code (srcstmt);
4528
4529 op0 = gimple_assign_rhs1 (srcstmt);
4530
4531 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4532 to find its initialization. Hopefully the initialization will
4533 be from a bitfield load. */
4534 if (TREE_CODE (op0) == SSA_NAME)
4535 {
4536 gimple op0stmt = get_gimple_for_ssa_name (op0);
4537
4538 /* We want to eventually have OP0 be the same as TO, which
4539 should be a bitfield. */
4540 if (!op0stmt
4541 || !is_gimple_assign (op0stmt)
4542 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4543 return false;
4544 op0 = gimple_assign_rhs1 (op0stmt);
4545 }
4546
4547 op1 = gimple_assign_rhs2 (srcstmt);
79367e65 4548
4549 if (!operand_equal_p (to, op0, 0))
4550 return false;
4551
4552 if (MEM_P (str_rtx))
4553 {
4554 unsigned HOST_WIDE_INT offset1;
4555
4556 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4557 str_mode = word_mode;
4558 str_mode = get_best_mode (bitsize, bitpos,
4bb60ec7 4559 bitregion_start, bitregion_end,
79367e65 4560 MEM_ALIGN (str_rtx), str_mode, 0);
4561 if (str_mode == VOIDmode)
4562 return false;
4563 str_bitsize = GET_MODE_BITSIZE (str_mode);
4564
4565 offset1 = bitpos;
4566 bitpos %= str_bitsize;
4567 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4568 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4569 }
4570 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4571 return false;
4572
4573 /* If the bit field covers the whole REG/MEM, store_field
4574 will likely generate better code. */
4575 if (bitsize >= str_bitsize)
4576 return false;
4577
4578 /* We can't handle fields split across multiple entities. */
4579 if (bitpos + bitsize > str_bitsize)
4580 return false;
4581
4582 if (BYTES_BIG_ENDIAN)
4583 bitpos = str_bitsize - bitpos - bitsize;
4584
c4532f22 4585 switch (code)
79367e65 4586 {
4587 case PLUS_EXPR:
4588 case MINUS_EXPR:
4589 /* For now, just optimize the case of the topmost bitfield
4590 where we don't need to do any masking and also
4591 1 bit bitfields where xor can be used.
4592 We might win by one instruction for the other bitfields
4593 too if insv/extv instructions aren't used, so that
4594 can be added later. */
4595 if (bitpos + bitsize != str_bitsize
4596 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4597 break;
4598
1db6d067 4599 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
79367e65 4600 value = convert_modes (str_mode,
4601 TYPE_MODE (TREE_TYPE (op1)), value,
4602 TYPE_UNSIGNED (TREE_TYPE (op1)));
4603
4604 /* We may be accessing data outside the field, which means
4605 we can alias adjacent data. */
4606 if (MEM_P (str_rtx))
4607 {
4608 str_rtx = shallow_copy_rtx (str_rtx);
4609 set_mem_alias_set (str_rtx, 0);
4610 set_mem_expr (str_rtx, 0);
4611 }
4612
c4532f22 4613 binop = code == PLUS_EXPR ? add_optab : sub_optab;
79367e65 4614 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4615 {
4616 value = expand_and (str_mode, value, const1_rtx, NULL);
4617 binop = xor_optab;
4618 }
40715742 4619 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
79367e65 4620 result = expand_binop (str_mode, binop, str_rtx,
4621 value, str_rtx, 1, OPTAB_WIDEN);
4622 if (result != str_rtx)
4623 emit_move_insn (str_rtx, result);
4624 return true;
4625
0ebe5db7 4626 case BIT_IOR_EXPR:
4627 case BIT_XOR_EXPR:
4628 if (TREE_CODE (op1) != INTEGER_CST)
4629 break;
40715742 4630 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4631 value = convert_modes (str_mode,
0ebe5db7 4632 TYPE_MODE (TREE_TYPE (op1)), value,
4633 TYPE_UNSIGNED (TREE_TYPE (op1)));
4634
4635 /* We may be accessing data outside the field, which means
4636 we can alias adjacent data. */
4637 if (MEM_P (str_rtx))
4638 {
4639 str_rtx = shallow_copy_rtx (str_rtx);
4640 set_mem_alias_set (str_rtx, 0);
4641 set_mem_expr (str_rtx, 0);
4642 }
4643
c4532f22 4644 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
40715742 4645 if (bitpos + bitsize != str_bitsize)
0ebe5db7 4646 {
0359f9f5 4647 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4648 str_mode);
40715742 4649 value = expand_and (str_mode, value, mask, NULL_RTX);
0ebe5db7 4650 }
40715742 4651 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4652 result = expand_binop (str_mode, binop, str_rtx,
0ebe5db7 4653 value, str_rtx, 1, OPTAB_WIDEN);
4654 if (result != str_rtx)
4655 emit_move_insn (str_rtx, result);
4656 return true;
4657
79367e65 4658 default:
4659 break;
4660 }
4661
4662 return false;
4663}
4664
4bb60ec7 4665/* In the C++ memory model, consecutive bit fields in a structure are
4666 considered one memory location.
4667
3cef948a 4668 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
8d8a34f9 4669 returns the bit range of consecutive bits in which this COMPONENT_REF
3cef948a 4670 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4671 and *OFFSET may be adjusted in the process.
4672
4673 If the access does not need to be restricted, 0 is returned in both
8d8a34f9 4674 *BITSTART and *BITEND. */
4bb60ec7 4675
4676static void
4677get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4678 unsigned HOST_WIDE_INT *bitend,
8d8a34f9 4679 tree exp,
3cef948a 4680 HOST_WIDE_INT *bitpos,
4681 tree *offset)
4bb60ec7 4682{
3cef948a 4683 HOST_WIDE_INT bitoffset;
fa42e1a4 4684 tree field, repr;
4bb60ec7 4685
4686 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4687
8d8a34f9 4688 field = TREE_OPERAND (exp, 1);
4689 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4690 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4691 need to limit the range we can access. */
4692 if (!repr)
4bb60ec7 4693 {
4694 *bitstart = *bitend = 0;
4695 return;
4696 }
4697
73041e9b 4698 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4699 part of a larger bit field, then the representative does not serve any
4700 useful purpose. This can occur in Ada. */
4701 if (handled_component_p (TREE_OPERAND (exp, 0)))
4702 {
3754d046 4703 machine_mode rmode;
73041e9b 4704 HOST_WIDE_INT rbitsize, rbitpos;
4705 tree roffset;
4706 int unsignedp;
4707 int volatilep = 0;
4708 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4709 &roffset, &rmode, &unsignedp, &volatilep, false);
4710 if ((rbitpos % BITS_PER_UNIT) != 0)
4711 {
4712 *bitstart = *bitend = 0;
4713 return;
4714 }
4715 }
4716
8d8a34f9 4717 /* Compute the adjustment to bitpos from the offset of the field
fa42e1a4 4718 relative to the representative. DECL_FIELD_OFFSET of field and
4719 repr are the same by construction if they are not constants,
4720 see finish_bitfield_layout. */
e913b5cd 4721 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4722 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4723 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4724 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
fa42e1a4 4725 else
4726 bitoffset = 0;
e913b5cd 4727 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4728 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4bb60ec7 4729
3cef948a 4730 /* If the adjustment is larger than bitpos, we would have a negative bit
5efffd8e 4731 position for the lower bound and this may wreak havoc later. Adjust
4732 offset and bitpos to make the lower bound non-negative in that case. */
3cef948a 4733 if (bitoffset > *bitpos)
4734 {
4735 HOST_WIDE_INT adjust = bitoffset - *bitpos;
3cef948a 4736 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
3cef948a 4737
4738 *bitpos += adjust;
5efffd8e 4739 if (*offset == NULL_TREE)
4740 *offset = size_int (-adjust / BITS_PER_UNIT);
4741 else
4742 *offset
4743 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
3cef948a 4744 *bitstart = 0;
4745 }
4746 else
4747 *bitstart = *bitpos - bitoffset;
4748
e913b5cd 4749 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4bb60ec7 4750}
79367e65 4751
6d1013f7 4752/* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4753 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4754 DECL_RTL was not set yet, return NORTL. */
4755
4756static inline bool
4757addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4758{
4759 if (TREE_CODE (addr) != ADDR_EXPR)
4760 return false;
4761
4762 tree base = TREE_OPERAND (addr, 0);
4763
4764 if (!DECL_P (base)
4765 || TREE_ADDRESSABLE (base)
4766 || DECL_MODE (base) == BLKmode)
4767 return false;
4768
4769 if (!DECL_RTL_SET_P (base))
4770 return nortl;
4771
4772 return (!MEM_P (DECL_RTL (base)));
4773}
4774
a598af2a 4775/* Returns true if the MEM_REF REF refers to an object that does not
4776 reside in memory and has non-BLKmode. */
4777
6d1013f7 4778static inline bool
a598af2a 4779mem_ref_refers_to_non_mem_p (tree ref)
4780{
4781 tree base = TREE_OPERAND (ref, 0);
6d1013f7 4782 return addr_expr_of_non_mem_decl_p_1 (base, false);
4783}
4784
5b5037b3 4785/* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4786 is true, try generating a nontemporal store. */
10f307d9 4787
5d3c0894 4788void
5b5037b3 4789expand_assignment (tree to, tree from, bool nontemporal)
10f307d9 4790{
19cb6b50 4791 rtx to_rtx = 0;
10f307d9 4792 rtx result;
3754d046 4793 machine_mode mode;
56cf6489 4794 unsigned int align;
8786db1e 4795 enum insn_code icode;
10f307d9 4796
4797 /* Don't crash if the lhs of the assignment was erroneous. */
10f307d9 4798 if (TREE_CODE (to) == ERROR_MARK)
9282409c 4799 {
1084097d 4800 expand_normal (from);
5d3c0894 4801 return;
9282409c 4802 }
10f307d9 4803
8f3e551a 4804 /* Optimize away no-op moves without side-effects. */
4805 if (operand_equal_p (to, from, 0))
4806 return;
4807
a598af2a 4808 /* Handle misaligned stores. */
5d9de213 4809 mode = TYPE_MODE (TREE_TYPE (to));
4810 if ((TREE_CODE (to) == MEM_REF
4811 || TREE_CODE (to) == TARGET_MEM_REF)
4812 && mode != BLKmode
55e42d78 4813 && !mem_ref_refers_to_non_mem_p (to)
3482bf13 4814 && ((align = get_object_alignment (to))
56cf6489 4815 < GET_MODE_ALIGNMENT (mode))
55e42d78 4816 && (((icode = optab_handler (movmisalign_optab, mode))
4817 != CODE_FOR_nothing)
4818 || SLOW_UNALIGNED_ACCESS (mode, align)))
5d9de213 4819 {
884b03c9 4820 rtx reg, mem;
5d9de213 4821
4822 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4823 reg = force_not_mem (reg);
884b03c9 4824 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5d9de213 4825
55e42d78 4826 if (icode != CODE_FOR_nothing)
4827 {
884b03c9 4828 struct expand_operand ops[2];
4829
55e42d78 4830 create_fixed_operand (&ops[0], mem);
4831 create_input_operand (&ops[1], reg, mode);
4832 /* The movmisalign<mode> pattern cannot fail, else the assignment
4833 would silently be omitted. */
4834 expand_insn (icode, 2, ops);
4835 }
4836 else
1603adf9 4837 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg);
5d9de213 4838 return;
4839 }
4840
10f307d9 4841 /* Assignment of a structure component needs special treatment
4842 if the structure component's rtx is not simply a MEM.
e3a8913c 4843 Assignment of an array element at a constant index, and assignment of
4844 an array element in an unaligned packed structure field, has the same
a598af2a 4845 problem. Same for (partially) storing into a non-memory object. */
79367e65 4846 if (handled_component_p (to)
182cf5a9 4847 || (TREE_CODE (to) == MEM_REF
a598af2a 4848 && mem_ref_refers_to_non_mem_p (to))
2d55cbd9 4849 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
10f307d9 4850 {
3754d046 4851 machine_mode mode1;
02e7a332 4852 HOST_WIDE_INT bitsize, bitpos;
4bb60ec7 4853 unsigned HOST_WIDE_INT bitregion_start = 0;
4854 unsigned HOST_WIDE_INT bitregion_end = 0;
954bdcb1 4855 tree offset;
10f307d9 4856 int unsignedp;
4857 int volatilep = 0;
88ac3f7f 4858 tree tem;
4859
4860 push_temp_slots ();
7fce34be 4861 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
e7e9416e 4862 &unsignedp, &volatilep, true);
10f307d9 4863
5efffd8e 4864 /* Make sure bitpos is not negative, it can wreak havoc later. */
4865 if (bitpos < 0)
4866 {
4867 gcc_assert (offset == NULL_TREE);
4868 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4869 ? 3 : exact_log2 (BITS_PER_UNIT)));
4870 bitpos &= BITS_PER_UNIT - 1;
4871 }
4872
4bb60ec7 4873 if (TREE_CODE (to) == COMPONENT_REF
4874 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
3cef948a 4875 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
3070a799 4876 /* The C++ memory model naturally applies to byte-aligned fields.
4877 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4878 BITSIZE are not byte-aligned, there is no need to limit the range
4879 we can access. This can occur with packed structures in Ada. */
4880 else if (bitsize > 0
4881 && bitsize % BITS_PER_UNIT == 0
4882 && bitpos % BITS_PER_UNIT == 0)
4883 {
4884 bitregion_start = bitpos;
4885 bitregion_end = bitpos + bitsize - 1;
4886 }
4bb60ec7 4887
7b9e6cc3 4888 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
a689a61a 4889
61a1f9de 4890 /* If the field has a mode, we want to access it in the
1795103a 4891 field's mode, not the computed mode.
4892 If a MEM has VOIDmode (external with incomplete type),
4893 use BLKmode for it instead. */
4894 if (MEM_P (to_rtx))
4895 {
61a1f9de 4896 if (mode1 != VOIDmode)
1795103a 4897 to_rtx = adjust_address (to_rtx, mode1, 0);
4898 else if (GET_MODE (to_rtx) == VOIDmode)
4899 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4900 }
a420d927 4901
954bdcb1 4902 if (offset != 0)
4903 {
3754d046 4904 machine_mode address_mode;
c22de3f0 4905 rtx offset_rtx;
954bdcb1 4906
c22de3f0 4907 if (!MEM_P (to_rtx))
4908 {
4909 /* We can get constant negative offsets into arrays with broken
4910 user code. Translate this to a trap instead of ICEing. */
4911 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4912 expand_builtin_trap ();
4913 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4914 }
33ef2f52 4915
c22de3f0 4916 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
87cf5753 4917 address_mode = get_address_mode (to_rtx);
98155838 4918 if (GET_MODE (offset_rtx) != address_mode)
2ff88218 4919 {
4920 /* We cannot be sure that the RTL in offset_rtx is valid outside
4921 of a memory address context, so force it into a register
4922 before attempting to convert it to the desired mode. */
4923 offset_rtx = force_operand (offset_rtx, NULL_RTX);
4924 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4925 }
33ef2f52 4926
dbe2840a 4927 /* If we have an expression in OFFSET_RTX and a non-zero
4928 byte offset in BITPOS, adding the byte offset before the
4929 OFFSET_RTX results in better intermediate code, which makes
4930 later rtl optimization passes perform better.
4931
4932 We prefer intermediate code like this:
4933
4934 r124:DI=r123:DI+0x18
4935 [r124:DI]=r121:DI
4936
4937 ... instead of ...
4938
4939 r124:DI=r123:DI+0x10
4940 [r124:DI+0x8]=r121:DI
4941
4942 This is only done for aligned data values, as these can
4943 be expected to result in single move instructions. */
4944 if (mode1 != VOIDmode
4945 && bitpos != 0
2b96c5f6 4946 && bitsize > 0
fa56dc1d 4947 && (bitpos % bitsize) == 0
25d55d72 4948 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
dbe2840a 4949 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
25d55d72 4950 {
fac6aae6 4951 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
438167eb 4952 bitregion_start = 0;
4953 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4954 bitregion_end -= bitpos;
25d55d72 4955 bitpos = 0;
4956 }
4957
fcdc122e 4958 to_rtx = offset_address (to_rtx, offset_rtx,
252d0e4d 4959 highest_pow2_factor_for_target (to,
4960 offset));
954bdcb1 4961 }
7014838c 4962
d8d9af50 4963 /* No action is needed if the target is not a memory and the field
4964 lies completely outside that target. This can occur if the source
4965 code contains an out-of-bounds access to a small array. */
4966 if (!MEM_P (to_rtx)
4967 && GET_MODE (to_rtx) != BLKmode
4968 && (unsigned HOST_WIDE_INT) bitpos
995b44f5 4969 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
d8d9af50 4970 {
4971 expand_normal (from);
4972 result = NULL;
4973 }
79367e65 4974 /* Handle expand_expr of a complex value returning a CONCAT. */
d8d9af50 4975 else if (GET_CODE (to_rtx) == CONCAT)
2b96c5f6 4976 {
3a175160 4977 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4978 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4979 && bitpos == 0
4980 && bitsize == mode_bitsize)
4981 result = store_expr (from, to_rtx, false, nontemporal);
4982 else if (bitsize == mode_bitsize / 2
4983 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4984 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4985 nontemporal);
2cd0cb08 4986 else if (bitpos + bitsize <= mode_bitsize / 2)
3a175160 4987 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4bb60ec7 4988 bitregion_start, bitregion_end,
f955ca51 4989 mode1, from,
3a175160 4990 get_alias_set (to), nontemporal);
2cd0cb08 4991 else if (bitpos >= mode_bitsize / 2)
3a175160 4992 result = store_field (XEXP (to_rtx, 1), bitsize,
4bb60ec7 4993 bitpos - mode_bitsize / 2,
4994 bitregion_start, bitregion_end,
4995 mode1, from,
f955ca51 4996 get_alias_set (to), nontemporal);
3a175160 4997 else if (bitpos == 0 && bitsize == mode_bitsize)
020823de 4998 {
3a175160 4999 rtx from_rtx;
5000 result = expand_normal (from);
5001 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
5002 TYPE_MODE (TREE_TYPE (from)), 0);
5003 emit_move_insn (XEXP (to_rtx, 0),
5004 read_complex_part (from_rtx, false));
5005 emit_move_insn (XEXP (to_rtx, 1),
5006 read_complex_part (from_rtx, true));
020823de 5007 }
5008 else
5009 {
3a175160 5010 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
2cd0cb08 5011 GET_MODE_SIZE (GET_MODE (to_rtx)));
3a175160 5012 write_complex_part (temp, XEXP (to_rtx, 0), false);
5013 write_complex_part (temp, XEXP (to_rtx, 1), true);
4bb60ec7 5014 result = store_field (temp, bitsize, bitpos,
5015 bitregion_start, bitregion_end,
5016 mode1, from,
f955ca51 5017 get_alias_set (to), nontemporal);
3a175160 5018 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
5019 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
020823de 5020 }
10f307d9 5021 }
79367e65 5022 else
0717ec39 5023 {
79367e65 5024 if (MEM_P (to_rtx))
d4ca42d3 5025 {
79367e65 5026 /* If the field is at offset zero, we could have been given the
5027 DECL_RTX of the parent struct. Don't munge it. */
5028 to_rtx = shallow_copy_rtx (to_rtx);
79367e65 5029 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
79367e65 5030 if (volatilep)
5031 MEM_VOLATILE_P (to_rtx) = 1;
0717ec39 5032 }
9c5f26b0 5033
4bb60ec7 5034 if (optimize_bitfield_assignment_op (bitsize, bitpos,
5035 bitregion_start, bitregion_end,
5036 mode1,
79367e65 5037 to_rtx, to, from))
5038 result = NULL;
5039 else
4bb60ec7 5040 result = store_field (to_rtx, bitsize, bitpos,
5041 bitregion_start, bitregion_end,
5042 mode1, from,
f955ca51 5043 get_alias_set (to), nontemporal);
0717ec39 5044 }
5045
79367e65 5046 if (result)
5047 preserve_temp_slots (result);
2b96c5f6 5048 pop_temp_slots ();
5d3c0894 5049 return;
10f307d9 5050 }
5051
a2e044a5 5052 /* If the rhs is a function call and its value is not an aggregate,
5053 call the function before we start to compute the lhs.
5054 This is needed for correct code for cases such as
5055 val = setjmp (buf) on machines where reference to val
e767499e 5056 requires loading up part of an address in a separate insn.
5057
16a8193d 5058 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5059 since it might be a promoted variable where the zero- or sign- extension
5060 needs to be done. Handling this in the normal way is safe because no
a8dd994c 5061 computation is done before the call. The same is true for SSA names. */
45550790 5062 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
b35c122b 5063 && COMPLETE_TYPE_P (TREE_TYPE (from))
61b44857 5064 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
ee5ab2d1 5065 && ! (((TREE_CODE (to) == VAR_DECL
5066 || TREE_CODE (to) == PARM_DECL
5067 || TREE_CODE (to) == RESULT_DECL)
a8dd994c 5068 && REG_P (DECL_RTL (to)))
5069 || TREE_CODE (to) == SSA_NAME))
a2e044a5 5070 {
88ac3f7f 5071 rtx value;
058a1b7a 5072 rtx bounds;
88ac3f7f 5073
5074 push_temp_slots ();
8ec3c5c2 5075 value = expand_normal (from);
058a1b7a 5076
5077 /* Split value and bounds to store them separately. */
5078 chkp_split_slot (value, &value, &bounds);
5079
a2e044a5 5080 if (to_rtx == 0)
8a06f2d4 5081 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
ac263f88 5082
ce739127 5083 /* Handle calls that return values in multiple non-contiguous locations.
5084 The Irix 6 ABI has examples of this. */
5085 if (GET_CODE (to_rtx) == PARALLEL)
2d0fd66d 5086 {
5087 if (GET_CODE (value) == PARALLEL)
5088 emit_group_move (to_rtx, value);
5089 else
5090 emit_group_load (to_rtx, value, TREE_TYPE (from),
5091 int_size_in_bytes (TREE_TYPE (from)));
5092 }
5093 else if (GET_CODE (value) == PARALLEL)
5094 emit_group_store (to_rtx, value, TREE_TYPE (from),
5095 int_size_in_bytes (TREE_TYPE (from)));
ce739127 5096 else if (GET_MODE (to_rtx) == BLKmode)
7e91b548 5097 {
f955ca51 5098 /* Handle calls that return BLKmode values in registers. */
7e91b548 5099 if (REG_P (value))
5100 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5101 else
5102 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5103 }
ac263f88 5104 else
5471b3be 5105 {
85d654dd 5106 if (POINTER_TYPE_P (TREE_TYPE (to)))
98155838 5107 value = convert_memory_address_addr_space
5108 (GET_MODE (to_rtx), value,
5109 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5110
5471b3be 5111 emit_move_insn (to_rtx, value);
5112 }
058a1b7a 5113
5114 /* Store bounds if required. */
5115 if (bounds
5116 && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to))))
5117 {
5118 gcc_assert (MEM_P (to_rtx));
5119 chkp_emit_bounds_store (bounds, value, to_rtx);
5120 }
5121
a2e044a5 5122 preserve_temp_slots (to_rtx);
88ac3f7f 5123 pop_temp_slots ();
5d3c0894 5124 return;
a2e044a5 5125 }
5126
a598af2a 5127 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5128 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
10f307d9 5129
addbe7ac 5130 /* Don't move directly into a return register. */
155b05dc 5131 if (TREE_CODE (to) == RESULT_DECL
8ad4c111 5132 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
addbe7ac 5133 {
88ac3f7f 5134 rtx temp;
5135
5136 push_temp_slots ();
f66cd30d 5137
5138 /* If the source is itself a return value, it still is in a pseudo at
5139 this point so we can move it back to the return register directly. */
5140 if (REG_P (to_rtx)
5141 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5142 && TREE_CODE (from) != CALL_EXPR)
ee5ab2d1 5143 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5144 else
5145 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
155b05dc 5146
2d0fd66d 5147 /* Handle calls that return values in multiple non-contiguous locations.
5148 The Irix 6 ABI has examples of this. */
155b05dc 5149 if (GET_CODE (to_rtx) == PARALLEL)
2d0fd66d 5150 {
5151 if (GET_CODE (temp) == PARALLEL)
5152 emit_group_move (to_rtx, temp);
5153 else
5154 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5155 int_size_in_bytes (TREE_TYPE (from)));
5156 }
ee5ab2d1 5157 else if (temp)
155b05dc 5158 emit_move_insn (to_rtx, temp);
5159
addbe7ac 5160 preserve_temp_slots (to_rtx);
88ac3f7f 5161 pop_temp_slots ();
5d3c0894 5162 return;
addbe7ac 5163 }
5164
10f307d9 5165 /* In case we are returning the contents of an object which overlaps
5166 the place the value is being stored, use a safe function when copying
5167 a value through a pointer into a structure value return block. */
865c8a7e 5168 if (TREE_CODE (to) == RESULT_DECL
5169 && TREE_CODE (from) == INDIRECT_REF
bd1a81f7 5170 && ADDR_SPACE_GENERIC_P
865c8a7e 5171 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5172 && refs_may_alias_p (to, from)
18d50ae6 5173 && cfun->returns_struct
5174 && !cfun->returns_pcc_struct)
10f307d9 5175 {
88ac3f7f 5176 rtx from_rtx, size;
5177
5178 push_temp_slots ();
eaf7767e 5179 size = expr_size (from);
8ec3c5c2 5180 from_rtx = expand_normal (from);
10f307d9 5181
f896c932 5182 emit_library_call (memmove_libfunc, LCT_NORMAL,
5183 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5184 XEXP (from_rtx, 0), Pmode,
5185 convert_to_mode (TYPE_MODE (sizetype),
5186 size, TYPE_UNSIGNED (sizetype)),
5187 TYPE_MODE (sizetype));
10f307d9 5188
5189 preserve_temp_slots (to_rtx);
88ac3f7f 5190 pop_temp_slots ();
5d3c0894 5191 return;
10f307d9 5192 }
5193
5194 /* Compute FROM and store the value in the rtx we got. */
5195
88ac3f7f 5196 push_temp_slots ();
058a1b7a 5197 result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, to);
10f307d9 5198 preserve_temp_slots (result);
88ac3f7f 5199 pop_temp_slots ();
5d3c0894 5200 return;
10f307d9 5201}
5202
5b5037b3 5203/* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5204 succeeded, false otherwise. */
5205
16c9337c 5206bool
5b5037b3 5207emit_storent_insn (rtx to, rtx from)
5208{
8786db1e 5209 struct expand_operand ops[2];
3754d046 5210 machine_mode mode = GET_MODE (to);
d6bf3b14 5211 enum insn_code code = optab_handler (storent_optab, mode);
5b5037b3 5212
5213 if (code == CODE_FOR_nothing)
5214 return false;
5215
8786db1e 5216 create_fixed_operand (&ops[0], to);
5217 create_input_operand (&ops[1], from, mode);
5218 return maybe_expand_insn (code, 2, ops);
5b5037b3 5219}
5220
10f307d9 5221/* Generate code for computing expression EXP,
5222 and storing the value into TARGET.
10f307d9 5223
9282409c 5224 If the mode is BLKmode then we may return TARGET itself.
5225 It turns out that in BLKmode it doesn't cause a problem.
5226 because C has no operators that could combine two different
5227 assignments into the same BLKmode object with different values
5228 with no sequence point. Will other languages need this to
5229 be more thorough?
5230
c0f85e83 5231 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5b5037b3 5232 stack, and block moves may need to be treated specially.
48e1416a 5233
058a1b7a 5234 If NONTEMPORAL is true, try using a nontemporal store instruction.
5235
5236 If BTARGET is not NULL then computed bounds of EXP are
5237 associated with BTARGET. */
10f307d9 5238
5239rtx
058a1b7a 5240store_expr_with_bounds (tree exp, rtx target, int call_param_p,
5241 bool nontemporal, tree btarget)
10f307d9 5242{
19cb6b50 5243 rtx temp;
60ffaf4d 5244 rtx alt_rtl = NULL_RTX;
ed4d69dc 5245 location_t loc = curr_insn_location ();
10f307d9 5246
824638f9 5247 if (VOID_TYPE_P (TREE_TYPE (exp)))
5248 {
5249 /* C++ can generate ?: expressions with a throw expression in one
5250 branch and an rvalue in the other. Here, we resolve attempts to
917bbcab 5251 store the throw expression's nonexistent result. */
c0f85e83 5252 gcc_assert (!call_param_p);
1db6d067 5253 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
824638f9 5254 return NULL_RTX;
5255 }
10f307d9 5256 if (TREE_CODE (exp) == COMPOUND_EXPR)
5257 {
5258 /* Perform first part of compound expression, then assign from second
5259 part. */
a35a63ff 5260 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
c0f85e83 5261 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
058a1b7a 5262 return store_expr_with_bounds (TREE_OPERAND (exp, 1), target,
5263 call_param_p, nontemporal, btarget);
10f307d9 5264 }
5265 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5266 {
5267 /* For conditional expression, get safe form of the target. Then
5268 test the condition, doing the appropriate assignment on either
5269 side. This avoids the creation of unnecessary temporaries.
5270 For non-BLKmode, it is more efficient not to do this. */
5271
1d277a67 5272 rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
10f307d9 5273
d07f1b1f 5274 do_pending_stack_adjust ();
10f307d9 5275 NO_DEFER_POP;
79ab74cc 5276 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
058a1b7a 5277 store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p,
5278 nontemporal, btarget);
10f307d9 5279 emit_jump_insn (gen_jump (lab2));
5280 emit_barrier ();
5281 emit_label (lab1);
058a1b7a 5282 store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p,
5283 nontemporal, btarget);
10f307d9 5284 emit_label (lab2);
5285 OK_DEFER_POP;
9012f57d 5286
3f2a8027 5287 return NULL_RTX;
bb11bacb 5288 }
acfb31e5 5289 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
edc2a478 5290 /* If this is a scalar in a register that is stored in a wider mode
acfb31e5 5291 than the declared mode, compute the result into its declared mode
5292 and then convert to the wider mode. Our value is the computed
5293 expression. */
5294 {
d2422fc2 5295 rtx inner_target = 0;
5296
3f2a8027 5297 /* We can do the conversion inside EXP, which will often result
5298 in some optimizations. Do the conversion in two steps: first
5299 change the signedness, if needed, then the extend. But don't
5300 do this if the type of EXP is a subtype of something else
5301 since then the conversion might involve more than just
5302 converting modes. */
5303 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
aec30911 5304 && TREE_TYPE (TREE_TYPE (exp)) == 0
dcfc697f 5305 && GET_MODE_PRECISION (GET_MODE (target))
5306 == TYPE_PRECISION (TREE_TYPE (exp)))
8d426db9 5307 {
e8629f9e 5308 if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5309 TYPE_UNSIGNED (TREE_TYPE (exp))))
a4521f7e 5310 {
5311 /* Some types, e.g. Fortran's logical*4, won't have a signed
5312 version, so use the mode instead. */
5313 tree ntype
11773141 5314 = (signed_or_unsigned_type_for
e8629f9e 5315 (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
a4521f7e 5316 if (ntype == NULL)
5317 ntype = lang_hooks.types.type_for_mode
5318 (TYPE_MODE (TREE_TYPE (exp)),
e8629f9e 5319 SUBREG_PROMOTED_SIGN (target));
a4521f7e 5320
389dd41b 5321 exp = fold_convert_loc (loc, ntype, exp);
a4521f7e 5322 }
8d426db9 5323
389dd41b 5324 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5325 (GET_MODE (SUBREG_REG (target)),
e8629f9e 5326 SUBREG_PROMOTED_SIGN (target)),
389dd41b 5327 exp);
d2422fc2 5328
5329 inner_target = SUBREG_REG (target);
8d426db9 5330 }
fa56dc1d 5331
a35a63ff 5332 temp = expand_expr (exp, inner_target, VOIDmode,
c0f85e83 5333 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
ceefa980 5334
058a1b7a 5335 /* Handle bounds returned by call. */
5336 if (TREE_CODE (exp) == CALL_EXPR)
5337 {
5338 rtx bounds;
5339 chkp_split_slot (temp, &temp, &bounds);
5340 if (bounds && btarget)
5341 {
5342 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5343 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5344 chkp_set_rtl_bounds (btarget, tmp);
5345 }
5346 }
5347
ceefa980 5348 /* If TEMP is a VOIDmode constant, use convert_modes to make
5349 sure that we properly convert it. */
5350 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
c3ba908e 5351 {
5352 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
e8629f9e 5353 temp, SUBREG_PROMOTED_SIGN (target));
c3ba908e 5354 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5355 GET_MODE (target), temp,
e8629f9e 5356 SUBREG_PROMOTED_SIGN (target));
c3ba908e 5357 }
ceefa980 5358
acfb31e5 5359 convert_move (SUBREG_REG (target), temp,
e8629f9e 5360 SUBREG_PROMOTED_SIGN (target));
28ad8d33 5361
3f2a8027 5362 return NULL_RTX;
acfb31e5 5363 }
b412eb5b 5364 else if ((TREE_CODE (exp) == STRING_CST
5365 || (TREE_CODE (exp) == MEM_REF
5366 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5367 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5368 == STRING_CST
5369 && integer_zerop (TREE_OPERAND (exp, 1))))
09879952 5370 && !nontemporal && !call_param_p
b412eb5b 5371 && MEM_P (target))
09879952 5372 {
5373 /* Optimize initialization of an array with a STRING_CST. */
5374 HOST_WIDE_INT exp_len, str_copy_len;
5375 rtx dest_mem;
b412eb5b 5376 tree str = TREE_CODE (exp) == STRING_CST
5377 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
09879952 5378
5379 exp_len = int_expr_size (exp);
5380 if (exp_len <= 0)
5381 goto normal_expr;
5382
b412eb5b 5383 if (TREE_STRING_LENGTH (str) <= 0)
182cf5a9 5384 goto normal_expr;
5385
5386 str_copy_len = strlen (TREE_STRING_POINTER (str));
5387 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5388 goto normal_expr;
5389
5390 str_copy_len = TREE_STRING_LENGTH (str);
b412eb5b 5391 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5392 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
182cf5a9 5393 {
5394 str_copy_len += STORE_MAX_PIECES - 1;
5395 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5396 }
5397 str_copy_len = MIN (str_copy_len, exp_len);
5398 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
b412eb5b 5399 CONST_CAST (char *, TREE_STRING_POINTER (str)),
182cf5a9 5400 MEM_ALIGN (target), false))
5401 goto normal_expr;
5402
5403 dest_mem = target;
5404
5405 dest_mem = store_by_pieces (dest_mem,
5406 str_copy_len, builtin_strncpy_read_str,
b412eb5b 5407 CONST_CAST (char *,
5408 TREE_STRING_POINTER (str)),
182cf5a9 5409 MEM_ALIGN (target), false,
5410 exp_len > str_copy_len ? 1 : 0);
5411 if (exp_len > str_copy_len)
5412 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5413 GEN_INT (exp_len - str_copy_len),
5414 BLOCK_OP_NORMAL);
5415 return NULL_RTX;
5416 }
10f307d9 5417 else
5418 {
5b5037b3 5419 rtx tmp_target;
5420
09879952 5421 normal_expr:
5b5037b3 5422 /* If we want to use a nontemporal store, force the value to
5423 register first. */
5424 tmp_target = nontemporal ? NULL_RTX : target;
5425 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
c0f85e83 5426 (call_param_p
60ffaf4d 5427 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
a12f023f 5428 &alt_rtl, false);
058a1b7a 5429
5430 /* Handle bounds returned by call. */
5431 if (TREE_CODE (exp) == CALL_EXPR)
5432 {
5433 rtx bounds;
5434 chkp_split_slot (temp, &temp, &bounds);
5435 if (bounds && btarget)
5436 {
5437 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5438 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5439 chkp_set_rtl_bounds (btarget, tmp);
5440 }
5441 }
10f307d9 5442 }
5443
c4050ce7 5444 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5445 the same as that of TARGET, adjust the constant. This is needed, for
5446 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5447 only a word-sized value. */
ceefa980 5448 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
43769aba 5449 && TREE_CODE (exp) != ERROR_MARK
ceefa980 5450 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5451 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
78a8ed03 5452 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
ceefa980 5453
10f307d9 5454 /* If value was not generated in the target, store it there.
c0d93299 5455 Convert the value to TARGET's type first if necessary and emit the
5456 pending incrementations that have been queued when expanding EXP.
5457 Note that we cannot emit the whole queue blindly because this will
5458 effectively disable the POST_INC optimization later.
5459
8a06f2d4 5460 If TEMP and TARGET compare equal according to rtx_equal_p, but
14e396bb 5461 one or both of them are volatile memory refs, we have to distinguish
5462 two cases:
5463 - expand_expr has used TARGET. In this case, we must not generate
5464 another copy. This can be detected by TARGET being equal according
5465 to == .
5466 - expand_expr has not used TARGET - that means that the source just
5467 happens to have the same RTX form. Since temp will have been created
5468 by expand_expr, it will compare unequal according to == .
5469 We must generate a copy in this case, to reach the correct number
5470 of volatile memory references. */
10f307d9 5471
b1ba8c8b 5472 if ((! rtx_equal_p (temp, target)
14e396bb 5473 || (temp != target && (side_effects_p (temp)
5474 || side_effects_p (target))))
afadb0ab 5475 && TREE_CODE (exp) != ERROR_MARK
72a64688 5476 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5477 but TARGET is not valid memory reference, TEMP will differ
5478 from TARGET although it is really the same location. */
1e20370f 5479 && !(alt_rtl
5480 && rtx_equal_p (alt_rtl, target)
5481 && !side_effects_p (alt_rtl)
5482 && !side_effects_p (target))
89f18f73 5483 /* If there's nothing to copy, don't bother. Don't call
5484 expr_size unless necessary, because some front-ends (C++)
5485 expr_size-hook must not be given objects that are not
5486 supposed to be bit-copied or bit-initialized. */
d18d957a 5487 && expr_size (exp) != const0_rtx)
10f307d9 5488 {
7e91b548 5489 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
10f307d9 5490 {
7e91b548 5491 if (GET_MODE (target) == BLKmode)
5492 {
f955ca51 5493 /* Handle calls that return BLKmode values in registers. */
60797203 5494 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5495 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
7e91b548 5496 else
60797203 5497 store_bit_field (target,
5498 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5499 0, 0, 0, GET_MODE (temp), temp);
7e91b548 5500 }
10f307d9 5501 else
7e91b548 5502 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
10f307d9 5503 }
5504
5505 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5506 {
18279aee 5507 /* Handle copying a string constant into an array. The string
5508 constant may be shorter than the array. So copy just the string's
5509 actual length, and clear the rest. First get the size of the data
5510 type of the string, which is actually the size of the target. */
5511 rtx size = expr_size (exp);
10f307d9 5512
971ba038 5513 if (CONST_INT_P (size)
35f44ac1 5514 && INTVAL (size) < TREE_STRING_LENGTH (exp))
a35a63ff 5515 emit_block_move (target, temp, size,
c0f85e83 5516 (call_param_p
a35a63ff 5517 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
35f44ac1 5518 else
10f307d9 5519 {
3754d046 5520 machine_mode pointer_mode
98155838 5521 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
3754d046 5522 machine_mode address_mode = get_address_mode (target);
98155838 5523
35f44ac1 5524 /* Compute the size of the data to copy from the string. */
5525 tree copy_size
389dd41b 5526 = size_binop_loc (loc, MIN_EXPR,
5527 make_tree (sizetype, size),
5528 size_int (TREE_STRING_LENGTH (exp)));
a35a63ff 5529 rtx copy_size_rtx
5530 = expand_expr (copy_size, NULL_RTX, VOIDmode,
c0f85e83 5531 (call_param_p
a35a63ff 5532 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
1d277a67 5533 rtx_code_label *label = 0;
35f44ac1 5534
5535 /* Copy that much. */
98155838 5536 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
78a8ed03 5537 TYPE_UNSIGNED (sizetype));
a35a63ff 5538 emit_block_move (target, temp, copy_size_rtx,
c0f85e83 5539 (call_param_p
a35a63ff 5540 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
35f44ac1 5541
ed8d3eee 5542 /* Figure out how much is left in TARGET that we have to clear.
98155838 5543 Do all calculations in pointer_mode. */
971ba038 5544 if (CONST_INT_P (copy_size_rtx))
35f44ac1 5545 {
29c05e22 5546 size = plus_constant (address_mode, size,
5547 -INTVAL (copy_size_rtx));
18279aee 5548 target = adjust_address (target, BLKmode,
5549 INTVAL (copy_size_rtx));
35f44ac1 5550 }
5551 else
5552 {
4a836698 5553 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
b572011e 5554 copy_size_rtx, NULL_RTX, 0,
5555 OPTAB_LIB_WIDEN);
35f44ac1 5556
98155838 5557 if (GET_MODE (copy_size_rtx) != address_mode)
5558 copy_size_rtx = convert_to_mode (address_mode,
5559 copy_size_rtx,
78a8ed03 5560 TYPE_UNSIGNED (sizetype));
18279aee 5561
5562 target = offset_address (target, copy_size_rtx,
5563 highest_pow2_factor (copy_size));
35f44ac1 5564 label = gen_label_rtx ();
5a894bc6 5565 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
2b96c5f6 5566 GET_MODE (size), 0, label);
35f44ac1 5567 }
5568
5569 if (size != const0_rtx)
0b25db21 5570 clear_storage (target, size, BLOCK_OP_NORMAL);
bdf60b71 5571
35f44ac1 5572 if (label)
5573 emit_label (label);
10f307d9 5574 }
5575 }
ce739127 5576 /* Handle calls that return values in multiple non-contiguous locations.
5577 The Irix 6 ABI has examples of this. */
5578 else if (GET_CODE (target) == PARALLEL)
2d0fd66d 5579 {
5580 if (GET_CODE (temp) == PARALLEL)
5581 emit_group_move (target, temp);
5582 else
5583 emit_group_load (target, temp, TREE_TYPE (exp),
5584 int_size_in_bytes (TREE_TYPE (exp)));
5585 }
5586 else if (GET_CODE (temp) == PARALLEL)
5587 emit_group_store (target, temp, TREE_TYPE (exp),
5588 int_size_in_bytes (TREE_TYPE (exp)));
10f307d9 5589 else if (GET_MODE (temp) == BLKmode)
a35a63ff 5590 emit_block_move (target, temp, expr_size (exp),
c0f85e83 5591 (call_param_p
a35a63ff 5592 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
2d0fd66d 5593 /* If we emit a nontemporal store, there is nothing else to do. */
5594 else if (nontemporal && emit_storent_insn (target, temp))
5b5037b3 5595 ;
10f307d9 5596 else
828eae76 5597 {
5598 temp = force_operand (temp, target);
5599 if (temp != target)
5600 emit_move_insn (target, temp);
5601 }
10f307d9 5602 }
9282409c 5603
3f2a8027 5604 return NULL_RTX;
10f307d9 5605}
058a1b7a 5606
5607/* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5608rtx
5609store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5610{
5611 return store_expr_with_bounds (exp, target, call_param_p, nontemporal, NULL);
5612}
10f307d9 5613\f
927b65fb 5614/* Return true if field F of structure TYPE is a flexible array. */
5615
5616static bool
5617flexible_array_member_p (const_tree f, const_tree type)
5618{
5619 const_tree tf;
5620
5621 tf = TREE_TYPE (f);
5622 return (DECL_CHAIN (f) == NULL
5623 && TREE_CODE (tf) == ARRAY_TYPE
5624 && TYPE_DOMAIN (tf)
5625 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5626 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5627 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5628 && int_size_in_bytes (type) >= 0);
5629}
5630
5631/* If FOR_CTOR_P, return the number of top-level elements that a constructor
5632 must have in order for it to completely initialize a value of type TYPE.
5633 Return -1 if the number isn't known.
5634
5635 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5636
5637static HOST_WIDE_INT
5638count_type_elements (const_tree type, bool for_ctor_p)
5639{
5640 switch (TREE_CODE (type))
5641 {
5642 case ARRAY_TYPE:
5643 {
5644 tree nelts;
5645
5646 nelts = array_type_nelts (type);
e913b5cd 5647 if (nelts && tree_fits_uhwi_p (nelts))
927b65fb 5648 {
5649 unsigned HOST_WIDE_INT n;
5650
e913b5cd 5651 n = tree_to_uhwi (nelts) + 1;
927b65fb 5652 if (n == 0 || for_ctor_p)
5653 return n;
5654 else
5655 return n * count_type_elements (TREE_TYPE (type), false);
5656 }
5657 return for_ctor_p ? -1 : 1;
5658 }
5659
5660 case RECORD_TYPE:
5661 {
5662 unsigned HOST_WIDE_INT n;
5663 tree f;
5664
5665 n = 0;
5666 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5667 if (TREE_CODE (f) == FIELD_DECL)
5668 {
5669 if (!for_ctor_p)
5670 n += count_type_elements (TREE_TYPE (f), false);
5671 else if (!flexible_array_member_p (f, type))
5672 /* Don't count flexible arrays, which are not supposed
5673 to be initialized. */
5674 n += 1;
5675 }
5676
5677 return n;
5678 }
5679
5680 case UNION_TYPE:
5681 case QUAL_UNION_TYPE:
5682 {
5683 tree f;
5684 HOST_WIDE_INT n, m;
5685
5686 gcc_assert (!for_ctor_p);
5687 /* Estimate the number of scalars in each field and pick the
5688 maximum. Other estimates would do instead; the idea is simply
5689 to make sure that the estimate is not sensitive to the ordering
5690 of the fields. */
5691 n = 1;
5692 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5693 if (TREE_CODE (f) == FIELD_DECL)
5694 {
5695 m = count_type_elements (TREE_TYPE (f), false);
5696 /* If the field doesn't span the whole union, add an extra
5697 scalar for the rest. */
5698 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5699 TYPE_SIZE (type)) != 1)
5700 m++;
5701 if (n < m)
5702 n = m;
5703 }
5704 return n;
5705 }
5706
5707 case COMPLEX_TYPE:
5708 return 2;
5709
5710 case VECTOR_TYPE:
5711 return TYPE_VECTOR_SUBPARTS (type);
5712
5713 case INTEGER_TYPE:
5714 case REAL_TYPE:
5715 case FIXED_POINT_TYPE:
5716 case ENUMERAL_TYPE:
5717 case BOOLEAN_TYPE:
5718 case POINTER_TYPE:
5719 case OFFSET_TYPE:
5720 case REFERENCE_TYPE:
d965946e 5721 case NULLPTR_TYPE:
927b65fb 5722 return 1;
5723
5724 case ERROR_MARK:
5725 return 0;
5726
5727 case VOID_TYPE:
5728 case METHOD_TYPE:
5729 case FUNCTION_TYPE:
5730 case LANG_TYPE:
5731 default:
5732 gcc_unreachable ();
5733 }
5734}
5735
20169a64 5736/* Helper for categorize_ctor_elements. Identical interface. */
dbd14dc5 5737
20169a64 5738static bool
b7bf20db 5739categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
927b65fb 5740 HOST_WIDE_INT *p_init_elts, bool *p_complete)
dbd14dc5 5741{
c75b4594 5742 unsigned HOST_WIDE_INT idx;
927b65fb 5743 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5744 tree value, purpose, elt_type;
dbd14dc5 5745
20169a64 5746 /* Whether CTOR is a valid constant initializer, in accordance with what
5747 initializer_constant_valid_p does. If inferred from the constructor
5748 elements, true until proven otherwise. */
5749 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5750 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5751
4ee9c684 5752 nz_elts = 0;
927b65fb 5753 init_elts = 0;
5754 num_fields = 0;
5755 elt_type = NULL_TREE;
491e04ef 5756
c75b4594 5757 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
dbd14dc5 5758 {
30d12889 5759 HOST_WIDE_INT mult = 1;
dbd14dc5 5760
0ff8139c 5761 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
4ee9c684 5762 {
5763 tree lo_index = TREE_OPERAND (purpose, 0);
5764 tree hi_index = TREE_OPERAND (purpose, 1);
dbd14dc5 5765
e913b5cd 5766 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5767 mult = (tree_to_uhwi (hi_index)
5768 - tree_to_uhwi (lo_index) + 1);
4ee9c684 5769 }
927b65fb 5770 num_fields += mult;
5771 elt_type = TREE_TYPE (value);
dbd14dc5 5772
4ee9c684 5773 switch (TREE_CODE (value))
5774 {
5775 case CONSTRUCTOR:
5776 {
20169a64 5777 HOST_WIDE_INT nz = 0, ic = 0;
1f8b6002 5778
927b65fb 5779 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5780 p_complete);
20169a64 5781
4ee9c684 5782 nz_elts += mult * nz;
927b65fb 5783 init_elts += mult * ic;
20169a64 5784
5785 if (const_from_elts_p && const_p)
5786 const_p = const_elt_p;
4ee9c684 5787 }
5788 break;
dbd14dc5 5789
4ee9c684 5790 case INTEGER_CST:
5791 case REAL_CST:
68a556d6 5792 case FIXED_CST:
4ee9c684 5793 if (!initializer_zerop (value))
5794 nz_elts += mult;
927b65fb 5795 init_elts += mult;
4ee9c684 5796 break;
839db04c 5797
5798 case STRING_CST:
5799 nz_elts += mult * TREE_STRING_LENGTH (value);
927b65fb 5800 init_elts += mult * TREE_STRING_LENGTH (value);
839db04c 5801 break;
5802
4ee9c684 5803 case COMPLEX_CST:
5804 if (!initializer_zerop (TREE_REALPART (value)))
5805 nz_elts += mult;
5806 if (!initializer_zerop (TREE_IMAGPART (value)))
5807 nz_elts += mult;
927b65fb 5808 init_elts += mult;
4ee9c684 5809 break;
839db04c 5810
4ee9c684 5811 case VECTOR_CST:
5812 {
fadf62f4 5813 unsigned i;
5814 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
42b74698 5815 {
fadf62f4 5816 tree v = VECTOR_CST_ELT (value, i);
5817 if (!initializer_zerop (v))
42b74698 5818 nz_elts += mult;
927b65fb 5819 init_elts += mult;
42b74698 5820 }
4ee9c684 5821 }
5822 break;
886cfd4f 5823
4ee9c684 5824 default:
30d12889 5825 {
927b65fb 5826 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
30d12889 5827 nz_elts += mult * tc;
927b65fb 5828 init_elts += mult * tc;
20169a64 5829
30d12889 5830 if (const_from_elts_p && const_p)
927b65fb 5831 const_p = initializer_constant_valid_p (value, elt_type)
30d12889 5832 != NULL_TREE;
5833 }
4ee9c684 5834 break;
5835 }
5836 }
886cfd4f 5837
927b65fb 5838 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5839 num_fields, elt_type))
5840 *p_complete = false;
7cb4a4d0 5841
4ee9c684 5842 *p_nz_elts += nz_elts;
927b65fb 5843 *p_init_elts += init_elts;
20169a64 5844
5845 return const_p;
4ee9c684 5846}
5847
20169a64 5848/* Examine CTOR to discover:
5849 * how many scalar fields are set to nonzero values,
5850 and place it in *P_NZ_ELTS;
5851 * how many scalar fields in total are in CTOR,
5852 and place it in *P_ELT_COUNT.
927b65fb 5853 * whether the constructor is complete -- in the sense that every
5854 meaningful byte is explicitly given a value --
5855 and place it in *P_COMPLETE.
20169a64 5856
5857 Return whether or not CTOR is a valid static constant initializer, the same
5858 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5859
5860bool
b7bf20db 5861categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
927b65fb 5862 HOST_WIDE_INT *p_init_elts, bool *p_complete)
4ee9c684 5863{
5864 *p_nz_elts = 0;
927b65fb 5865 *p_init_elts = 0;
5866 *p_complete = true;
20169a64 5867
927b65fb 5868 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
4ee9c684 5869}
5870
927b65fb 5871/* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5872 of which had type LAST_TYPE. Each element was itself a complete
5873 initializer, in the sense that every meaningful byte was explicitly
5874 given a value. Return true if the same is true for the constructor
5875 as a whole. */
4ee9c684 5876
927b65fb 5877bool
5878complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5879 const_tree last_type)
4ee9c684 5880{
927b65fb 5881 if (TREE_CODE (type) == UNION_TYPE
5882 || TREE_CODE (type) == QUAL_UNION_TYPE)
4ee9c684 5883 {
927b65fb 5884 if (num_elts == 0)
5885 return false;
fa56dc1d 5886
927b65fb 5887 gcc_assert (num_elts == 1 && last_type);
026a11f4 5888
927b65fb 5889 /* ??? We could look at each element of the union, and find the
5890 largest element. Which would avoid comparing the size of the
5891 initialized element against any tail padding in the union.
5892 Doesn't seem worth the effort... */
5893 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
dbd14dc5 5894 }
927b65fb 5895
5896 return count_type_elements (type, true) == num_elts;
dbd14dc5 5897}
5898
5899/* Return 1 if EXP contains mostly (3/4) zeros. */
5900
a9adb06f 5901static int
1f1872fd 5902mostly_zeros_p (const_tree exp)
dbd14dc5 5903{
dbd14dc5 5904 if (TREE_CODE (exp) == CONSTRUCTOR)
5905 {
927b65fb 5906 HOST_WIDE_INT nz_elts, init_elts;
5907 bool complete_p;
4ee9c684 5908
927b65fb 5909 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5910 return !complete_p || nz_elts < init_elts / 4;
dbd14dc5 5911 }
5912
4ee9c684 5913 return initializer_zerop (exp);
dbd14dc5 5914}
c69ad7b2 5915
5916/* Return 1 if EXP contains all zeros. */
5917
5918static int
1f1872fd 5919all_zeros_p (const_tree exp)
c69ad7b2 5920{
5921 if (TREE_CODE (exp) == CONSTRUCTOR)
c69ad7b2 5922 {
927b65fb 5923 HOST_WIDE_INT nz_elts, init_elts;
5924 bool complete_p;
c69ad7b2 5925
927b65fb 5926 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
96d4b8c5 5927 return nz_elts == 0;
c69ad7b2 5928 }
5929
5930 return initializer_zerop (exp);
5931}
dbd14dc5 5932\f
e7ef3ff2 5933/* Helper function for store_constructor.
5934 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
2c269e73 5935 CLEARED is as for store_constructor.
1179a68b 5936 ALIAS_SET is the alias set to use for any stores.
a5b7fc8b 5937
5938 This provides a recursive shortcut back to store_constructor when it isn't
5939 necessary to go through store_field. This is so that we can pass through
5940 the cleared field to let store_constructor know that we may not have to
5941 clear a substructure if the outer structure has already been cleared. */
e7ef3ff2 5942
5943static void
35cb5232 5944store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
3754d046 5945 HOST_WIDE_INT bitpos, machine_mode mode,
f955ca51 5946 tree exp, int cleared, alias_set_type alias_set)
e7ef3ff2 5947{
5948 if (TREE_CODE (exp) == CONSTRUCTOR
a6645eae 5949 /* We can only call store_constructor recursively if the size and
5950 bit position are on a byte boundary. */
a5b7fc8b 5951 && bitpos % BITS_PER_UNIT == 0
a6645eae 5952 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
6ef828f9 5953 /* If we have a nonzero bitpos for a register target, then we just
a5b7fc8b 5954 let store_field do the bitfield handling. This is unlikely to
5955 generate unnecessary clear instructions anyways. */
e16ceb8e 5956 && (bitpos == 0 || MEM_P (target)))
e7ef3ff2 5957 {
e16ceb8e 5958 if (MEM_P (target))
459b8611 5959 target
5960 = adjust_address (target,
5961 GET_MODE (target) == BLKmode
5962 || 0 != (bitpos
5963 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5964 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
1179a68b 5965
5b90bb08 5966
2c269e73 5967 /* Update the alias set, if required. */
e16ceb8e 5968 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5cc193e7 5969 && MEM_ALIAS_SET (target) != 0)
86ce88aa 5970 {
5971 target = copy_rtx (target);
5972 set_mem_alias_set (target, alias_set);
5973 }
5b90bb08 5974
e792f237 5975 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
e7ef3ff2 5976 }
5977 else
f955ca51 5978 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
e7ef3ff2 5979}
5980
0e80b01d 5981
5982/* Returns the number of FIELD_DECLs in TYPE. */
5983
5984static int
5985fields_length (const_tree type)
5986{
5987 tree t = TYPE_FIELDS (type);
5988 int count = 0;
5989
5990 for (; t; t = DECL_CHAIN (t))
5991 if (TREE_CODE (t) == FIELD_DECL)
5992 ++count;
5993
5994 return count;
5995}
5996
5997
10f307d9 5998/* Store the value of constructor EXP into the rtx TARGET.
2c269e73 5999 TARGET is either a REG or a MEM; we know it cannot conflict, since
6000 safe_from_p has been called.
e792f237 6001 CLEARED is true if TARGET is known to have been zero'd.
6002 SIZE is the number of bytes of TARGET we are allowed to modify: this
a316ea6a 6003 may not be the same as the size of EXP if we are assigning to a field
6004 which has been packed to exclude padding bits. */
10f307d9 6005
6006static void
35cb5232 6007store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
10f307d9 6008{
2ef1e405 6009 tree type = TREE_TYPE (exp);
0bf16c4a 6010#ifdef WORD_REGISTER_OPERATIONS
3a6656ad 6011 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
0bf16c4a 6012#endif
2ef1e405 6013
611234b4 6014 switch (TREE_CODE (type))
10f307d9 6015 {
611234b4 6016 case RECORD_TYPE:
6017 case UNION_TYPE:
6018 case QUAL_UNION_TYPE:
6019 {
c75b4594 6020 unsigned HOST_WIDE_INT idx;
6021 tree field, value;
dbd14dc5 6022
611234b4 6023 /* If size is zero or the target is already cleared, do nothing. */
6024 if (size == 0 || cleared)
dbd14dc5 6025 cleared = 1;
611234b4 6026 /* We either clear the aggregate or indicate the value is dead. */
6027 else if ((TREE_CODE (type) == UNION_TYPE
6028 || TREE_CODE (type) == QUAL_UNION_TYPE)
6029 && ! CONSTRUCTOR_ELTS (exp))
6030 /* If the constructor is empty, clear the union. */
6031 {
0b25db21 6032 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
611234b4 6033 cleared = 1;
6034 }
10f307d9 6035
611234b4 6036 /* If we are building a static constructor into a register,
6037 set the initial value as zero so we can fold the value into
6038 a constant. But if more than one register is involved,
6039 this probably loses. */
6040 else if (REG_P (target) && TREE_STATIC (exp)
6041 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
6042 {
6043 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6044 cleared = 1;
6045 }
fa56dc1d 6046
611234b4 6047 /* If the constructor has fewer fields than the structure or
6048 if we are initializing the structure to mostly zeros, clear
6049 the whole structure first. Don't do this if TARGET is a
6050 register whose mode size isn't equal to SIZE since
6051 clear_storage can't handle this case. */
6052 else if (size > 0
f1f41a6c 6053 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
611234b4 6054 != fields_length (type))
6055 || mostly_zeros_p (exp))
6056 && (!REG_P (target)
6057 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
6058 == size)))
6059 {
0b25db21 6060 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
611234b4 6061 cleared = 1;
6062 }
c869557a 6063
e8d1dcf2 6064 if (REG_P (target) && !cleared)
18b42941 6065 emit_clobber (target);
10f307d9 6066
611234b4 6067 /* Store each element of the constructor into the
6068 corresponding field of TARGET. */
c75b4594 6069 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
611234b4 6070 {
3754d046 6071 machine_mode mode;
611234b4 6072 HOST_WIDE_INT bitsize;
6073 HOST_WIDE_INT bitpos = 0;
6074 tree offset;
6075 rtx to_rtx = target;
1f8b6002 6076
611234b4 6077 /* Just ignore missing fields. We cleared the whole
6078 structure, above, if any fields are missing. */
6079 if (field == 0)
6080 continue;
1f8b6002 6081
611234b4 6082 if (cleared && initializer_zerop (value))
6083 continue;
1f8b6002 6084
e913b5cd 6085 if (tree_fits_uhwi_p (DECL_SIZE (field)))
6086 bitsize = tree_to_uhwi (DECL_SIZE (field));
611234b4 6087 else
6088 bitsize = -1;
1f8b6002 6089
611234b4 6090 mode = DECL_MODE (field);
6091 if (DECL_BIT_FIELD (field))
6092 mode = VOIDmode;
1f8b6002 6093
611234b4 6094 offset = DECL_FIELD_OFFSET (field);
e913b5cd 6095 if (tree_fits_shwi_p (offset)
6096 && tree_fits_shwi_p (bit_position (field)))
611234b4 6097 {
6098 bitpos = int_bit_position (field);
6099 offset = 0;
6100 }
6101 else
e913b5cd 6102 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
1f8b6002 6103
611234b4 6104 if (offset)
6105 {
3754d046 6106 machine_mode address_mode;
611234b4 6107 rtx offset_rtx;
1f8b6002 6108
611234b4 6109 offset
6110 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
6111 make_tree (TREE_TYPE (exp),
6112 target));
6113
8ec3c5c2 6114 offset_rtx = expand_normal (offset);
611234b4 6115 gcc_assert (MEM_P (to_rtx));
1f8b6002 6116
87cf5753 6117 address_mode = get_address_mode (to_rtx);
98155838 6118 if (GET_MODE (offset_rtx) != address_mode)
6119 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
33ef2f52 6120
611234b4 6121 to_rtx = offset_address (to_rtx, offset_rtx,
6122 highest_pow2_factor (offset));
6123 }
7014838c 6124
e6860d27 6125#ifdef WORD_REGISTER_OPERATIONS
611234b4 6126 /* If this initializes a field that is smaller than a
6127 word, at the start of a word, try to widen it to a full
6128 word. This special case allows us to output C++ member
6129 function initializations in a form that the optimizers
6130 can understand. */
6131 if (REG_P (target)
6132 && bitsize < BITS_PER_WORD
6133 && bitpos % BITS_PER_WORD == 0
6134 && GET_MODE_CLASS (mode) == MODE_INT
6135 && TREE_CODE (value) == INTEGER_CST
6136 && exp_size >= 0
6137 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6138 {
6139 tree type = TREE_TYPE (value);
1f8b6002 6140
611234b4 6141 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6142 {
1b3c3119 6143 type = lang_hooks.types.type_for_mode
6144 (word_mode, TYPE_UNSIGNED (type));
e3b560a6 6145 value = fold_convert (type, value);
611234b4 6146 }
1f8b6002 6147
611234b4 6148 if (BYTES_BIG_ENDIAN)
6149 value
faa43f85 6150 = fold_build2 (LSHIFT_EXPR, type, value,
e3b560a6 6151 build_int_cst (type,
faa43f85 6152 BITS_PER_WORD - bitsize));
611234b4 6153 bitsize = BITS_PER_WORD;
6154 mode = word_mode;
6155 }
e6860d27 6156#endif
5cc193e7 6157
611234b4 6158 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6159 && DECL_NONADDRESSABLE_P (field))
6160 {
6161 to_rtx = copy_rtx (to_rtx);
6162 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6163 }
1f8b6002 6164
611234b4 6165 store_constructor_field (to_rtx, bitsize, bitpos, mode,
f955ca51 6166 value, cleared,
611234b4 6167 get_alias_set (TREE_TYPE (field)));
6168 }
6169 break;
6170 }
6171 case ARRAY_TYPE:
6172 {
c75b4594 6173 tree value, index;
6174 unsigned HOST_WIDE_INT i;
611234b4 6175 int need_to_clear;
6176 tree domain;
6177 tree elttype = TREE_TYPE (type);
6178 int const_bounds_p;
6179 HOST_WIDE_INT minelt = 0;
6180 HOST_WIDE_INT maxelt = 0;
6181
6182 domain = TYPE_DOMAIN (type);
6183 const_bounds_p = (TYPE_MIN_VALUE (domain)
6184 && TYPE_MAX_VALUE (domain)
e913b5cd 6185 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6186 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
611234b4 6187
6188 /* If we have constant bounds for the range of the type, get them. */
6189 if (const_bounds_p)
6190 {
e913b5cd 6191 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6192 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
611234b4 6193 }
4418a1d4 6194
611234b4 6195 /* If the constructor has fewer elements than the array, clear
6196 the whole array first. Similarly if this is static
6197 constructor of a non-BLKmode object. */
6198 if (cleared)
6199 need_to_clear = 0;
6200 else if (REG_P (target) && TREE_STATIC (exp))
6201 need_to_clear = 1;
6202 else
6203 {
c75b4594 6204 unsigned HOST_WIDE_INT idx;
6205 tree index, value;
611234b4 6206 HOST_WIDE_INT count = 0, zero_count = 0;
6207 need_to_clear = ! const_bounds_p;
1f8b6002 6208
611234b4 6209 /* This loop is a more accurate version of the loop in
6210 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6211 is also needed to check for missing elements. */
c75b4594 6212 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
611234b4 6213 {
611234b4 6214 HOST_WIDE_INT this_node_count;
c75b4594 6215
6216 if (need_to_clear)
6217 break;
1f8b6002 6218
611234b4 6219 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6220 {
6221 tree lo_index = TREE_OPERAND (index, 0);
6222 tree hi_index = TREE_OPERAND (index, 1);
1f8b6002 6223
e913b5cd 6224 if (! tree_fits_uhwi_p (lo_index)
6225 || ! tree_fits_uhwi_p (hi_index))
611234b4 6226 {
6227 need_to_clear = 1;
6228 break;
6229 }
1f8b6002 6230
e913b5cd 6231 this_node_count = (tree_to_uhwi (hi_index)
6232 - tree_to_uhwi (lo_index) + 1);
611234b4 6233 }
6234 else
6235 this_node_count = 1;
1f8b6002 6236
611234b4 6237 count += this_node_count;
c75b4594 6238 if (mostly_zeros_p (value))
611234b4 6239 zero_count += this_node_count;
6240 }
1f8b6002 6241
611234b4 6242 /* Clear the entire array first if there are any missing
6243 elements, or if the incidence of zero elements is >=
6244 75%. */
6245 if (! need_to_clear
6246 && (count < maxelt - minelt + 1
6247 || 4 * zero_count >= 3 * count))
6248 need_to_clear = 1;
6249 }
1f8b6002 6250
611234b4 6251 if (need_to_clear && size > 0)
6252 {
6253 if (REG_P (target))
6254 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6255 else
0b25db21 6256 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
611234b4 6257 cleared = 1;
6258 }
4418a1d4 6259
611234b4 6260 if (!cleared && REG_P (target))
6261 /* Inform later passes that the old value is dead. */
18b42941 6262 emit_clobber (target);
4418a1d4 6263
611234b4 6264 /* Store each element of the constructor into the
6265 corresponding element of TARGET, determined by counting the
6266 elements. */
c75b4594 6267 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
611234b4 6268 {
3754d046 6269 machine_mode mode;
611234b4 6270 HOST_WIDE_INT bitsize;
6271 HOST_WIDE_INT bitpos;
611234b4 6272 rtx xtarget = target;
1f8b6002 6273
611234b4 6274 if (cleared && initializer_zerop (value))
6275 continue;
1f8b6002 6276
611234b4 6277 mode = TYPE_MODE (elttype);
6278 if (mode == BLKmode)
e913b5cd 6279 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6280 ? tree_to_uhwi (TYPE_SIZE (elttype))
611234b4 6281 : -1);
6282 else
6283 bitsize = GET_MODE_BITSIZE (mode);
1f8b6002 6284
611234b4 6285 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6286 {
6287 tree lo_index = TREE_OPERAND (index, 0);
6288 tree hi_index = TREE_OPERAND (index, 1);
6289 rtx index_r, pos_rtx;
6290 HOST_WIDE_INT lo, hi, count;
6291 tree position;
1f8b6002 6292
611234b4 6293 /* If the range is constant and "small", unroll the loop. */
6294 if (const_bounds_p
e913b5cd 6295 && tree_fits_shwi_p (lo_index)
6296 && tree_fits_shwi_p (hi_index)
6297 && (lo = tree_to_shwi (lo_index),
6298 hi = tree_to_shwi (hi_index),
611234b4 6299 count = hi - lo + 1,
6300 (!MEM_P (target)
6301 || count <= 2
e913b5cd 6302 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6303 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
611234b4 6304 <= 40 * 8)))))
6305 {
6306 lo -= minelt; hi -= minelt;
6307 for (; lo <= hi; lo++)
6308 {
e913b5cd 6309 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
1f8b6002 6310
611234b4 6311 if (MEM_P (target)
6312 && !MEM_KEEP_ALIAS_SET_P (target)
6313 && TREE_CODE (type) == ARRAY_TYPE
6314 && TYPE_NONALIASED_COMPONENT (type))
6315 {
6316 target = copy_rtx (target);
6317 MEM_KEEP_ALIAS_SET_P (target) = 1;
6318 }
1f8b6002 6319
611234b4 6320 store_constructor_field
f955ca51 6321 (target, bitsize, bitpos, mode, value, cleared,
611234b4 6322 get_alias_set (elttype));
6323 }
6324 }
6325 else
6326 {
1d277a67 6327 rtx_code_label *loop_start = gen_label_rtx ();
6328 rtx_code_label *loop_end = gen_label_rtx ();
611234b4 6329 tree exit_cond;
1f8b6002 6330
8ec3c5c2 6331 expand_normal (hi_index);
1f8b6002 6332
e60a6f7b 6333 index = build_decl (EXPR_LOCATION (exp),
6334 VAR_DECL, NULL_TREE, domain);
3b2411a8 6335 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
611234b4 6336 SET_DECL_RTL (index, index_r);
5b5037b3 6337 store_expr (lo_index, index_r, 0, false);
1f8b6002 6338
611234b4 6339 /* Build the head of the loop. */
6340 do_pending_stack_adjust ();
6341 emit_label (loop_start);
6342
6343 /* Assign value to element index. */
e3b560a6 6344 position =
6345 fold_convert (ssizetype,
6346 fold_build2 (MINUS_EXPR,
6347 TREE_TYPE (index),
6348 index,
6349 TYPE_MIN_VALUE (domain)));
6350
6351 position =
6352 size_binop (MULT_EXPR, position,
6353 fold_convert (ssizetype,
6354 TYPE_SIZE_UNIT (elttype)));
1f8b6002 6355
8ec3c5c2 6356 pos_rtx = expand_normal (position);
611234b4 6357 xtarget = offset_address (target, pos_rtx,
6358 highest_pow2_factor (position));
6359 xtarget = adjust_address (xtarget, mode, 0);
6360 if (TREE_CODE (value) == CONSTRUCTOR)
6361 store_constructor (value, xtarget, cleared,
6362 bitsize / BITS_PER_UNIT);
6363 else
5b5037b3 6364 store_expr (value, xtarget, 0, false);
611234b4 6365
6366 /* Generate a conditional jump to exit the loop. */
6367 exit_cond = build2 (LT_EXPR, integer_type_node,
6368 index, hi_index);
79ab74cc 6369 jumpif (exit_cond, loop_end, -1);
1f8b6002 6370
611234b4 6371 /* Update the loop counter, and jump to the head of
6372 the loop. */
6373 expand_assignment (index,
6374 build2 (PLUS_EXPR, TREE_TYPE (index),
5b5037b3 6375 index, integer_one_node),
6376 false);
1f8b6002 6377
611234b4 6378 emit_jump (loop_start);
1f8b6002 6379
611234b4 6380 /* Build the end of the loop. */
6381 emit_label (loop_end);
6382 }
6383 }
e913b5cd 6384 else if ((index != 0 && ! tree_fits_shwi_p (index))
6385 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
611234b4 6386 {
6387 tree position;
1f8b6002 6388
611234b4 6389 if (index == 0)
6390 index = ssize_int (1);
1f8b6002 6391
611234b4 6392 if (minelt)
6393 index = fold_convert (ssizetype,
faa43f85 6394 fold_build2 (MINUS_EXPR,
6395 TREE_TYPE (index),
6396 index,
6397 TYPE_MIN_VALUE (domain)));
1f8b6002 6398
e3b560a6 6399 position =
6400 size_binop (MULT_EXPR, index,
6401 fold_convert (ssizetype,
6402 TYPE_SIZE_UNIT (elttype)));
611234b4 6403 xtarget = offset_address (target,
8ec3c5c2 6404 expand_normal (position),
611234b4 6405 highest_pow2_factor (position));
6406 xtarget = adjust_address (xtarget, mode, 0);
5b5037b3 6407 store_expr (value, xtarget, 0, false);
611234b4 6408 }
6409 else
6410 {
6411 if (index != 0)
e913b5cd 6412 bitpos = ((tree_to_shwi (index) - minelt)
6413 * tree_to_uhwi (TYPE_SIZE (elttype)));
611234b4 6414 else
e913b5cd 6415 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
1f8b6002 6416
611234b4 6417 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6418 && TREE_CODE (type) == ARRAY_TYPE
6419 && TYPE_NONALIASED_COMPONENT (type))
6420 {
6421 target = copy_rtx (target);
6422 MEM_KEEP_ALIAS_SET_P (target) = 1;
6423 }
6424 store_constructor_field (target, bitsize, bitpos, mode, value,
f955ca51 6425 cleared, get_alias_set (elttype));
611234b4 6426 }
6427 }
6428 break;
6429 }
4418a1d4 6430
611234b4 6431 case VECTOR_TYPE:
6432 {
c75b4594 6433 unsigned HOST_WIDE_INT idx;
6434 constructor_elt *ce;
611234b4 6435 int i;
6436 int need_to_clear;
d386876e 6437 int icode = CODE_FOR_nothing;
611234b4 6438 tree elttype = TREE_TYPE (type);
e913b5cd 6439 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
3754d046 6440 machine_mode eltmode = TYPE_MODE (elttype);
611234b4 6441 HOST_WIDE_INT bitsize;
6442 HOST_WIDE_INT bitpos;
9c1b832c 6443 rtvec vector = NULL;
611234b4 6444 unsigned n_elts;
4eaf1e94 6445 alias_set_type alias;
1f8b6002 6446
611234b4 6447 gcc_assert (eltmode != BLKmode);
1f8b6002 6448
611234b4 6449 n_elts = TYPE_VECTOR_SUBPARTS (type);
6450 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6451 {
3754d046 6452 machine_mode mode = GET_MODE (target);
1f8b6002 6453
d6bf3b14 6454 icode = (int) optab_handler (vec_init_optab, mode);
57b5438a 6455 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6456 if (icode != CODE_FOR_nothing)
6457 {
6458 tree value;
6459
6460 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6461 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6462 {
6463 icode = CODE_FOR_nothing;
6464 break;
6465 }
6466 }
611234b4 6467 if (icode != CODE_FOR_nothing)
6468 {
6469 unsigned int i;
1f8b6002 6470
9c1b832c 6471 vector = rtvec_alloc (n_elts);
611234b4 6472 for (i = 0; i < n_elts; i++)
9c1b832c 6473 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
611234b4 6474 }
6475 }
1f8b6002 6476
611234b4 6477 /* If the constructor has fewer elements than the vector,
6478 clear the whole array first. Similarly if this is static
6479 constructor of a non-BLKmode object. */
6480 if (cleared)
6481 need_to_clear = 0;
6482 else if (REG_P (target) && TREE_STATIC (exp))
6483 need_to_clear = 1;
6484 else
6485 {
6486 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
c75b4594 6487 tree value;
1f8b6002 6488
c75b4594 6489 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
611234b4 6490 {
e913b5cd 6491 int n_elts_here = tree_to_uhwi
611234b4 6492 (int_const_binop (TRUNC_DIV_EXPR,
c75b4594 6493 TYPE_SIZE (TREE_TYPE (value)),
e913b5cd 6494 TYPE_SIZE (elttype)));
1f8b6002 6495
611234b4 6496 count += n_elts_here;
c75b4594 6497 if (mostly_zeros_p (value))
611234b4 6498 zero_count += n_elts_here;
6499 }
4418a1d4 6500
611234b4 6501 /* Clear the entire vector first if there are any missing elements,
6502 or if the incidence of zero elements is >= 75%. */
6503 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6504 }
1f8b6002 6505
611234b4 6506 if (need_to_clear && size > 0 && !vector)
6507 {
6508 if (REG_P (target))
4eaf1e94 6509 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
611234b4 6510 else
0b25db21 6511 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
611234b4 6512 cleared = 1;
6513 }
1f8b6002 6514
49f312aa 6515 /* Inform later passes that the old value is dead. */
1abf6b04 6516 if (!cleared && !vector && REG_P (target))
49f312aa 6517 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
611234b4 6518
4eaf1e94 6519 if (MEM_P (target))
6520 alias = MEM_ALIAS_SET (target);
6521 else
6522 alias = get_alias_set (elttype);
6523
611234b4 6524 /* Store each element of the constructor into the corresponding
6525 element of TARGET, determined by counting the elements. */
c75b4594 6526 for (idx = 0, i = 0;
f1f41a6c 6527 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
c75b4594 6528 idx++, i += bitsize / elt_size)
611234b4 6529 {
611234b4 6530 HOST_WIDE_INT eltpos;
c75b4594 6531 tree value = ce->value;
1f8b6002 6532
e913b5cd 6533 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
611234b4 6534 if (cleared && initializer_zerop (value))
6535 continue;
1f8b6002 6536
c75b4594 6537 if (ce->index)
e913b5cd 6538 eltpos = tree_to_uhwi (ce->index);
611234b4 6539 else
6540 eltpos = i;
1f8b6002 6541
611234b4 6542 if (vector)
6543 {
57b5438a 6544 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6545 elements. */
611234b4 6546 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
9c1b832c 6547 RTVEC_ELT (vector, eltpos)
8ec3c5c2 6548 = expand_normal (value);
611234b4 6549 }
6550 else
6551 {
3754d046 6552 machine_mode value_mode =
611234b4 6553 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
4418a1d4 6554 ? TYPE_MODE (TREE_TYPE (value))
6555 : eltmode;
611234b4 6556 bitpos = eltpos * elt_size;
f955ca51 6557 store_constructor_field (target, bitsize, bitpos, value_mode,
6558 value, cleared, alias);
611234b4 6559 }
6560 }
1f8b6002 6561
611234b4 6562 if (vector)
6563 emit_insn (GEN_FCN (icode)
6564 (target,
9c1b832c 6565 gen_rtx_PARALLEL (GET_MODE (target), vector)));
611234b4 6566 break;
6567 }
1f8b6002 6568
611234b4 6569 default:
6570 gcc_unreachable ();
97b2af42 6571 }
10f307d9 6572}
6573
6574/* Store the value of EXP (an expression tree)
6575 into a subfield of TARGET which has mode MODE and occupies
6576 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6577 If MODE is VOIDmode, it means that we are storing into a bit-field.
6578
4bb60ec7 6579 BITREGION_START is bitpos of the first bitfield in this region.
6580 BITREGION_END is the bitpos of the ending bitfield in this region.
6581 These two fields are 0, if the C++ memory model does not apply,
6582 or we are not interested in keeping track of bitfield regions.
6583
58f9138c 6584 Always return const0_rtx unless we have something particular to
6585 return.
10f307d9 6586
1e2513d9 6587 ALIAS_SET is the alias set for the destination. This value will
6588 (in general) be different from that for TARGET, since TARGET is a
5b5037b3 6589 reference to the containing structure.
48e1416a 6590
5b5037b3 6591 If NONTEMPORAL is true, try generating a nontemporal store. */
10f307d9 6592
6593static rtx
35cb5232 6594store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
4bb60ec7 6595 unsigned HOST_WIDE_INT bitregion_start,
6596 unsigned HOST_WIDE_INT bitregion_end,
3754d046 6597 machine_mode mode, tree exp,
32c2fdea 6598 alias_set_type alias_set, bool nontemporal)
10f307d9 6599{
0dbd1c74 6600 if (TREE_CODE (exp) == ERROR_MARK)
6601 return const0_rtx;
6602
55e9836d 6603 /* If we have nothing to store, do nothing unless the expression has
6604 side-effects. */
6605 if (bitsize == 0)
1db6d067 6606 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
10f307d9 6607
efd3939c 6608 if (GET_CODE (target) == CONCAT)
6609 {
6610 /* We're storing into a struct containing a single __complex. */
6611
611234b4 6612 gcc_assert (!bitpos);
5b5037b3 6613 return store_expr (exp, target, 0, nontemporal);
efd3939c 6614 }
10f307d9 6615
6616 /* If the structure is in a register or if the component
6617 is a bit field, we cannot use addressing to access it.
6618 Use bit-field techniques or SUBREG to store in it. */
6619
07edfa02 6620 if (mode == VOIDmode
03519f22 6621 || (mode != BLKmode && ! direct_store[(int) mode]
6622 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6623 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
8ad4c111 6624 || REG_P (target)
66aa258b 6625 || GET_CODE (target) == SUBREG
4e05e574 6626 /* If the field isn't aligned enough to store as an ordinary memref,
6627 store it as a bit field. */
9a0db358 6628 || (mode != BLKmode
8f6f6bc8 6629 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6630 || bitpos % GET_MODE_ALIGNMENT (mode))
6631 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
35cb5232 6632 || (bitpos % BITS_PER_UNIT != 0)))
941a2396 6633 || (bitsize >= 0 && mode != BLKmode
6634 && GET_MODE_BITSIZE (mode) > bitsize)
155b05dc 6635 /* If the RHS and field are a constant size and the size of the
6636 RHS isn't the same size as the bitfield, we must use bitfield
6637 operations. */
a0c2c45b 6638 || (bitsize >= 0
6639 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
182cf5a9 6640 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6641 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6642 decl we must use bitfield operations. */
6643 || (bitsize >= 0
6644 && TREE_CODE (exp) == MEM_REF
6645 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6646 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6647 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6648 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
10f307d9 6649 {
24bdc387 6650 rtx temp;
c1a83279 6651 gimple nop_def;
24bdc387 6652
6653 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6654 implies a mask operation. If the precision is the same size as
6655 the field we're storing into, that mask is redundant. This is
6656 particularly common with bit field assignments generated by the
6657 C front end. */
c1a83279 6658 nop_def = get_def_for_expr (exp, NOP_EXPR);
6659 if (nop_def)
60fb4601 6660 {
6661 tree type = TREE_TYPE (exp);
6662 if (INTEGRAL_TYPE_P (type)
6663 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6664 && bitsize == TYPE_PRECISION (type))
6665 {
c1a83279 6666 tree op = gimple_assign_rhs1 (nop_def);
6667 type = TREE_TYPE (op);
60fb4601 6668 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
c1a83279 6669 exp = op;
60fb4601 6670 }
6671 }
24bdc387 6672
8ec3c5c2 6673 temp = expand_normal (exp);
97d7f645 6674
0aa5cbcc 6675 /* If BITSIZE is narrower than the size of the type of EXP
6676 we will be narrowing TEMP. Normally, what's wanted are the
6677 low-order bits. However, if EXP's type is a record and this is
6678 big-endian machine, we want the upper BITSIZE bits. */
6679 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
cce8da2f 6680 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
0aa5cbcc 6681 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6682 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
f5ff0b21 6683 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
8a348c93 6684 NULL_RTX, 1);
0aa5cbcc 6685
40715742 6686 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
97d7f645 6687 if (mode != VOIDmode && mode != BLKmode
6688 && mode != TYPE_MODE (TREE_TYPE (exp)))
6689 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6690
7081e928 6691 /* If TEMP is not a PARALLEL (see below) and its mode and that of TARGET
6692 are both BLKmode, both must be in memory and BITPOS must be aligned
6693 on a byte boundary. If so, we simply do a block copy. Likewise for
6694 a BLKmode-like TARGET. */
6695 if (GET_CODE (temp) != PARALLEL
6696 && GET_MODE (temp) == BLKmode
3bfa8ada 6697 && (GET_MODE (target) == BLKmode
6698 || (MEM_P (target)
6699 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6700 && (bitpos % BITS_PER_UNIT) == 0
6701 && (bitsize % BITS_PER_UNIT) == 0)))
0e20f9fb 6702 {
611234b4 6703 gcc_assert (MEM_P (target) && MEM_P (temp)
3bfa8ada 6704 && (bitpos % BITS_PER_UNIT) == 0);
0e20f9fb 6705
e513d163 6706 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
0e20f9fb 6707 emit_block_move (target, temp,
2b96c5f6 6708 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
0378dbdc 6709 / BITS_PER_UNIT),
6710 BLOCK_OP_NORMAL);
0e20f9fb 6711
58f9138c 6712 return const0_rtx;
0e20f9fb 6713 }
6714
2d0fd66d 6715 /* Handle calls that return values in multiple non-contiguous locations.
6716 The Irix 6 ABI has examples of this. */
61c39547 6717 if (GET_CODE (temp) == PARALLEL)
6718 {
f955ca51 6719 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
61c39547 6720 rtx temp_target;
e0fb89d5 6721 if (mode == BLKmode || mode == VOIDmode)
f955ca51 6722 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6723 temp_target = gen_reg_rtx (mode);
6724 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
61c39547 6725 temp = temp_target;
6726 }
f955ca51 6727 else if (mode == BLKmode)
7e91b548 6728 {
f955ca51 6729 /* Handle calls that return BLKmode values in registers. */
6730 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6731 {
6732 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6733 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6734 temp = temp_target;
6735 }
6736 else
6737 {
6738 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6739 rtx temp_target;
6740 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6741 temp_target = gen_reg_rtx (mode);
6742 temp_target
6743 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
3f71db40 6744 temp_target, mode, mode);
f955ca51 6745 temp = temp_target;
6746 }
7e91b548 6747 }
6748
61c39547 6749 /* Store the value in the bitfield. */
6750 store_bit_field (target, bitsize, bitpos,
6751 bitregion_start, bitregion_end,
6752 mode, temp);
2b96c5f6 6753
10f307d9 6754 return const0_rtx;
6755 }
6756 else
6757 {
10f307d9 6758 /* Now build a reference to just the desired component. */
58f9138c 6759 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
2b96c5f6 6760
6761 if (to_rtx == target)
6762 to_rtx = copy_rtx (to_rtx);
537ffcfc 6763
5cc193e7 6764 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
2b96c5f6 6765 set_mem_alias_set (to_rtx, alias_set);
10f307d9 6766
5b5037b3 6767 return store_expr (exp, to_rtx, 0, nontemporal);
10f307d9 6768 }
6769}
6770\f
6771/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
ba04d9d5 6772 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6773 codes and find the ultimate containing object, which we return.
10f307d9 6774
6775 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6776 bit position, and *PUNSIGNEDP to the signedness of the field.
954bdcb1 6777 If the position of the field is variable, we store a tree
6778 giving the variable offset (in units) in *POFFSET.
6779 This offset is in addition to the bit position.
6780 If the position is not variable, we store 0 in *POFFSET.
10f307d9 6781
6782 If any of the extraction expressions is volatile,
6783 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6784
3bfa8ada 6785 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6786 Otherwise, it is a mode that can be used to access the field.
01ab6370 6787
6788 If the field describes a variable-sized object, *PMODE is set to
3bfa8ada 6789 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
e7e9416e 6790 this case, but the address of the object can be found.
6791
6792 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6793 look through nodes that serve as markers of a greater alignment than
6794 the one that can be deduced from the expression. These nodes make it
6795 possible for front-ends to prevent temporaries from being created by
6796 the middle-end on alignment considerations. For that purpose, the
6797 normal operating mode at high-level is to always pass FALSE so that
6798 the ultimate containing object is really returned; moreover, the
6799 associated predicate handled_component_p will always return TRUE
6800 on these nodes, thus indicating that they are essentially handled
6801 by get_inner_reference. TRUE should only be passed when the caller
6802 is scanning the expression in order to build another representation
6803 and specifically knows how to handle these nodes; as such, this is
6804 the normal operating mode in the RTL expanders. */
10f307d9 6805
6806tree
35cb5232 6807get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6808 HOST_WIDE_INT *pbitpos, tree *poffset,
3754d046 6809 machine_mode *pmode, int *punsignedp,
e7e9416e 6810 int *pvolatilep, bool keep_aligning)
10f307d9 6811{
6812 tree size_tree = 0;
3754d046 6813 machine_mode mode = VOIDmode;
3bfa8ada 6814 bool blkmode_bitfield = false;
902de8ed 6815 tree offset = size_zero_node;
5de9d3ed 6816 offset_int bit_offset = 0;
10f307d9 6817
02e7a332 6818 /* First get the mode, signedness, and size. We do this from just the
6819 outermost expression. */
b21392bb 6820 *pbitsize = -1;
10f307d9 6821 if (TREE_CODE (exp) == COMPONENT_REF)
6822 {
3bfa8ada 6823 tree field = TREE_OPERAND (exp, 1);
6824 size_tree = DECL_SIZE (field);
7691c4ce 6825 if (flag_strict_volatile_bitfields > 0
6826 && TREE_THIS_VOLATILE (exp)
6827 && DECL_BIT_FIELD_TYPE (field)
6828 && DECL_MODE (field) != BLKmode)
a420d927 6829 /* Volatile bitfields should be accessed in the mode of the
6830 field's type, not the mode computed based on the bit
6831 size. */
6832 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
7691c4ce 6833 else if (!DECL_BIT_FIELD (field))
6834 mode = DECL_MODE (field);
6835 else if (DECL_MODE (field) == BLKmode)
6836 blkmode_bitfield = true;
3bfa8ada 6837
6838 *punsignedp = DECL_UNSIGNED (field);
10f307d9 6839 }
6840 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6841 {
6842 size_tree = TREE_OPERAND (exp, 1);
70337474 6843 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6844 || TYPE_UNSIGNED (TREE_TYPE (exp)));
1f8b6002 6845
8ea8de24 6846 /* For vector types, with the correct size of access, use the mode of
6847 inner type. */
6848 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6849 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6850 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6851 mode = TYPE_MODE (TREE_TYPE (exp));
10f307d9 6852 }
6853 else
6854 {
6855 mode = TYPE_MODE (TREE_TYPE (exp));
78a8ed03 6856 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
02e7a332 6857
be2828ce 6858 if (mode == BLKmode)
6859 size_tree = TYPE_SIZE (TREE_TYPE (exp));
02e7a332 6860 else
6861 *pbitsize = GET_MODE_BITSIZE (mode);
10f307d9 6862 }
fa56dc1d 6863
02e7a332 6864 if (size_tree != 0)
10f307d9 6865 {
e913b5cd 6866 if (! tree_fits_uhwi_p (size_tree))
01ab6370 6867 mode = BLKmode, *pbitsize = -1;
6868 else
e913b5cd 6869 *pbitsize = tree_to_uhwi (size_tree);
10f307d9 6870 }
6871
6872 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6873 and find the ultimate containing object. */
10f307d9 6874 while (1)
6875 {
1f9b622b 6876 switch (TREE_CODE (exp))
10f307d9 6877 {
1f9b622b 6878 case BIT_FIELD_REF:
5de9d3ed 6879 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
1f9b622b 6880 break;
10f307d9 6881
1f9b622b 6882 case COMPONENT_REF:
6883 {
6884 tree field = TREE_OPERAND (exp, 1);
6885 tree this_offset = component_ref_field_offset (exp);
227bf826 6886
1f9b622b 6887 /* If this field hasn't been filled in yet, don't go past it.
6888 This should only happen when folding expressions made during
6889 type construction. */
6890 if (this_offset == 0)
6891 break;
75f7b24f 6892
1f9b622b 6893 offset = size_binop (PLUS_EXPR, offset, this_offset);
5de9d3ed 6894 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
7114c815 6895
1f9b622b 6896 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6897 }
6898 break;
7114c815 6899
1f9b622b 6900 case ARRAY_REF:
6901 case ARRAY_RANGE_REF:
6902 {
6903 tree index = TREE_OPERAND (exp, 1);
6904 tree low_bound = array_ref_low_bound (exp);
6905 tree unit_size = array_ref_element_size (exp);
6906
6907 /* We assume all arrays have sizes that are a multiple of a byte.
6908 First subtract the lower bound, if any, in the type of the
6909 index, then convert to sizetype and multiply by the size of
6910 the array element. */
6911 if (! integer_zerop (low_bound))
faa43f85 6912 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6913 index, low_bound);
1f9b622b 6914
6915 offset = size_binop (PLUS_EXPR, offset,
6916 size_binop (MULT_EXPR,
e3b560a6 6917 fold_convert (sizetype, index),
1f9b622b 6918 unit_size));
6919 }
6920 break;
6921
6922 case REALPART_EXPR:
1f9b622b 6923 break;
6924
6925 case IMAGPART_EXPR:
e913b5cd 6926 bit_offset += *pbitsize;
1f9b622b 6927 break;
6928
1f9b622b 6929 case VIEW_CONVERT_EXPR:
e7e9416e 6930 if (keep_aligning && STRICT_ALIGNMENT
6931 && (TYPE_ALIGN (TREE_TYPE (exp))
1f9b622b 6932 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
1f9b622b 6933 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6934 < BIGGEST_ALIGNMENT)
6935 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6936 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6937 goto done;
6938 break;
6939
182cf5a9 6940 case MEM_REF:
6941 /* Hand back the decl for MEM[&decl, off]. */
6942 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6943 {
6944 tree off = TREE_OPERAND (exp, 1);
6945 if (!integer_zerop (off))
6946 {
5de9d3ed 6947 offset_int boff, coff = mem_ref_offset (exp);
885a2694 6948 boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
cf8f0e63 6949 bit_offset += boff;
182cf5a9 6950 }
6951 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6952 }
6953 goto done;
6954
1f9b622b 6955 default:
6956 goto done;
6957 }
954bdcb1 6958
6959 /* If any reference in the chain is volatile, the effect is volatile. */
6960 if (TREE_THIS_VOLATILE (exp))
6961 *pvolatilep = 1;
7fce34be 6962
10f307d9 6963 exp = TREE_OPERAND (exp, 0);
6964 }
1f9b622b 6965 done:
10f307d9 6966
02e7a332 6967 /* If OFFSET is constant, see if we can return the whole thing as a
85a32bdb 6968 constant bit position. Make sure to handle overflow during
6969 this conversion. */
2ad5f5fc 6970 if (TREE_CODE (offset) == INTEGER_CST)
6971 {
5de9d3ed 6972 offset_int tem = wi::sext (wi::to_offset (offset),
6973 TYPE_PRECISION (sizetype));
885a2694 6974 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
cf8f0e63 6975 tem += bit_offset;
796b6678 6976 if (wi::fits_shwi_p (tem))
cf8f0e63 6977 {
6978 *pbitpos = tem.to_shwi ();
3bfa8ada 6979 *poffset = offset = NULL_TREE;
85a32bdb 6980 }
6981 }
6982
6983 /* Otherwise, split it up. */
3bfa8ada 6984 if (offset)
6985 {
476e59ce 6986 /* Avoid returning a negative bitpos as this may wreak havoc later. */
f7572df2 6987 if (wi::neg_p (bit_offset) || !wi::fits_shwi_p (bit_offset))
476e59ce 6988 {
885a2694 6989 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
5de9d3ed 6990 offset_int tem = bit_offset.and_not (mask);
476e59ce 6991 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6992 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
cf8f0e63 6993 bit_offset -= tem;
885a2694 6994 tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
476e59ce 6995 offset = size_binop (PLUS_EXPR, offset,
e913b5cd 6996 wide_int_to_tree (sizetype, tem));
476e59ce 6997 }
6998
cf8f0e63 6999 *pbitpos = bit_offset.to_shwi ();
3bfa8ada 7000 *poffset = offset;
7001 }
7002
7003 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
7004 if (mode == VOIDmode
7005 && blkmode_bitfield
7006 && (*pbitpos % BITS_PER_UNIT) == 0
7007 && (*pbitsize % BITS_PER_UNIT) == 0)
7008 *pmode = BLKmode;
7009 else
7010 *pmode = mode;
c869557a 7011
10f307d9 7012 return exp;
7013}
eb4b06b6 7014
6cbeacbb 7015/* Alignment in bits the TARGET of an assignment may be assumed to have. */
7016
7017static unsigned HOST_WIDE_INT
7018target_align (const_tree target)
7019{
7020 /* We might have a chain of nested references with intermediate misaligning
7021 bitfields components, so need to recurse to find out. */
7022
7023 unsigned HOST_WIDE_INT this_align, outer_align;
7024
7025 switch (TREE_CODE (target))
7026 {
7027 case BIT_FIELD_REF:
7028 return 1;
7029
7030 case COMPONENT_REF:
7031 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7032 outer_align = target_align (TREE_OPERAND (target, 0));
7033 return MIN (this_align, outer_align);
7034
7035 case ARRAY_REF:
7036 case ARRAY_RANGE_REF:
7037 this_align = TYPE_ALIGN (TREE_TYPE (target));
7038 outer_align = target_align (TREE_OPERAND (target, 0));
7039 return MIN (this_align, outer_align);
7040
7041 CASE_CONVERT:
7042 case NON_LVALUE_EXPR:
7043 case VIEW_CONVERT_EXPR:
7044 this_align = TYPE_ALIGN (TREE_TYPE (target));
7045 outer_align = target_align (TREE_OPERAND (target, 0));
7046 return MAX (this_align, outer_align);
7047
7048 default:
7049 return TYPE_ALIGN (TREE_TYPE (target));
7050 }
7051}
7052
10f307d9 7053\f
dc183975 7054/* Given an rtx VALUE that may contain additions and multiplications, return
7055 an equivalent value that just refers to a register, memory, or constant.
7056 This is done by generating instructions to perform the arithmetic and
7057 returning a pseudo-register containing the value.
c4f1a887 7058
7059 The returned value may be a REG, SUBREG, MEM or constant. */
10f307d9 7060
7061rtx
35cb5232 7062force_operand (rtx value, rtx target)
10f307d9 7063{
fef8467d 7064 rtx op1, op2;
10f307d9 7065 /* Use subtarget as the target for operand 0 of a binary operation. */
19cb6b50 7066 rtx subtarget = get_subtarget (target);
fef8467d 7067 enum rtx_code code = GET_CODE (value);
10f307d9 7068
f9cce2dc 7069 /* Check for subreg applied to an expression produced by loop optimizer. */
7070 if (code == SUBREG
8ad4c111 7071 && !REG_P (SUBREG_REG (value))
e16ceb8e 7072 && !MEM_P (SUBREG_REG (value)))
f9cce2dc 7073 {
4631d202 7074 value
7075 = simplify_gen_subreg (GET_MODE (value),
7076 force_reg (GET_MODE (SUBREG_REG (value)),
7077 force_operand (SUBREG_REG (value),
7078 NULL_RTX)),
7079 GET_MODE (SUBREG_REG (value)),
7080 SUBREG_BYTE (value));
f9cce2dc 7081 code = GET_CODE (value);
7082 }
7083
8b59469a 7084 /* Check for a PIC address load. */
fef8467d 7085 if ((code == PLUS || code == MINUS)
8b59469a 7086 && XEXP (value, 0) == pic_offset_table_rtx
7087 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7088 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7089 || GET_CODE (XEXP (value, 1)) == CONST))
7090 {
7091 if (!subtarget)
7092 subtarget = gen_reg_rtx (GET_MODE (value));
7093 emit_move_insn (subtarget, value);
7094 return subtarget;
7095 }
7096
6720e96c 7097 if (ARITHMETIC_P (value))
10f307d9 7098 {
7099 op2 = XEXP (value, 1);
8ad4c111 7100 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
10f307d9 7101 subtarget = 0;
971ba038 7102 if (code == MINUS && CONST_INT_P (op2))
10f307d9 7103 {
fef8467d 7104 code = PLUS;
10f307d9 7105 op2 = negate_rtx (GET_MODE (value), op2);
7106 }
7107
7108 /* Check for an addition with OP2 a constant integer and our first
fef8467d 7109 operand a PLUS of a virtual register and something else. In that
7110 case, we want to emit the sum of the virtual register and the
7111 constant first and then add the other value. This allows virtual
7112 register instantiation to simply modify the constant rather than
7113 creating another one around this addition. */
971ba038 7114 if (code == PLUS && CONST_INT_P (op2)
10f307d9 7115 && GET_CODE (XEXP (value, 0)) == PLUS
8ad4c111 7116 && REG_P (XEXP (XEXP (value, 0), 0))
10f307d9 7117 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7118 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7119 {
fef8467d 7120 rtx temp = expand_simple_binop (GET_MODE (value), code,
7121 XEXP (XEXP (value, 0), 0), op2,
7122 subtarget, 0, OPTAB_LIB_WIDEN);
7123 return expand_simple_binop (GET_MODE (value), code, temp,
7124 force_operand (XEXP (XEXP (value,
7125 0), 1), 0),
7126 target, 0, OPTAB_LIB_WIDEN);
10f307d9 7127 }
fa56dc1d 7128
fef8467d 7129 op1 = force_operand (XEXP (value, 0), subtarget);
7130 op2 = force_operand (op2, NULL_RTX);
7131 switch (code)
7132 {
7133 case MULT:
7134 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7135 case DIV:
7136 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7137 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7138 target, 1, OPTAB_LIB_WIDEN);
7139 else
7140 return expand_divmod (0,
7141 FLOAT_MODE_P (GET_MODE (value))
7142 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7143 GET_MODE (value), op1, op2, target, 0);
fef8467d 7144 case MOD:
7145 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7146 target, 0);
fef8467d 7147 case UDIV:
7148 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7149 target, 1);
fef8467d 7150 case UMOD:
7151 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7152 target, 1);
fef8467d 7153 case ASHIFTRT:
7154 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7155 target, 0, OPTAB_LIB_WIDEN);
fef8467d 7156 default:
7157 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7158 target, 1, OPTAB_LIB_WIDEN);
7159 }
7160 }
6720e96c 7161 if (UNARY_P (value))
fef8467d 7162 {
c0427b5d 7163 if (!target)
7164 target = gen_reg_rtx (GET_MODE (value));
fef8467d 7165 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6184cd69 7166 switch (code)
7167 {
c0427b5d 7168 case ZERO_EXTEND:
7169 case SIGN_EXTEND:
6184cd69 7170 case TRUNCATE:
e40df2f5 7171 case FLOAT_EXTEND:
7172 case FLOAT_TRUNCATE:
c0427b5d 7173 convert_move (target, op1, code == ZERO_EXTEND);
7174 return target;
7175
7176 case FIX:
7177 case UNSIGNED_FIX:
7178 expand_fix (target, op1, code == UNSIGNED_FIX);
7179 return target;
7180
7181 case FLOAT:
7182 case UNSIGNED_FLOAT:
7183 expand_float (target, op1, code == UNSIGNED_FLOAT);
7184 return target;
7185
6184cd69 7186 default:
7187 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7188 }
10f307d9 7189 }
3084721c 7190
7191#ifdef INSN_SCHEDULING
7192 /* On machines that have insn scheduling, we want all memory reference to be
7193 explicit, so we need to deal with such paradoxical SUBREGs. */
b537bfdb 7194 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
3084721c 7195 value
7196 = simplify_gen_subreg (GET_MODE (value),
7197 force_reg (GET_MODE (SUBREG_REG (value)),
7198 force_operand (SUBREG_REG (value),
7199 NULL_RTX)),
7200 GET_MODE (SUBREG_REG (value)),
7201 SUBREG_BYTE (value));
7202#endif
7203
10f307d9 7204 return value;
7205}
7206\f
10f307d9 7207/* Subroutine of expand_expr: return nonzero iff there is no way that
997d68fe 7208 EXP can reference X, which is being modified. TOP_P is nonzero if this
7209 call is going to be used to determine whether we need a temporary
67e40adc 7210 for EXP, as opposed to a recursive call to this function.
7211
7212 It is always safe for this routine to return zero since it merely
7213 searches for optimization opportunities. */
10f307d9 7214
e41f0d80 7215int
1f1872fd 7216safe_from_p (const_rtx x, tree exp, int top_p)
10f307d9 7217{
7218 rtx exp_rtl = 0;
7219 int i, nops;
7220
a71ba0b1 7221 if (x == 0
7222 /* If EXP has varying size, we MUST use a target since we currently
62d8c952 7223 have no way of allocating temporaries of variable size
7224 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7225 So we assume here that something at a higher level has prevented a
b9438b95 7226 clash. This is somewhat bogus, but the best we can do. Only
997d68fe 7227 do this when X is BLKmode and when we are at the top level. */
4b72716d 7228 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
b9438b95 7229 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
62d8c952 7230 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7231 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7232 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7233 != INTEGER_CST)
387bc205 7234 && GET_MODE (x) == BLKmode)
7235 /* If X is in the outgoing argument area, it is always safe. */
e16ceb8e 7236 || (MEM_P (x)
387bc205 7237 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7238 || (GET_CODE (XEXP (x, 0)) == PLUS
7239 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
10f307d9 7240 return 1;
7241
7242 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7243 find the underlying pseudo. */
7244 if (GET_CODE (x) == SUBREG)
7245 {
7246 x = SUBREG_REG (x);
8ad4c111 7247 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
10f307d9 7248 return 0;
7249 }
7250
387bc205 7251 /* Now look at our tree code and possibly recurse. */
10f307d9 7252 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7253 {
ce45a448 7254 case tcc_declaration:
6db2b7ab 7255 exp_rtl = DECL_RTL_IF_SET (exp);
10f307d9 7256 break;
7257
ce45a448 7258 case tcc_constant:
10f307d9 7259 return 1;
7260
ce45a448 7261 case tcc_exceptional:
10f307d9 7262 if (TREE_CODE (exp) == TREE_LIST)
56c7ac50 7263 {
7264 while (1)
7265 {
7266 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7267 return 0;
7268 exp = TREE_CHAIN (exp);
7269 if (!exp)
7270 return 1;
7271 if (TREE_CODE (exp) != TREE_LIST)
7272 return safe_from_p (x, exp, 0);
7273 }
7274 }
a5b684d2 7275 else if (TREE_CODE (exp) == CONSTRUCTOR)
7276 {
7277 constructor_elt *ce;
7278 unsigned HOST_WIDE_INT idx;
7279
f1f41a6c 7280 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
a5b684d2 7281 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7282 || !safe_from_p (x, ce->value, 0))
7283 return 0;
7284 return 1;
7285 }
67e40adc 7286 else if (TREE_CODE (exp) == ERROR_MARK)
7287 return 1; /* An already-visited SAVE_EXPR? */
10f307d9 7288 else
7289 return 0;
7290
ce45a448 7291 case tcc_statement:
7dd37241 7292 /* The only case we look at here is the DECL_INITIAL inside a
7293 DECL_EXPR. */
7294 return (TREE_CODE (exp) != DECL_EXPR
7295 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7296 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7297 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7298
ce45a448 7299 case tcc_binary:
7300 case tcc_comparison:
56c7ac50 7301 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7302 return 0;
d632b59a 7303 /* Fall through. */
56c7ac50 7304
ce45a448 7305 case tcc_unary:
56c7ac50 7306 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
10f307d9 7307
ce45a448 7308 case tcc_expression:
7309 case tcc_reference:
c2f47e15 7310 case tcc_vl_exp:
10f307d9 7311 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7312 the expression. If it is set, we conflict iff we are that rtx or
7313 both are in memory. Otherwise, we check all operands of the
7314 expression recursively. */
7315
7316 switch (TREE_CODE (exp))
7317 {
7318 case ADDR_EXPR:
86ce88aa 7319 /* If the operand is static or we are static, we can't conflict.
7320 Likewise if we don't conflict with the operand at all. */
7321 if (staticp (TREE_OPERAND (exp, 0))
7322 || TREE_STATIC (exp)
7323 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7324 return 1;
7325
7326 /* Otherwise, the only way this can conflict is if we are taking
7327 the address of a DECL a that address if part of X, which is
7328 very rare. */
7329 exp = TREE_OPERAND (exp, 0);
7330 if (DECL_P (exp))
7331 {
7332 if (!DECL_RTL_SET_P (exp)
e16ceb8e 7333 || !MEM_P (DECL_RTL (exp)))
86ce88aa 7334 return 0;
7335 else
7336 exp_rtl = XEXP (DECL_RTL (exp), 0);
7337 }
7338 break;
10f307d9 7339
5d9de213 7340 case MEM_REF:
e16ceb8e 7341 if (MEM_P (x)
387bc205 7342 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7343 get_alias_set (exp)))
10f307d9 7344 return 0;
7345 break;
7346
7347 case CALL_EXPR:
bc33ff05 7348 /* Assume that the call will clobber all hard registers and
7349 all of memory. */
8ad4c111 7350 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
e16ceb8e 7351 || MEM_P (x))
bc33ff05 7352 return 0;
10f307d9 7353 break;
7354
10f307d9 7355 case WITH_CLEANUP_EXPR:
34e2ddcd 7356 case CLEANUP_POINT_EXPR:
6388f9f7 7357 /* Lowered by gimplify.c. */
611234b4 7358 gcc_unreachable ();
6388f9f7 7359
10f307d9 7360 case SAVE_EXPR:
67c155cb 7361 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
10f307d9 7362
0dbd1c74 7363 default:
7364 break;
10f307d9 7365 }
7366
7367 /* If we have an rtx, we do not need to scan our operands. */
7368 if (exp_rtl)
7369 break;
7370
c2f47e15 7371 nops = TREE_OPERAND_LENGTH (exp);
10f307d9 7372 for (i = 0; i < nops; i++)
7373 if (TREE_OPERAND (exp, i) != 0
997d68fe 7374 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
10f307d9 7375 return 0;
e41f0d80 7376
ce45a448 7377 break;
7378
7379 case tcc_type:
7380 /* Should never get a type here. */
7381 gcc_unreachable ();
10f307d9 7382 }
7383
7384 /* If we have an rtl, find any enclosed object. Then see if we conflict
7385 with it. */
7386 if (exp_rtl)
7387 {
7388 if (GET_CODE (exp_rtl) == SUBREG)
7389 {
7390 exp_rtl = SUBREG_REG (exp_rtl);
8ad4c111 7391 if (REG_P (exp_rtl)
10f307d9 7392 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7393 return 0;
7394 }
7395
7396 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
387bc205 7397 are memory and they conflict. */
10f307d9 7398 return ! (rtx_equal_p (x, exp_rtl)
e16ceb8e 7399 || (MEM_P (x) && MEM_P (exp_rtl)
376a287d 7400 && true_dependence (exp_rtl, VOIDmode, x)));
10f307d9 7401 }
7402
7403 /* If we reach here, it is safe. */
7404 return 1;
7405}
7406
155b05dc 7407\f
fcdc122e 7408/* Return the highest power of two that EXP is known to be a multiple of.
7409 This is used in updating alignment of MEMs in array references. */
7410
516849c7 7411unsigned HOST_WIDE_INT
b7bf20db 7412highest_pow2_factor (const_tree exp)
fcdc122e 7413{
c8a2b4ff 7414 unsigned HOST_WIDE_INT ret;
7415 int trailing_zeros = tree_ctz (exp);
7416 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7417 return BIGGEST_ALIGNMENT;
7418 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7419 if (ret > BIGGEST_ALIGNMENT)
7420 return BIGGEST_ALIGNMENT;
7421 return ret;
fcdc122e 7422}
5b965633 7423
252d0e4d 7424/* Similar, except that the alignment requirements of TARGET are
7425 taken into account. Assume it is at least as aligned as its
7426 type, unless it is a COMPONENT_REF in which case the layout of
7427 the structure gives the alignment. */
5b965633 7428
84130727 7429static unsigned HOST_WIDE_INT
b7bf20db 7430highest_pow2_factor_for_target (const_tree target, const_tree exp)
5b965633 7431{
6cbeacbb 7432 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7433 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
48e1416a 7434
6cbeacbb 7435 return MAX (factor, talign);
5b965633 7436}
fcdc122e 7437\f
9d75589a 7438/* Convert the tree comparison code TCODE to the rtl one where the
c909ed33 7439 signedness is UNSIGNEDP. */
7440
7441static enum rtx_code
7442convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7443{
7444 enum rtx_code code;
7445 switch (tcode)
7446 {
7447 case EQ_EXPR:
7448 code = EQ;
7449 break;
7450 case NE_EXPR:
7451 code = NE;
7452 break;
7453 case LT_EXPR:
7454 code = unsignedp ? LTU : LT;
7455 break;
7456 case LE_EXPR:
7457 code = unsignedp ? LEU : LE;
7458 break;
7459 case GT_EXPR:
7460 code = unsignedp ? GTU : GT;
7461 break;
7462 case GE_EXPR:
7463 code = unsignedp ? GEU : GE;
7464 break;
7465 case UNORDERED_EXPR:
7466 code = UNORDERED;
7467 break;
7468 case ORDERED_EXPR:
7469 code = ORDERED;
7470 break;
7471 case UNLT_EXPR:
7472 code = UNLT;
7473 break;
7474 case UNLE_EXPR:
7475 code = UNLE;
7476 break;
7477 case UNGT_EXPR:
7478 code = UNGT;
7479 break;
7480 case UNGE_EXPR:
7481 code = UNGE;
7482 break;
7483 case UNEQ_EXPR:
7484 code = UNEQ;
7485 break;
7486 case LTGT_EXPR:
7487 code = LTGT;
7488 break;
7489
7490 default:
7491 gcc_unreachable ();
7492 }
7493 return code;
7494}
7495
33204670 7496/* Subroutine of expand_expr. Expand the two operands of a binary
7497 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7498 The value may be stored in TARGET if TARGET is nonzero. The
7499 MODIFIER argument is as documented by expand_expr. */
7500
01ee997b 7501void
33204670 7502expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7503 enum expand_modifier modifier)
7504{
7505 if (! safe_from_p (target, exp1, 1))
7506 target = 0;
7507 if (operand_equal_p (exp0, exp1, 0))
7508 {
7509 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7510 *op1 = copy_rtx (*op0);
7511 }
7512 else
7513 {
3541e113 7514 /* If we need to preserve evaluation order, copy exp0 into its own
7515 temporary variable so that it can't be clobbered by exp1. */
7516 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7517 exp0 = save_expr (exp0);
33204670 7518 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7519 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7520 }
7521}
7522
c3a9c149 7523\f
334ec2d8 7524/* Return a MEM that contains constant EXP. DEFER is as for
f2d0e9f1 7525 output_constant_def and MODIFIER is as for expand_expr. */
7526
7527static rtx
7528expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7529{
7530 rtx mem;
7531
7532 mem = output_constant_def (exp, defer);
7533 if (modifier != EXPAND_INITIALIZER)
7534 mem = use_anchored_address (mem);
7535 return mem;
7536}
7537
b51e4016 7538/* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
ec1e52d1 7539 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7540
7541static rtx
3754d046 7542expand_expr_addr_expr_1 (tree exp, rtx target, machine_mode tmode,
98155838 7543 enum expand_modifier modifier, addr_space_t as)
ec1e52d1 7544{
7545 rtx result, subtarget;
7546 tree inner, offset;
7547 HOST_WIDE_INT bitsize, bitpos;
7548 int volatilep, unsignedp;
3754d046 7549 machine_mode mode1;
ec1e52d1 7550
7551 /* If we are taking the address of a constant and are at the top level,
7552 we have to use output_constant_def since we can't call force_const_mem
7553 at top level. */
7554 /* ??? This should be considered a front-end bug. We should not be
7555 generating ADDR_EXPR of something that isn't an LVALUE. The only
7556 exception here is STRING_CST. */
e54c9818 7557 if (CONSTANT_CLASS_P (exp))
792729b8 7558 {
7559 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7560 if (modifier < EXPAND_SUM)
7561 result = force_operand (result, target);
7562 return result;
7563 }
ec1e52d1 7564
7565 /* Everything must be something allowed by is_gimple_addressable. */
7566 switch (TREE_CODE (exp))
7567 {
7568 case INDIRECT_REF:
7569 /* This case will happen via recursion for &a->b. */
f2d0e9f1 7570 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
ec1e52d1 7571
182cf5a9 7572 case MEM_REF:
7573 {
7574 tree tem = TREE_OPERAND (exp, 0);
7575 if (!integer_zerop (TREE_OPERAND (exp, 1)))
a0553bff 7576 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
182cf5a9 7577 return expand_expr (tem, target, tmode, modifier);
7578 }
7579
ec1e52d1 7580 case CONST_DECL:
c5075621 7581 /* Expand the initializer like constants above. */
792729b8 7582 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7583 0, modifier), 0);
7584 if (modifier < EXPAND_SUM)
7585 result = force_operand (result, target);
7586 return result;
ec1e52d1 7587
7588 case REALPART_EXPR:
7589 /* The real part of the complex number is always first, therefore
7590 the address is the same as the address of the parent object. */
7591 offset = 0;
7592 bitpos = 0;
7593 inner = TREE_OPERAND (exp, 0);
7594 break;
7595
7596 case IMAGPART_EXPR:
7597 /* The imaginary part of the complex number is always second.
91275768 7598 The expression is therefore always offset by the size of the
ec1e52d1 7599 scalar type. */
7600 offset = 0;
7601 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7602 inner = TREE_OPERAND (exp, 0);
7603 break;
7604
03404fe6 7605 case COMPOUND_LITERAL_EXPR:
50f9371e 7606 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7607 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7608 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7609 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7610 the initializers aren't gimplified. */
7611 if (COMPOUND_LITERAL_EXPR_DECL (exp)
7612 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp)))
03404fe6 7613 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7614 target, tmode, modifier, as);
7615 /* FALLTHRU */
ec1e52d1 7616 default:
7617 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7618 expand_expr, as that can have various side effects; LABEL_DECLs for
e54c9818 7619 example, may not have their DECL_RTL set yet. Expand the rtl of
7620 CONSTRUCTORs too, which should yield a memory reference for the
7621 constructor's contents. Assume language specific tree nodes can
7622 be expanded in some interesting way. */
862f468c 7623 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
ec1e52d1 7624 if (DECL_P (exp)
e54c9818 7625 || TREE_CODE (exp) == CONSTRUCTOR
862f468c 7626 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
ec1e52d1 7627 {
7628 result = expand_expr (exp, target, tmode,
7629 modifier == EXPAND_INITIALIZER
7630 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7631
7632 /* If the DECL isn't in memory, then the DECL wasn't properly
7633 marked TREE_ADDRESSABLE, which will be either a front-end
7634 or a tree optimizer bug. */
5b9418fd 7635
7636 if (TREE_ADDRESSABLE (exp)
7637 && ! MEM_P (result)
9af5ce0c 7638 && ! targetm.calls.allocate_stack_slots_for_args ())
5b9418fd 7639 {
7640 error ("local frame unavailable (naked function?)");
7641 return result;
7642 }
7643 else
7644 gcc_assert (MEM_P (result));
ec1e52d1 7645 result = XEXP (result, 0);
7646
7647 /* ??? Is this needed anymore? */
ea259bbe 7648 if (DECL_P (exp))
7649 TREE_USED (exp) = 1;
ec1e52d1 7650
7651 if (modifier != EXPAND_INITIALIZER
41628de0 7652 && modifier != EXPAND_CONST_ADDRESS
7653 && modifier != EXPAND_SUM)
ec1e52d1 7654 result = force_operand (result, target);
7655 return result;
7656 }
7657
e7e9416e 7658 /* Pass FALSE as the last argument to get_inner_reference although
7659 we are expanding to RTL. The rationale is that we know how to
7660 handle "aligning nodes" here: we can just bypass them because
7661 they won't change the final object whose address will be returned
7662 (they actually exist only for that purpose). */
ec1e52d1 7663 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
e7e9416e 7664 &mode1, &unsignedp, &volatilep, false);
ec1e52d1 7665 break;
7666 }
7667
7668 /* We must have made progress. */
611234b4 7669 gcc_assert (inner != exp);
ec1e52d1 7670
7671 subtarget = offset || bitpos ? NULL_RTX : target;
41727a57 7672 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7673 inner alignment, force the inner to be sufficiently aligned. */
7674 if (CONSTANT_CLASS_P (inner)
7675 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7676 {
7677 inner = copy_node (inner);
7678 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7679 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7680 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7681 }
98155838 7682 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
ec1e52d1 7683
ec1e52d1 7684 if (offset)
7685 {
7686 rtx tmp;
7687
7688 if (modifier != EXPAND_NORMAL)
7689 result = force_operand (result, NULL);
48e1416a 7690 tmp = expand_expr (offset, NULL_RTX, tmode,
af391a06 7691 modifier == EXPAND_INITIALIZER
7692 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
ec1e52d1 7693
1c634092 7694 /* expand_expr is allowed to return an object in a mode other
7695 than TMODE. If it did, we need to convert. */
7696 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7697 tmp = convert_modes (tmode, GET_MODE (tmp),
7698 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
98155838 7699 result = convert_memory_address_addr_space (tmode, result, as);
7700 tmp = convert_memory_address_addr_space (tmode, tmp, as);
07f6ff58 7701
3286ab0c 7702 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88ff2f0d 7703 result = simplify_gen_binary (PLUS, tmode, result, tmp);
ec1e52d1 7704 else
7705 {
7706 subtarget = bitpos ? NULL_RTX : target;
7707 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7708 1, OPTAB_LIB_WIDEN);
7709 }
7710 }
7711
7712 if (bitpos)
7713 {
7714 /* Someone beforehand should have rejected taking the address
7715 of such an object. */
07f6ff58 7716 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
ec1e52d1 7717
fe265396 7718 result = convert_memory_address_addr_space (tmode, result, as);
29c05e22 7719 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
ec1e52d1 7720 if (modifier < EXPAND_SUM)
7721 result = force_operand (result, target);
7722 }
7723
7724 return result;
7725}
7726
b51e4016 7727/* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7728 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7729
7730static rtx
3754d046 7731expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
b51e4016 7732 enum expand_modifier modifier)
7733{
98155838 7734 addr_space_t as = ADDR_SPACE_GENERIC;
3754d046 7735 machine_mode address_mode = Pmode;
7736 machine_mode pointer_mode = ptr_mode;
7737 machine_mode rmode;
b51e4016 7738 rtx result;
7739
07f6ff58 7740 /* Target mode of VOIDmode says "whatever's natural". */
7741 if (tmode == VOIDmode)
7742 tmode = TYPE_MODE (TREE_TYPE (exp));
7743
98155838 7744 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7745 {
7746 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7747 address_mode = targetm.addr_space.address_mode (as);
7748 pointer_mode = targetm.addr_space.pointer_mode (as);
7749 }
7750
07f6ff58 7751 /* We can get called with some Weird Things if the user does silliness
7752 like "(short) &a". In that case, convert_memory_address won't do
7753 the right thing, so ignore the given target mode. */
98155838 7754 if (tmode != address_mode && tmode != pointer_mode)
7755 tmode = address_mode;
07f6ff58 7756
b51e4016 7757 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
98155838 7758 tmode, modifier, as);
b51e4016 7759
7760 /* Despite expand_expr claims concerning ignoring TMODE when not
07f6ff58 7761 strictly convenient, stuff breaks if we don't honor it. Note
7762 that combined with the above, we only do this for pointer modes. */
b51e4016 7763 rmode = GET_MODE (result);
7764 if (rmode == VOIDmode)
7765 rmode = tmode;
7766 if (rmode != tmode)
98155838 7767 result = convert_memory_address_addr_space (tmode, result, as);
07f6ff58 7768
b51e4016 7769 return result;
7770}
7771
d4cf8ff7 7772/* Generate code for computing CONSTRUCTOR EXP.
7773 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7774 is TRUE, instead of creating a temporary variable in memory
7775 NULL is returned and the caller needs to handle it differently. */
7776
7777static rtx
7778expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7779 bool avoid_temp_mem)
7780{
7781 tree type = TREE_TYPE (exp);
3754d046 7782 machine_mode mode = TYPE_MODE (type);
d4cf8ff7 7783
7784 /* Try to avoid creating a temporary at all. This is possible
7785 if all of the initializer is zero.
7786 FIXME: try to handle all [0..255] initializers we can handle
7787 with memset. */
7788 if (TREE_STATIC (exp)
7789 && !TREE_ADDRESSABLE (exp)
7790 && target != 0 && mode == BLKmode
7791 && all_zeros_p (exp))
7792 {
7793 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7794 return target;
7795 }
7796
7797 /* All elts simple constants => refer to a constant in memory. But
7798 if this is a non-BLKmode mode, let it store a field at a time
e913b5cd 7799 since that should make a CONST_INT, CONST_WIDE_INT or
7800 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7801 use, it is best to store directly into the target unless the type
7802 is large enough that memcpy will be used. If we are making an
7803 initializer and all operands are constant, put it in memory as
7804 well.
d4cf8ff7 7805
7806 FIXME: Avoid trying to fill vector constructors piece-meal.
7807 Output them with output_constant_def below unless we're sure
7808 they're zeros. This should go away when vector initializers
7809 are treated like VECTOR_CST instead of arrays. */
7810 if ((TREE_STATIC (exp)
7811 && ((mode == BLKmode
7812 && ! (target != 0 && safe_from_p (target, exp, 1)))
7813 || TREE_ADDRESSABLE (exp)
e913b5cd 7814 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
d4bd0e64 7815 && (! can_move_by_pieces
e913b5cd 7816 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
d4cf8ff7 7817 TYPE_ALIGN (type)))
7818 && ! mostly_zeros_p (exp))))
7819 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7820 && TREE_CONSTANT (exp)))
7821 {
7822 rtx constructor;
7823
7824 if (avoid_temp_mem)
7825 return NULL_RTX;
7826
7827 constructor = expand_expr_constant (exp, 1, modifier);
7828
7829 if (modifier != EXPAND_CONST_ADDRESS
7830 && modifier != EXPAND_INITIALIZER
7831 && modifier != EXPAND_SUM)
7832 constructor = validize_mem (constructor);
7833
7834 return constructor;
7835 }
7836
7837 /* Handle calls that pass values in multiple non-contiguous
7838 locations. The Irix 6 ABI has examples of this. */
7839 if (target == 0 || ! safe_from_p (target, exp, 1)
7840 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7841 {
7842 if (avoid_temp_mem)
7843 return NULL_RTX;
7844
9f495e8d 7845 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
d4cf8ff7 7846 }
7847
7848 store_constructor (exp, target, 0, int_expr_size (exp));
7849 return target;
7850}
7851
b51e4016 7852
10f307d9 7853/* expand_expr: generate code for computing expression EXP.
7854 An rtx for the computed value is returned. The value is never null.
7855 In the case of a void EXP, const0_rtx is returned.
7856
7857 The value may be stored in TARGET if TARGET is nonzero.
7858 TARGET is just a suggestion; callers must assume that
7859 the rtx returned may not be the same as TARGET.
7860
7861 If TARGET is CONST0_RTX, it means that the value will be ignored.
7862
7863 If TMODE is not VOIDmode, it suggests generating the
7864 result in mode TMODE. But this is done only when convenient.
7865 Otherwise, TMODE is ignored and the value generated in its natural mode.
7866 TMODE is just a suggestion; callers must assume that
7867 the rtx returned may not have mode TMODE.
7868
d2ae1b1e 7869 Note that TARGET may have neither TMODE nor MODE. In that case, it
7870 probably will not be used.
10f307d9 7871
7872 If MODIFIER is EXPAND_SUM then when EXP is an addition
7873 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7874 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7875 products as above, or REG or MEM, or constant.
7876 Ordinarily in such cases we would output mul or add instructions
7877 and then return a pseudo reg containing the sum.
7878
7879 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7880 it also marks a label as absolutely required (it can't be dead).
1aaabd2e 7881 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d2ae1b1e 7882 This is used for outputting expressions used in initializers.
7883
7884 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7885 with a constant address even if that address is not normally legitimate.
a35a63ff 7886 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7887
7888 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7889 a call parameter. Such targets require special care as we haven't yet
7890 marked TARGET so that it's safe from being trashed by libcalls. We
7891 don't want to use TARGET for anything but the final result;
7892 Intermediate values must go elsewhere. Additionally, calls to
491e04ef 7893 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
60ffaf4d 7894
7895 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7896 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7897 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7898 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
a12f023f 7899 recursively.
7900
7901 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7902 In this case, we don't adjust a returned MEM rtx that wouldn't be
7903 sufficiently aligned for its mode; instead, it's up to the caller
7904 to deal with it afterwards. This is used to make sure that unaligned
7905 base objects for which out-of-bounds accesses are supported, for
7906 example record types with trailing arrays, aren't realigned behind
7907 the back of the caller.
7908 The normal operating mode is to pass FALSE for this parameter. */
10f307d9 7909
7910rtx
3754d046 7911expand_expr_real (tree exp, rtx target, machine_mode tmode,
a12f023f 7912 enum expand_modifier modifier, rtx *alt_rtl,
7913 bool inner_reference_p)
4ee9c684 7914{
8cee8dc0 7915 rtx ret;
4ee9c684 7916
7917 /* Handle ERROR_MARK before anybody tries to access its type. */
7918 if (TREE_CODE (exp) == ERROR_MARK
75a70cf9 7919 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
4ee9c684 7920 {
7921 ret = CONST0_RTX (tmode);
7922 return ret ? ret : const0_rtx;
7923 }
7924
a12f023f 7925 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
7926 inner_reference_p);
4ee9c684 7927 return ret;
7928}
7929
c909ed33 7930/* Try to expand the conditional expression which is represented by
7931 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7932 return the rtl reg which repsents the result. Otherwise return
7933 NULL_RTL. */
7934
7935static rtx
7936expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7937 tree treeop1 ATTRIBUTE_UNUSED,
7938 tree treeop2 ATTRIBUTE_UNUSED)
7939{
c909ed33 7940 rtx insn;
7941 rtx op00, op01, op1, op2;
7942 enum rtx_code comparison_code;
3754d046 7943 machine_mode comparison_mode;
c909ed33 7944 gimple srcstmt;
7945 rtx temp;
7946 tree type = TREE_TYPE (treeop1);
7947 int unsignedp = TYPE_UNSIGNED (type);
3754d046 7948 machine_mode mode = TYPE_MODE (type);
7949 machine_mode orig_mode = mode;
c909ed33 7950
c909ed33 7951 /* If we cannot do a conditional move on the mode, try doing it
7952 with the promoted mode. */
7953 if (!can_conditionally_move_p (mode))
631188f5 7954 {
7955 mode = promote_mode (type, mode, &unsignedp);
7956 if (!can_conditionally_move_p (mode))
7957 return NULL_RTX;
7958 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
7959 }
7960 else
7961 temp = assign_temp (type, 0, 1);
c909ed33 7962
7963 start_sequence ();
7964 expand_operands (treeop1, treeop2,
7965 temp, &op1, &op2, EXPAND_NORMAL);
7966
7967 if (TREE_CODE (treeop0) == SSA_NAME
7968 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7969 {
7970 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7971 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7972 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7973 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7974 comparison_mode = TYPE_MODE (type);
7975 unsignedp = TYPE_UNSIGNED (type);
7976 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7977 }
21c8a0ab 7978 else if (COMPARISON_CLASS_P (treeop0))
c909ed33 7979 {
7980 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
7981 enum tree_code cmpcode = TREE_CODE (treeop0);
7982 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
7983 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
7984 unsignedp = TYPE_UNSIGNED (type);
7985 comparison_mode = TYPE_MODE (type);
7986 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7987 }
7988 else
7989 {
7990 op00 = expand_normal (treeop0);
7991 op01 = const0_rtx;
7992 comparison_code = NE;
9336ad57 7993 comparison_mode = GET_MODE (op00);
7994 if (comparison_mode == VOIDmode)
7995 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
c909ed33 7996 }
7997
7998 if (GET_MODE (op1) != mode)
7999 op1 = gen_lowpart (mode, op1);
8000
8001 if (GET_MODE (op2) != mode)
8002 op2 = gen_lowpart (mode, op2);
8003
8004 /* Try to emit the conditional move. */
8005 insn = emit_conditional_move (temp, comparison_code,
8006 op00, op01, comparison_mode,
8007 op1, op2, mode,
8008 unsignedp);
8009
8010 /* If we could do the conditional move, emit the sequence,
8011 and return. */
8012 if (insn)
8013 {
1d277a67 8014 rtx_insn *seq = get_insns ();
c909ed33 8015 end_sequence ();
8016 emit_insn (seq);
31c66114 8017 return convert_modes (orig_mode, mode, temp, 0);
c909ed33 8018 }
8019
8020 /* Otherwise discard the sequence and fall back to code with
8021 branches. */
8022 end_sequence ();
c909ed33 8023 return NULL_RTX;
8024}
8025
16c9337c 8026rtx
3754d046 8027expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
dff12ad7 8028 enum expand_modifier modifier)
10f307d9 8029{
dff12ad7 8030 rtx op0, op1, op2, temp;
f9a00e9e 8031 rtx_code_label *lab;
35cc02b5 8032 tree type;
78a8ed03 8033 int unsignedp;
3754d046 8034 machine_mode mode;
dff12ad7 8035 enum tree_code code = ops->code;
10f307d9 8036 optab this_optab;
32b3a273 8037 rtx subtarget, original_target;
8038 int ignore;
dcfc697f 8039 bool reduce_bit_field;
dff12ad7 8040 location_t loc = ops->location;
b9be572e 8041 tree treeop0, treeop1, treeop2;
dcfc697f 8042#define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
4f7f7efd 8043 ? reduce_to_bit_field_precision ((expr), \
8044 target, \
8045 type) \
8046 : (expr))
10f307d9 8047
dff12ad7 8048 type = ops->type;
75a70cf9 8049 mode = TYPE_MODE (type);
8050 unsignedp = TYPE_UNSIGNED (type);
78a8ed03 8051
dff12ad7 8052 treeop0 = ops->op0;
8053 treeop1 = ops->op1;
b9be572e 8054 treeop2 = ops->op2;
dff12ad7 8055
8056 /* We should be called only on simple (binary or unary) expressions,
8057 exactly those that are valid in gimple expressions that aren't
8058 GIMPLE_SINGLE_RHS (or invalid). */
8059 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
00f4f705 8060 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8061 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
588e1cc3 8062
32b3a273 8063 ignore = (target == const0_rtx
d9659041 8064 || ((CONVERT_EXPR_CODE_P (code)
6a2b2394 8065 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
32b3a273 8066 && TREE_CODE (type) == VOID_TYPE));
8067
dff12ad7 8068 /* We should be called only if we need the result. */
8069 gcc_assert (!ignore);
8070
dcfc697f 8071 /* An operation in what may be a bit-field type needs the
8072 result to be reduced to the precision of the bit-field type,
8073 which is narrower than that of the type's mode. */
c3c9a9f3 8074 reduce_bit_field = (INTEGRAL_TYPE_P (type)
dcfc697f 8075 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8076
dcfc697f 8077 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8078 target = 0;
8079
8080 /* Use subtarget as the target for operand 0 of a binary operation. */
8081 subtarget = get_subtarget (target);
8082 original_target = target;
10f307d9 8083
10f307d9 8084 switch (code)
8085 {
e38def9c 8086 case NON_LVALUE_EXPR:
dff12ad7 8087 case PAREN_EXPR:
8088 CASE_CONVERT:
8089 if (treeop0 == error_mark_node)
8090 return const0_rtx;
4ee9c684 8091
dff12ad7 8092 if (TREE_CODE (type) == UNION_TYPE)
8093 {
8094 tree valtype = TREE_TYPE (treeop0);
4ee9c684 8095
dff12ad7 8096 /* If both input and output are BLKmode, this conversion isn't doing
8097 anything except possibly changing memory attribute. */
8098 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8099 {
8100 rtx result = expand_expr (treeop0, target, tmode,
8101 modifier);
10f307d9 8102
dff12ad7 8103 result = copy_rtx (result);
8104 set_mem_attributes (result, type, 0);
8105 return result;
8106 }
dec41e98 8107
dff12ad7 8108 if (target == 0)
8109 {
8110 if (TYPE_MODE (type) != BLKmode)
8111 target = gen_reg_rtx (TYPE_MODE (type));
8112 else
0ab48139 8113 target = assign_temp (type, 1, 1);
dff12ad7 8114 }
eb4b06b6 8115
dff12ad7 8116 if (MEM_P (target))
8117 /* Store data into beginning of memory target. */
8118 store_expr (treeop0,
8119 adjust_address (target, TYPE_MODE (valtype), 0),
8120 modifier == EXPAND_STACK_PARM,
8121 false);
9dda1f80 8122
dff12ad7 8123 else
8124 {
8125 gcc_assert (REG_P (target));
d2ae1b1e 8126
dff12ad7 8127 /* Store this field into a union of the proper type. */
8128 store_field (target,
8129 MIN ((int_size_in_bytes (TREE_TYPE
8130 (treeop0))
8131 * BITS_PER_UNIT),
8132 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
f955ca51 8133 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
dff12ad7 8134 }
d2ae1b1e 8135
dff12ad7 8136 /* Return the entire union. */
8137 return target;
34f17b00 8138 }
8139
dff12ad7 8140 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8141 {
8142 op0 = expand_expr (treeop0, target, VOIDmode,
8143 modifier);
d2ae1b1e 8144
dff12ad7 8145 /* If the signedness of the conversion differs and OP0 is
8146 a promoted SUBREG, clear that indication since we now
8147 have to do the proper extension. */
8148 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8149 && GET_CODE (op0) == SUBREG)
8150 SUBREG_PROMOTED_VAR_P (op0) = 0;
d2ae1b1e 8151
dff12ad7 8152 return REDUCE_BIT_FIELD (op0);
60ffaf4d 8153 }
acfb31e5 8154
dff12ad7 8155 op0 = expand_expr (treeop0, NULL_RTX, mode,
8156 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8157 if (GET_MODE (op0) == mode)
8158 ;
8159
8160 /* If OP0 is a constant, just convert it into the proper mode. */
8161 else if (CONSTANT_P (op0))
6e6b4174 8162 {
dff12ad7 8163 tree inner_type = TREE_TYPE (treeop0);
3754d046 8164 machine_mode inner_mode = GET_MODE (op0);
faa7e9d5 8165
8166 if (inner_mode == VOIDmode)
8167 inner_mode = TYPE_MODE (inner_type);
6e6b4174 8168
dff12ad7 8169 if (modifier == EXPAND_INITIALIZER)
8170 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8171 subreg_lowpart_offset (mode,
8172 inner_mode));
8173 else
8174 op0= convert_modes (mode, inner_mode, op0,
8175 TYPE_UNSIGNED (inner_type));
6e6b4174 8176 }
8177
dff12ad7 8178 else if (modifier == EXPAND_INITIALIZER)
8179 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
acfb31e5 8180
dff12ad7 8181 else if (target == 0)
8182 op0 = convert_to_mode (mode, op0,
8183 TYPE_UNSIGNED (TREE_TYPE
8184 (treeop0)));
8185 else
acfb31e5 8186 {
dff12ad7 8187 convert_move (target, op0,
8188 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8189 op0 = target;
8190 }
1f8b6002 8191
dff12ad7 8192 return REDUCE_BIT_FIELD (op0);
10f307d9 8193
bd1a81f7 8194 case ADDR_SPACE_CONVERT_EXPR:
8195 {
8196 tree treeop0_type = TREE_TYPE (treeop0);
8197 addr_space_t as_to;
8198 addr_space_t as_from;
8199
8200 gcc_assert (POINTER_TYPE_P (type));
8201 gcc_assert (POINTER_TYPE_P (treeop0_type));
8202
8203 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8204 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8205
8206 /* Conversions between pointers to the same address space should
8207 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8208 gcc_assert (as_to != as_from);
8209
8210 /* Ask target code to handle conversion between pointers
8211 to overlapping address spaces. */
8212 if (targetm.addr_space.subset_p (as_to, as_from)
8213 || targetm.addr_space.subset_p (as_from, as_to))
8214 {
8215 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8216 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8217 gcc_assert (op0);
8218 return op0;
8219 }
8220
8221 /* For disjoint address spaces, converting anything but
8222 a null pointer invokes undefined behaviour. We simply
8223 always return a null pointer here. */
8224 return CONST0_RTX (mode);
8225 }
8226
48e1416a 8227 case POINTER_PLUS_EXPR:
dff12ad7 8228 /* Even though the sizetype mode and the pointer's mode can be different
48e1416a 8229 expand is able to handle this correctly and get the correct result out
dff12ad7 8230 of the PLUS_EXPR code. */
8231 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8232 if sizetype precision is smaller than pointer precision. */
8233 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8234 treeop1 = fold_convert_loc (loc, type,
8235 fold_convert_loc (loc, ssizetype,
8236 treeop1));
450c6e32 8237 /* If sizetype precision is larger than pointer precision, truncate the
8238 offset to have matching modes. */
8239 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8240 treeop1 = fold_convert_loc (loc, type, treeop1);
8241
dff12ad7 8242 case PLUS_EXPR:
dff12ad7 8243 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8244 something else, make sure we add the register to the constant and
8245 then to the other thing. This case can occur during strength
8246 reduction and doing it this way will produce better code if the
8247 frame pointer or argument pointer is eliminated.
10f307d9 8248
dff12ad7 8249 fold-const.c will ensure that the constant is always in the inner
8250 PLUS_EXPR, so the only case we need to do anything about is if
8251 sp, ap, or fp is our second argument, in which case we must swap
8252 the innermost first argument and our second argument. */
fa56dc1d 8253
dff12ad7 8254 if (TREE_CODE (treeop0) == PLUS_EXPR
8255 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8256 && TREE_CODE (treeop1) == VAR_DECL
8257 && (DECL_RTL (treeop1) == frame_pointer_rtx
8258 || DECL_RTL (treeop1) == stack_pointer_rtx
8259 || DECL_RTL (treeop1) == arg_pointer_rtx))
8260 {
c8010b80 8261 gcc_unreachable ();
dff12ad7 8262 }
10f307d9 8263
dff12ad7 8264 /* If the result is to be ptr_mode and we are adding an integer to
8265 something, we might be forming a constant. So try to use
8266 plus_constant. If it produces a sum and we can't accept it,
8267 use force_operand. This allows P = &ARR[const] to generate
8268 efficient code on machines where a SYMBOL_REF is not a valid
8269 address.
68a556d6 8270
dff12ad7 8271 If this is an EXPAND_SUM call, always return the sum. */
8272 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8273 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
67cae17d 8274 {
dff12ad7 8275 if (modifier == EXPAND_STACK_PARM)
8276 target = 0;
8277 if (TREE_CODE (treeop0) == INTEGER_CST
995b44f5 8278 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
dff12ad7 8279 && TREE_CONSTANT (treeop1))
8280 {
8281 rtx constant_part;
e913b5cd 8282 HOST_WIDE_INT wc;
3754d046 8283 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
67cae17d 8284
dff12ad7 8285 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8286 EXPAND_SUM);
796b6678 8287 /* Use wi::shwi to ensure that the constant is
dff12ad7 8288 truncated according to the mode of OP1, then sign extended
8289 to a HOST_WIDE_INT. Using the constant directly can result
8290 in non-canonical RTL in a 64x32 cross compile. */
f9ae6f95 8291 wc = TREE_INT_CST_LOW (treeop0);
796b6678 8292 constant_part =
8293 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
29c05e22 8294 op1 = plus_constant (mode, op1, INTVAL (constant_part));
dff12ad7 8295 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8296 op1 = force_operand (op1, target);
8297 return REDUCE_BIT_FIELD (op1);
8298 }
67cae17d 8299
dff12ad7 8300 else if (TREE_CODE (treeop1) == INTEGER_CST
995b44f5 8301 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
dff12ad7 8302 && TREE_CONSTANT (treeop0))
8303 {
8304 rtx constant_part;
e913b5cd 8305 HOST_WIDE_INT wc;
3754d046 8306 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
67cae17d 8307
dff12ad7 8308 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8309 (modifier == EXPAND_INITIALIZER
8310 ? EXPAND_INITIALIZER : EXPAND_SUM));
8311 if (! CONSTANT_P (op0))
8312 {
8313 op1 = expand_expr (treeop1, NULL_RTX,
8314 VOIDmode, modifier);
8315 /* Return a PLUS if modifier says it's OK. */
8316 if (modifier == EXPAND_SUM
8317 || modifier == EXPAND_INITIALIZER)
8318 return simplify_gen_binary (PLUS, mode, op0, op1);
8319 goto binop2;
8320 }
796b6678 8321 /* Use wi::shwi to ensure that the constant is
dff12ad7 8322 truncated according to the mode of OP1, then sign extended
8323 to a HOST_WIDE_INT. Using the constant directly can result
8324 in non-canonical RTL in a 64x32 cross compile. */
f9ae6f95 8325 wc = TREE_INT_CST_LOW (treeop1);
796b6678 8326 constant_part
8327 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
29c05e22 8328 op0 = plus_constant (mode, op0, INTVAL (constant_part));
dff12ad7 8329 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8330 op0 = force_operand (op0, target);
8331 return REDUCE_BIT_FIELD (op0);
8332 }
67cae17d 8333 }
8334
46b155e1 8335 /* Use TER to expand pointer addition of a negated value
8336 as pointer subtraction. */
8337 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8338 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8339 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8340 && TREE_CODE (treeop1) == SSA_NAME
8341 && TYPE_MODE (TREE_TYPE (treeop0))
8342 == TYPE_MODE (TREE_TYPE (treeop1)))
8343 {
8344 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8345 if (def)
8346 {
8347 treeop1 = gimple_assign_rhs1 (def);
8348 code = MINUS_EXPR;
8349 goto do_minus;
8350 }
8351 }
8352
dff12ad7 8353 /* No sense saving up arithmetic to be done
8354 if it's all in the wrong mode to form part of an address.
8355 And force_operand won't know whether to sign-extend or
8356 zero-extend. */
8357 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8358 || mode != ptr_mode)
8359 {
8360 expand_operands (treeop0, treeop1,
8361 subtarget, &op0, &op1, EXPAND_NORMAL);
8362 if (op0 == const0_rtx)
8363 return op1;
8364 if (op1 == const0_rtx)
8365 return op0;
8366 goto binop2;
8367 }
67cae17d 8368
dff12ad7 8369 expand_operands (treeop0, treeop1,
8370 subtarget, &op0, &op1, modifier);
8371 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
10f307d9 8372
dff12ad7 8373 case MINUS_EXPR:
46b155e1 8374 do_minus:
dff12ad7 8375 /* For initializers, we are allowed to return a MINUS of two
8376 symbolic constants. Here we handle all cases when both operands
8377 are constant. */
8378 /* Handle difference of two symbolic constants,
8379 for the sake of an initializer. */
8380 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8381 && really_constant_p (treeop0)
8382 && really_constant_p (treeop1))
8383 {
8384 expand_operands (treeop0, treeop1,
8385 NULL_RTX, &op0, &op1, modifier);
acfb31e5 8386
dff12ad7 8387 /* If the last operand is a CONST_INT, use plus_constant of
8388 the negated constant. Else make the MINUS. */
8389 if (CONST_INT_P (op1))
29c05e22 8390 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8391 -INTVAL (op1)));
dff12ad7 8392 else
8393 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8394 }
acfb31e5 8395
dff12ad7 8396 /* No sense saving up arithmetic to be done
8397 if it's all in the wrong mode to form part of an address.
8398 And force_operand won't know whether to sign-extend or
8399 zero-extend. */
8400 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8401 || mode != ptr_mode)
8402 goto binop;
10f307d9 8403
dff12ad7 8404 expand_operands (treeop0, treeop1,
8405 subtarget, &op0, &op1, modifier);
c19f64ba 8406
dff12ad7 8407 /* Convert A - const to A + (-const). */
8408 if (CONST_INT_P (op1))
f75fb6ae 8409 {
dff12ad7 8410 op1 = negate_rtx (mode, op1);
8411 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
f75fb6ae 8412 }
603c4ee1 8413
dff12ad7 8414 goto binop2;
fa56dc1d 8415
00f4f705 8416 case WIDEN_MULT_PLUS_EXPR:
8417 case WIDEN_MULT_MINUS_EXPR:
8418 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
b9be572e 8419 op2 = expand_normal (treeop2);
00f4f705 8420 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8421 target, unsignedp);
8422 return target;
8423
62be004c 8424 case WIDEN_MULT_EXPR:
dff12ad7 8425 /* If first operand is constant, swap them.
8426 Thus the following special case checks need only
8427 check the second operand. */
8428 if (TREE_CODE (treeop0) == INTEGER_CST)
8429 {
8430 tree t1 = treeop0;
8431 treeop0 = treeop1;
8432 treeop1 = t1;
8433 }
10f307d9 8434
dff12ad7 8435 /* First, check if we have a multiplication of one signed and one
8436 unsigned operand. */
62be004c 8437 if (TREE_CODE (treeop1) != INTEGER_CST
8438 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8439 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
dff12ad7 8440 {
3754d046 8441 machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
dff12ad7 8442 this_optab = usmul_widen_optab;
aff5fb4d 8443 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8444 != CODE_FOR_nothing)
dff12ad7 8445 {
aff5fb4d 8446 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8447 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8448 EXPAND_NORMAL);
8449 else
8450 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8451 EXPAND_NORMAL);
4620c2de 8452 /* op0 and op1 might still be constant, despite the above
8453 != INTEGER_CST check. Handle it. */
8454 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8455 {
8456 op0 = convert_modes (innermode, mode, op0, true);
8457 op1 = convert_modes (innermode, mode, op1, false);
8458 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8459 target, unsignedp));
8460 }
aff5fb4d 8461 goto binop3;
dff12ad7 8462 }
8463 }
62be004c 8464 /* Check for a multiplication with matching signedness. */
8465 else if ((TREE_CODE (treeop1) == INTEGER_CST
8466 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8467 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8468 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
dff12ad7 8469 {
62be004c 8470 tree op0type = TREE_TYPE (treeop0);
3754d046 8471 machine_mode innermode = TYPE_MODE (op0type);
dff12ad7 8472 bool zextend_p = TYPE_UNSIGNED (op0type);
8473 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8474 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
f1a494f4 8475
aff5fb4d 8476 if (TREE_CODE (treeop0) != INTEGER_CST)
dff12ad7 8477 {
aff5fb4d 8478 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
5a574e8b 8479 != CODE_FOR_nothing)
dff12ad7 8480 {
62be004c 8481 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8482 EXPAND_NORMAL);
4620c2de 8483 /* op0 and op1 might still be constant, despite the above
8484 != INTEGER_CST check. Handle it. */
8485 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8486 {
8487 widen_mult_const:
8488 op0 = convert_modes (innermode, mode, op0, zextend_p);
8489 op1
8490 = convert_modes (innermode, mode, op1,
8491 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8492 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8493 target,
8494 unsignedp));
8495 }
62be004c 8496 temp = expand_widening_mult (mode, op0, op1, target,
8497 unsignedp, this_optab);
8498 return REDUCE_BIT_FIELD (temp);
dff12ad7 8499 }
aff5fb4d 8500 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
5a574e8b 8501 != CODE_FOR_nothing
62be004c 8502 && innermode == word_mode)
dff12ad7 8503 {
8504 rtx htem, hipart;
62be004c 8505 op0 = expand_normal (treeop0);
8506 if (TREE_CODE (treeop1) == INTEGER_CST)
dff12ad7 8507 op1 = convert_modes (innermode, mode,
4620c2de 8508 expand_normal (treeop1),
8509 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
dff12ad7 8510 else
62be004c 8511 op1 = expand_normal (treeop1);
4620c2de 8512 /* op0 and op1 might still be constant, despite the above
8513 != INTEGER_CST check. Handle it. */
8514 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8515 goto widen_mult_const;
dff12ad7 8516 temp = expand_binop (mode, other_optab, op0, op1, target,
8517 unsignedp, OPTAB_LIB_WIDEN);
8518 hipart = gen_highpart (innermode, temp);
8519 htem = expand_mult_highpart_adjust (innermode, hipart,
8520 op0, op1, hipart,
8521 zextend_p);
8522 if (htem != hipart)
8523 emit_move_insn (hipart, htem);
8524 return REDUCE_BIT_FIELD (temp);
8525 }
8526 }
8527 }
62be004c 8528 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8529 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8530 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8531 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8532
b9be572e 8533 case FMA_EXPR:
8534 {
8535 optab opt = fma_optab;
8536 gimple def0, def2;
8537
d325c8f4 8538 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8539 call. */
8540 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8541 {
8542 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8543 tree call_expr;
8544
8545 gcc_assert (fn != NULL_TREE);
8546 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8547 return expand_builtin (call_expr, target, subtarget, mode, false);
8548 }
8549
b9be572e 8550 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
ec4d2c3d 8551 /* The multiplication is commutative - look at its 2nd operand
8552 if the first isn't fed by a negate. */
8553 if (!def0)
8554 {
8555 def0 = get_def_for_expr (treeop1, NEGATE_EXPR);
8556 /* Swap operands if the 2nd operand is fed by a negate. */
8557 if (def0)
8558 {
8559 tree tem = treeop0;
8560 treeop0 = treeop1;
8561 treeop1 = tem;
8562 }
8563 }
b9be572e 8564 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8565
8566 op0 = op2 = NULL;
8567
8568 if (def0 && def2
8569 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8570 {
8571 opt = fnms_optab;
8572 op0 = expand_normal (gimple_assign_rhs1 (def0));
8573 op2 = expand_normal (gimple_assign_rhs1 (def2));
8574 }
8575 else if (def0
8576 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8577 {
8578 opt = fnma_optab;
8579 op0 = expand_normal (gimple_assign_rhs1 (def0));
8580 }
8581 else if (def2
8582 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8583 {
8584 opt = fms_optab;
8585 op2 = expand_normal (gimple_assign_rhs1 (def2));
8586 }
8587
8588 if (op0 == NULL)
8589 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8590 if (op2 == NULL)
8591 op2 = expand_normal (treeop2);
8592 op1 = expand_normal (treeop1);
8593
8594 return expand_ternary_op (TYPE_MODE (type), opt,
8595 op0, op1, op2, target, 0);
8596 }
8597
62be004c 8598 case MULT_EXPR:
8599 /* If this is a fixed-point operation, then we cannot use the code
8600 below because "expand_mult" doesn't support sat/no-sat fixed-point
8601 multiplications. */
8602 if (ALL_FIXED_POINT_MODE_P (mode))
8603 goto binop;
8604
8605 /* If first operand is constant, swap them.
8606 Thus the following special case checks need only
8607 check the second operand. */
8608 if (TREE_CODE (treeop0) == INTEGER_CST)
8609 {
8610 tree t1 = treeop0;
8611 treeop0 = treeop1;
8612 treeop1 = t1;
8613 }
8614
8615 /* Attempt to return something suitable for generating an
8616 indexed address, for machines that support that. */
8617
8618 if (modifier == EXPAND_SUM && mode == ptr_mode
e913b5cd 8619 && tree_fits_shwi_p (treeop1))
62be004c 8620 {
8621 tree exp1 = treeop1;
8622
8623 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8624 EXPAND_SUM);
8625
8626 if (!REG_P (op0))
8627 op0 = force_operand (op0, NULL_RTX);
8628 if (!REG_P (op0))
8629 op0 = copy_to_mode_reg (mode, op0);
8630
8631 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
fcb97e84 8632 gen_int_mode (tree_to_shwi (exp1),
62be004c 8633 TYPE_MODE (TREE_TYPE (exp1)))));
8634 }
8635
8636 if (modifier == EXPAND_STACK_PARM)
8637 target = 0;
8638
8639 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
dff12ad7 8640 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
10f307d9 8641
dff12ad7 8642 case TRUNC_DIV_EXPR:
8643 case FLOOR_DIV_EXPR:
8644 case CEIL_DIV_EXPR:
8645 case ROUND_DIV_EXPR:
8646 case EXACT_DIV_EXPR:
8647 /* If this is a fixed-point operation, then we cannot use the code
8648 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8649 divisions. */
8650 if (ALL_FIXED_POINT_MODE_P (mode))
8651 goto binop;
aed164c3 8652
dff12ad7 8653 if (modifier == EXPAND_STACK_PARM)
8654 target = 0;
8655 /* Possible optimization: compute the dividend with EXPAND_SUM
8656 then if the divisor is constant can optimize the case
8657 where some terms of the dividend have coeffs divisible by it. */
8658 expand_operands (treeop0, treeop1,
8659 subtarget, &op0, &op1, EXPAND_NORMAL);
8660 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
aed164c3 8661
dff12ad7 8662 case RDIV_EXPR:
8663 goto binop;
4ee9c684 8664
ebf4f764 8665 case MULT_HIGHPART_EXPR:
8666 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8667 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8668 gcc_assert (temp);
8669 return temp;
8670
dff12ad7 8671 case TRUNC_MOD_EXPR:
8672 case FLOOR_MOD_EXPR:
8673 case CEIL_MOD_EXPR:
8674 case ROUND_MOD_EXPR:
8675 if (modifier == EXPAND_STACK_PARM)
8676 target = 0;
8677 expand_operands (treeop0, treeop1,
8678 subtarget, &op0, &op1, EXPAND_NORMAL);
8679 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
cf389750 8680
dff12ad7 8681 case FIXED_CONVERT_EXPR:
8682 op0 = expand_normal (treeop0);
8683 if (target == 0 || modifier == EXPAND_STACK_PARM)
8684 target = gen_reg_rtx (mode);
cf389750 8685
dff12ad7 8686 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8687 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8688 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8689 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8690 else
8691 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8692 return target;
4ee9c684 8693
dff12ad7 8694 case FIX_TRUNC_EXPR:
8695 op0 = expand_normal (treeop0);
8696 if (target == 0 || modifier == EXPAND_STACK_PARM)
8697 target = gen_reg_rtx (mode);
8698 expand_fix (target, op0, unsignedp);
8699 return target;
10f307d9 8700
dff12ad7 8701 case FLOAT_EXPR:
8702 op0 = expand_normal (treeop0);
8703 if (target == 0 || modifier == EXPAND_STACK_PARM)
8704 target = gen_reg_rtx (mode);
8705 /* expand_float can't figure out what to do if FROM has VOIDmode.
8706 So give it the correct mode. With -O, cse will optimize this. */
8707 if (GET_MODE (op0) == VOIDmode)
8708 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8709 op0);
8710 expand_float (target, op0,
8711 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8712 return target;
2ef1e405 8713
dff12ad7 8714 case NEGATE_EXPR:
8715 op0 = expand_expr (treeop0, subtarget,
8716 VOIDmode, EXPAND_NORMAL);
8717 if (modifier == EXPAND_STACK_PARM)
8718 target = 0;
8719 temp = expand_unop (mode,
8720 optab_for_tree_code (NEGATE_EXPR, type,
8721 optab_default),
8722 op0, target, 0);
8723 gcc_assert (temp);
8724 return REDUCE_BIT_FIELD (temp);
a0c2c45b 8725
dff12ad7 8726 case ABS_EXPR:
8727 op0 = expand_expr (treeop0, subtarget,
8728 VOIDmode, EXPAND_NORMAL);
8729 if (modifier == EXPAND_STACK_PARM)
8730 target = 0;
fa56dc1d 8731
dff12ad7 8732 /* ABS_EXPR is not valid for complex arguments. */
8733 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8734 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
cf389750 8735
dff12ad7 8736 /* Unsigned abs is simply the operand. Testing here means we don't
8737 risk generating incorrect code below. */
8738 if (TYPE_UNSIGNED (type))
8739 return op0;
c75b4594 8740
dff12ad7 8741 return expand_abs (mode, op0, target, unsignedp,
8742 safe_from_p (target, treeop0, 1));
d4cf8ff7 8743
dff12ad7 8744 case MAX_EXPR:
8745 case MIN_EXPR:
8746 target = original_target;
8747 if (target == 0
8748 || modifier == EXPAND_STACK_PARM
8749 || (MEM_P (target) && MEM_VOLATILE_P (target))
8750 || GET_MODE (target) != mode
8751 || (REG_P (target)
8752 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8753 target = gen_reg_rtx (mode);
8754 expand_operands (treeop0, treeop1,
8755 target, &op0, &op1, EXPAND_NORMAL);
d4cf8ff7 8756
dff12ad7 8757 /* First try to do it with a special MIN or MAX instruction.
8758 If that does not win, use a conditional jump to select the proper
8759 value. */
8760 this_optab = optab_for_tree_code (code, type, optab_default);
8761 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8762 OPTAB_WIDEN);
8763 if (temp != 0)
8764 return temp;
1f8b6002 8765
dff12ad7 8766 /* At this point, a MEM target is no longer useful; we will get better
8767 code without it. */
1f8b6002 8768
dff12ad7 8769 if (! REG_P (target))
8770 target = gen_reg_rtx (mode);
1f8b6002 8771
dff12ad7 8772 /* If op1 was placed in target, swap op0 and op1. */
8773 if (target != op0 && target == op1)
1d1a557c 8774 std::swap (op0, op1);
1f8b6002 8775
dff12ad7 8776 /* We generate better code and avoid problems with op1 mentioning
8777 target by forcing op1 into a pseudo if it isn't a constant. */
8778 if (! CONSTANT_P (op1))
8779 op1 = force_reg (mode, op1);
2ef1e405 8780
10f307d9 8781 {
dff12ad7 8782 enum rtx_code comparison_code;
8783 rtx cmpop1 = op1;
952f3892 8784
dff12ad7 8785 if (code == MAX_EXPR)
8786 comparison_code = unsignedp ? GEU : GE;
8787 else
8788 comparison_code = unsignedp ? LEU : LE;
952f3892 8789
dff12ad7 8790 /* Canonicalize to comparisons against 0. */
8791 if (op1 == const1_rtx)
5fb2a157 8792 {
dff12ad7 8793 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8794 or (a != 0 ? a : 1) for unsigned.
8795 For MIN we are safe converting (a <= 1 ? a : 1)
8796 into (a <= 0 ? a : 1) */
8797 cmpop1 = const0_rtx;
8798 if (code == MAX_EXPR)
8799 comparison_code = unsignedp ? NE : GT;
5fb2a157 8800 }
dff12ad7 8801 if (op1 == constm1_rtx && !unsignedp)
a7bab26c 8802 {
dff12ad7 8803 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8804 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8805 cmpop1 = const0_rtx;
8806 if (code == MIN_EXPR)
8807 comparison_code = LT;
a7bab26c 8808 }
9630036a 8809
dff12ad7 8810 /* Use a conditional move if possible. */
8811 if (can_conditionally_move_p (mode))
954bdcb1 8812 {
dff12ad7 8813 rtx insn;
954bdcb1 8814
dff12ad7 8815 start_sequence ();
5785f96f 8816
dff12ad7 8817 /* Try to emit the conditional move. */
8818 insn = emit_conditional_move (target, comparison_code,
8819 op0, cmpop1, mode,
8820 op0, op1, mode,
8821 unsignedp);
8822
8823 /* If we could do the conditional move, emit the sequence,
8824 and return. */
8825 if (insn)
25d55d72 8826 {
1d277a67 8827 rtx_insn *seq = get_insns ();
dff12ad7 8828 end_sequence ();
8829 emit_insn (seq);
8830 return target;
25d55d72 8831 }
8832
dff12ad7 8833 /* Otherwise discard the sequence and fall back to code with
8834 branches. */
8835 end_sequence ();
954bdcb1 8836 }
9630036a 8837
dff12ad7 8838 if (target != op0)
8839 emit_move_insn (target, op0);
954bdcb1 8840
f9a00e9e 8841 lab = gen_label_rtx ();
dff12ad7 8842 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
f9a00e9e 8843 unsignedp, mode, NULL_RTX, NULL, lab,
79ab74cc 8844 -1);
dff12ad7 8845 }
8846 emit_move_insn (target, op1);
f9a00e9e 8847 emit_label (lab);
dff12ad7 8848 return target;
67c68e45 8849
dff12ad7 8850 case BIT_NOT_EXPR:
8851 op0 = expand_expr (treeop0, subtarget,
8852 VOIDmode, EXPAND_NORMAL);
8853 if (modifier == EXPAND_STACK_PARM)
8854 target = 0;
f1c44a44 8855 /* In case we have to reduce the result to bitfield precision
7e5608ca 8856 for unsigned bitfield expand this as XOR with a proper constant
8857 instead. */
8858 if (reduce_bit_field && TYPE_UNSIGNED (type))
e913b5cd 8859 {
796b6678 8860 wide_int mask = wi::mask (TYPE_PRECISION (type),
8861 false, GET_MODE_PRECISION (mode));
e913b5cd 8862
8863 temp = expand_binop (mode, xor_optab, op0,
8864 immed_wide_int_const (mask, mode),
8865 target, 1, OPTAB_LIB_WIDEN);
8866 }
f1c44a44 8867 else
8868 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
dff12ad7 8869 gcc_assert (temp);
8870 return temp;
c3a9c149 8871
dff12ad7 8872 /* ??? Can optimize bitwise operations with one arg constant.
8873 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8874 and (a bitwise1 b) bitwise2 b (etc)
8875 but that is probably not worth while. */
10f307d9 8876
dff12ad7 8877 case BIT_AND_EXPR:
dff12ad7 8878 case BIT_IOR_EXPR:
dff12ad7 8879 case BIT_XOR_EXPR:
8880 goto binop;
0e9fefce 8881
dff12ad7 8882 case LROTATE_EXPR:
8883 case RROTATE_EXPR:
8884 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8885 || (GET_MODE_PRECISION (TYPE_MODE (type))
8886 == TYPE_PRECISION (type)));
8887 /* fall through */
0e20f9fb 8888
dff12ad7 8889 case LSHIFT_EXPR:
8890 case RSHIFT_EXPR:
8891 /* If this is a fixed-point operation, then we cannot use the code
8892 below because "expand_shift" doesn't support sat/no-sat fixed-point
8893 shifts. */
8894 if (ALL_FIXED_POINT_MODE_P (mode))
8895 goto binop;
fa56dc1d 8896
dff12ad7 8897 if (! safe_from_p (subtarget, treeop1, 1))
8898 subtarget = 0;
8899 if (modifier == EXPAND_STACK_PARM)
8900 target = 0;
8901 op0 = expand_expr (treeop0, subtarget,
8902 VOIDmode, EXPAND_NORMAL);
f5ff0b21 8903 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8904 unsignedp);
dff12ad7 8905 if (code == LSHIFT_EXPR)
8906 temp = REDUCE_BIT_FIELD (temp);
8907 return temp;
10f307d9 8908
dff12ad7 8909 /* Could determine the answer when only additive constants differ. Also,
8910 the addition of one can be handled by changing the condition. */
8911 case LT_EXPR:
8912 case LE_EXPR:
8913 case GT_EXPR:
8914 case GE_EXPR:
8915 case EQ_EXPR:
8916 case NE_EXPR:
8917 case UNORDERED_EXPR:
8918 case ORDERED_EXPR:
8919 case UNLT_EXPR:
8920 case UNLE_EXPR:
8921 case UNGT_EXPR:
8922 case UNGE_EXPR:
8923 case UNEQ_EXPR:
8924 case LTGT_EXPR:
f9a00e9e 8925 {
8926 temp = do_store_flag (ops,
8927 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8928 tmode != VOIDmode ? tmode : mode);
8929 if (temp)
8930 return temp;
8931
8932 /* Use a compare and a jump for BLKmode comparisons, or for function
8933 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8934
8935 if ((target == 0
8936 || modifier == EXPAND_STACK_PARM
8937 || ! safe_from_p (target, treeop0, 1)
8938 || ! safe_from_p (target, treeop1, 1)
8939 /* Make sure we don't have a hard reg (such as function's return
8940 value) live across basic blocks, if not optimizing. */
8941 || (!optimize && REG_P (target)
8942 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8943 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8944
8945 emit_move_insn (target, const0_rtx);
8946
8947 rtx_code_label *lab1 = gen_label_rtx ();
8948 jumpifnot_1 (code, treeop0, treeop1, lab1, -1);
8949
8950 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8951 emit_move_insn (target, constm1_rtx);
8952 else
8953 emit_move_insn (target, const1_rtx);
10f307d9 8954
f9a00e9e 8955 emit_label (lab1);
8956 return target;
8957 }
dff12ad7 8958 case COMPLEX_EXPR:
8959 /* Get the rtx code of the operands. */
8960 op0 = expand_normal (treeop0);
8961 op1 = expand_normal (treeop1);
f8ca8b77 8962
dff12ad7 8963 if (!target)
8964 target = gen_reg_rtx (TYPE_MODE (type));
83014b20 8965 else
8966 /* If target overlaps with op1, then either we need to force
8967 op1 into a pseudo (if target also overlaps with op0),
8968 or write the complex parts in reverse order. */
8969 switch (GET_CODE (target))
8970 {
8971 case CONCAT:
8972 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
8973 {
8974 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
8975 {
8976 complex_expr_force_op1:
8977 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
8978 emit_move_insn (temp, op1);
8979 op1 = temp;
8980 break;
8981 }
8982 complex_expr_swap_order:
8983 /* Move the imaginary (op1) and real (op0) parts to their
8984 location. */
8985 write_complex_part (target, op1, true);
8986 write_complex_part (target, op0, false);
8987
8988 return target;
8989 }
8990 break;
8991 case MEM:
8992 temp = adjust_address_nv (target,
8993 GET_MODE_INNER (GET_MODE (target)), 0);
8994 if (reg_overlap_mentioned_p (temp, op1))
8995 {
3754d046 8996 machine_mode imode = GET_MODE_INNER (GET_MODE (target));
83014b20 8997 temp = adjust_address_nv (target, imode,
8998 GET_MODE_SIZE (imode));
8999 if (reg_overlap_mentioned_p (temp, op0))
9000 goto complex_expr_force_op1;
9001 goto complex_expr_swap_order;
9002 }
9003 break;
9004 default:
9005 if (reg_overlap_mentioned_p (target, op1))
9006 {
9007 if (reg_overlap_mentioned_p (target, op0))
9008 goto complex_expr_force_op1;
9009 goto complex_expr_swap_order;
9010 }
9011 break;
9012 }
b5ba9f3a 9013
dff12ad7 9014 /* Move the real (op0) and imaginary (op1) parts to their location. */
9015 write_complex_part (target, op0, false);
9016 write_complex_part (target, op1, true);
c3a9c149 9017
dff12ad7 9018 return target;
6e6b4174 9019
dff12ad7 9020 case WIDEN_SUM_EXPR:
9021 {
9022 tree oprnd0 = treeop0;
9023 tree oprnd1 = treeop1;
1c9f9aa6 9024
dff12ad7 9025 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9026 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9027 target, unsignedp);
9028 return target;
10f307d9 9029 }
9030
dff12ad7 9031 case REDUC_MAX_EXPR:
9032 case REDUC_MIN_EXPR:
9033 case REDUC_PLUS_EXPR:
10fc867f 9034 {
dff12ad7 9035 op0 = expand_normal (treeop0);
9036 this_optab = optab_for_tree_code (code, type, optab_default);
3754d046 9037 machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0));
f3d76545 9038
9039 if (optab_handler (this_optab, vec_mode) != CODE_FOR_nothing)
9040 {
9041 struct expand_operand ops[2];
9042 enum insn_code icode = optab_handler (this_optab, vec_mode);
9043
9044 create_output_operand (&ops[0], target, mode);
9045 create_input_operand (&ops[1], op0, vec_mode);
9046 if (maybe_expand_insn (icode, 2, ops))
9047 {
9048 target = ops[0].value;
9049 if (GET_MODE (target) != mode)
9050 return gen_lowpart (tmode, target);
9051 return target;
9052 }
9053 }
9054 /* Fall back to optab with vector result, and then extract scalar. */
9055 this_optab = scalar_reduc_to_vector (this_optab, type);
7ba68b18 9056 temp = expand_unop (vec_mode, this_optab, op0, NULL_RTX, unsignedp);
9057 gcc_assert (temp);
9058 /* The tree code produces a scalar result, but (somewhat by convention)
9059 the optab produces a vector with the result in element 0 if
9060 little-endian, or element N-1 if big-endian. So pull the scalar
9061 result out of that element. */
9062 int index = BYTES_BIG_ENDIAN ? GET_MODE_NUNITS (vec_mode) - 1 : 0;
9063 int bitsize = GET_MODE_BITSIZE (GET_MODE_INNER (vec_mode));
9064 temp = extract_bit_field (temp, bitsize, bitsize * index, unsignedp,
9065 target, mode, mode);
dff12ad7 9066 gcc_assert (temp);
9067 return temp;
9068 }
10fc867f 9069
dff12ad7 9070 case VEC_UNPACK_HI_EXPR:
9071 case VEC_UNPACK_LO_EXPR:
9072 {
9073 op0 = expand_normal (treeop0);
dff12ad7 9074 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9075 target, unsignedp);
9076 gcc_assert (temp);
9077 return temp;
9078 }
a9d9ab08 9079
dff12ad7 9080 case VEC_UNPACK_FLOAT_HI_EXPR:
9081 case VEC_UNPACK_FLOAT_LO_EXPR:
9082 {
9083 op0 = expand_normal (treeop0);
9084 /* The signedness is determined from input operand. */
dff12ad7 9085 temp = expand_widen_pattern_expr
9086 (ops, op0, NULL_RTX, NULL_RTX,
9087 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
155b05dc 9088
dff12ad7 9089 gcc_assert (temp);
9090 return temp;
9091 }
d2ae1b1e 9092
dff12ad7 9093 case VEC_WIDEN_MULT_HI_EXPR:
9094 case VEC_WIDEN_MULT_LO_EXPR:
79a78f7f 9095 case VEC_WIDEN_MULT_EVEN_EXPR:
9096 case VEC_WIDEN_MULT_ODD_EXPR:
6083c152 9097 case VEC_WIDEN_LSHIFT_HI_EXPR:
9098 case VEC_WIDEN_LSHIFT_LO_EXPR:
79a78f7f 9099 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9100 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9101 target, unsignedp);
9102 gcc_assert (target);
9103 return target;
6083c152 9104
dff12ad7 9105 case VEC_PACK_TRUNC_EXPR:
9106 case VEC_PACK_SAT_EXPR:
9107 case VEC_PACK_FIX_TRUNC_EXPR:
9108 mode = TYPE_MODE (TREE_TYPE (treeop0));
9109 goto binop;
f4803722 9110
9111 case VEC_PERM_EXPR:
e21c468f 9112 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9113 op2 = expand_normal (treeop2);
d9198d85 9114
9115 /* Careful here: if the target doesn't support integral vector modes,
9116 a constant selection vector could wind up smooshed into a normal
9117 integral constant. */
9118 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9119 {
9120 tree sel_type = TREE_TYPE (treeop2);
3754d046 9121 machine_mode vmode
d9198d85 9122 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9123 TYPE_VECTOR_SUBPARTS (sel_type));
9124 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9125 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9126 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9127 }
9128 else
9129 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9130
e21c468f 9131 temp = expand_vec_perm (mode, op0, op1, op2, target);
9132 gcc_assert (temp);
9133 return temp;
10f307d9 9134
c86930b0 9135 case DOT_PROD_EXPR:
9136 {
9137 tree oprnd0 = treeop0;
9138 tree oprnd1 = treeop1;
9139 tree oprnd2 = treeop2;
9140 rtx op2;
9141
9142 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9143 op2 = expand_normal (oprnd2);
9144 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9145 target, unsignedp);
9146 return target;
9147 }
9148
a2287001 9149 case SAD_EXPR:
9150 {
9151 tree oprnd0 = treeop0;
9152 tree oprnd1 = treeop1;
9153 tree oprnd2 = treeop2;
9154 rtx op2;
9155
9156 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9157 op2 = expand_normal (oprnd2);
9158 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9159 target, unsignedp);
9160 return target;
9161 }
9162
c86930b0 9163 case REALIGN_LOAD_EXPR:
9164 {
9165 tree oprnd0 = treeop0;
9166 tree oprnd1 = treeop1;
9167 tree oprnd2 = treeop2;
9168 rtx op2;
9169
9170 this_optab = optab_for_tree_code (code, type, optab_default);
9171 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9172 op2 = expand_normal (oprnd2);
9173 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9174 target, unsignedp);
9175 gcc_assert (temp);
9176 return temp;
9177 }
9178
8a2caf10 9179 case COND_EXPR:
f9a00e9e 9180 {
9181 /* A COND_EXPR with its type being VOID_TYPE represents a
9182 conditional jump and is handled in
9183 expand_gimple_cond_expr. */
9184 gcc_assert (!VOID_TYPE_P (type));
9185
9186 /* Note that COND_EXPRs whose type is a structure or union
9187 are required to be constructed to contain assignments of
9188 a temporary variable, so that we can evaluate them here
9189 for side effect only. If type is void, we must do likewise. */
9190
9191 gcc_assert (!TREE_ADDRESSABLE (type)
9192 && !ignore
9193 && TREE_TYPE (treeop1) != void_type_node
9194 && TREE_TYPE (treeop2) != void_type_node);
9195
9196 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9197 if (temp)
9198 return temp;
9199
9200 /* If we are not to produce a result, we have no target. Otherwise,
9201 if a target was specified use it; it will not be used as an
9202 intermediate target unless it is safe. If no target, use a
9203 temporary. */
9204
9205 if (modifier != EXPAND_STACK_PARM
9206 && original_target
9207 && safe_from_p (original_target, treeop0, 1)
9208 && GET_MODE (original_target) == mode
9209 && !MEM_P (original_target))
9210 temp = original_target;
9211 else
9212 temp = assign_temp (type, 0, 1);
9213
9214 do_pending_stack_adjust ();
9215 NO_DEFER_POP;
9216 rtx_code_label *lab0 = gen_label_rtx ();
9217 rtx_code_label *lab1 = gen_label_rtx ();
9218 jumpifnot (treeop0, lab0, -1);
9219 store_expr (treeop1, temp,
9220 modifier == EXPAND_STACK_PARM,
9221 false);
9222
9223 emit_jump_insn (gen_jump (lab1));
9224 emit_barrier ();
9225 emit_label (lab0);
9226 store_expr (treeop2, temp,
9227 modifier == EXPAND_STACK_PARM,
9228 false);
9229
9230 emit_label (lab1);
9231 OK_DEFER_POP;
c909ed33 9232 return temp;
f9a00e9e 9233 }
8a2caf10 9234
9235 case VEC_COND_EXPR:
9236 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9237 return target;
9238
dff12ad7 9239 default:
9240 gcc_unreachable ();
9241 }
d2ae1b1e 9242
dff12ad7 9243 /* Here to do an ordinary binary operator. */
9244 binop:
9245 expand_operands (treeop0, treeop1,
9246 subtarget, &op0, &op1, EXPAND_NORMAL);
9247 binop2:
9248 this_optab = optab_for_tree_code (code, type, optab_default);
9249 binop3:
9250 if (modifier == EXPAND_STACK_PARM)
9251 target = 0;
9252 temp = expand_binop (mode, this_optab, op0, op1, target,
1701f789 9253 unsignedp, OPTAB_LIB_WIDEN);
dff12ad7 9254 gcc_assert (temp);
8618a9e7 9255 /* Bitwise operations do not need bitfield reduction as we expect their
9256 operands being properly truncated. */
9257 if (code == BIT_XOR_EXPR
9258 || code == BIT_AND_EXPR
9259 || code == BIT_IOR_EXPR)
9260 return temp;
dff12ad7 9261 return REDUCE_BIT_FIELD (temp);
9262}
9263#undef REDUCE_BIT_FIELD
d324678b 9264
f7373a91 9265
9266/* Return TRUE if expression STMT is suitable for replacement.
9267 Never consider memory loads as replaceable, because those don't ever lead
9268 into constant expressions. */
9269
9270static bool
9271stmt_is_replaceable_p (gimple stmt)
9272{
9273 if (ssa_is_replaceable_p (stmt))
9274 {
9275 /* Don't move around loads. */
9276 if (!gimple_assign_single_p (stmt)
9277 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9278 return true;
9279 }
9280 return false;
9281}
9282
16c9337c 9283rtx
3754d046 9284expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
a12f023f 9285 enum expand_modifier modifier, rtx *alt_rtl,
9286 bool inner_reference_p)
dff12ad7 9287{
9288 rtx op0, op1, temp, decl_rtl;
9289 tree type;
9290 int unsignedp;
1382992b 9291 machine_mode mode;
dff12ad7 9292 enum tree_code code = TREE_CODE (exp);
dff12ad7 9293 rtx subtarget, original_target;
9294 int ignore;
9295 tree context;
9296 bool reduce_bit_field;
9297 location_t loc = EXPR_LOCATION (exp);
9298 struct separate_ops ops;
9299 tree treeop0, treeop1, treeop2;
ae929441 9300 tree ssa_name = NULL_TREE;
9301 gimple g;
d324678b 9302
dff12ad7 9303 type = TREE_TYPE (exp);
9304 mode = TYPE_MODE (type);
9305 unsignedp = TYPE_UNSIGNED (type);
d324678b 9306
dff12ad7 9307 treeop0 = treeop1 = treeop2 = NULL_TREE;
9308 if (!VL_EXP_CLASS_P (exp))
9309 switch (TREE_CODE_LENGTH (code))
9310 {
9311 default:
9312 case 3: treeop2 = TREE_OPERAND (exp, 2);
9313 case 2: treeop1 = TREE_OPERAND (exp, 1);
9314 case 1: treeop0 = TREE_OPERAND (exp, 0);
9315 case 0: break;
9316 }
9317 ops.code = code;
9318 ops.type = type;
9319 ops.op0 = treeop0;
9320 ops.op1 = treeop1;
9321 ops.op2 = treeop2;
9322 ops.location = loc;
dda75192 9323
dff12ad7 9324 ignore = (target == const0_rtx
9325 || ((CONVERT_EXPR_CODE_P (code)
9326 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9327 && TREE_CODE (type) == VOID_TYPE));
5b1bb114 9328
dff12ad7 9329 /* An operation in what may be a bit-field type needs the
9330 result to be reduced to the precision of the bit-field type,
9331 which is narrower than that of the type's mode. */
9332 reduce_bit_field = (!ignore
c3c9a9f3 9333 && INTEGRAL_TYPE_P (type)
dff12ad7 9334 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
dda75192 9335
dff12ad7 9336 /* If we are going to ignore this result, we need only do something
9337 if there is a side-effect somewhere in the expression. If there
9338 is, short-circuit the most common cases here. Note that we must
9339 not call expand_expr with anything but const0_rtx in case this
9340 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
d2ae1b1e 9341
dff12ad7 9342 if (ignore)
9343 {
9344 if (! TREE_SIDE_EFFECTS (exp))
9345 return const0_rtx;
9346
9347 /* Ensure we reference a volatile object even if value is ignored, but
9348 don't do this if all we are doing is taking its address. */
9349 if (TREE_THIS_VOLATILE (exp)
9350 && TREE_CODE (exp) != FUNCTION_DECL
9351 && mode != VOIDmode && mode != BLKmode
9352 && modifier != EXPAND_CONST_ADDRESS)
78aee3e4 9353 {
dff12ad7 9354 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9355 if (MEM_P (temp))
1084097d 9356 copy_to_reg (temp);
dff12ad7 9357 return const0_rtx;
78aee3e4 9358 }
9359
dff12ad7 9360 if (TREE_CODE_CLASS (code) == tcc_unary
2330f9c5 9361 || code == BIT_FIELD_REF
9362 || code == COMPONENT_REF
9363 || code == INDIRECT_REF)
dff12ad7 9364 return expand_expr (treeop0, const0_rtx, VOIDmode,
9365 modifier);
f9b618cc 9366
dff12ad7 9367 else if (TREE_CODE_CLASS (code) == tcc_binary
9368 || TREE_CODE_CLASS (code) == tcc_comparison
9369 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9370 {
9371 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9372 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9373 return const0_rtx;
9374 }
f9b618cc 9375
dff12ad7 9376 target = 0;
9377 }
f9b618cc 9378
dff12ad7 9379 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9380 target = 0;
f9b618cc 9381
dff12ad7 9382 /* Use subtarget as the target for operand 0 of a binary operation. */
9383 subtarget = get_subtarget (target);
9384 original_target = target;
f9b618cc 9385
dff12ad7 9386 switch (code)
9387 {
9388 case LABEL_DECL:
9389 {
9390 tree function = decl_function_context (exp);
f9b618cc 9391
dff12ad7 9392 temp = label_rtx (exp);
9393 temp = gen_rtx_LABEL_REF (Pmode, temp);
f9b618cc 9394
dff12ad7 9395 if (function != current_function_decl
9396 && function != 0)
9397 LABEL_REF_NONLOCAL_P (temp) = 1;
9398
9399 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9400 return temp;
f9b618cc 9401 }
9402
dff12ad7 9403 case SSA_NAME:
9404 /* ??? ivopts calls expander, without any preparation from
9405 out-of-ssa. So fake instructions as if this was an access to the
9406 base variable. This unnecessarily allocates a pseudo, see how we can
9407 reuse it, if partition base vars have it set already. */
9408 if (!currently_expanding_to_rtl)
ec11736b 9409 {
9410 tree var = SSA_NAME_VAR (exp);
9411 if (var && DECL_RTL_SET_P (var))
9412 return DECL_RTL (var);
9413 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9414 LAST_VIRTUAL_REGISTER + 1);
9415 }
ae929441 9416
9417 g = get_gimple_for_ssa_name (exp);
351b6ccf 9418 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9419 if (g == NULL
9420 && modifier == EXPAND_INITIALIZER
9421 && !SSA_NAME_IS_DEFAULT_DEF (exp)
1382992b 9422 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
351b6ccf 9423 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9424 g = SSA_NAME_DEF_STMT (exp);
ae929441 9425 if (g)
0a98b6d9 9426 {
ed4d69dc 9427 rtx r;
903906b6 9428 ops.code = gimple_assign_rhs_code (g);
9429 switch (get_gimple_rhs_class (ops.code))
9430 {
9431 case GIMPLE_TERNARY_RHS:
9432 ops.op2 = gimple_assign_rhs3 (g);
9433 /* Fallthru */
9434 case GIMPLE_BINARY_RHS:
9435 ops.op1 = gimple_assign_rhs2 (g);
9336ad57 9436
9437 /* Try to expand conditonal compare. */
9438 if (targetm.gen_ccmp_first)
9439 {
9440 gcc_checking_assert (targetm.gen_ccmp_next != NULL);
9441 r = expand_ccmp_expr (g);
9442 if (r)
9443 break;
9444 }
903906b6 9445 /* Fallthru */
9446 case GIMPLE_UNARY_RHS:
9447 ops.op0 = gimple_assign_rhs1 (g);
9448 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9449 ops.location = gimple_location (g);
9450 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9451 break;
9452 case GIMPLE_SINGLE_RHS:
9453 {
9454 location_t saved_loc = curr_insn_location ();
9455 set_curr_insn_location (gimple_location (g));
9456 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9457 tmode, modifier, NULL, inner_reference_p);
9458 set_curr_insn_location (saved_loc);
9459 break;
9460 }
9461 default:
9462 gcc_unreachable ();
9463 }
0a98b6d9 9464 if (REG_P (r) && !REG_EXPR (r))
9465 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9466 return r;
9467 }
ae929441 9468
9469 ssa_name = exp;
9470 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9471 exp = SSA_NAME_VAR (ssa_name);
dff12ad7 9472 goto expand_decl_rtl;
f96c43fb 9473
dff12ad7 9474 case PARM_DECL:
9475 case VAR_DECL:
9476 /* If a static var's type was incomplete when the decl was written,
9477 but the type is complete now, lay out the decl now. */
9478 if (DECL_SIZE (exp) == 0
9479 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9480 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9481 layout_decl (exp, 0);
9482
dff12ad7 9483 /* ... fall through ... */
f96c43fb 9484
dff12ad7 9485 case FUNCTION_DECL:
9486 case RESULT_DECL:
9487 decl_rtl = DECL_RTL (exp);
9488 expand_decl_rtl:
9489 gcc_assert (decl_rtl);
9490 decl_rtl = copy_rtx (decl_rtl);
47a55be7 9491 /* Record writes to register variables. */
d82cf2b2 9492 if (modifier == EXPAND_WRITE
9493 && REG_P (decl_rtl)
9494 && HARD_REGISTER_P (decl_rtl))
9495 add_to_hard_reg_set (&crtl->asm_clobbers,
9496 GET_MODE (decl_rtl), REGNO (decl_rtl));
f96c43fb 9497
dff12ad7 9498 /* Ensure variable marked as used even if it doesn't go through
9499 a parser. If it hasn't be used yet, write out an external
9500 definition. */
1382992b 9501 TREE_USED (exp) = 1;
f96c43fb 9502
dff12ad7 9503 /* Show we haven't gotten RTL for this yet. */
9504 temp = 0;
f96c43fb 9505
dff12ad7 9506 /* Variables inherited from containing functions should have
9507 been lowered by this point. */
1382992b 9508 context = decl_function_context (exp);
9509 gcc_assert (SCOPE_FILE_SCOPE_P (context)
dff12ad7 9510 || context == current_function_decl
9511 || TREE_STATIC (exp)
ca601f81 9512 || DECL_EXTERNAL (exp)
dff12ad7 9513 /* ??? C++ creates functions that are not TREE_STATIC. */
9514 || TREE_CODE (exp) == FUNCTION_DECL);
f96c43fb 9515
dff12ad7 9516 /* This is the case of an array whose size is to be determined
9517 from its initializer, while the initializer is still being parsed.
0ab48139 9518 ??? We aren't parsing while expanding anymore. */
e58d0f17 9519
dff12ad7 9520 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9521 temp = validize_mem (decl_rtl);
f96c43fb 9522
dff12ad7 9523 /* If DECL_RTL is memory, we are in the normal case and the
9524 address is not valid, get the address into a register. */
ff385626 9525
dff12ad7 9526 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9527 {
9528 if (alt_rtl)
9529 *alt_rtl = decl_rtl;
9530 decl_rtl = use_anchored_address (decl_rtl);
9531 if (modifier != EXPAND_CONST_ADDRESS
9532 && modifier != EXPAND_SUM
1382992b 9533 && !memory_address_addr_space_p (DECL_MODE (exp),
bd1a81f7 9534 XEXP (decl_rtl, 0),
9535 MEM_ADDR_SPACE (decl_rtl)))
dff12ad7 9536 temp = replace_equiv_address (decl_rtl,
9537 copy_rtx (XEXP (decl_rtl, 0)));
f96c43fb 9538 }
9539
dff12ad7 9540 /* If we got something, return it. But first, set the alignment
9541 if the address is a register. */
9542 if (temp != 0)
9543 {
1382992b 9544 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
dff12ad7 9545 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
f96c43fb 9546
dff12ad7 9547 return temp;
9548 }
0de36bdb 9549
ee5ab2d1 9550 /* If the mode of DECL_RTL does not match that of the decl,
9551 there are two cases: we are dealing with a BLKmode value
9552 that is returned in a register, or we are dealing with
9553 a promoted value. In the latter case, return a SUBREG
9554 of the wanted mode, but mark it so that we know that it
9555 was already extended. */
9556 if (REG_P (decl_rtl)
1382992b 9557 && DECL_MODE (exp) != BLKmode
9558 && GET_MODE (decl_rtl) != DECL_MODE (exp))
7e564f73 9559 {
3754d046 9560 machine_mode pmode;
7e564f73 9561
ae929441 9562 /* Get the signedness to be used for this variable. Ensure we get
9563 the same mode we got when the variable was declared. */
1382992b 9564 if (code == SSA_NAME
9565 && (g = SSA_NAME_DEF_STMT (ssa_name))
9566 && gimple_code (g) == GIMPLE_CALL
9567 && !gimple_call_internal_p (g))
4acb747c 9568 pmode = promote_function_mode (type, mode, &unsignedp,
9569 gimple_call_fntype (g),
9570 2);
ae929441 9571 else
1382992b 9572 pmode = promote_decl_mode (exp, &unsignedp);
dff12ad7 9573 gcc_assert (GET_MODE (decl_rtl) == pmode);
9574
9575 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9576 SUBREG_PROMOTED_VAR_P (temp) = 1;
5a9ccd1b 9577 SUBREG_PROMOTED_SET (temp, unsignedp);
dff12ad7 9578 return temp;
7e564f73 9579 }
9580
dff12ad7 9581 return decl_rtl;
10f307d9 9582
dff12ad7 9583 case INTEGER_CST:
90079d10 9584 /* Given that TYPE_PRECISION (type) is not always equal to
c4050ce7 9585 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9586 the former to the latter according to the signedness of the
90079d10 9587 type. */
c4050ce7 9588 temp = immed_wide_int_const (wide_int::from
9589 (exp,
9590 GET_MODE_PRECISION (TYPE_MODE (type)),
9591 TYPE_SIGN (type)),
9592 TYPE_MODE (type));
9593 return temp;
9594
dff12ad7 9595 case VECTOR_CST:
9596 {
9597 tree tmp = NULL_TREE;
9598 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9599 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9600 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9601 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9602 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9603 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9604 return const_vector_from_tree (exp);
9605 if (GET_MODE_CLASS (mode) == MODE_INT)
9606 {
9607 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9608 if (type_for_mode)
9609 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9610 }
9611 if (!tmp)
fadf62f4 9612 {
f1f41a6c 9613 vec<constructor_elt, va_gc> *v;
fadf62f4 9614 unsigned i;
f1f41a6c 9615 vec_alloc (v, VECTOR_CST_NELTS (exp));
fadf62f4 9616 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9617 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9618 tmp = build_constructor (type, v);
9619 }
dff12ad7 9620 return expand_expr (tmp, ignore ? const0_rtx : target,
9621 tmode, modifier);
9622 }
10f307d9 9623
dff12ad7 9624 case CONST_DECL:
9625 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
10f307d9 9626
dff12ad7 9627 case REAL_CST:
9628 /* If optimized, generate immediate CONST_DOUBLE
9629 which will be turned into memory by reload if necessary.
2c551bbe 9630
dff12ad7 9631 We used to force a register so that loop.c could see it. But
9632 this does not allow gen_* patterns to perform optimizations with
9633 the constants. It also produces two insns in cases like "x = 1.0;".
9634 On most machines, floating-point constants are not permitted in
9635 many insns, so we'd end up copying it to a register in any case.
10f307d9 9636
dff12ad7 9637 Now, we do the copying in expand_binop, if appropriate. */
9638 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9639 TYPE_MODE (TREE_TYPE (exp)));
2c551bbe 9640
dff12ad7 9641 case FIXED_CST:
9642 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9643 TYPE_MODE (TREE_TYPE (exp)));
10f307d9 9644
dff12ad7 9645 case COMPLEX_CST:
9646 /* Handle evaluating a complex constant in a CONCAT target. */
9647 if (original_target && GET_CODE (original_target) == CONCAT)
a60e4107 9648 {
3754d046 9649 machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
dff12ad7 9650 rtx rtarg, itarg;
9651
9652 rtarg = XEXP (original_target, 0);
9653 itarg = XEXP (original_target, 1);
9654
9655 /* Move the real and imaginary parts separately. */
9656 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9657 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9658
9659 if (op0 != rtarg)
9660 emit_move_insn (rtarg, op0);
9661 if (op1 != itarg)
9662 emit_move_insn (itarg, op1);
9663
9664 return original_target;
a60e4107 9665 }
10f307d9 9666
dff12ad7 9667 /* ... fall through ... */
10f307d9 9668
dff12ad7 9669 case STRING_CST:
9670 temp = expand_expr_constant (exp, 1, modifier);
43fda261 9671
dff12ad7 9672 /* temp contains a constant address.
9673 On RISC machines where a constant address isn't valid,
9674 make some insns to get that address into a register. */
9675 if (modifier != EXPAND_CONST_ADDRESS
9676 && modifier != EXPAND_INITIALIZER
9677 && modifier != EXPAND_SUM
bd1a81f7 9678 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9679 MEM_ADDR_SPACE (temp)))
dff12ad7 9680 return replace_equiv_address (temp,
9681 copy_rtx (XEXP (temp, 0)));
9682 return temp;
43fda261 9683
dff12ad7 9684 case SAVE_EXPR:
9685 {
9686 tree val = treeop0;
a12f023f 9687 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9688 inner_reference_p);
94d01330 9689
dff12ad7 9690 if (!SAVE_EXPR_RESOLVED_P (exp))
9691 {
9692 /* We can indeed still hit this case, typically via builtin
9693 expanders calling save_expr immediately before expanding
9694 something. Assume this means that we only have to deal
9695 with non-BLKmode values. */
9696 gcc_assert (GET_MODE (ret) != BLKmode);
a02b3586 9697
ed4d69dc 9698 val = build_decl (curr_insn_location (),
dff12ad7 9699 VAR_DECL, NULL, TREE_TYPE (exp));
9700 DECL_ARTIFICIAL (val) = 1;
9701 DECL_IGNORED_P (val) = 1;
9702 treeop0 = val;
9703 TREE_OPERAND (exp, 0) = treeop0;
9704 SAVE_EXPR_RESOLVED_P (exp) = 1;
acd367d1 9705
dff12ad7 9706 if (!CONSTANT_P (ret))
9707 ret = copy_to_reg (ret);
9708 SET_DECL_RTL (val, ret);
9709 }
acd367d1 9710
dff12ad7 9711 return ret;
9712 }
9713
dff12ad7 9714
9715 case CONSTRUCTOR:
9716 /* If we don't need the result, just ensure we evaluate any
9717 subexpressions. */
9718 if (ignore)
acd367d1 9719 {
dff12ad7 9720 unsigned HOST_WIDE_INT idx;
9721 tree value;
9722
9723 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9724 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9725
9726 return const0_rtx;
acd367d1 9727 }
9728
dff12ad7 9729 return expand_constructor (exp, target, modifier, false);
10f307d9 9730
5d9de213 9731 case TARGET_MEM_REF:
dff12ad7 9732 {
1211c450 9733 addr_space_t as
9734 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
36eabf84 9735 enum insn_code icode;
56cf6489 9736 unsigned int align;
bd1a81f7 9737
64641360 9738 op0 = addr_for_mem_ref (exp, as, true);
bd1a81f7 9739 op0 = memory_address_addr_space (mode, op0, as);
dff12ad7 9740 temp = gen_rtx_MEM (mode, op0);
dff12ad7 9741 set_mem_attributes (temp, exp, 0);
bd1a81f7 9742 set_mem_addr_space (temp, as);
3482bf13 9743 align = get_object_alignment (exp);
884b03c9 9744 if (modifier != EXPAND_WRITE
28a9e8c4 9745 && modifier != EXPAND_MEMORY
884b03c9 9746 && mode != BLKmode
56cf6489 9747 && align < GET_MODE_ALIGNMENT (mode)
5d9de213 9748 /* If the target does not have special handling for unaligned
9749 loads of mode then it can use regular moves for them. */
9750 && ((icode = optab_handler (movmisalign_optab, mode))
9751 != CODE_FOR_nothing))
dff12ad7 9752 {
36eabf84 9753 struct expand_operand ops[2];
a35a63ff 9754
dff12ad7 9755 /* We've already validated the memory, and we're creating a
36eabf84 9756 new pseudo destination. The predicates really can't fail,
9757 nor can the generator. */
9758 create_output_operand (&ops[0], NULL_RTX, mode);
9759 create_fixed_operand (&ops[1], temp);
9760 expand_insn (icode, 2, ops);
e9b15297 9761 temp = ops[0].value;
dff12ad7 9762 }
dff12ad7 9763 return temp;
9764 }
9765
182cf5a9 9766 case MEM_REF:
9767 {
9768 addr_space_t as
1211c450 9769 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
3754d046 9770 machine_mode address_mode;
182cf5a9 9771 tree base = TREE_OPERAND (exp, 0);
86638c2e 9772 gimple def_stmt;
36eabf84 9773 enum insn_code icode;
56cf6489 9774 unsigned align;
182cf5a9 9775 /* Handle expansion of non-aliased memory with non-BLKmode. That
9776 might end up in a register. */
a598af2a 9777 if (mem_ref_refers_to_non_mem_p (exp))
182cf5a9 9778 {
e913b5cd 9779 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
182cf5a9 9780 base = TREE_OPERAND (base, 0);
a598af2a 9781 if (offset == 0
e913b5cd 9782 && tree_fits_uhwi_p (TYPE_SIZE (type))
a598af2a 9783 && (GET_MODE_BITSIZE (DECL_MODE (base))
e913b5cd 9784 == tree_to_uhwi (TYPE_SIZE (type))))
e9b15297 9785 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
a598af2a 9786 target, tmode, modifier);
e9b15297 9787 if (TYPE_MODE (type) == BLKmode)
182cf5a9 9788 {
a598af2a 9789 temp = assign_stack_temp (DECL_MODE (base),
0ab48139 9790 GET_MODE_SIZE (DECL_MODE (base)));
a598af2a 9791 store_expr (base, temp, 0, false);
9792 temp = adjust_address (temp, BLKmode, offset);
e9b15297 9793 set_mem_size (temp, int_size_in_bytes (type));
a598af2a 9794 return temp;
182cf5a9 9795 }
e9b15297 9796 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9797 bitsize_int (offset * BITS_PER_UNIT));
9798 return expand_expr (exp, target, tmode, modifier);
182cf5a9 9799 }
9800 address_mode = targetm.addr_space.address_mode (as);
86638c2e 9801 base = TREE_OPERAND (exp, 0);
9802 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
5d9de213 9803 {
9804 tree mask = gimple_assign_rhs2 (def_stmt);
9805 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9806 gimple_assign_rhs1 (def_stmt), mask);
9807 TREE_OPERAND (exp, 0) = base;
9808 }
3482bf13 9809 align = get_object_alignment (exp);
84f7af3e 9810 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
d244cab5 9811 op0 = memory_address_addr_space (mode, op0, as);
182cf5a9 9812 if (!integer_zerop (TREE_OPERAND (exp, 1)))
6b0e9e4b 9813 {
e913b5cd 9814 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
6b0e9e4b 9815 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
628e6f66 9816 op0 = memory_address_addr_space (mode, op0, as);
6b0e9e4b 9817 }
182cf5a9 9818 temp = gen_rtx_MEM (mode, op0);
9819 set_mem_attributes (temp, exp, 0);
9820 set_mem_addr_space (temp, as);
9821 if (TREE_THIS_VOLATILE (exp))
9822 MEM_VOLATILE_P (temp) = 1;
884b03c9 9823 if (modifier != EXPAND_WRITE
28a9e8c4 9824 && modifier != EXPAND_MEMORY
a12f023f 9825 && !inner_reference_p
884b03c9 9826 && mode != BLKmode
e2c56221 9827 && align < GET_MODE_ALIGNMENT (mode))
5d9de213 9828 {
e2c56221 9829 if ((icode = optab_handler (movmisalign_optab, mode))
9830 != CODE_FOR_nothing)
9831 {
9832 struct expand_operand ops[2];
9833
9834 /* We've already validated the memory, and we're creating a
9835 new pseudo destination. The predicates really can't fail,
9836 nor can the generator. */
9837 create_output_operand (&ops[0], NULL_RTX, mode);
9838 create_fixed_operand (&ops[1], temp);
9839 expand_insn (icode, 2, ops);
e9b15297 9840 temp = ops[0].value;
e2c56221 9841 }
9842 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9843 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9844 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
3f71db40 9845 (modifier == EXPAND_STACK_PARM
9846 ? NULL_RTX : target),
e2c56221 9847 mode, mode);
5d9de213 9848 }
182cf5a9 9849 return temp;
9850 }
9851
dff12ad7 9852 case ARRAY_REF:
9853
9854 {
9855 tree array = treeop0;
9856 tree index = treeop1;
f42d23fc 9857 tree init;
dff12ad7 9858
9859 /* Fold an expression like: "foo"[2].
9860 This is not done in fold so it won't happen inside &.
9861 Don't fold if this is for wide characters since it's too
9862 difficult to do correctly and this is a very rare case. */
9863
9864 if (modifier != EXPAND_CONST_ADDRESS
9865 && modifier != EXPAND_INITIALIZER
9866 && modifier != EXPAND_MEMORY)
9867 {
9868 tree t = fold_read_from_constant_string (exp);
9869
9870 if (t)
9871 return expand_expr (t, target, tmode, modifier);
9872 }
9873
9874 /* If this is a constant index into a constant array,
9875 just get the value from the array. Handle both the cases when
9876 we have an explicit constructor and when our operand is a variable
9877 that was declared const. */
9878
9879 if (modifier != EXPAND_CONST_ADDRESS
9880 && modifier != EXPAND_INITIALIZER
9881 && modifier != EXPAND_MEMORY
9882 && TREE_CODE (array) == CONSTRUCTOR
9883 && ! TREE_SIDE_EFFECTS (array)
9884 && TREE_CODE (index) == INTEGER_CST)
9885 {
9886 unsigned HOST_WIDE_INT ix;
9887 tree field, value;
9888
9889 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9890 field, value)
9891 if (tree_int_cst_equal (field, index))
10b58489 9892 {
dff12ad7 9893 if (!TREE_SIDE_EFFECTS (value))
9894 return expand_expr (fold (value), target, tmode, modifier);
9895 break;
10b58489 9896 }
dff12ad7 9897 }
10f307d9 9898
dff12ad7 9899 else if (optimize >= 1
9900 && modifier != EXPAND_CONST_ADDRESS
9901 && modifier != EXPAND_INITIALIZER
9902 && modifier != EXPAND_MEMORY
9903 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
df8d3e89 9904 && TREE_CODE (index) == INTEGER_CST
9905 && (TREE_CODE (array) == VAR_DECL
9906 || TREE_CODE (array) == CONST_DECL)
9907 && (init = ctor_for_folding (array)) != error_mark_node)
dff12ad7 9908 {
e8e60ada 9909 if (init == NULL_TREE)
9910 {
9911 tree value = build_zero_cst (type);
9912 if (TREE_CODE (value) == CONSTRUCTOR)
9913 {
9914 /* If VALUE is a CONSTRUCTOR, this optimization is only
9915 useful if this doesn't store the CONSTRUCTOR into
9916 memory. If it does, it is more efficient to just
9917 load the data from the array directly. */
9918 rtx ret = expand_constructor (value, target,
9919 modifier, true);
9920 if (ret == NULL_RTX)
9921 value = NULL_TREE;
9922 }
9923
9924 if (value)
9925 return expand_expr (value, target, tmode, modifier);
9926 }
9927 else if (TREE_CODE (init) == CONSTRUCTOR)
dff12ad7 9928 {
df8d3e89 9929 unsigned HOST_WIDE_INT ix;
9930 tree field, value;
68a556d6 9931
df8d3e89 9932 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9933 field, value)
9934 if (tree_int_cst_equal (field, index))
9935 {
9936 if (TREE_SIDE_EFFECTS (value))
9937 break;
10f307d9 9938
df8d3e89 9939 if (TREE_CODE (value) == CONSTRUCTOR)
dff12ad7 9940 {
df8d3e89 9941 /* If VALUE is a CONSTRUCTOR, this
9942 optimization is only useful if
9943 this doesn't store the CONSTRUCTOR
9944 into memory. If it does, it is more
9945 efficient to just load the data from
9946 the array directly. */
9947 rtx ret = expand_constructor (value, target,
9948 modifier, true);
9949 if (ret == NULL_RTX)
dff12ad7 9950 break;
dff12ad7 9951 }
df8d3e89 9952
f42d23fc 9953 return
9954 expand_expr (fold (value), target, tmode, modifier);
df8d3e89 9955 }
9956 }
f42d23fc 9957 else if (TREE_CODE (init) == STRING_CST)
df8d3e89 9958 {
df8d3e89 9959 tree low_bound = array_ref_low_bound (exp);
f42d23fc 9960 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
9961
9962 /* Optimize the special case of a zero lower bound.
9963
9964 We convert the lower bound to sizetype to avoid problems
9965 with constant folding. E.g. suppose the lower bound is
9966 1 and its mode is QI. Without the conversion
9967 (ARRAY + (INDEX - (unsigned char)1))
9968 becomes
9969 (ARRAY + (-(unsigned char)1) + INDEX)
9970 which becomes
9971 (ARRAY + 255 + INDEX). Oops! */
9972 if (!integer_zerop (low_bound))
df8d3e89 9973 index1 = size_diffop_loc (loc, index1,
f42d23fc 9974 fold_convert_loc (loc, sizetype,
9975 low_bound));
df8d3e89 9976
f42d23fc 9977 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
dff12ad7 9978 {
df8d3e89 9979 tree type = TREE_TYPE (TREE_TYPE (init));
3754d046 9980 machine_mode mode = TYPE_MODE (type);
3a54beaf 9981
df8d3e89 9982 if (GET_MODE_CLASS (mode) == MODE_INT
9983 && GET_MODE_SIZE (mode) == 1)
9984 return gen_int_mode (TREE_STRING_POINTER (init)
f9ae6f95 9985 [TREE_INT_CST_LOW (index1)],
df8d3e89 9986 mode);
dff12ad7 9987 }
9988 }
9989 }
9990 }
9991 goto normal_inner_ref;
9992
9993 case COMPONENT_REF:
9994 /* If the operand is a CONSTRUCTOR, we can just extract the
9995 appropriate field if it is present. */
9996 if (TREE_CODE (treeop0) == CONSTRUCTOR)
9997 {
9998 unsigned HOST_WIDE_INT idx;
9999 tree field, value;
10000
10001 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
10002 idx, field, value)
10003 if (field == treeop1
10004 /* We can normally use the value of the field in the
10005 CONSTRUCTOR. However, if this is a bitfield in
10006 an integral mode that we can fit in a HOST_WIDE_INT,
10007 we must mask only the number of bits in the bitfield,
10008 since this is done implicitly by the constructor. If
10009 the bitfield does not meet either of those conditions,
10010 we can't do this optimization. */
10011 && (! DECL_BIT_FIELD (field)
10012 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
995b44f5 10013 && (GET_MODE_PRECISION (DECL_MODE (field))
dff12ad7 10014 <= HOST_BITS_PER_WIDE_INT))))
10015 {
10016 if (DECL_BIT_FIELD (field)
10017 && modifier == EXPAND_STACK_PARM)
10018 target = 0;
10019 op0 = expand_expr (value, target, tmode, modifier);
10020 if (DECL_BIT_FIELD (field))
10021 {
f9ae6f95 10022 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3754d046 10023 machine_mode imode = TYPE_MODE (TREE_TYPE (field));
dff12ad7 10024
10025 if (TYPE_UNSIGNED (TREE_TYPE (field)))
10026 {
0359f9f5 10027 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
10028 imode);
dff12ad7 10029 op0 = expand_and (imode, op0, op1, target);
10030 }
10031 else
10032 {
995b44f5 10033 int count = GET_MODE_PRECISION (imode) - bitsize;
dff12ad7 10034
10035 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10036 target, 0);
10037 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10038 target, 0);
10039 }
10040 }
10041
10042 return op0;
10043 }
10044 }
10045 goto normal_inner_ref;
10046
10047 case BIT_FIELD_REF:
10048 case ARRAY_RANGE_REF:
10049 normal_inner_ref:
10050 {
3754d046 10051 machine_mode mode1, mode2;
dff12ad7 10052 HOST_WIDE_INT bitsize, bitpos;
10053 tree offset;
10054 int volatilep = 0, must_force_mem;
10055 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
10056 &mode1, &unsignedp, &volatilep, true);
10057 rtx orig_op0, memloc;
db219949 10058 bool clear_mem_expr = false;
dff12ad7 10059
10060 /* If we got back the original object, something is wrong. Perhaps
10061 we are evaluating an expression too early. In any event, don't
10062 infinitely recurse. */
10063 gcc_assert (tem != exp);
10064
10065 /* If TEM's type is a union of variable size, pass TARGET to the inner
10066 computation, since it will need a temporary and TARGET is known
10067 to have to do. This occurs in unchecked conversion in Ada. */
10068 orig_op0 = op0
a12f023f 10069 = expand_expr_real (tem,
10070 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10071 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10072 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10073 != INTEGER_CST)
10074 && modifier != EXPAND_STACK_PARM
10075 ? target : NULL_RTX),
10076 VOIDmode,
10077 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10078 NULL, true);
a420d927 10079
61a1f9de 10080 /* If the field has a mode, we want to access it in the
1795103a 10081 field's mode, not the computed mode.
10082 If a MEM has VOIDmode (external with incomplete type),
10083 use BLKmode for it instead. */
10084 if (MEM_P (op0))
10085 {
61a1f9de 10086 if (mode1 != VOIDmode)
1795103a 10087 op0 = adjust_address (op0, mode1, 0);
10088 else if (GET_MODE (op0) == VOIDmode)
10089 op0 = adjust_address (op0, BLKmode, 0);
10090 }
a420d927 10091
dff12ad7 10092 mode2
10093 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10094
10095 /* If we have either an offset, a BLKmode result, or a reference
10096 outside the underlying object, we must force it to memory.
10097 Such a case can occur in Ada if we have unchecked conversion
10098 of an expression from a scalar type to an aggregate type or
10099 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10100 passed a partially uninitialized object or a view-conversion
10101 to a larger size. */
10102 must_force_mem = (offset
10103 || mode1 == BLKmode
10104 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10105
10106 /* Handle CONCAT first. */
10107 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10108 {
10109 if (bitpos == 0
10110 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
10111 return op0;
10112 if (bitpos == 0
10113 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10114 && bitsize)
10115 {
10116 op0 = XEXP (op0, 0);
10117 mode2 = GET_MODE (op0);
10118 }
10119 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10120 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10121 && bitpos
10122 && bitsize)
10123 {
10124 op0 = XEXP (op0, 1);
10125 bitpos = 0;
10126 mode2 = GET_MODE (op0);
10127 }
10128 else
10129 /* Otherwise force into memory. */
10130 must_force_mem = 1;
10131 }
10132
10133 /* If this is a constant, put it in a register if it is a legitimate
10134 constant and we don't need a memory reference. */
10135 if (CONSTANT_P (op0)
10136 && mode2 != BLKmode
ca316360 10137 && targetm.legitimate_constant_p (mode2, op0)
dff12ad7 10138 && !must_force_mem)
10139 op0 = force_reg (mode2, op0);
10140
10141 /* Otherwise, if this is a constant, try to force it to the constant
10142 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10143 is a legitimate constant. */
10144 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10145 op0 = validize_mem (memloc);
10146
10147 /* Otherwise, if this is a constant or the object is not in memory
10148 and need be, put it there. */
10149 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10150 {
9f495e8d 10151 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
dff12ad7 10152 emit_move_insn (memloc, op0);
10153 op0 = memloc;
db219949 10154 clear_mem_expr = true;
dff12ad7 10155 }
10156
10157 if (offset)
10158 {
3754d046 10159 machine_mode address_mode;
dff12ad7 10160 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10161 EXPAND_SUM);
10162
10163 gcc_assert (MEM_P (op0));
10164
87cf5753 10165 address_mode = get_address_mode (op0);
98155838 10166 if (GET_MODE (offset_rtx) != address_mode)
2ff88218 10167 {
10168 /* We cannot be sure that the RTL in offset_rtx is valid outside
10169 of a memory address context, so force it into a register
10170 before attempting to convert it to the desired mode. */
10171 offset_rtx = force_operand (offset_rtx, NULL_RTX);
10172 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10173 }
dff12ad7 10174
dbe2840a 10175 /* See the comment in expand_assignment for the rationale. */
10176 if (mode1 != VOIDmode
10177 && bitpos != 0
10178 && bitsize > 0
dff12ad7 10179 && (bitpos % bitsize) == 0
10180 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
dbe2840a 10181 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
dff12ad7 10182 {
10183 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10184 bitpos = 0;
10185 }
10186
10187 op0 = offset_address (op0, offset_rtx,
10188 highest_pow2_factor (offset));
10189 }
10190
10191 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10192 record its alignment as BIGGEST_ALIGNMENT. */
10193 if (MEM_P (op0) && bitpos == 0 && offset != 0
10194 && is_aligning_offset (offset, tem))
10195 set_mem_align (op0, BIGGEST_ALIGNMENT);
10196
10197 /* Don't forget about volatility even if this is a bitfield. */
10198 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10199 {
10200 if (op0 == orig_op0)
10201 op0 = copy_rtx (op0);
10202
10203 MEM_VOLATILE_P (op0) = 1;
10204 }
10205
10206 /* In cases where an aligned union has an unaligned object
10207 as a field, we might be extracting a BLKmode value from
10208 an integer-mode (e.g., SImode) object. Handle this case
10209 by doing the extract into an object as wide as the field
10210 (which we know to be the width of a basic mode), then
10211 storing into memory, and changing the mode to BLKmode. */
10212 if (mode1 == VOIDmode
10213 || REG_P (op0) || GET_CODE (op0) == SUBREG
10214 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10215 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10216 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10217 && modifier != EXPAND_CONST_ADDRESS
7b642468 10218 && modifier != EXPAND_INITIALIZER
10219 && modifier != EXPAND_MEMORY)
61a1f9de 10220 /* If the bitfield is volatile and the bitsize
10221 is narrower than the access size of the bitfield,
10222 we need to extract bitfields from the access. */
10223 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10224 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10225 && mode1 != BLKmode
10226 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
dff12ad7 10227 /* If the field isn't aligned enough to fetch as a memref,
10228 fetch it as a bit field. */
10229 || (mode1 != BLKmode
10230 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10231 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10232 || (MEM_P (op0)
10233 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10234 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
780871fb 10235 && modifier != EXPAND_MEMORY
dff12ad7 10236 && ((modifier == EXPAND_CONST_ADDRESS
10237 || modifier == EXPAND_INITIALIZER)
10238 ? STRICT_ALIGNMENT
10239 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10240 || (bitpos % BITS_PER_UNIT != 0)))
10241 /* If the type and the field are a constant size and the
10242 size of the type isn't the same size as the bitfield,
10243 we must use bitfield operations. */
10244 || (bitsize >= 0
10245 && TYPE_SIZE (TREE_TYPE (exp))
10246 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10247 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10248 bitsize)))
10249 {
3754d046 10250 machine_mode ext_mode = mode;
dff12ad7 10251
10252 if (ext_mode == BLKmode
10253 && ! (target != 0 && MEM_P (op0)
10254 && MEM_P (target)
10255 && bitpos % BITS_PER_UNIT == 0))
10256 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10257
10258 if (ext_mode == BLKmode)
10259 {
10260 if (target == 0)
0ab48139 10261 target = assign_temp (type, 1, 1);
dff12ad7 10262
4a5cda13 10263 /* ??? Unlike the similar test a few lines below, this one is
10264 very likely obsolete. */
dff12ad7 10265 if (bitsize == 0)
10266 return target;
10267
10268 /* In this case, BITPOS must start at a byte boundary and
10269 TARGET, if specified, must be a MEM. */
10270 gcc_assert (MEM_P (op0)
10271 && (!target || MEM_P (target))
10272 && !(bitpos % BITS_PER_UNIT));
10273
10274 emit_block_move (target,
10275 adjust_address (op0, VOIDmode,
10276 bitpos / BITS_PER_UNIT),
10277 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10278 / BITS_PER_UNIT),
10279 (modifier == EXPAND_STACK_PARM
10280 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10281
10282 return target;
10283 }
10284
4a5cda13 10285 /* If we have nothing to extract, the result will be 0 for targets
10286 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10287 return 0 for the sake of consistency, as reading a zero-sized
10288 bitfield is valid in Ada and the value is fully specified. */
10289 if (bitsize == 0)
10290 return const0_rtx;
10291
dff12ad7 10292 op0 = validize_mem (op0);
10293
10294 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10295 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10296
3f71db40 10297 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
dff12ad7 10298 (modifier == EXPAND_STACK_PARM
10299 ? NULL_RTX : target),
10300 ext_mode, ext_mode);
10301
10302 /* If the result is a record type and BITSIZE is narrower than
10303 the mode of OP0, an integral mode, and this is a big endian
10304 machine, we must put the field into the high-order bits. */
10305 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10306 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10307 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10308 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
f5ff0b21 10309 GET_MODE_BITSIZE (GET_MODE (op0))
10310 - bitsize, op0, 1);
dff12ad7 10311
10312 /* If the result type is BLKmode, store the data into a temporary
10313 of the appropriate type, but with the mode corresponding to the
217d5117 10314 mode for the data we have (op0's mode). */
dff12ad7 10315 if (mode == BLKmode)
10316 {
217d5117 10317 rtx new_rtx
10318 = assign_stack_temp_for_type (ext_mode,
10319 GET_MODE_BITSIZE (ext_mode),
10320 type);
dff12ad7 10321 emit_move_insn (new_rtx, op0);
10322 op0 = copy_rtx (new_rtx);
10323 PUT_MODE (op0, BLKmode);
dff12ad7 10324 }
5db186f1 10325
dff12ad7 10326 return op0;
10327 }
10f307d9 10328
dff12ad7 10329 /* If the result is BLKmode, use that to access the object
10330 now as well. */
10331 if (mode == BLKmode)
10332 mode1 = BLKmode;
10f307d9 10333
dff12ad7 10334 /* Get a reference to just this component. */
10335 if (modifier == EXPAND_CONST_ADDRESS
10336 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10337 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10338 else
10339 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10f307d9 10340
dff12ad7 10341 if (op0 == orig_op0)
10342 op0 = copy_rtx (op0);
10f307d9 10343
db219949 10344 set_mem_attributes (op0, exp, 0);
75d525a2 10345
dff12ad7 10346 if (REG_P (XEXP (op0, 0)))
10347 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
fa56dc1d 10348
db219949 10349 /* If op0 is a temporary because the original expressions was forced
10350 to memory, clear MEM_EXPR so that the original expression cannot
10351 be marked as addressable through MEM_EXPR of the temporary. */
10352 if (clear_mem_expr)
10353 set_mem_expr (op0, NULL_TREE);
10354
dff12ad7 10355 MEM_VOLATILE_P (op0) |= volatilep;
10356 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10357 || modifier == EXPAND_CONST_ADDRESS
10358 || modifier == EXPAND_INITIALIZER)
10359 return op0;
f42d23fc 10360
e9b15297 10361 if (target == 0)
dff12ad7 10362 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
446a42ee 10363
dff12ad7 10364 convert_move (target, op0, unsignedp);
10365 return target;
10366 }
a54ebf2e 10367
dff12ad7 10368 case OBJ_TYPE_REF:
10369 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
93d9d66f 10370
dff12ad7 10371 case CALL_EXPR:
10372 /* All valid uses of __builtin_va_arg_pack () are removed during
10373 inlining. */
10374 if (CALL_EXPR_VA_ARG_PACK (exp))
10375 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
b533d30b 10376 {
dff12ad7 10377 tree fndecl = get_callee_fndecl (exp), attr;
ca436b82 10378
dff12ad7 10379 if (fndecl
10380 && (attr = lookup_attribute ("error",
10381 DECL_ATTRIBUTES (fndecl))) != NULL)
10382 error ("%Kcall to %qs declared with attribute error: %s",
10383 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10384 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10385 if (fndecl
10386 && (attr = lookup_attribute ("warning",
10387 DECL_ATTRIBUTES (fndecl))) != NULL)
10388 warning_at (tree_nonartificial_location (exp),
10389 0, "%Kcall to %qs declared with attribute warning: %s",
10390 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10391 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
ca436b82 10392
dff12ad7 10393 /* Check for a built-in function. */
10394 if (fndecl && DECL_BUILT_IN (fndecl))
b533d30b 10395 {
dff12ad7 10396 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
f21337ef 10397 if (CALL_WITH_BOUNDS_P (exp))
10398 return expand_builtin_with_bounds (exp, target, subtarget,
10399 tmode, ignore);
10400 else
10401 return expand_builtin (exp, target, subtarget, tmode, ignore);
b533d30b 10402 }
dff12ad7 10403 }
10404 return expand_call (exp, target, ignore);
ca436b82 10405
dff12ad7 10406 case VIEW_CONVERT_EXPR:
10407 op0 = NULL_RTX;
ca436b82 10408
dff12ad7 10409 /* If we are converting to BLKmode, try to avoid an intermediate
10410 temporary by fetching an inner memory reference. */
10411 if (mode == BLKmode
e9b15297 10412 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
dff12ad7 10413 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10414 && handled_component_p (treeop0))
10415 {
3754d046 10416 machine_mode mode1;
dff12ad7 10417 HOST_WIDE_INT bitsize, bitpos;
10418 tree offset;
10419 int unsignedp;
10420 int volatilep = 0;
10421 tree tem
10422 = get_inner_reference (treeop0, &bitsize, &bitpos,
10423 &offset, &mode1, &unsignedp, &volatilep,
10424 true);
10425 rtx orig_op0;
ca436b82 10426
dff12ad7 10427 /* ??? We should work harder and deal with non-zero offsets. */
10428 if (!offset
10429 && (bitpos % BITS_PER_UNIT) == 0
10430 && bitsize >= 0
e9b15297 10431 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
dff12ad7 10432 {
10433 /* See the normal_inner_ref case for the rationale. */
10434 orig_op0
a12f023f 10435 = expand_expr_real (tem,
10436 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10437 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10438 != INTEGER_CST)
10439 && modifier != EXPAND_STACK_PARM
10440 ? target : NULL_RTX),
10441 VOIDmode,
10442 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10443 NULL, true);
ca436b82 10444
dff12ad7 10445 if (MEM_P (orig_op0))
b533d30b 10446 {
dff12ad7 10447 op0 = orig_op0;
b533d30b 10448
dff12ad7 10449 /* Get a reference to just this component. */
10450 if (modifier == EXPAND_CONST_ADDRESS
10451 || modifier == EXPAND_SUM
10452 || modifier == EXPAND_INITIALIZER)
10453 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10454 else
10455 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
d2ae1b1e 10456
dff12ad7 10457 if (op0 == orig_op0)
10458 op0 = copy_rtx (op0);
10459
10460 set_mem_attributes (op0, treeop0, 0);
10461 if (REG_P (XEXP (op0, 0)))
10462 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10463
10464 MEM_VOLATILE_P (op0) |= volatilep;
10465 }
10466 }
b533d30b 10467 }
10f307d9 10468
dff12ad7 10469 if (!op0)
a12f023f 10470 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10471 NULL, inner_reference_p);
dff12ad7 10472
10473 /* If the input and output modes are both the same, we are done. */
10474 if (mode == GET_MODE (op0))
10475 ;
10476 /* If neither mode is BLKmode, and both modes are the same size
10477 then we can use gen_lowpart. */
10478 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
995b44f5 10479 && (GET_MODE_PRECISION (mode)
10480 == GET_MODE_PRECISION (GET_MODE (op0)))
dff12ad7 10481 && !COMPLEX_MODE_P (GET_MODE (op0)))
10482 {
10483 if (GET_CODE (op0) == SUBREG)
10484 op0 = force_reg (GET_MODE (op0), op0);
062fb763 10485 temp = gen_lowpart_common (mode, op0);
10486 if (temp)
10487 op0 = temp;
10488 else
10489 {
10490 if (!REG_P (op0) && !MEM_P (op0))
10491 op0 = force_reg (GET_MODE (op0), op0);
10492 op0 = gen_lowpart (mode, op0);
10493 }
dff12ad7 10494 }
f235634e 10495 /* If both types are integral, convert from one mode to the other. */
10496 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
48e1416a 10497 op0 = convert_modes (mode, GET_MODE (op0), op0,
dff12ad7 10498 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
fc0bb78b 10499 /* If the output type is a bit-field type, do an extraction. */
10500 else if (reduce_bit_field)
10501 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10502 TYPE_UNSIGNED (type), NULL_RTX,
10503 mode, mode);
dff12ad7 10504 /* As a last resort, spill op0 to memory, and reload it in a
10505 different mode. */
10506 else if (!MEM_P (op0))
10507 {
10508 /* If the operand is not a MEM, force it into memory. Since we
10509 are going to be changing the mode of the MEM, don't call
10510 force_const_mem for constants because we don't allow pool
10511 constants to change mode. */
10512 tree inner_type = TREE_TYPE (treeop0);
10513
10514 gcc_assert (!TREE_ADDRESSABLE (exp));
10f307d9 10515
dff12ad7 10516 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10517 target
10518 = assign_stack_temp_for_type
10519 (TYPE_MODE (inner_type),
0ab48139 10520 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
d2ae1b1e 10521
dff12ad7 10522 emit_move_insn (target, op0);
10523 op0 = target;
10524 }
d2ae1b1e 10525
fc0bb78b 10526 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10527 output type is such that the operand is known to be aligned, indicate
10528 that it is. Otherwise, we need only be concerned about alignment for
10529 non-BLKmode results. */
dff12ad7 10530 if (MEM_P (op0))
10531 {
7f295214 10532 enum insn_code icode;
10533
dff12ad7 10534 if (TYPE_ALIGN_OK (type))
d3909c67 10535 {
10536 /* ??? Copying the MEM without substantially changing it might
10537 run afoul of the code handling volatile memory references in
10538 store_expr, which assumes that TARGET is returned unmodified
10539 if it has been used. */
10540 op0 = copy_rtx (op0);
10541 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10542 }
a12f023f 10543 else if (modifier != EXPAND_WRITE
10544 && modifier != EXPAND_MEMORY
10545 && !inner_reference_p
dff12ad7 10546 && mode != BLKmode
10547 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10548 {
a12f023f 10549 /* If the target does have special handling for unaligned
10550 loads of mode then use them. */
10551 if ((icode = optab_handler (movmisalign_optab, mode))
10552 != CODE_FOR_nothing)
10553 {
9ed997be 10554 rtx reg;
a12f023f 10555
10556 op0 = adjust_address (op0, mode, 0);
10557 /* We've already validated the memory, and we're creating a
10558 new pseudo destination. The predicates really can't
10559 fail. */
10560 reg = gen_reg_rtx (mode);
10561
10562 /* Nor can the insn generator. */
9ed997be 10563 rtx_insn *insn = GEN_FCN (icode) (reg, op0);
a12f023f 10564 emit_insn (insn);
10565 return reg;
10566 }
10567 else if (STRICT_ALIGNMENT)
10568 {
10569 tree inner_type = TREE_TYPE (treeop0);
10570 HOST_WIDE_INT temp_size
10571 = MAX (int_size_in_bytes (inner_type),
10572 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10573 rtx new_rtx
10574 = assign_stack_temp_for_type (mode, temp_size, type);
10575 rtx new_with_op0_mode
10576 = adjust_address (new_rtx, GET_MODE (op0), 0);
10577
10578 gcc_assert (!TREE_ADDRESSABLE (exp));
10579
10580 if (GET_MODE (op0) == BLKmode)
10581 emit_block_move (new_with_op0_mode, op0,
10582 GEN_INT (GET_MODE_SIZE (mode)),
10583 (modifier == EXPAND_STACK_PARM
10584 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10585 else
10586 emit_move_insn (new_with_op0_mode, op0);
6295ca72 10587
a12f023f 10588 op0 = new_rtx;
10589 }
dff12ad7 10590 }
68a556d6 10591
dff12ad7 10592 op0 = adjust_address (op0, mode, 0);
10593 }
10f307d9 10594
dff12ad7 10595 return op0;
4ee9c684 10596
41076ef6 10597 case MODIFY_EXPR:
10598 {
588e1cc3 10599 tree lhs = treeop0;
10600 tree rhs = treeop1;
987329ad 10601 gcc_assert (ignore);
10602
10f307d9 10603 /* Check for |= or &= of a bitfield of size one into another bitfield
10604 of size 1. In this case, (unless we need the result of the
10605 assignment) we can do this more efficiently with a
10606 test followed by an assignment, if necessary.
10607
10608 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10609 things change so we do, this code should be enhanced to
10610 support it. */
987329ad 10611 if (TREE_CODE (lhs) == COMPONENT_REF
10f307d9 10612 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10613 || TREE_CODE (rhs) == BIT_AND_EXPR)
10614 && TREE_OPERAND (rhs, 0) == lhs
10615 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
a0c2c45b 10616 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10617 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10f307d9 10618 {
1d277a67 10619 rtx_code_label *label = gen_label_rtx ();
e3b560a6 10620 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10f307d9 10621 do_jump (TREE_OPERAND (rhs, 1),
e3b560a6 10622 value ? label : 0,
79ab74cc 10623 value ? 0 : label, -1);
5b5037b3 10624 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
ca06a84c 10625 false);
01ab6370 10626 do_pending_stack_adjust ();
10f307d9 10627 emit_label (label);
10628 return const0_rtx;
10629 }
10630
ca06a84c 10631 expand_assignment (lhs, rhs, false);
993e4bab 10632 return const0_rtx;
10f307d9 10633 }
10634
10f307d9 10635 case ADDR_EXPR:
b51e4016 10636 return expand_expr_addr_expr (exp, target, tmode, modifier);
10f307d9 10637
b63679d2 10638 case REALPART_EXPR:
588e1cc3 10639 op0 = expand_normal (treeop0);
de17a47b 10640 return read_complex_part (op0, false);
fa56dc1d 10641
b63679d2 10642 case IMAGPART_EXPR:
588e1cc3 10643 op0 = expand_normal (treeop0);
de17a47b 10644 return read_complex_part (op0, true);
b63679d2 10645
16c9337c 10646 case RETURN_EXPR:
10647 case LABEL_EXPR:
10648 case GOTO_EXPR:
10649 case SWITCH_EXPR:
10650 case ASM_EXPR:
16c9337c 10651 /* Expanded in cfgexpand.c. */
10652 gcc_unreachable ();
4ee9c684 10653
694ec519 10654 case TRY_CATCH_EXPR:
4ee9c684 10655 case CATCH_EXPR:
4ee9c684 10656 case EH_FILTER_EXPR:
f0c211a3 10657 case TRY_FINALLY_EXPR:
6388f9f7 10658 /* Lowered by tree-eh.c. */
611234b4 10659 gcc_unreachable ();
f0c211a3 10660
6388f9f7 10661 case WITH_CLEANUP_EXPR:
10662 case CLEANUP_POINT_EXPR:
10663 case TARGET_EXPR:
873f1e89 10664 case CASE_LABEL_EXPR:
2799a2b7 10665 case VA_ARG_EXPR:
491e04ef 10666 case BIND_EXPR:
e3ee6a3e 10667 case INIT_EXPR:
10668 case CONJ_EXPR:
10669 case COMPOUND_EXPR:
10670 case PREINCREMENT_EXPR:
10671 case PREDECREMENT_EXPR:
10672 case POSTINCREMENT_EXPR:
10673 case POSTDECREMENT_EXPR:
10674 case LOOP_EXPR:
10675 case EXIT_EXPR:
7843e4bc 10676 case COMPOUND_LITERAL_EXPR:
6388f9f7 10677 /* Lowered by gimplify.c. */
611234b4 10678 gcc_unreachable ();
f0c211a3 10679
6bfa2cc1 10680 case FDESC_EXPR:
10681 /* Function descriptors are not valid except for as
10682 initialization constants, and should not be expanded. */
611234b4 10683 gcc_unreachable ();
6bfa2cc1 10684
80f06481 10685 case WITH_SIZE_EXPR:
10686 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10687 have pulled out the size to use in whatever context it needed. */
588e1cc3 10688 return expand_expr_real (treeop0, original_target, tmode,
a12f023f 10689 modifier, alt_rtl, inner_reference_p);
80f06481 10690
10f307d9 10691 default:
dff12ad7 10692 return expand_expr_real_2 (&ops, target, tmode, modifier);
10f307d9 10693 }
4f7f7efd 10694}
4f7f7efd 10695\f
10696/* Subroutine of above: reduce EXP to the precision of TYPE (in the
10697 signedness of TYPE), possibly returning the result in TARGET. */
10698static rtx
10699reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10700{
10701 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10702 if (target && GET_MODE (target) != GET_MODE (exp))
10703 target = 0;
32e79ae6 10704 /* For constant values, reduce using build_int_cst_type. */
971ba038 10705 if (CONST_INT_P (exp))
32e79ae6 10706 {
10707 HOST_WIDE_INT value = INTVAL (exp);
10708 tree t = build_int_cst_type (type, value);
10709 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10710 }
10711 else if (TYPE_UNSIGNED (type))
4f7f7efd 10712 {
3754d046 10713 machine_mode mode = GET_MODE (exp);
ddb1be65 10714 rtx mask = immed_wide_int_const
796b6678 10715 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
e913b5cd 10716 return expand_and (mode, exp, mask, target);
4f7f7efd 10717 }
10718 else
10719 {
995b44f5 10720 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
f5ff0b21 10721 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10722 exp, count, target, 0);
10723 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10724 exp, count, target, 0);
4f7f7efd 10725 }
10f307d9 10726}
b54842d8 10727\f
67c68e45 10728/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10729 when applied to the address of EXP produces an address known to be
10730 aligned more than BIGGEST_ALIGNMENT. */
10731
10732static int
1f1872fd 10733is_aligning_offset (const_tree offset, const_tree exp)
67c68e45 10734{
55f9d7dc 10735 /* Strip off any conversions. */
72dd6141 10736 while (CONVERT_EXPR_P (offset))
67c68e45 10737 offset = TREE_OPERAND (offset, 0);
10738
10739 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10740 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10741 if (TREE_CODE (offset) != BIT_AND_EXPR
e913b5cd 10742 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
491e04ef 10743 || compare_tree_int (TREE_OPERAND (offset, 1),
6be0ba7c 10744 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
ac8c312d 10745 || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
67c68e45 10746 return 0;
10747
10748 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10749 It must be NEGATE_EXPR. Then strip any more conversions. */
10750 offset = TREE_OPERAND (offset, 0);
72dd6141 10751 while (CONVERT_EXPR_P (offset))
67c68e45 10752 offset = TREE_OPERAND (offset, 0);
10753
10754 if (TREE_CODE (offset) != NEGATE_EXPR)
10755 return 0;
10756
10757 offset = TREE_OPERAND (offset, 0);
72dd6141 10758 while (CONVERT_EXPR_P (offset))
67c68e45 10759 offset = TREE_OPERAND (offset, 0);
10760
55f9d7dc 10761 /* This must now be the address of EXP. */
10762 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
67c68e45 10763}
10764\f
dafdd1c8 10765/* Return the tree node if an ARG corresponds to a string constant or zero
6ef828f9 10766 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
902de8ed 10767 in bytes within the string that ARG is accessing. The type of the
10768 offset will be `sizetype'. */
b54842d8 10769
53800dbe 10770tree
35cb5232 10771string_constant (tree arg, tree *ptr_offset)
b54842d8 10772{
de556b32 10773 tree array, offset, lower_bound;
b54842d8 10774 STRIP_NOPS (arg);
10775
d2165e90 10776 if (TREE_CODE (arg) == ADDR_EXPR)
b54842d8 10777 {
d2165e90 10778 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10779 {
10780 *ptr_offset = size_zero_node;
10781 return TREE_OPERAND (arg, 0);
10782 }
10783 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10784 {
10785 array = TREE_OPERAND (arg, 0);
10786 offset = size_zero_node;
10787 }
10788 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10789 {
10790 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10791 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10792 if (TREE_CODE (array) != STRING_CST
10793 && TREE_CODE (array) != VAR_DECL)
10794 return 0;
de556b32 10795
f2b32076 10796 /* Check if the array has a nonzero lower bound. */
de556b32 10797 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10798 if (!integer_zerop (lower_bound))
10799 {
10800 /* If the offset and base aren't both constants, return 0. */
10801 if (TREE_CODE (lower_bound) != INTEGER_CST)
10802 return 0;
10803 if (TREE_CODE (offset) != INTEGER_CST)
10804 return 0;
10805 /* Adjust offset by the lower bound. */
1f8b6002 10806 offset = size_diffop (fold_convert (sizetype, offset),
de556b32 10807 fold_convert (sizetype, lower_bound));
10808 }
d2165e90 10809 }
f32d300e 10810 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10811 {
10812 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10813 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10814 if (TREE_CODE (array) != ADDR_EXPR)
10815 return 0;
10816 array = TREE_OPERAND (array, 0);
10817 if (TREE_CODE (array) != STRING_CST
10818 && TREE_CODE (array) != VAR_DECL)
10819 return 0;
10820 }
d2165e90 10821 else
10822 return 0;
4ee9c684 10823 }
0de36bdb 10824 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
b54842d8 10825 {
10826 tree arg0 = TREE_OPERAND (arg, 0);
10827 tree arg1 = TREE_OPERAND (arg, 1);
10828
10829 STRIP_NOPS (arg0);
10830 STRIP_NOPS (arg1);
10831
10832 if (TREE_CODE (arg0) == ADDR_EXPR
d2165e90 10833 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10834 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10f307d9 10835 {
d2165e90 10836 array = TREE_OPERAND (arg0, 0);
10837 offset = arg1;
10f307d9 10838 }
b54842d8 10839 else if (TREE_CODE (arg1) == ADDR_EXPR
d2165e90 10840 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10841 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10f307d9 10842 {
d2165e90 10843 array = TREE_OPERAND (arg1, 0);
10844 offset = arg0;
10f307d9 10845 }
d2165e90 10846 else
10847 return 0;
10848 }
10849 else
10850 return 0;
10851
10852 if (TREE_CODE (array) == STRING_CST)
10853 {
e3b560a6 10854 *ptr_offset = fold_convert (sizetype, offset);
d2165e90 10855 return array;
10856 }
93b79643 10857 else if (TREE_CODE (array) == VAR_DECL
10858 || TREE_CODE (array) == CONST_DECL)
d2165e90 10859 {
10860 int length;
df8d3e89 10861 tree init = ctor_for_folding (array);
d2165e90 10862
10863 /* Variables initialized to string literals can be handled too. */
df8d3e89 10864 if (init == error_mark_node
10865 || !init
10866 || TREE_CODE (init) != STRING_CST)
d2165e90 10867 return 0;
10868
d2165e90 10869 /* Avoid const char foo[4] = "abcde"; */
10870 if (DECL_SIZE_UNIT (array) == NULL_TREE
10871 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
df8d3e89 10872 || (length = TREE_STRING_LENGTH (init)) <= 0
d2165e90 10873 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10874 return 0;
10875
10876 /* If variable is bigger than the string literal, OFFSET must be constant
10877 and inside of the bounds of the string literal. */
e3b560a6 10878 offset = fold_convert (sizetype, offset);
d2165e90 10879 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
e913b5cd 10880 && (! tree_fits_uhwi_p (offset)
d2165e90 10881 || compare_tree_int (offset, length) >= 0))
10882 return 0;
10883
10884 *ptr_offset = offset;
df8d3e89 10885 return init;
b54842d8 10886 }
649d8da6 10887
b54842d8 10888 return 0;
10889}
649d8da6 10890\f
2c02962c 10891/* Generate code to calculate OPS, and exploded expression
10892 using a store-flag instruction and return an rtx for the result.
10893 OPS reflects a comparison.
649d8da6 10894
b54842d8 10895 If TARGET is nonzero, store the result there if convenient.
649d8da6 10896
b54842d8 10897 Return zero if there is no suitable set-flag instruction
10898 available on this machine.
649d8da6 10899
b54842d8 10900 Once expand_expr has been called on the arguments of the comparison,
10901 we are committed to doing the store flag, since it is not safe to
10902 re-evaluate the expression. We emit the store-flag insn by calling
10903 emit_store_flag, but only expand the arguments if we have a reason
10904 to believe that emit_store_flag will be successful. If we think that
10905 it will, but it isn't, we have to simulate the store-flag with a
10906 set/jump/set sequence. */
649d8da6 10907
b54842d8 10908static rtx
3754d046 10909do_store_flag (sepops ops, rtx target, machine_mode mode)
b54842d8 10910{
10911 enum rtx_code code;
10912 tree arg0, arg1, type;
10913 tree tem;
3754d046 10914 machine_mode operand_mode;
b54842d8 10915 int unsignedp;
10916 rtx op0, op1;
b54842d8 10917 rtx subtarget = target;
2c02962c 10918 location_t loc = ops->location;
649d8da6 10919
2c02962c 10920 arg0 = ops->op0;
10921 arg1 = ops->op1;
fc80e4dd 10922
10923 /* Don't crash if the comparison was erroneous. */
10924 if (arg0 == error_mark_node || arg1 == error_mark_node)
10925 return const0_rtx;
10926
b54842d8 10927 type = TREE_TYPE (arg0);
10928 operand_mode = TYPE_MODE (type);
78a8ed03 10929 unsignedp = TYPE_UNSIGNED (type);
649d8da6 10930
b54842d8 10931 /* We won't bother with BLKmode store-flag operations because it would mean
10932 passing a lot of information to emit_store_flag. */
10933 if (operand_mode == BLKmode)
10934 return 0;
649d8da6 10935
b54842d8 10936 /* We won't bother with store-flag operations involving function pointers
10937 when function pointers must be canonicalized before comparisons. */
10938#ifdef HAVE_canonicalize_funcptr_for_compare
10939 if (HAVE_canonicalize_funcptr_for_compare
2c02962c 10940 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10941 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
b54842d8 10942 == FUNCTION_TYPE))
2c02962c 10943 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10944 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
b54842d8 10945 == FUNCTION_TYPE))))
10946 return 0;
649d8da6 10947#endif
10948
b54842d8 10949 STRIP_NOPS (arg0);
10950 STRIP_NOPS (arg1);
6cf89e04 10951
d7ad16c2 10952 /* For vector typed comparisons emit code to generate the desired
10953 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10954 expander for this. */
10955 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10956 {
10957 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10958 tree if_true = constant_boolean_node (true, ops->type);
10959 tree if_false = constant_boolean_node (false, ops->type);
10960 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10961 }
10962
b54842d8 10963 /* Get the rtx comparison code to use. We know that EXP is a comparison
10964 operation of some type. Some comparisons against 1 and -1 can be
10965 converted to comparisons with zero. Do so here so that the tests
10966 below will be aware that we have a comparison with zero. These
10967 tests will not catch constants in the first operand, but constants
10968 are rarely passed as the first operand. */
649d8da6 10969
2c02962c 10970 switch (ops->code)
b54842d8 10971 {
10972 case EQ_EXPR:
10973 code = EQ;
10f307d9 10974 break;
b54842d8 10975 case NE_EXPR:
10976 code = NE;
10f307d9 10977 break;
b54842d8 10978 case LT_EXPR:
10979 if (integer_onep (arg1))
10980 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10981 else
10982 code = unsignedp ? LTU : LT;
649d8da6 10983 break;
b54842d8 10984 case LE_EXPR:
10985 if (! unsignedp && integer_all_onesp (arg1))
10986 arg1 = integer_zero_node, code = LT;
10987 else
10988 code = unsignedp ? LEU : LE;
649d8da6 10989 break;
b54842d8 10990 case GT_EXPR:
10991 if (! unsignedp && integer_all_onesp (arg1))
10992 arg1 = integer_zero_node, code = GE;
10993 else
10994 code = unsignedp ? GTU : GT;
10995 break;
10996 case GE_EXPR:
10997 if (integer_onep (arg1))
10998 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10999 else
11000 code = unsignedp ? GEU : GE;
649d8da6 11001 break;
a4110d9a 11002
11003 case UNORDERED_EXPR:
11004 code = UNORDERED;
11005 break;
11006 case ORDERED_EXPR:
11007 code = ORDERED;
11008 break;
11009 case UNLT_EXPR:
11010 code = UNLT;
11011 break;
11012 case UNLE_EXPR:
11013 code = UNLE;
11014 break;
11015 case UNGT_EXPR:
11016 code = UNGT;
11017 break;
11018 case UNGE_EXPR:
11019 code = UNGE;
11020 break;
11021 case UNEQ_EXPR:
11022 code = UNEQ;
11023 break;
318a728f 11024 case LTGT_EXPR:
11025 code = LTGT;
11026 break;
a4110d9a 11027
649d8da6 11028 default:
611234b4 11029 gcc_unreachable ();
10f307d9 11030 }
10f307d9 11031
b54842d8 11032 /* Put a constant second. */
68a556d6 11033 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
11034 || TREE_CODE (arg0) == FIXED_CST)
b54842d8 11035 {
11036 tem = arg0; arg0 = arg1; arg1 = tem;
11037 code = swap_condition (code);
649d8da6 11038 }
10f307d9 11039
b54842d8 11040 /* If this is an equality or inequality test of a single bit, we can
11041 do this by shifting the bit being tested to the low-order bit and
11042 masking the result with the constant 1. If the condition was EQ,
11043 we xor it with 1. This does not require an scc insn and is faster
6881f973 11044 than an scc insn even if we have it.
11045
11046 The code to make this transformation was moved into fold_single_bit_test,
11047 so we just call into the folder and expand its result. */
3218a49d 11048
b54842d8 11049 if ((code == NE || code == EQ)
a53286c0 11050 && integer_zerop (arg1)
d430af93 11051 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
76c3b8b7 11052 {
a53286c0 11053 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11054 if (srcstmt
11055 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11056 {
11057 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11058 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11059 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11060 gimple_assign_rhs1 (srcstmt),
11061 gimple_assign_rhs2 (srcstmt));
11062 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11063 if (temp)
11064 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11065 }
76c3b8b7 11066 }
10f307d9 11067
d8e5b213 11068 if (! get_subtarget (target)
a54ebf2e 11069 || GET_MODE (subtarget) != operand_mode)
b54842d8 11070 subtarget = 0;
11071
b9c74b4d 11072 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
b54842d8 11073
11074 if (target == 0)
11075 target = gen_reg_rtx (mode);
11076
80e1bfa1 11077 /* Try a cstore if possible. */
11078 return emit_store_flag_force (target, code, op0, op1,
d430af93 11079 operand_mode, unsignedp,
11080 (TYPE_PRECISION (ops->type) == 1
11081 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
649d8da6 11082}
b54842d8 11083\f
b54842d8 11084
539a3a92 11085/* Stubs in case we haven't got a casesi insn. */
11086#ifndef HAVE_casesi
11087# define HAVE_casesi 0
11088# define gen_casesi(a, b, c, d, e) (0)
11089# define CODE_FOR_casesi CODE_FOR_nothing
11090#endif
11091
539a3a92 11092/* Attempt to generate a casesi instruction. Returns 1 if successful,
584abc98 11093 0 otherwise (i.e. if there is no casesi instruction).
11094
11095 DEFAULT_PROBABILITY is the probability of jumping to the default
11096 label. */
539a3a92 11097int
35cb5232 11098try_casesi (tree index_type, tree index_expr, tree minval, tree range,
584abc98 11099 rtx table_label, rtx default_label, rtx fallback_label,
11100 int default_probability)
539a3a92 11101{
8786db1e 11102 struct expand_operand ops[5];
3754d046 11103 machine_mode index_mode = SImode;
539a3a92 11104 rtx op1, op2, index;
539a3a92 11105
11106 if (! HAVE_casesi)
11107 return 0;
11108
11109 /* Convert the index to SImode. */
11110 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11111 {
3754d046 11112 machine_mode omode = TYPE_MODE (index_type);
8ec3c5c2 11113 rtx rangertx = expand_normal (range);
539a3a92 11114
11115 /* We must handle the endpoints in the original mode. */
b55f9493 11116 index_expr = build2 (MINUS_EXPR, index_type,
11117 index_expr, minval);
539a3a92 11118 minval = integer_zero_node;
8ec3c5c2 11119 index = expand_normal (index_expr);
72c30859 11120 if (default_label)
11121 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
584abc98 11122 omode, 1, default_label,
11123 default_probability);
539a3a92 11124 /* Now we can safely truncate. */
11125 index = convert_to_mode (index_mode, index, 0);
11126 }
11127 else
11128 {
11129 if (TYPE_MODE (index_type) != index_mode)
11130 {
1b3c3119 11131 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
e3b560a6 11132 index_expr = fold_convert (index_type, index_expr);
539a3a92 11133 }
11134
8ec3c5c2 11135 index = expand_normal (index_expr);
539a3a92 11136 }
0a534ba7 11137
539a3a92 11138 do_pending_stack_adjust ();
11139
8ec3c5c2 11140 op1 = expand_normal (minval);
8ec3c5c2 11141 op2 = expand_normal (range);
539a3a92 11142
8786db1e 11143 create_input_operand (&ops[0], index, index_mode);
11144 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11145 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11146 create_fixed_operand (&ops[3], table_label);
11147 create_fixed_operand (&ops[4], (default_label
11148 ? default_label
11149 : fallback_label));
11150 expand_jump_insn (CODE_FOR_casesi, 5, ops);
539a3a92 11151 return 1;
11152}
11153
11154/* Attempt to generate a tablejump instruction; same concept. */
539a3a92 11155/* Subroutine of the next function.
11156
11157 INDEX is the value being switched on, with the lowest value
b54842d8 11158 in the table already subtracted.
11159 MODE is its expected mode (needed if INDEX is constant).
11160 RANGE is the length of the jump table.
11161 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
1ccc1a7e 11162
b54842d8 11163 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
584abc98 11164 index value is out of range.
11165 DEFAULT_PROBABILITY is the probability of jumping to
11166 the default label. */
a92771b8 11167
539a3a92 11168static void
3754d046 11169do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
584abc98 11170 rtx default_label, int default_probability)
649d8da6 11171{
19cb6b50 11172 rtx temp, vector;
1ccc1a7e 11173
edb7afe8 11174 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11175 cfun->cfg->max_jumptable_ents = INTVAL (range);
71a455ac 11176
b54842d8 11177 /* Do an unsigned comparison (in the proper mode) between the index
11178 expression and the value which represents the length of the range.
11179 Since we just finished subtracting the lower bound of the range
11180 from the index expression, this comparison allows us to simultaneously
11181 check that the original index expression value is both greater than
11182 or equal to the minimum value of the range and less than or equal to
11183 the maximum value of the range. */
9282409c 11184
72c30859 11185 if (default_label)
11186 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
584abc98 11187 default_label, default_probability);
11188
10f307d9 11189
b54842d8 11190 /* If index is in range, it must fit in Pmode.
11191 Convert to Pmode so we can index with it. */
11192 if (mode != Pmode)
11193 index = convert_to_mode (Pmode, index, 1);
10f307d9 11194
c7bf1374 11195 /* Don't let a MEM slip through, because then INDEX that comes
b54842d8 11196 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11197 and break_out_memory_refs will go to work on it and mess it up. */
11198#ifdef PIC_CASE_VECTOR_ADDRESS
8ad4c111 11199 if (flag_pic && !REG_P (index))
b54842d8 11200 index = copy_to_mode_reg (Pmode, index);
11201#endif
649d8da6 11202
b54842d8 11203 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11204 GET_MODE_SIZE, because this indicates how large insns are. The other
11205 uses should all be Pmode, because they are addresses. This code
11206 could fail if addresses and insns are not the same size. */
178ef0b6 11207 index = simplify_gen_binary (MULT, Pmode, index,
11208 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11209 Pmode));
11210 index = simplify_gen_binary (PLUS, Pmode, index,
11211 gen_rtx_LABEL_REF (Pmode, table_label));
11212
b54842d8 11213#ifdef PIC_CASE_VECTOR_ADDRESS
11214 if (flag_pic)
11215 index = PIC_CASE_VECTOR_ADDRESS (index);
11216 else
10f307d9 11217#endif
4d25f9eb 11218 index = memory_address (CASE_VECTOR_MODE, index);
b54842d8 11219 temp = gen_reg_rtx (CASE_VECTOR_MODE);
e265a6da 11220 vector = gen_const_mem (CASE_VECTOR_MODE, index);
b54842d8 11221 convert_move (temp, vector, 0);
11222
11223 emit_jump_insn (gen_tablejump (temp, table_label));
11224
11225 /* If we are generating PIC code or if the table is PC-relative, the
11226 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11227 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11228 emit_barrier ();
10f307d9 11229}
b54842d8 11230
539a3a92 11231int
35cb5232 11232try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
584abc98 11233 rtx table_label, rtx default_label, int default_probability)
539a3a92 11234{
11235 rtx index;
11236
11237 if (! HAVE_tablejump)
11238 return 0;
11239
faa43f85 11240 index_expr = fold_build2 (MINUS_EXPR, index_type,
e3b560a6 11241 fold_convert (index_type, index_expr),
11242 fold_convert (index_type, minval));
8ec3c5c2 11243 index = expand_normal (index_expr);
539a3a92 11244 do_pending_stack_adjust ();
11245
11246 do_tablejump (index, TYPE_MODE (index_type),
11247 convert_modes (TYPE_MODE (index_type),
11248 TYPE_MODE (TREE_TYPE (range)),
8ec3c5c2 11249 expand_normal (range),
78a8ed03 11250 TYPE_UNSIGNED (TREE_TYPE (range))),
584abc98 11251 table_label, default_label, default_probability);
539a3a92 11252 return 1;
11253}
1f3233d1 11254
c3309fc6 11255/* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11256static rtx
35cb5232 11257const_vector_from_tree (tree exp)
c3309fc6 11258{
11259 rtvec v;
fadf62f4 11260 unsigned i;
11261 int units;
11262 tree elt;
3754d046 11263 machine_mode inner, mode;
c3309fc6 11264
11265 mode = TYPE_MODE (TREE_TYPE (exp));
11266
4ee9c684 11267 if (initializer_zerop (exp))
c3309fc6 11268 return CONST0_RTX (mode);
11269
11270 units = GET_MODE_NUNITS (mode);
11271 inner = GET_MODE_INNER (mode);
11272
11273 v = rtvec_alloc (units);
11274
fadf62f4 11275 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
c3309fc6 11276 {
fadf62f4 11277 elt = VECTOR_CST_ELT (exp, i);
c3309fc6 11278
11279 if (TREE_CODE (elt) == REAL_CST)
11280 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11281 inner);
68a556d6 11282 else if (TREE_CODE (elt) == FIXED_CST)
11283 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11284 inner);
c3309fc6 11285 else
c4050ce7 11286 RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
c3309fc6 11287 }
11288
6e68dcb2 11289 return gen_rtx_CONST_VECTOR (mode, v);
c3309fc6 11290}
58d82cd0 11291
382597e4 11292/* Build a decl for a personality function given a language prefix. */
58d82cd0 11293
11294tree
382597e4 11295build_personality_function (const char *lang)
58d82cd0 11296{
382597e4 11297 const char *unwind_and_version;
58d82cd0 11298 tree decl, type;
382597e4 11299 char *name;
11300
218e3e4e 11301 switch (targetm_common.except_unwind_info (&global_options))
382597e4 11302 {
11303 case UI_NONE:
11304 return NULL;
11305 case UI_SJLJ:
11306 unwind_and_version = "_sj0";
11307 break;
11308 case UI_DWARF2:
11309 case UI_TARGET:
11310 unwind_and_version = "_v0";
11311 break;
8ad0b530 11312 case UI_SEH:
11313 unwind_and_version = "_seh0";
11314 break;
382597e4 11315 default:
11316 gcc_unreachable ();
11317 }
11318
11319 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
58d82cd0 11320
11321 type = build_function_type_list (integer_type_node, integer_type_node,
11322 long_long_unsigned_type_node,
11323 ptr_type_node, ptr_type_node, NULL_TREE);
11324 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11325 get_identifier (name), type);
11326 DECL_ARTIFICIAL (decl) = 1;
11327 DECL_EXTERNAL (decl) = 1;
11328 TREE_PUBLIC (decl) = 1;
11329
11330 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11331 are the flags assigned by targetm.encode_section_info. */
11332 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11333
11334 return decl;
11335}
11336
11337/* Extracts the personality function of DECL and returns the corresponding
11338 libfunc. */
11339
11340rtx
11341get_personality_function (tree decl)
11342{
11343 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11344 enum eh_personality_kind pk;
11345
11346 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11347 if (pk == eh_personality_none)
11348 return NULL;
11349
11350 if (!personality
11351 && pk == eh_personality_any)
11352 personality = lang_hooks.eh_personality ();
11353
11354 if (pk == eh_personality_lang)
11355 gcc_assert (personality != NULL_TREE);
11356
11357 return XEXP (DECL_RTL (personality), 0);
11358}
11359
d53441c8 11360/* Returns a tree for the size of EXP in bytes. */
11361
11362static tree
11363tree_expr_size (const_tree exp)
11364{
11365 if (DECL_P (exp)
11366 && DECL_SIZE_UNIT (exp) != 0)
11367 return DECL_SIZE_UNIT (exp);
11368 else
11369 return size_in_bytes (TREE_TYPE (exp));
11370}
11371
11372/* Return an rtx for the size in bytes of the value of EXP. */
11373
11374rtx
11375expr_size (tree exp)
11376{
11377 tree size;
11378
11379 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11380 size = TREE_OPERAND (exp, 1);
11381 else
11382 {
11383 size = tree_expr_size (exp);
11384 gcc_assert (size);
11385 gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
11386 }
11387
11388 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
11389}
11390
11391/* Return a wide integer for the size in bytes of the value of EXP, or -1
11392 if the size can vary or is larger than an integer. */
11393
11394static HOST_WIDE_INT
11395int_expr_size (tree exp)
11396{
11397 tree size;
11398
11399 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11400 size = TREE_OPERAND (exp, 1);
11401 else
11402 {
11403 size = tree_expr_size (exp);
11404 gcc_assert (size);
11405 }
11406
11407 if (size == 0 || !tree_fits_shwi_p (size))
11408 return -1;
11409
11410 return tree_to_shwi (size);
11411}
11412
1f3233d1 11413#include "gt-expr.h"