]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/expr.c
2015-07-07 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / expr.c
CommitLineData
10f307d9 1/* Convert tree expression to rtl instructions, for GNU compiler.
d353bf18 2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
10f307d9 3
f12b58b3 4This file is part of GCC.
10f307d9 5
f12b58b3 6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
f12b58b3 9version.
10f307d9 10
f12b58b3 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
10f307d9 15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
10f307d9 19
10f307d9 20#include "config.h"
405711de 21#include "system.h"
805e22b2 22#include "coretypes.h"
9ef16211 23#include "backend.h"
24#include "tree.h"
25#include "gimple.h"
10f307d9 26#include "rtl.h"
9ef16211 27#include "df.h"
28#include "ssa.h"
b20a8bb4 29#include "alias.h"
b20a8bb4 30#include "fold-const.h"
9ed99284 31#include "stor-layout.h"
32#include "attribs.h"
33#include "varasm.h"
10f307d9 34#include "flags.h"
09994a52 35#include "regs.h"
037a5228 36#include "except.h"
10f307d9 37#include "insn-config.h"
3084721c 38#include "insn-attr.h"
d53441c8 39#include "expmed.h"
40#include "dojump.h"
41#include "explow.h"
42#include "calls.h"
43#include "emit-rtl.h"
44#include "stmt.h"
fa56dc1d 45/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
cd03a192 46#include "expr.h"
34517c64 47#include "insn-codes.h"
d8fc4d0b 48#include "optabs.h"
49#include "libfuncs.h"
10f307d9 50#include "recog.h"
6702c250 51#include "reload.h"
10f307d9 52#include "typeclass.h"
12874aaf 53#include "toplev.h"
b3187c7c 54#include "langhooks.h"
a3c49299 55#include "intl.h"
075136a2 56#include "tm_p.h"
4ee9c684 57#include "tree-iterator.h"
bc61cadb 58#include "internal-fn.h"
073c1fd5 59#include "cgraph.h"
2c8ff1ed 60#include "target.h"
218e3e4e 61#include "common/common-target.h"
5290ebdb 62#include "timevar.h"
cb7f680b 63#include "diagnostic.h"
b23fb4cb 64#include "tree-ssa-live.h"
f7373a91 65#include "tree-outof-ssa.h"
db5ca0ab 66#include "target-globals.h"
4bb60ec7 67#include "params.h"
424a4a92 68#include "tree-ssa-address.h"
e797f49f 69#include "cfgexpand.h"
f7715905 70#include "builtins.h"
058a1b7a 71#include "tree-chkp.h"
72#include "rtl-chkp.h"
9336ad57 73#include "ccmp.h"
10f307d9 74
c0bfc78e 75
10f307d9 76/* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
82int cse_not_expected;
83
3ebd94bd 84/* This structure is used by move_by_pieces to describe the move to
85 be performed. */
584511c1 86struct move_by_pieces_d
3ebd94bd 87{
88 rtx to;
89 rtx to_addr;
90 int autinc_to;
91 int explicit_inc_to;
92 rtx from;
93 rtx from_addr;
94 int autinc_from;
95 int explicit_inc_from;
f7c44134 96 unsigned HOST_WIDE_INT len;
97 HOST_WIDE_INT offset;
3ebd94bd 98 int reverse;
99};
100
6840589f 101/* This structure is used by store_by_pieces to describe the clear to
dbd14dc5 102 be performed. */
103
584511c1 104struct store_by_pieces_d
dbd14dc5 105{
106 rtx to;
107 rtx to_addr;
108 int autinc_to;
109 int explicit_inc_to;
f7c44134 110 unsigned HOST_WIDE_INT len;
111 HOST_WIDE_INT offset;
3754d046 112 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode);
b9a7cc69 113 void *constfundata;
dbd14dc5 114 int reverse;
115};
116
3d953cb1 117static void move_by_pieces_1 (insn_gen_fn, machine_mode,
584511c1 118 struct move_by_pieces_d *);
35cb5232 119static bool block_move_libcall_safe_for_call_parm (void);
36d63243 120static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
9db0f34d 121 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
122 unsigned HOST_WIDE_INT);
35cb5232 123static tree emit_block_move_libcall_fn (int);
124static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
3754d046 125static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, machine_mode);
35cb5232 126static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
584511c1 127static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
3d953cb1 128static void store_by_pieces_2 (insn_gen_fn, machine_mode,
584511c1 129 struct store_by_pieces_d *);
35cb5232 130static tree clear_storage_libcall_fn (int);
c81fd430 131static rtx_insn *compress_float_constant (rtx, rtx);
35cb5232 132static rtx get_subtarget (rtx);
35cb5232 133static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
3754d046 134 HOST_WIDE_INT, machine_mode,
f955ca51 135 tree, int, alias_set_type);
35cb5232 136static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
4bb60ec7 137static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
138 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
3754d046 139 machine_mode, tree, alias_set_type, bool);
35cb5232 140
b7bf20db 141static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
35cb5232 142
1f1872fd 143static int is_aligning_offset (const_tree, const_tree);
4f7f7efd 144static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
3754d046 145static rtx do_store_flag (sepops, rtx, machine_mode);
fad4a30c 146#ifdef PUSH_ROUNDING
3754d046 147static void emit_single_push_insn (machine_mode, rtx, tree);
fad4a30c 148#endif
3754d046 149static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx, int);
35cb5232 150static rtx const_vector_from_tree (tree);
d53441c8 151static tree tree_expr_size (const_tree);
152static HOST_WIDE_INT int_expr_size (tree);
10f307d9 153
10f307d9 154\f
6d8b68a3 155/* This is run to set up which modes can be used
156 directly in memory and to initialize the block move optab. It is run
157 at the beginning of compilation and when the target is reinitialized. */
07edfa02 158
159void
6d8b68a3 160init_expr_target (void)
07edfa02 161{
162 rtx insn, pat;
3754d046 163 machine_mode mode;
6fa98783 164 int num_clobbers;
9e042f31 165 rtx mem, mem1;
0c7f5242 166 rtx reg;
9e042f31 167
a97fcedd 168 /* Try indexing by frame ptr and try by stack ptr.
169 It is known that on the Convex the stack ptr isn't a valid index.
170 With luck, one or the other is valid on any machine. */
06a78ffe 171 mem = gen_rtx_MEM (word_mode, stack_pointer_rtx);
172 mem1 = gen_rtx_MEM (word_mode, frame_pointer_rtx);
07edfa02 173
0c7f5242 174 /* A scratch register we can modify in-place below to avoid
175 useless RTL allocations. */
dcd6d0f4 176 reg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
0c7f5242 177
7a5749cc 178 insn = rtx_alloc (INSN);
d1f9b275 179 pat = gen_rtx_SET (NULL_RTX, NULL_RTX);
7a5749cc 180 PATTERN (insn) = pat;
07edfa02 181
182 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
3754d046 183 mode = (machine_mode) ((int) mode + 1))
07edfa02 184 {
185 int regno;
07edfa02 186
187 direct_load[(int) mode] = direct_store[(int) mode] = 0;
188 PUT_MODE (mem, mode);
a97fcedd 189 PUT_MODE (mem1, mode);
07edfa02 190
3c209fda 191 /* See if there is some register that can be used in this mode and
192 directly loaded or stored from memory. */
193
b63679d2 194 if (mode != VOIDmode && mode != BLKmode)
195 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
196 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
197 regno++)
198 {
199 if (! HARD_REGNO_MODE_OK (regno, mode))
200 continue;
3c209fda 201
937ca48e 202 set_mode_and_regno (reg, mode, regno);
3c209fda 203
b63679d2 204 SET_SRC (pat) = mem;
205 SET_DEST (pat) = reg;
206 if (recog (pat, insn, &num_clobbers) >= 0)
207 direct_load[(int) mode] = 1;
3c209fda 208
a97fcedd 209 SET_SRC (pat) = mem1;
210 SET_DEST (pat) = reg;
211 if (recog (pat, insn, &num_clobbers) >= 0)
212 direct_load[(int) mode] = 1;
213
b63679d2 214 SET_SRC (pat) = reg;
215 SET_DEST (pat) = mem;
216 if (recog (pat, insn, &num_clobbers) >= 0)
217 direct_store[(int) mode] = 1;
a97fcedd 218
219 SET_SRC (pat) = reg;
220 SET_DEST (pat) = mem1;
221 if (recog (pat, insn, &num_clobbers) >= 0)
222 direct_store[(int) mode] = 1;
b63679d2 223 }
07edfa02 224 }
225
dcd6d0f4 226 mem = gen_rtx_MEM (VOIDmode, gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 1));
c0c4a46d 227
228 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
229 mode = GET_MODE_WIDER_MODE (mode))
230 {
3754d046 231 machine_mode srcmode;
c0c4a46d 232 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
ff385626 233 srcmode = GET_MODE_WIDER_MODE (srcmode))
c0c4a46d 234 {
235 enum insn_code ic;
236
237 ic = can_extend_p (mode, srcmode, 0);
238 if (ic == CODE_FOR_nothing)
239 continue;
240
241 PUT_MODE (mem, srcmode);
ff385626 242
39c56a89 243 if (insn_operand_matches (ic, 1, mem))
c0c4a46d 244 float_extend_from_mem[mode][srcmode] = true;
245 }
246 }
07edfa02 247}
6fa98783 248
10f307d9 249/* This is run at the start of compiling a function. */
250
251void
35cb5232 252init_expr (void)
10f307d9 253{
fd6ffb7c 254 memset (&crtl->expr, 0, sizeof (crtl->expr));
10f307d9 255}
10f307d9 256\f
257/* Copy data from FROM to TO, where the machine modes are not the same.
68a556d6 258 Both modes may be integer, or both may be floating, or both may be
259 fixed-point.
10f307d9 260 UNSIGNEDP should be nonzero if FROM is an unsigned type.
261 This causes zero-extension instead of sign-extension. */
262
263void
35cb5232 264convert_move (rtx to, rtx from, int unsignedp)
10f307d9 265{
3754d046 266 machine_mode to_mode = GET_MODE (to);
267 machine_mode from_mode = GET_MODE (from);
cee7491d 268 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
269 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
10f307d9 270 enum insn_code code;
271 rtx libcall;
272
273 /* rtx code for making an equivalent value. */
65923445 274 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
275 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
10f307d9 276
10f307d9 277
611234b4 278 gcc_assert (to_real == from_real);
1f8b6002 279 gcc_assert (to_mode != BLKmode);
280 gcc_assert (from_mode != BLKmode);
10f307d9 281
4ee9c684 282 /* If the source and destination are already the same, then there's
283 nothing to do. */
284 if (to == from)
285 return;
286
acfb31e5 287 /* If FROM is a SUBREG that indicates that we have already done at least
288 the required extension, strip it. We don't handle such SUBREGs as
289 TO here. */
290
291 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
995b44f5 292 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
293 >= GET_MODE_PRECISION (to_mode))
e8629f9e 294 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
acfb31e5 295 from = gen_lowpart (to_mode, from), from_mode = to_mode;
296
611234b4 297 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
acfb31e5 298
10f307d9 299 if (to_mode == from_mode
300 || (from_mode == VOIDmode && CONSTANT_P (from)))
301 {
302 emit_move_insn (to, from);
303 return;
304 }
305
8a95ab85 306 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
307 {
611234b4 308 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
fa56dc1d 309
8a95ab85 310 if (VECTOR_MODE_P (to_mode))
1c0d4c2c 311 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
8a95ab85 312 else
1c0d4c2c 313 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
8a95ab85 314
315 emit_move_insn (to, from);
316 return;
317 }
318
a9f93c81 319 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
320 {
321 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
322 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
323 return;
324 }
325
10f307d9 326 if (to_real)
327 {
1d277a67 328 rtx value;
329 rtx_insn *insns;
a7cc195f 330 convert_optab tab;
ece3ba9a 331
069b07bf 332 gcc_assert ((GET_MODE_PRECISION (from_mode)
333 != GET_MODE_PRECISION (to_mode))
334 || (DECIMAL_FLOAT_MODE_P (from_mode)
335 != DECIMAL_FLOAT_MODE_P (to_mode)));
1f8b6002 336
069b07bf 337 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
338 /* Conversion between decimal float and binary float, same size. */
339 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
340 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
a7cc195f 341 tab = sext_optab;
a7cc195f 342 else
611234b4 343 tab = trunc_optab;
dd8a4c60 344
a7cc195f 345 /* Try converting directly if the insn is supported. */
dd8a4c60 346
d6bf3b14 347 code = convert_optab_handler (tab, to_mode, from_mode);
a7cc195f 348 if (code != CODE_FOR_nothing)
c2a91a88 349 {
a7cc195f 350 emit_unop_insn (code, to, from,
351 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
c2a91a88 352 return;
353 }
c2a91a88 354
a7cc195f 355 /* Otherwise use a libcall. */
f36b9f69 356 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
fa56dc1d 357
611234b4 358 /* Is this conversion implemented yet? */
359 gcc_assert (libcall);
10f307d9 360
542baf17 361 start_sequence ();
2c5d421b 362 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
ece3ba9a 363 1, from, from_mode);
542baf17 364 insns = get_insns ();
365 end_sequence ();
1d5ca076 366 emit_libcall_block (insns, to, value,
367 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
368 from)
369 : gen_rtx_FLOAT_EXTEND (to_mode, from));
10f307d9 370 return;
371 }
372
a7cc195f 373 /* Handle pointer conversion. */ /* SPEE 900220. */
91ae0791 374 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
375 {
376 convert_optab ctab;
377
378 if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
379 ctab = trunc_optab;
380 else if (unsignedp)
381 ctab = zext_optab;
382 else
383 ctab = sext_optab;
384
385 if (convert_optab_handler (ctab, to_mode, from_mode)
386 != CODE_FOR_nothing)
387 {
388 emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
389 to, from, UNKNOWN);
390 return;
391 }
392 }
393
a7cc195f 394 /* Targets are expected to provide conversion insns between PxImode and
395 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
396 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
397 {
3754d046 398 machine_mode full_mode
a7cc195f 399 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
400
d6bf3b14 401 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
611234b4 402 != CODE_FOR_nothing);
a7cc195f 403
404 if (full_mode != from_mode)
405 from = convert_to_mode (full_mode, from, unsignedp);
d6bf3b14 406 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
a7cc195f 407 to, from, UNKNOWN);
408 return;
409 }
410 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
411 {
06633980 412 rtx new_from;
3754d046 413 machine_mode full_mode
a7cc195f 414 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
c8076084 415 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
416 enum insn_code icode;
a7cc195f 417
c8076084 418 icode = convert_optab_handler (ctab, full_mode, from_mode);
419 gcc_assert (icode != CODE_FOR_nothing);
a7cc195f 420
a7cc195f 421 if (to_mode == full_mode)
06633980 422 {
c8076084 423 emit_unop_insn (icode, to, from, UNKNOWN);
06633980 424 return;
425 }
426
427 new_from = gen_reg_rtx (full_mode);
c8076084 428 emit_unop_insn (icode, new_from, from, UNKNOWN);
a7cc195f 429
aab2cf92 430 /* else proceed to integer conversions below. */
a7cc195f 431 from_mode = full_mode;
06633980 432 from = new_from;
a7cc195f 433 }
434
68a556d6 435 /* Make sure both are fixed-point modes or both are not. */
436 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
437 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
438 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
439 {
440 /* If we widen from_mode to to_mode and they are in the same class,
441 we won't saturate the result.
442 Otherwise, always saturate the result to play safe. */
443 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
444 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
445 expand_fixed_convert (to, from, 0, 0);
446 else
447 expand_fixed_convert (to, from, 0, 1);
448 return;
449 }
450
10f307d9 451 /* Now both modes are integers. */
452
453 /* Handle expanding beyond a word. */
995b44f5 454 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
455 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
10f307d9 456 {
1d277a67 457 rtx_insn *insns;
10f307d9 458 rtx lowpart;
459 rtx fill_value;
460 rtx lowfrom;
461 int i;
3754d046 462 machine_mode lowpart_mode;
10f307d9 463 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
464
465 /* Try converting directly if the insn is supported. */
466 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
467 != CODE_FOR_nothing)
468 {
6a0b5011 469 /* If FROM is a SUBREG, put it into a register. Do this
470 so that we always generate the same set of insns for
471 better cse'ing; if an intermediate assignment occurred,
472 we won't be doing the operation directly on the SUBREG. */
473 if (optimize > 0 && GET_CODE (from) == SUBREG)
474 from = force_reg (from_mode, from);
10f307d9 475 emit_unop_insn (code, to, from, equiv_code);
476 return;
477 }
478 /* Next, try converting via full word. */
995b44f5 479 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
10f307d9 480 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
481 != CODE_FOR_nothing))
482 {
8ae2fa3b 483 rtx word_to = gen_reg_rtx (word_mode);
8ad4c111 484 if (REG_P (to))
d6af6bc2 485 {
486 if (reg_overlap_mentioned_p (to, from))
487 from = force_reg (from_mode, from);
18b42941 488 emit_clobber (to);
d6af6bc2 489 }
8ae2fa3b 490 convert_move (word_to, from, unsignedp);
491 emit_unop_insn (code, to, word_to, equiv_code);
10f307d9 492 return;
493 }
494
495 /* No special multiword conversion insn; do it by hand. */
496 start_sequence ();
497
c43fbd61 498 /* Since we will turn this into a no conflict block, we must ensure the
499 the source does not overlap the target so force it into an isolated
500 register when maybe so. Likewise for any MEM input, since the
501 conversion sequence might require several references to it and we
502 must ensure we're getting the same value every time. */
ab72e117 503
c43fbd61 504 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
ab72e117 505 from = force_reg (from_mode, from);
506
10f307d9 507 /* Get a copy of FROM widened to a word, if necessary. */
995b44f5 508 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
10f307d9 509 lowpart_mode = word_mode;
510 else
511 lowpart_mode = from_mode;
512
513 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
514
515 lowpart = gen_lowpart (lowpart_mode, to);
516 emit_move_insn (lowpart, lowfrom);
517
518 /* Compute the value to put in each remaining word. */
519 if (unsignedp)
520 fill_value = const0_rtx;
521 else
155586ea 522 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
523 LT, lowfrom, const0_rtx,
524 lowpart_mode, 0, -1);
10f307d9 525
526 /* Fill the remaining words. */
527 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
528 {
529 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
530 rtx subword = operand_subword (to, index, 1, to_mode);
531
611234b4 532 gcc_assert (subword);
10f307d9 533
534 if (fill_value != subword)
535 emit_move_insn (subword, fill_value);
536 }
537
538 insns = get_insns ();
539 end_sequence ();
540
e29831db 541 emit_insn (insns);
10f307d9 542 return;
543 }
544
5602c36d 545 /* Truncating multi-word to a word or less. */
995b44f5 546 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
547 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
10f307d9 548 {
e16ceb8e 549 if (!((MEM_P (from)
d5601bb1 550 && ! MEM_VOLATILE_P (from)
551 && direct_load[(int) to_mode]
4e27ffd0 552 && ! mode_dependent_address_p (XEXP (from, 0),
553 MEM_ADDR_SPACE (from)))
8ad4c111 554 || REG_P (from)
d5601bb1 555 || GET_CODE (from) == SUBREG))
556 from = force_reg (from_mode, from);
10f307d9 557 convert_move (to, gen_lowpart (word_mode, from), 0);
558 return;
559 }
560
10f307d9 561 /* Now follow all the conversions between integers
562 no more than a word long. */
563
564 /* For truncation, usually we can just refer to FROM in a narrower mode. */
565 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
396f2130 566 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
10f307d9 567 {
e16ceb8e 568 if (!((MEM_P (from)
5602c36d 569 && ! MEM_VOLATILE_P (from)
570 && direct_load[(int) to_mode]
4e27ffd0 571 && ! mode_dependent_address_p (XEXP (from, 0),
572 MEM_ADDR_SPACE (from)))
8ad4c111 573 || REG_P (from)
5602c36d 574 || GET_CODE (from) == SUBREG))
575 from = force_reg (from_mode, from);
8ad4c111 576 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
7de79a05 577 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
578 from = copy_to_reg (from);
10f307d9 579 emit_move_insn (to, gen_lowpart (to_mode, from));
580 return;
581 }
582
5602c36d 583 /* Handle extension. */
995b44f5 584 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
10f307d9 585 {
586 /* Convert directly if that works. */
587 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
588 != CODE_FOR_nothing)
589 {
590 emit_unop_insn (code, to, from, equiv_code);
591 return;
592 }
593 else
594 {
3754d046 595 machine_mode intermediate;
851e6849 596 rtx tmp;
f5ff0b21 597 int shift_amount;
10f307d9 598
599 /* Search for a mode to convert via. */
600 for (intermediate = from_mode; intermediate != VOIDmode;
601 intermediate = GET_MODE_WIDER_MODE (intermediate))
0f22a35c 602 if (((can_extend_p (to_mode, intermediate, unsignedp)
603 != CODE_FOR_nothing)
604 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
396f2130 605 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
10f307d9 606 && (can_extend_p (intermediate, from_mode, unsignedp)
607 != CODE_FOR_nothing))
608 {
609 convert_move (to, convert_to_mode (intermediate, from,
610 unsignedp), unsignedp);
611 return;
612 }
613
851e6849 614 /* No suitable intermediate mode.
fa56dc1d 615 Generate what we need with shifts. */
995b44f5 616 shift_amount = (GET_MODE_PRECISION (to_mode)
617 - GET_MODE_PRECISION (from_mode));
851e6849 618 from = gen_lowpart (to_mode, force_reg (from_mode, from));
619 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
620 to, unsignedp);
fa56dc1d 621 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
851e6849 622 to, unsignedp);
623 if (tmp != to)
624 emit_move_insn (to, tmp);
625 return;
10f307d9 626 }
627 }
628
fa56dc1d 629 /* Support special truncate insns for certain modes. */
d6bf3b14 630 if (convert_optab_handler (trunc_optab, to_mode,
631 from_mode) != CODE_FOR_nothing)
10f307d9 632 {
d6bf3b14 633 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
a7cc195f 634 to, from, UNKNOWN);
cd0fdd24 635 return;
636 }
637
10f307d9 638 /* Handle truncation of volatile memrefs, and so on;
639 the things that couldn't be truncated directly,
a7cc195f 640 and for which there was no special instruction.
641
642 ??? Code above formerly short-circuited this, for most integer
643 mode pairs, with a force_reg in from_mode followed by a recursive
644 call to this routine. Appears always to have been wrong. */
995b44f5 645 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
10f307d9 646 {
647 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
648 emit_move_insn (to, temp);
649 return;
650 }
651
652 /* Mode combination is not recognized. */
611234b4 653 gcc_unreachable ();
10f307d9 654}
655
656/* Return an rtx for a value that would result
657 from converting X to mode MODE.
658 Both X and MODE may be floating, or both integer.
659 UNSIGNEDP is nonzero if X is an unsigned value.
660 This can be done by referring to a part of X in place
0a534ba7 661 or by copying to a new temporary with conversion. */
10f307d9 662
663rtx
3754d046 664convert_to_mode (machine_mode mode, rtx x, int unsignedp)
a63e1c46 665{
666 return convert_modes (mode, VOIDmode, x, unsignedp);
667}
668
669/* Return an rtx for a value that would result
670 from converting X from mode OLDMODE to mode MODE.
671 Both modes may be floating, or both integer.
672 UNSIGNEDP is nonzero if X is an unsigned value.
673
674 This can be done by referring to a part of X in place
675 or by copying to a new temporary with conversion.
676
0a534ba7 677 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
a63e1c46 678
679rtx
3754d046 680convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
10f307d9 681{
19cb6b50 682 rtx temp;
a63e1c46 683
acfb31e5 684 /* If FROM is a SUBREG that indicates that we have already done at least
685 the required extension, strip it. */
686
687 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
688 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
e8629f9e 689 && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
9f484185 690 x = gen_lowpart (mode, SUBREG_REG (x));
10f307d9 691
e4d9bbd7 692 if (GET_MODE (x) != VOIDmode)
693 oldmode = GET_MODE (x);
fa56dc1d 694
79a80d86 695 if (mode == oldmode)
696 return x;
697
c4050ce7 698 if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
e92ac421 699 {
c4050ce7 700 /* If the caller did not tell us the old mode, then there is not
701 much to do with respect to canonicalization. We have to
702 assume that all the bits are significant. */
2508ba32 703 if (GET_MODE_CLASS (oldmode) != MODE_INT)
720f3e8a 704 oldmode = MAX_MODE_INT;
705 wide_int w = wide_int::from (std::make_pair (x, oldmode),
706 GET_MODE_PRECISION (mode),
707 unsignedp ? UNSIGNED : SIGNED);
a23d6610 708 return immed_wide_int_const (w, mode);
e92ac421 709 }
10f307d9 710
711 /* We can do this with a gen_lowpart if both desired and current modes
712 are integer, and this is either a constant integer, a register, or a
a23d6610 713 non-volatile MEM. */
714 if (GET_MODE_CLASS (mode) == MODE_INT
715 && GET_MODE_CLASS (oldmode) == MODE_INT
716 && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
717 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
718 || (REG_P (x)
719 && (!HARD_REGISTER_P (x)
720 || HARD_REGNO_MODE_OK (REGNO (x), mode))
721 && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
722
723 return gen_lowpart (mode, x);
10f307d9 724
77d25dbd 725 /* Converting from integer constant into mode is always equivalent to an
726 subreg operation. */
727 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
728 {
611234b4 729 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
77d25dbd 730 return simplify_gen_subreg (mode, x, oldmode, 0);
731 }
732
10f307d9 733 temp = gen_reg_rtx (mode);
734 convert_move (temp, x, unsignedp);
735 return temp;
736}
737\f
c7e41aee 738/* Return the largest alignment we can use for doing a move (or store)
739 of MAX_PIECES. ALIGN is the largest alignment we could use. */
740
741static unsigned int
742alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
743{
3754d046 744 machine_mode tmode;
c7e41aee 745
746 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
747 if (align >= GET_MODE_ALIGNMENT (tmode))
748 align = GET_MODE_ALIGNMENT (tmode);
749 else
750 {
3754d046 751 machine_mode tmode, xmode;
c7e41aee 752
753 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
754 tmode != VOIDmode;
755 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
756 if (GET_MODE_SIZE (tmode) > max_pieces
757 || SLOW_UNALIGNED_ACCESS (tmode, align))
758 break;
759
760 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
761 }
762
763 return align;
764}
765
766/* Return the widest integer mode no wider than SIZE. If no such mode
767 can be found, return VOIDmode. */
768
3754d046 769static machine_mode
c7e41aee 770widest_int_mode_for_size (unsigned int size)
771{
3754d046 772 machine_mode tmode, mode = VOIDmode;
c7e41aee 773
774 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
775 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
776 if (GET_MODE_SIZE (tmode) < size)
777 mode = tmode;
778
779 return mode;
780}
781
9fe0e1b8 782/* Determine whether the LEN bytes can be moved by using several move
783 instructions. Return nonzero if a call to move_by_pieces should
784 succeed. */
785
786int
d4bd0e64 787can_move_by_pieces (unsigned HOST_WIDE_INT len,
788 unsigned int align)
9fe0e1b8 789{
d4bd0e64 790 return targetm.use_by_pieces_infrastructure_p (len, align, MOVE_BY_PIECES,
791 optimize_insn_for_speed_p ());
9fe0e1b8 792}
793
fad4a30c 794/* Generate several move instructions to copy LEN bytes from block FROM to
0a534ba7 795 block TO. (These are MEM rtx's with BLKmode).
ef7dc4b4 796
fad4a30c 797 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
798 used to push FROM to the stack.
ef7dc4b4 799
9fe0e1b8 800 ALIGN is maximum stack alignment we can assume.
10f307d9 801
9fe0e1b8 802 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
803 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
804 stpcpy. */
805
806rtx
35cb5232 807move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
808 unsigned int align, int endp)
10f307d9 809{
584511c1 810 struct move_by_pieces_d data;
3754d046 811 machine_mode to_addr_mode;
812 machine_mode from_addr_mode = get_address_mode (from);
ef7dc4b4 813 rtx to_addr, from_addr = XEXP (from, 0);
02e7a332 814 unsigned int max_size = MOVE_MAX_PIECES + 1;
53bd09ab 815 enum insn_code icode;
10f307d9 816
b4ad0ea6 817 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
818
10f307d9 819 data.offset = 0;
10f307d9 820 data.from_addr = from_addr;
ef7dc4b4 821 if (to)
822 {
87cf5753 823 to_addr_mode = get_address_mode (to);
ef7dc4b4 824 to_addr = XEXP (to, 0);
825 data.to = to;
826 data.autinc_to
827 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
828 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
829 data.reverse
830 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
831 }
832 else
833 {
98155838 834 to_addr_mode = VOIDmode;
ef7dc4b4 835 to_addr = NULL_RTX;
836 data.to = NULL_RTX;
837 data.autinc_to = 1;
3764c94e 838 if (STACK_GROWS_DOWNWARD)
839 data.reverse = 1;
840 else
841 data.reverse = 0;
ef7dc4b4 842 }
843 data.to_addr = to_addr;
10f307d9 844 data.from = from;
10f307d9 845 data.autinc_from
846 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
847 || GET_CODE (from_addr) == POST_INC
848 || GET_CODE (from_addr) == POST_DEC);
849
850 data.explicit_inc_from = 0;
851 data.explicit_inc_to = 0;
10f307d9 852 if (data.reverse) data.offset = len;
853 data.len = len;
854
855 /* If copying requires more than two move insns,
856 copy addresses to registers (to make displacements shorter)
857 and use post-increment if available. */
858 if (!(data.autinc_from && data.autinc_to)
025d4f81 859 && move_by_pieces_ninsns (len, align, max_size) > 2)
10f307d9 860 {
c7e41aee 861 /* Find the mode of the largest move...
862 MODE might not be used depending on the definitions of the
863 USE_* macros below. */
3754d046 864 machine_mode mode ATTRIBUTE_UNUSED
c7e41aee 865 = widest_int_mode_for_size (max_size);
53bd09ab 866
867 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
10f307d9 868 {
98155838 869 data.from_addr = copy_to_mode_reg (from_addr_mode,
29c05e22 870 plus_constant (from_addr_mode,
871 from_addr, len));
10f307d9 872 data.autinc_from = 1;
873 data.explicit_inc_from = -1;
874 }
53bd09ab 875 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
10f307d9 876 {
98155838 877 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
10f307d9 878 data.autinc_from = 1;
879 data.explicit_inc_from = 1;
880 }
10f307d9 881 if (!data.autinc_from && CONSTANT_P (from_addr))
98155838 882 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
53bd09ab 883 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
10f307d9 884 {
98155838 885 data.to_addr = copy_to_mode_reg (to_addr_mode,
29c05e22 886 plus_constant (to_addr_mode,
887 to_addr, len));
10f307d9 888 data.autinc_to = 1;
889 data.explicit_inc_to = -1;
890 }
53bd09ab 891 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
10f307d9 892 {
98155838 893 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
10f307d9 894 data.autinc_to = 1;
895 data.explicit_inc_to = 1;
896 }
10f307d9 897 if (!data.autinc_to && CONSTANT_P (to_addr))
98155838 898 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
10f307d9 899 }
900
c7e41aee 901 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
10f307d9 902
903 /* First move what we can in the largest integer mode, then go to
904 successively smaller modes. */
905
01dd0067 906 while (max_size > 1 && data.len > 0)
10f307d9 907 {
3754d046 908 machine_mode mode = widest_int_mode_for_size (max_size);
10f307d9 909
910 if (mode == VOIDmode)
911 break;
912
d6bf3b14 913 icode = optab_handler (mov_optab, mode);
325d1c45 914 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
10f307d9 915 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
916
917 max_size = GET_MODE_SIZE (mode);
918 }
919
920 /* The code above should have handled everything. */
611234b4 921 gcc_assert (!data.len);
9fe0e1b8 922
923 if (endp)
924 {
925 rtx to1;
926
611234b4 927 gcc_assert (!data.reverse);
9fe0e1b8 928 if (data.autinc_to)
929 {
930 if (endp == 2)
931 {
932 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
933 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
934 else
98155838 935 data.to_addr = copy_to_mode_reg (to_addr_mode,
29c05e22 936 plus_constant (to_addr_mode,
937 data.to_addr,
9fe0e1b8 938 -1));
939 }
940 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
941 data.offset);
942 }
943 else
944 {
945 if (endp == 2)
946 --data.offset;
947 to1 = adjust_address (data.to, QImode, data.offset);
948 }
949 return to1;
950 }
951 else
952 return data.to;
10f307d9 953}
954
955/* Return number of insns required to move L bytes by pieces.
decd7a45 956 ALIGN (in bits) is maximum alignment we can assume. */
10f307d9 957
8136e769 958unsigned HOST_WIDE_INT
025d4f81 959move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
960 unsigned int max_size)
10f307d9 961{
f7c44134 962 unsigned HOST_WIDE_INT n_insns = 0;
10f307d9 963
c7e41aee 964 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
10f307d9 965
01dd0067 966 while (max_size > 1 && l > 0)
10f307d9 967 {
3754d046 968 machine_mode mode;
10f307d9 969 enum insn_code icode;
970
c7e41aee 971 mode = widest_int_mode_for_size (max_size);
10f307d9 972
973 if (mode == VOIDmode)
974 break;
975
d6bf3b14 976 icode = optab_handler (mov_optab, mode);
325d1c45 977 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
10f307d9 978 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
979
980 max_size = GET_MODE_SIZE (mode);
981 }
982
611234b4 983 gcc_assert (!l);
10f307d9 984 return n_insns;
985}
986
987/* Subroutine of move_by_pieces. Move as many bytes as appropriate
988 with move instructions for mode MODE. GENFUN is the gen_... function
989 to make a move insn for that mode. DATA has all the other info. */
990
991static void
3d953cb1 992move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
584511c1 993 struct move_by_pieces_d *data)
10f307d9 994{
f7c44134 995 unsigned int size = GET_MODE_SIZE (mode);
97b330ca 996 rtx to1 = NULL_RTX, from1;
10f307d9 997
998 while (data->len >= size)
999 {
f7c44134 1000 if (data->reverse)
1001 data->offset -= size;
1002
ef7dc4b4 1003 if (data->to)
f7c44134 1004 {
ef7dc4b4 1005 if (data->autinc_to)
bf42c62d 1006 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1007 data->offset);
ef7dc4b4 1008 else
e513d163 1009 to1 = adjust_address (data->to, mode, data->offset);
f7c44134 1010 }
f7c44134 1011
1012 if (data->autinc_from)
bf42c62d 1013 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1014 data->offset);
f7c44134 1015 else
e513d163 1016 from1 = adjust_address (data->from, mode, data->offset);
10f307d9 1017
e4e498cf 1018 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
e1855348 1019 emit_insn (gen_add2_insn (data->to_addr,
d11aedc7 1020 gen_int_mode (-(HOST_WIDE_INT) size,
1021 GET_MODE (data->to_addr))));
e4e498cf 1022 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
e1855348 1023 emit_insn (gen_add2_insn (data->from_addr,
d11aedc7 1024 gen_int_mode (-(HOST_WIDE_INT) size,
1025 GET_MODE (data->from_addr))));
10f307d9 1026
ef7dc4b4 1027 if (data->to)
1028 emit_insn ((*genfun) (to1, from1));
1029 else
fad4a30c 1030 {
1031#ifdef PUSH_ROUNDING
1032 emit_single_push_insn (mode, from1, NULL);
1033#else
611234b4 1034 gcc_unreachable ();
fad4a30c 1035#endif
1036 }
f7c44134 1037
e4e498cf 1038 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
d11aedc7 1039 emit_insn (gen_add2_insn (data->to_addr,
1040 gen_int_mode (size,
1041 GET_MODE (data->to_addr))));
e4e498cf 1042 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
d11aedc7 1043 emit_insn (gen_add2_insn (data->from_addr,
1044 gen_int_mode (size,
1045 GET_MODE (data->from_addr))));
10f307d9 1046
f7c44134 1047 if (! data->reverse)
1048 data->offset += size;
10f307d9 1049
1050 data->len -= size;
1051 }
1052}
1053\f
c0bfc78e 1054/* Emit code to move a block Y to a block X. This may be done with
1055 string-move instructions, with multiple scalar move instructions,
1056 or with a library call.
10f307d9 1057
c0bfc78e 1058 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
10f307d9 1059 SIZE is an rtx that says how long they are.
325d1c45 1060 ALIGN is the maximum alignment we can assume they have.
0378dbdc 1061 METHOD describes what kind of copy this is, and what mechanisms may be used.
36d63243 1062 MIN_SIZE is the minimal size of block to move
1063 MAX_SIZE is the maximal size of block to move, if it can not be represented
1064 in unsigned HOST_WIDE_INT, than it is mask of all ones.
10f307d9 1065
0dbd1c74 1066 Return the address of the new block, if memcpy is called and returns it,
1067 0 otherwise. */
1068
1069rtx
162719b3 1070emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
36d63243 1071 unsigned int expected_align, HOST_WIDE_INT expected_size,
1072 unsigned HOST_WIDE_INT min_size,
9db0f34d 1073 unsigned HOST_WIDE_INT max_size,
1074 unsigned HOST_WIDE_INT probable_max_size)
10f307d9 1075{
0378dbdc 1076 bool may_use_call;
0dbd1c74 1077 rtx retval = 0;
0378dbdc 1078 unsigned int align;
1079
aeccaf28 1080 gcc_assert (size);
1081 if (CONST_INT_P (size)
1082 && INTVAL (size) == 0)
1083 return 0;
1084
0378dbdc 1085 switch (method)
1086 {
1087 case BLOCK_OP_NORMAL:
0b25db21 1088 case BLOCK_OP_TAILCALL:
0378dbdc 1089 may_use_call = true;
1090 break;
1091
1092 case BLOCK_OP_CALL_PARM:
1093 may_use_call = block_move_libcall_safe_for_call_parm ();
1094
1095 /* Make inhibit_defer_pop nonzero around the library call
1096 to force it to pop the arguments right away. */
1097 NO_DEFER_POP;
1098 break;
1099
1100 case BLOCK_OP_NO_LIBCALL:
1101 may_use_call = false;
1102 break;
1103
1104 default:
611234b4 1105 gcc_unreachable ();
0378dbdc 1106 }
1107
aeccaf28 1108 gcc_assert (MEM_P (x) && MEM_P (y));
0378dbdc 1109 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
0ea95334 1110 gcc_assert (align >= BITS_PER_UNIT);
0dbd1c74 1111
67c155cb 1112 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1113 block copy is more efficient for other large modes, e.g. DCmode. */
1114 x = adjust_address (x, BLKmode, 0);
1115 y = adjust_address (y, BLKmode, 0);
1116
e83ff88b 1117 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1118 can be incorrect is coming from __builtin_memcpy. */
971ba038 1119 if (CONST_INT_P (size))
e83ff88b 1120 {
1121 x = shallow_copy_rtx (x);
1122 y = shallow_copy_rtx (y);
5b2a69fa 1123 set_mem_size (x, INTVAL (size));
1124 set_mem_size (y, INTVAL (size));
e83ff88b 1125 }
1126
d4bd0e64 1127 if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
9fe0e1b8 1128 move_by_pieces (x, y, INTVAL (size), align, 0);
162719b3 1129 else if (emit_block_move_via_movmem (x, y, size, align,
36d63243 1130 expected_align, expected_size,
9db0f34d 1131 min_size, max_size, probable_max_size))
c0bfc78e 1132 ;
bd1a81f7 1133 else if (may_use_call
1134 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1135 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
006e2d5a 1136 {
1137 /* Since x and y are passed to a libcall, mark the corresponding
1138 tree EXPR as addressable. */
1139 tree y_expr = MEM_EXPR (y);
1140 tree x_expr = MEM_EXPR (x);
1141 if (y_expr)
1142 mark_addressable (y_expr);
1143 if (x_expr)
1144 mark_addressable (x_expr);
1145 retval = emit_block_move_via_libcall (x, y, size,
1146 method == BLOCK_OP_TAILCALL);
1147 }
1148
0378dbdc 1149 else
1150 emit_block_move_via_loop (x, y, size, align);
1151
1152 if (method == BLOCK_OP_CALL_PARM)
1153 OK_DEFER_POP;
a5fd5157 1154
c0bfc78e 1155 return retval;
1156}
a5fd5157 1157
162719b3 1158rtx
1159emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1160{
36d63243 1161 unsigned HOST_WIDE_INT max, min = 0;
1162 if (GET_CODE (size) == CONST_INT)
1163 min = max = UINTVAL (size);
1164 else
1165 max = GET_MODE_MASK (GET_MODE (size));
1166 return emit_block_move_hints (x, y, size, method, 0, -1,
9db0f34d 1167 min, max, max);
162719b3 1168}
1169
35cb5232 1170/* A subroutine of emit_block_move. Returns true if calling the
0378dbdc 1171 block move libcall will not clobber any parameters which may have
1172 already been placed on the stack. */
1173
1174static bool
35cb5232 1175block_move_libcall_safe_for_call_parm (void)
0378dbdc 1176{
22c61100 1177#if defined (REG_PARM_STACK_SPACE)
1178 tree fn;
1179#endif
1180
a58c0619 1181 /* If arguments are pushed on the stack, then they're safe. */
0378dbdc 1182 if (PUSH_ARGS)
1183 return true;
0378dbdc 1184
1d5ca076 1185 /* If registers go on the stack anyway, any argument is sure to clobber
a58c0619 1186 an outgoing argument. */
63c68695 1187#if defined (REG_PARM_STACK_SPACE)
22c61100 1188 fn = emit_block_move_libcall_fn (false);
ac2fdd89 1189 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1190 depend on its argument. */
1191 (void) fn;
22c61100 1192 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1193 && REG_PARM_STACK_SPACE (fn) != 0)
1194 return false;
0378dbdc 1195#endif
0378dbdc 1196
a58c0619 1197 /* If any argument goes in memory, then it might clobber an outgoing
1198 argument. */
1199 {
39cba157 1200 CUMULATIVE_ARGS args_so_far_v;
1201 cumulative_args_t args_so_far;
a58c0619 1202 tree fn, arg;
1d5ca076 1203
a58c0619 1204 fn = emit_block_move_libcall_fn (false);
39cba157 1205 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1206 args_so_far = pack_cumulative_args (&args_so_far_v);
1d5ca076 1207
a58c0619 1208 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1209 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1210 {
3754d046 1211 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
39cba157 1212 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
f387af4f 1213 NULL_TREE, true);
a58c0619 1214 if (!tmp || !REG_P (tmp))
0378dbdc 1215 return false;
39cba157 1216 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
a58c0619 1217 return false;
39cba157 1218 targetm.calls.function_arg_advance (args_so_far, mode,
f387af4f 1219 NULL_TREE, true);
a58c0619 1220 }
1221 }
1222 return true;
0378dbdc 1223}
1224
008c057d 1225/* A subroutine of emit_block_move. Expand a movmem pattern;
c0bfc78e 1226 return true if successful. */
6702c250 1227
c0bfc78e 1228static bool
162719b3 1229emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
36d63243 1230 unsigned int expected_align, HOST_WIDE_INT expected_size,
1231 unsigned HOST_WIDE_INT min_size,
9db0f34d 1232 unsigned HOST_WIDE_INT max_size,
1233 unsigned HOST_WIDE_INT probable_max_size)
c0bfc78e 1234{
fbc6244b 1235 int save_volatile_ok = volatile_ok;
3754d046 1236 machine_mode mode;
a5fd5157 1237
162719b3 1238 if (expected_align < align)
1239 expected_align = align;
36d63243 1240 if (expected_size != -1)
1241 {
9db0f34d 1242 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1243 expected_size = probable_max_size;
36d63243 1244 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1245 expected_size = min_size;
1246 }
162719b3 1247
c0bfc78e 1248 /* Since this is a move insn, we don't care about volatility. */
1249 volatile_ok = 1;
1250
d5f9786f 1251 /* Try the most limited insn first, because there's no point
1252 including more than one in the machine description unless
1253 the more limited one has some advantage. */
1254
c0bfc78e 1255 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1256 mode = GET_MODE_WIDER_MODE (mode))
1257 {
6b531606 1258 enum insn_code code = direct_optab_handler (movmem_optab, mode);
c0bfc78e 1259
1260 if (code != CODE_FOR_nothing
1261 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1262 here because if SIZE is less than the mode mask, as it is
1263 returned by the macro, it will definitely be less than the
300c6cee 1264 actual mode mask. Since SIZE is within the Pmode address
1265 space, we limit MODE to Pmode. */
971ba038 1266 && ((CONST_INT_P (size)
c0bfc78e 1267 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1268 <= (GET_MODE_MASK (mode) >> 1)))
36d63243 1269 || max_size <= (GET_MODE_MASK (mode) >> 1)
300c6cee 1270 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
8786db1e 1271 {
9db0f34d 1272 struct expand_operand ops[9];
8786db1e 1273 unsigned int nops;
c0bfc78e 1274
1275 /* ??? When called via emit_block_move_for_call, it'd be
1276 nice if there were some way to inform the backend, so
1277 that it doesn't fail the expansion because it thinks
1278 emitting the libcall would be more efficient. */
32f79657 1279 nops = insn_data[(int) code].n_generator_args;
9db0f34d 1280 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
b52cb719 1281
8786db1e 1282 create_fixed_operand (&ops[0], x);
1283 create_fixed_operand (&ops[1], y);
1284 /* The check above guarantees that this size conversion is valid. */
1285 create_convert_operand_to (&ops[2], size, mode, true);
1286 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
36d63243 1287 if (nops >= 6)
8786db1e 1288 {
1289 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1290 create_integer_operand (&ops[5], expected_size);
8786db1e 1291 }
9db0f34d 1292 if (nops >= 8)
36d63243 1293 {
1294 create_integer_operand (&ops[6], min_size);
1295 /* If we can not represent the maximal size,
1296 make parameter NULL. */
1297 if ((HOST_WIDE_INT) max_size != -1)
1298 create_integer_operand (&ops[7], max_size);
1299 else
1300 create_fixed_operand (&ops[7], NULL);
1301 }
9db0f34d 1302 if (nops == 9)
1303 {
1304 /* If we can not represent the maximal size,
1305 make parameter NULL. */
1306 if ((HOST_WIDE_INT) probable_max_size != -1)
1307 create_integer_operand (&ops[8], probable_max_size);
1308 else
1309 create_fixed_operand (&ops[8], NULL);
1310 }
8786db1e 1311 if (maybe_expand_insn (code, nops, ops))
c0bfc78e 1312 {
fbc6244b 1313 volatile_ok = save_volatile_ok;
c0bfc78e 1314 return true;
10f307d9 1315 }
1316 }
c0bfc78e 1317 }
10f307d9 1318
fbc6244b 1319 volatile_ok = save_volatile_ok;
c0bfc78e 1320 return false;
1321}
6702c250 1322
f896c932 1323/* A subroutine of emit_block_move. Expand a call to memcpy.
c0bfc78e 1324 Return the return value from memcpy, 0 otherwise. */
06b8e3db 1325
ab608690 1326rtx
0b25db21 1327emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
c0bfc78e 1328{
d5f9786f 1329 rtx dst_addr, src_addr;
c2f47e15 1330 tree call_expr, fn, src_tree, dst_tree, size_tree;
3754d046 1331 machine_mode size_mode;
c0bfc78e 1332 rtx retval;
06b8e3db 1333
0a534ba7 1334 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1335 pseudos. We can then place those new pseudos into a VAR_DECL and
1336 use them later. */
d5f9786f 1337
99182918 1338 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1339 src_addr = copy_addr_to_reg (XEXP (src, 0));
c0bfc78e 1340
d5f9786f 1341 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1342 src_addr = convert_memory_address (ptr_mode, src_addr);
d5f9786f 1343
1344 dst_tree = make_tree (ptr_type_node, dst_addr);
1345 src_tree = make_tree (ptr_type_node, src_addr);
c0bfc78e 1346
f896c932 1347 size_mode = TYPE_MODE (sizetype);
d5f9786f 1348
c0bfc78e 1349 size = convert_to_mode (size_mode, size, 1);
1350 size = copy_to_mode_reg (size_mode, size);
1351
1352 /* It is incorrect to use the libcall calling conventions to call
1353 memcpy in this context. This could be a user call to memcpy and
1354 the user may wish to examine the return value from memcpy. For
1355 targets where libcalls and normal calls have different conventions
f896c932 1356 for returning pointers, we could end up generating incorrect code. */
c0bfc78e 1357
f896c932 1358 size_tree = make_tree (sizetype, size);
c0bfc78e 1359
1360 fn = emit_block_move_libcall_fn (true);
c2f47e15 1361 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
0b25db21 1362 CALL_EXPR_TAILCALL (call_expr) = tailcall;
c0bfc78e 1363
8ec3c5c2 1364 retval = expand_normal (call_expr);
c0bfc78e 1365
f896c932 1366 return retval;
c0bfc78e 1367}
f708f8fd 1368
c0bfc78e 1369/* A subroutine of emit_block_move_via_libcall. Create the tree node
ea259bbe 1370 for the function we use for block copies. */
f708f8fd 1371
c0bfc78e 1372static GTY(()) tree block_move_fn;
1373
d459e0d8 1374void
35cb5232 1375init_block_move_fn (const char *asmspec)
c0bfc78e 1376{
d459e0d8 1377 if (!block_move_fn)
c0bfc78e 1378 {
c8010b80 1379 tree args, fn, attrs, attr_args;
d459e0d8 1380
f896c932 1381 fn = get_identifier ("memcpy");
1382 args = build_function_type_list (ptr_type_node, ptr_type_node,
1383 const_ptr_type_node, sizetype,
1384 NULL_TREE);
f708f8fd 1385
e60a6f7b 1386 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
c0bfc78e 1387 DECL_EXTERNAL (fn) = 1;
1388 TREE_PUBLIC (fn) = 1;
1389 DECL_ARTIFICIAL (fn) = 1;
1390 TREE_NOTHROW (fn) = 1;
f0f2eb24 1391 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1392 DECL_VISIBILITY_SPECIFIED (fn) = 1;
8ca560c1 1393
c8010b80 1394 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1395 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1396
1397 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1398
c0bfc78e 1399 block_move_fn = fn;
10f307d9 1400 }
0dbd1c74 1401
d459e0d8 1402 if (asmspec)
b2c4af5e 1403 set_user_assembler_name (block_move_fn, asmspec);
d459e0d8 1404}
1405
1406static tree
35cb5232 1407emit_block_move_libcall_fn (int for_call)
d459e0d8 1408{
1409 static bool emitted_extern;
1410
1411 if (!block_move_fn)
1412 init_block_move_fn (NULL);
1413
c0bfc78e 1414 if (for_call && !emitted_extern)
1415 {
1416 emitted_extern = true;
b2c4af5e 1417 make_decl_rtl (block_move_fn);
c0bfc78e 1418 }
1419
d459e0d8 1420 return block_move_fn;
10f307d9 1421}
0378dbdc 1422
1423/* A subroutine of emit_block_move. Copy the data via an explicit
1424 loop. This is used only when libcalls are forbidden. */
1425/* ??? It'd be nice to copy in hunks larger than QImode. */
1426
1427static void
35cb5232 1428emit_block_move_via_loop (rtx x, rtx y, rtx size,
1429 unsigned int align ATTRIBUTE_UNUSED)
0378dbdc 1430{
1d277a67 1431 rtx_code_label *cmp_label, *top_label;
1432 rtx iter, x_addr, y_addr, tmp;
3754d046 1433 machine_mode x_addr_mode = get_address_mode (x);
1434 machine_mode y_addr_mode = get_address_mode (y);
1435 machine_mode iter_mode;
0378dbdc 1436
1437 iter_mode = GET_MODE (size);
1438 if (iter_mode == VOIDmode)
1439 iter_mode = word_mode;
1440
1441 top_label = gen_label_rtx ();
1442 cmp_label = gen_label_rtx ();
1443 iter = gen_reg_rtx (iter_mode);
1444
1445 emit_move_insn (iter, const0_rtx);
1446
1447 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1448 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1449 do_pending_stack_adjust ();
1450
0378dbdc 1451 emit_jump (cmp_label);
1452 emit_label (top_label);
1453
98155838 1454 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
59d4eb16 1455 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
98155838 1456
1457 if (x_addr_mode != y_addr_mode)
1458 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
59d4eb16 1459 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
98155838 1460
0378dbdc 1461 x = change_address (x, QImode, x_addr);
1462 y = change_address (y, QImode, y_addr);
1463
1464 emit_move_insn (x, y);
1465
1466 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1467 true, OPTAB_LIB_WIDEN);
1468 if (tmp != iter)
1469 emit_move_insn (iter, tmp);
1470
0378dbdc 1471 emit_label (cmp_label);
1472
1473 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
584abc98 1474 true, top_label, REG_BR_PROB_BASE * 90 / 100);
0378dbdc 1475}
10f307d9 1476\f
1477/* Copy all or part of a value X into registers starting at REGNO.
1478 The number of registers to be filled is NREGS. */
1479
1480void
3754d046 1481move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
10f307d9 1482{
c9750f6d 1483 if (nregs == 0)
1484 return;
1485
ca316360 1486 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
10f307d9 1487 x = validize_mem (force_const_mem (mode, x));
1488
1489 /* See if the machine can do this with a load multiple insn. */
bffbb74d 1490 if (targetm.have_load_multiple ())
10f307d9 1491 {
bffbb74d 1492 rtx_insn *last = get_last_insn ();
1493 rtx first = gen_rtx_REG (word_mode, regno);
1494 if (rtx_insn *pat = targetm.gen_load_multiple (first, x,
1495 GEN_INT (nregs)))
d3afc10f 1496 {
1497 emit_insn (pat);
1498 return;
1499 }
1500 else
1501 delete_insns_since (last);
10f307d9 1502 }
10f307d9 1503
bffbb74d 1504 for (int i = 0; i < nregs; i++)
941522d6 1505 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
10f307d9 1506 operand_subword_force (x, i, mode));
1507}
1508
1509/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
530178a9 1510 The number of registers to be filled is NREGS. */
db7bca86 1511
10f307d9 1512void
35cb5232 1513move_block_from_reg (int regno, rtx x, int nregs)
10f307d9 1514{
cc119c14 1515 if (nregs == 0)
1516 return;
1517
10f307d9 1518 /* See if the machine can do this with a store multiple insn. */
bffbb74d 1519 if (targetm.have_store_multiple ())
10f307d9 1520 {
1d277a67 1521 rtx_insn *last = get_last_insn ();
bffbb74d 1522 rtx first = gen_rtx_REG (word_mode, regno);
1523 if (rtx_insn *pat = targetm.gen_store_multiple (x, first,
1524 GEN_INT (nregs)))
d3afc10f 1525 {
1526 emit_insn (pat);
1527 return;
1528 }
1529 else
1530 delete_insns_since (last);
10f307d9 1531 }
10f307d9 1532
bffbb74d 1533 for (int i = 0; i < nregs; i++)
10f307d9 1534 {
1535 rtx tem = operand_subword (x, i, 1, BLKmode);
1536
611234b4 1537 gcc_assert (tem);
10f307d9 1538
941522d6 1539 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
10f307d9 1540 }
1541}
1542
b566e2e5 1543/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1544 ORIG, where ORIG is a non-consecutive group of registers represented by
1545 a PARALLEL. The clone is identical to the original except in that the
1546 original set of registers is replaced by a new set of pseudo registers.
1547 The new set has the same modes as the original set. */
1548
1549rtx
35cb5232 1550gen_group_rtx (rtx orig)
b566e2e5 1551{
1552 int i, length;
1553 rtx *tmps;
1554
611234b4 1555 gcc_assert (GET_CODE (orig) == PARALLEL);
b566e2e5 1556
1557 length = XVECLEN (orig, 0);
2457c754 1558 tmps = XALLOCAVEC (rtx, length);
b566e2e5 1559
1560 /* Skip a NULL entry in first slot. */
1561 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1562
1563 if (i)
1564 tmps[0] = 0;
1565
1566 for (; i < length; i++)
1567 {
3754d046 1568 machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
b566e2e5 1569 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1570
1571 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1572 }
1573
1574 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1575}
1576
e2ff5c1b 1577/* A subroutine of emit_group_load. Arguments as for emit_group_load,
1578 except that values are placed in TMPS[i], and must later be moved
10689255 1579 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
ce739127 1580
e2ff5c1b 1581static void
1582emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
ce739127 1583{
e2ff5c1b 1584 rtx src;
6ede8018 1585 int start, i;
3754d046 1586 machine_mode m = GET_MODE (orig_src);
ce739127 1587
611234b4 1588 gcc_assert (GET_CODE (dst) == PARALLEL);
ce739127 1589
553b7a5d 1590 if (m != VOIDmode
1591 && !SCALAR_INT_MODE_P (m)
1592 && !MEM_P (orig_src)
1593 && GET_CODE (orig_src) != CONCAT)
57fb21b0 1594 {
3754d046 1595 machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
57fb21b0 1596 if (imode == BLKmode)
0ab48139 1597 src = assign_stack_temp (GET_MODE (orig_src), ssize);
57fb21b0 1598 else
1599 src = gen_reg_rtx (imode);
1600 if (imode != BLKmode)
1601 src = gen_lowpart (GET_MODE (orig_src), src);
1602 emit_move_insn (src, orig_src);
1603 /* ...and back again. */
1604 if (imode != BLKmode)
1605 src = gen_lowpart (imode, src);
e2ff5c1b 1606 emit_group_load_1 (tmps, dst, src, type, ssize);
57fb21b0 1607 return;
1608 }
1609
ce739127 1610 /* Check for a NULL entry, used to indicate that the parameter goes
1611 both on the stack and in registers. */
6ede8018 1612 if (XEXP (XVECEXP (dst, 0, 0), 0))
1613 start = 0;
ce739127 1614 else
6ede8018 1615 start = 1;
1616
6ede8018 1617 /* Process the pieces. */
1618 for (i = start; i < XVECLEN (dst, 0); i++)
1619 {
3754d046 1620 machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
02e7a332 1621 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1622 unsigned int bytelen = GET_MODE_SIZE (mode);
6ede8018 1623 int shift = 0;
1624
1625 /* Handle trailing fragments that run over the size of the struct. */
e1439bcb 1626 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
6ede8018 1627 {
5f4cd670 1628 /* Arrange to shift the fragment to where it belongs.
1629 extract_bit_field loads to the lsb of the reg. */
1630 if (
1631#ifdef BLOCK_REG_PADDING
1632 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1633 == (BYTES_BIG_ENDIAN ? upward : downward)
1634#else
1635 BYTES_BIG_ENDIAN
1636#endif
1637 )
1638 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
6ede8018 1639 bytelen = ssize - bytepos;
611234b4 1640 gcc_assert (bytelen > 0);
6ede8018 1641 }
1642
c037cba7 1643 /* If we won't be loading directly from memory, protect the real source
1644 from strange tricks we might play; but make sure that the source can
1645 be loaded directly into the destination. */
1646 src = orig_src;
e16ceb8e 1647 if (!MEM_P (orig_src)
c037cba7 1648 && (!CONSTANT_P (orig_src)
1649 || (GET_MODE (orig_src) != mode
1650 && GET_MODE (orig_src) != VOIDmode)))
1651 {
1652 if (GET_MODE (orig_src) == VOIDmode)
1653 src = gen_reg_rtx (mode);
1654 else
1655 src = gen_reg_rtx (GET_MODE (orig_src));
2c269e73 1656
c037cba7 1657 emit_move_insn (src, orig_src);
1658 }
1659
6ede8018 1660 /* Optimize the access just a bit. */
e16ceb8e 1661 if (MEM_P (src)
5f4cd670 1662 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1663 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
fe352cf1 1664 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
6ede8018 1665 && bytelen == GET_MODE_SIZE (mode))
1666 {
1667 tmps[i] = gen_reg_rtx (mode);
e513d163 1668 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
ce739127 1669 }
9a98257b 1670 else if (COMPLEX_MODE_P (mode)
1671 && GET_MODE (src) == mode
1672 && bytelen == GET_MODE_SIZE (mode))
1673 /* Let emit_move_complex do the bulk of the work. */
1674 tmps[i] = src;
a1000ec6 1675 else if (GET_CODE (src) == CONCAT)
1676 {
2a075f91 1677 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1678 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1679
1680 if ((bytepos == 0 && bytelen == slen0)
1681 || (bytepos != 0 && bytepos + bytelen <= slen))
4c183732 1682 {
2a075f91 1683 /* The following assumes that the concatenated objects all
1684 have the same size. In this case, a simple calculation
1685 can be used to determine the object and the bit field
1686 to be extracted. */
1687 tmps[i] = XEXP (src, bytepos / slen0);
4c183732 1688 if (! CONSTANT_P (tmps[i])
8ad4c111 1689 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
4c183732 1690 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2a075f91 1691 (bytepos % slen0) * BITS_PER_UNIT,
3f71db40 1692 1, NULL_RTX, mode, mode);
4c183732 1693 }
611234b4 1694 else
10d075b5 1695 {
611234b4 1696 rtx mem;
6ee1d299 1697
611234b4 1698 gcc_assert (!bytepos);
0ab48139 1699 mem = assign_stack_temp (GET_MODE (src), slen);
10d075b5 1700 emit_move_insn (mem, src);
6ee1d299 1701 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
3f71db40 1702 0, 1, NULL_RTX, mode, mode);
10d075b5 1703 }
a1000ec6 1704 }
c050f95a 1705 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1706 SIMD register, which is currently broken. While we get GCC
1707 to emit proper RTL for these cases, let's dump to memory. */
1708 else if (VECTOR_MODE_P (GET_MODE (dst))
8ad4c111 1709 && REG_P (src))
c050f95a 1710 {
1711 int slen = GET_MODE_SIZE (GET_MODE (src));
1712 rtx mem;
1713
0ab48139 1714 mem = assign_stack_temp (GET_MODE (src), slen);
c050f95a 1715 emit_move_insn (mem, src);
1716 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1717 }
568b64fd 1718 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1719 && XVECLEN (dst, 0) > 1)
9af5ce0c 1720 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
25b1440f 1721 else if (CONSTANT_P (src))
1722 {
1723 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1724
1725 if (len == ssize)
1726 tmps[i] = src;
1727 else
1728 {
1729 rtx first, second;
1730
e913b5cd 1731 /* TODO: const_wide_int can have sizes other than this... */
25b1440f 1732 gcc_assert (2 * len == ssize);
1733 split_double (src, &first, &second);
1734 if (i)
1735 tmps[i] = second;
1736 else
1737 tmps[i] = first;
1738 }
1739 }
1740 else if (REG_P (src) && GET_MODE (src) == mode)
73645c13 1741 tmps[i] = src;
ce739127 1742 else
325d1c45 1743 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
3f71db40 1744 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1445ea5b 1745 mode, mode);
ce739127 1746
5f4cd670 1747 if (shift)
92966f8b 1748 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
f5ff0b21 1749 shift, tmps[i], 0);
ce739127 1750 }
e2ff5c1b 1751}
1752
1753/* Emit code to move a block SRC of type TYPE to a block DST,
1754 where DST is non-consecutive registers represented by a PARALLEL.
1755 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1756 if not known. */
1757
1758void
1759emit_group_load (rtx dst, rtx src, tree type, int ssize)
1760{
1761 rtx *tmps;
1762 int i;
1763
2457c754 1764 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
e2ff5c1b 1765 emit_group_load_1 (tmps, dst, src, type, ssize);
325d1c45 1766
6ede8018 1767 /* Copy the extracted pieces into the proper (probable) hard regs. */
e2ff5c1b 1768 for (i = 0; i < XVECLEN (dst, 0); i++)
1769 {
1770 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1771 if (d == NULL)
1772 continue;
1773 emit_move_insn (d, tmps[i]);
1774 }
1775}
1776
1777/* Similar, but load SRC into new pseudos in a format that looks like
1778 PARALLEL. This can later be fed to emit_group_move to get things
1779 in the right place. */
1780
1781rtx
1782emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1783{
1784 rtvec vec;
1785 int i;
1786
1787 vec = rtvec_alloc (XVECLEN (parallel, 0));
1788 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1789
1790 /* Convert the vector to look just like the original PARALLEL, except
1791 with the computed values. */
1792 for (i = 0; i < XVECLEN (parallel, 0); i++)
1793 {
1794 rtx e = XVECEXP (parallel, 0, i);
1795 rtx d = XEXP (e, 0);
1796
1797 if (d)
1798 {
1799 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1800 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1801 }
1802 RTVEC_ELT (vec, i) = e;
1803 }
1804
1805 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
ce739127 1806}
1807
b566e2e5 1808/* Emit code to move a block SRC to block DST, where SRC and DST are
1809 non-consecutive groups of registers, each represented by a PARALLEL. */
1810
1811void
35cb5232 1812emit_group_move (rtx dst, rtx src)
b566e2e5 1813{
1814 int i;
1815
611234b4 1816 gcc_assert (GET_CODE (src) == PARALLEL
1817 && GET_CODE (dst) == PARALLEL
1818 && XVECLEN (src, 0) == XVECLEN (dst, 0));
b566e2e5 1819
1820 /* Skip first entry if NULL. */
1821 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1822 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1823 XEXP (XVECEXP (src, 0, i), 0));
1824}
1825
e2ff5c1b 1826/* Move a group of registers represented by a PARALLEL into pseudos. */
1827
1828rtx
1829emit_group_move_into_temps (rtx src)
1830{
1831 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1832 int i;
1833
1834 for (i = 0; i < XVECLEN (src, 0); i++)
1835 {
1836 rtx e = XVECEXP (src, 0, i);
1837 rtx d = XEXP (e, 0);
1838
1839 if (d)
1840 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1841 RTVEC_ELT (vec, i) = e;
1842 }
1843
1844 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1845}
1846
5f4cd670 1847/* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1848 where SRC is non-consecutive registers represented by a PARALLEL.
1849 SSIZE represents the total size of block ORIG_DST, or -1 if not
1850 known. */
ce739127 1851
1852void
5f4cd670 1853emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
ce739127 1854{
6ede8018 1855 rtx *tmps, dst;
73fe0e40 1856 int start, finish, i;
3754d046 1857 machine_mode m = GET_MODE (orig_dst);
ce739127 1858
611234b4 1859 gcc_assert (GET_CODE (src) == PARALLEL);
ce739127 1860
0b755acc 1861 if (!SCALAR_INT_MODE_P (m)
1862 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
57fb21b0 1863 {
3754d046 1864 machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
57fb21b0 1865 if (imode == BLKmode)
0ab48139 1866 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
57fb21b0 1867 else
1868 dst = gen_reg_rtx (imode);
1869 emit_group_store (dst, src, type, ssize);
1870 if (imode != BLKmode)
1871 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1872 emit_move_insn (orig_dst, dst);
1873 return;
1874 }
1875
ce739127 1876 /* Check for a NULL entry, used to indicate that the parameter goes
1877 both on the stack and in registers. */
6ede8018 1878 if (XEXP (XVECEXP (src, 0, 0), 0))
1879 start = 0;
ce739127 1880 else
6ede8018 1881 start = 1;
73fe0e40 1882 finish = XVECLEN (src, 0);
6ede8018 1883
2457c754 1884 tmps = XALLOCAVEC (rtx, finish);
ce739127 1885
6ede8018 1886 /* Copy the (probable) hard regs into pseudos. */
73fe0e40 1887 for (i = start; i < finish; i++)
ce739127 1888 {
6ede8018 1889 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2b4bed8a 1890 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1891 {
1892 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1893 emit_move_insn (tmps[i], reg);
1894 }
1895 else
1896 tmps[i] = reg;
6ede8018 1897 }
ce739127 1898
6ede8018 1899 /* If we won't be storing directly into memory, protect the real destination
1900 from strange tricks we might play. */
1901 dst = orig_dst;
723d3639 1902 if (GET_CODE (dst) == PARALLEL)
1903 {
1904 rtx temp;
1905
1906 /* We can get a PARALLEL dst if there is a conditional expression in
1907 a return statement. In that case, the dst and src are the same,
1908 so no action is necessary. */
1909 if (rtx_equal_p (dst, src))
1910 return;
1911
1912 /* It is unclear if we can ever reach here, but we may as well handle
1913 it. Allocate a temporary, and split this into a store/load to/from
1914 the temporary. */
0ab48139 1915 temp = assign_stack_temp (GET_MODE (dst), ssize);
5f4cd670 1916 emit_group_store (temp, src, type, ssize);
1917 emit_group_load (dst, temp, type, ssize);
723d3639 1918 return;
1919 }
e16ceb8e 1920 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
6ede8018 1921 {
3754d046 1922 machine_mode outer = GET_MODE (dst);
1923 machine_mode inner;
f25b36d2 1924 HOST_WIDE_INT bytepos;
73fe0e40 1925 bool done = false;
1926 rtx temp;
1927
2b4bed8a 1928 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
73fe0e40 1929 dst = gen_reg_rtx (outer);
1930
6ede8018 1931 /* Make life a bit easier for combine. */
73fe0e40 1932 /* If the first element of the vector is the low part
1933 of the destination mode, use a paradoxical subreg to
1934 initialize the destination. */
1935 if (start < finish)
1936 {
1937 inner = GET_MODE (tmps[start]);
47e9d4ca 1938 bytepos = subreg_lowpart_offset (inner, outer);
73fe0e40 1939 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1940 {
1941 temp = simplify_gen_subreg (outer, tmps[start],
47e9d4ca 1942 inner, 0);
4bd9981b 1943 if (temp)
1944 {
1945 emit_move_insn (dst, temp);
1946 done = true;
1947 start++;
1948 }
73fe0e40 1949 }
1950 }
1951
1952 /* If the first element wasn't the low part, try the last. */
1953 if (!done
1954 && start < finish - 1)
1955 {
1956 inner = GET_MODE (tmps[finish - 1]);
47e9d4ca 1957 bytepos = subreg_lowpart_offset (inner, outer);
73fe0e40 1958 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1959 {
1960 temp = simplify_gen_subreg (outer, tmps[finish - 1],
47e9d4ca 1961 inner, 0);
4bd9981b 1962 if (temp)
1963 {
1964 emit_move_insn (dst, temp);
1965 done = true;
1966 finish--;
1967 }
73fe0e40 1968 }
1969 }
1970
1971 /* Otherwise, simply initialize the result to zero. */
1972 if (!done)
1973 emit_move_insn (dst, CONST0_RTX (outer));
6ede8018 1974 }
6ede8018 1975
1976 /* Process the pieces. */
73fe0e40 1977 for (i = start; i < finish; i++)
6ede8018 1978 {
02e7a332 1979 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
3754d046 1980 machine_mode mode = GET_MODE (tmps[i]);
02e7a332 1981 unsigned int bytelen = GET_MODE_SIZE (mode);
1603adf9 1982 unsigned int adj_bytelen;
463e3bf7 1983 rtx dest = dst;
6ede8018 1984
1985 /* Handle trailing fragments that run over the size of the struct. */
e1439bcb 1986 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
dc77d5c5 1987 adj_bytelen = ssize - bytepos;
1603adf9 1988 else
1989 adj_bytelen = bytelen;
ce739127 1990
463e3bf7 1991 if (GET_CODE (dst) == CONCAT)
1992 {
dc77d5c5 1993 if (bytepos + adj_bytelen
1994 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
463e3bf7 1995 dest = XEXP (dst, 0);
1996 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1997 {
1998 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1999 dest = XEXP (dst, 1);
2000 }
611234b4 2001 else
376c21d1 2002 {
3754d046 2003 machine_mode dest_mode = GET_MODE (dest);
2004 machine_mode tmp_mode = GET_MODE (tmps[i]);
2c49840d 2005
47b0fad7 2006 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2c49840d 2007
2008 if (GET_MODE_ALIGNMENT (dest_mode)
2009 >= GET_MODE_ALIGNMENT (tmp_mode))
2010 {
47b0fad7 2011 dest = assign_stack_temp (dest_mode,
0ab48139 2012 GET_MODE_SIZE (dest_mode));
2c49840d 2013 emit_move_insn (adjust_address (dest,
2014 tmp_mode,
2015 bytepos),
2016 tmps[i]);
2017 dst = dest;
2018 }
2019 else
2020 {
47b0fad7 2021 dest = assign_stack_temp (tmp_mode,
0ab48139 2022 GET_MODE_SIZE (tmp_mode));
2c49840d 2023 emit_move_insn (dest, tmps[i]);
2024 dst = adjust_address (dest, dest_mode, bytepos);
2025 }
376c21d1 2026 break;
2027 }
463e3bf7 2028 }
2029
1603adf9 2030 /* Handle trailing fragments that run over the size of the struct. */
dc77d5c5 2031 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2032 {
2033 /* store_bit_field always takes its value from the lsb.
2034 Move the fragment to the lsb if it's not already there. */
2035 if (
2036#ifdef BLOCK_REG_PADDING
2037 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2038 == (BYTES_BIG_ENDIAN ? upward : downward)
2039#else
2040 BYTES_BIG_ENDIAN
2041#endif
2042 )
2043 {
2044 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2045 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
f5ff0b21 2046 shift, tmps[i], 0);
dc77d5c5 2047 }
1603adf9 2048
2049 /* Make sure not to write past the end of the struct. */
2050 store_bit_field (dest,
2051 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
7998fe4b 2052 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
1603adf9 2053 VOIDmode, tmps[i]);
dc77d5c5 2054 }
2055
6ede8018 2056 /* Optimize the access just a bit. */
1603adf9 2057 else if (MEM_P (dest)
2058 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2059 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2060 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2061 && bytelen == GET_MODE_SIZE (mode))
463e3bf7 2062 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1603adf9 2063
6ede8018 2064 else
463e3bf7 2065 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
4bb60ec7 2066 0, 0, mode, tmps[i]);
ce739127 2067 }
fe352cf1 2068
6ede8018 2069 /* Copy from the pseudo into the (probable) hard reg. */
376c21d1 2070 if (orig_dst != dst)
6ede8018 2071 emit_move_insn (orig_dst, dst);
ce739127 2072}
2073
933eb13a 2074/* Return a form of X that does not use a PARALLEL. TYPE is the type
2075 of the value stored in X. */
2076
2077rtx
2078maybe_emit_group_store (rtx x, tree type)
2079{
3754d046 2080 machine_mode mode = TYPE_MODE (type);
933eb13a 2081 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2082 if (GET_CODE (x) == PARALLEL)
2083 {
2084 rtx result = gen_reg_rtx (mode);
2085 emit_group_store (result, x, type, int_size_in_bytes (type));
2086 return result;
2087 }
2088 return x;
2089}
2090
7e91b548 2091/* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
25eb0f59 2092
7e91b548 2093 This is used on targets that return BLKmode values in registers. */
25eb0f59 2094
7e91b548 2095void
2096copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
25eb0f59 2097{
325d1c45 2098 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2099 rtx src = NULL, dst = NULL;
2100 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2c8ff1ed 2101 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
3754d046 2102 machine_mode mode = GET_MODE (srcreg);
2103 machine_mode tmode = GET_MODE (target);
2104 machine_mode copy_mode;
325d1c45 2105
7e91b548 2106 /* BLKmode registers created in the back-end shouldn't have survived. */
2107 gcc_assert (mode != BLKmode);
325d1c45 2108
2c8ff1ed 2109 /* If the structure doesn't take up a whole number of words, see whether
2110 SRCREG is padded on the left or on the right. If it's on the left,
2111 set PADDING_CORRECTION to the number of bits to skip.
2112
2113 In most ABIs, the structure will be returned at the least end of
2114 the register, which translates to right padding on little-endian
2115 targets and left padding on big-endian targets. The opposite
2116 holds if the structure is returned at the most significant
2117 end of the register. */
2118 if (bytes % UNITS_PER_WORD != 0
2119 && (targetm.calls.return_in_msb (type)
2120 ? !BYTES_BIG_ENDIAN
2121 : BYTES_BIG_ENDIAN))
2122 padding_correction
325d1c45 2123 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2124
7e91b548 2125 /* We can use a single move if we have an exact mode for the size. */
2126 else if (MEM_P (target)
2127 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2128 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2129 && bytes == GET_MODE_SIZE (mode))
2130 {
2131 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2132 return;
2133 }
2134
2135 /* And if we additionally have the same mode for a register. */
2136 else if (REG_P (target)
2137 && GET_MODE (target) == mode
2138 && bytes == GET_MODE_SIZE (mode))
2139 {
2140 emit_move_insn (target, srcreg);
2141 return;
2142 }
2143
2144 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2145 into a new pseudo which is a full word. */
2146 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2147 {
2148 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2149 mode = word_mode;
2150 }
2151
13a68d5b 2152 /* Copy the structure BITSIZE bits at a time. If the target lives in
2153 memory, take care of not reading/writing past its end by selecting
2154 a copy mode suited to BITSIZE. This should always be possible given
2155 how it is computed.
fa56dc1d 2156
7e91b548 2157 If the target lives in register, make sure not to select a copy mode
2158 larger than the mode of the register.
2159
325d1c45 2160 We could probably emit more efficient code for machines which do not use
2161 strict alignment, but it doesn't seem worth the effort at the current
2162 time. */
13a68d5b 2163
2164 copy_mode = word_mode;
7e91b548 2165 if (MEM_P (target))
13a68d5b 2166 {
3754d046 2167 machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
13a68d5b 2168 if (mem_mode != BLKmode)
2169 copy_mode = mem_mode;
2170 }
7e91b548 2171 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2172 copy_mode = tmode;
13a68d5b 2173
2c8ff1ed 2174 for (bitpos = 0, xbitpos = padding_correction;
325d1c45 2175 bitpos < bytes * BITS_PER_UNIT;
2176 bitpos += bitsize, xbitpos += bitsize)
2177 {
fa56dc1d 2178 /* We need a new source operand each time xbitpos is on a
2c8ff1ed 2179 word boundary and when xbitpos == padding_correction
325d1c45 2180 (the first time through). */
7e91b548 2181 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2182 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
325d1c45 2183
2184 /* We need a new destination operand each time bitpos is on
2185 a word boundary. */
7e91b548 2186 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2187 dst = target;
2188 else if (bitpos % BITS_PER_WORD == 0)
2189 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
fa56dc1d 2190
325d1c45 2191 /* Use xbitpos for the source extraction (right justified) and
13a68d5b 2192 bitpos for the destination store (left justified). */
4bb60ec7 2193 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
325d1c45 2194 extract_bit_field (src, bitsize,
3f71db40 2195 xbitpos % BITS_PER_WORD, 1,
13a68d5b 2196 NULL_RTX, copy_mode, copy_mode));
325d1c45 2197 }
25eb0f59 2198}
2199
ee5ab2d1 2200/* Copy BLKmode value SRC into a register of mode MODE. Return the
2201 register if it contains any data, otherwise return null.
2202
2203 This is used on targets that return BLKmode values in registers. */
2204
2205rtx
3754d046 2206copy_blkmode_to_reg (machine_mode mode, tree src)
ee5ab2d1 2207{
2208 int i, n_regs;
2209 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2210 unsigned int bitsize;
2211 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
3754d046 2212 machine_mode dst_mode;
ee5ab2d1 2213
2214 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2215
2216 x = expand_normal (src);
2217
2218 bytes = int_size_in_bytes (TREE_TYPE (src));
2219 if (bytes == 0)
2220 return NULL_RTX;
2221
2222 /* If the structure doesn't take up a whole number of words, see
2223 whether the register value should be padded on the left or on
2224 the right. Set PADDING_CORRECTION to the number of padding
2225 bits needed on the left side.
2226
2227 In most ABIs, the structure will be returned at the least end of
2228 the register, which translates to right padding on little-endian
2229 targets and left padding on big-endian targets. The opposite
2230 holds if the structure is returned at the most significant
2231 end of the register. */
2232 if (bytes % UNITS_PER_WORD != 0
2233 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2234 ? !BYTES_BIG_ENDIAN
2235 : BYTES_BIG_ENDIAN))
2236 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2237 * BITS_PER_UNIT));
2238
2239 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2240 dst_words = XALLOCAVEC (rtx, n_regs);
2241 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2242
2243 /* Copy the structure BITSIZE bits at a time. */
2244 for (bitpos = 0, xbitpos = padding_correction;
2245 bitpos < bytes * BITS_PER_UNIT;
2246 bitpos += bitsize, xbitpos += bitsize)
2247 {
2248 /* We need a new destination pseudo each time xbitpos is
2249 on a word boundary and when xbitpos == padding_correction
2250 (the first time through). */
2251 if (xbitpos % BITS_PER_WORD == 0
2252 || xbitpos == padding_correction)
2253 {
2254 /* Generate an appropriate register. */
2255 dst_word = gen_reg_rtx (word_mode);
2256 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2257
2258 /* Clear the destination before we move anything into it. */
2259 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2260 }
2261
2262 /* We need a new source operand each time bitpos is on a word
2263 boundary. */
2264 if (bitpos % BITS_PER_WORD == 0)
2265 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2266
2267 /* Use bitpos for the source extraction (left justified) and
2268 xbitpos for the destination store (right justified). */
2269 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2270 0, 0, word_mode,
2271 extract_bit_field (src_word, bitsize,
3f71db40 2272 bitpos % BITS_PER_WORD, 1,
ee5ab2d1 2273 NULL_RTX, word_mode, word_mode));
2274 }
2275
2276 if (mode == BLKmode)
2277 {
2278 /* Find the smallest integer mode large enough to hold the
2279 entire structure. */
2280 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2281 mode != VOIDmode;
2282 mode = GET_MODE_WIDER_MODE (mode))
2283 /* Have we found a large enough mode? */
2284 if (GET_MODE_SIZE (mode) >= bytes)
2285 break;
2286
2287 /* A suitable mode should have been found. */
2288 gcc_assert (mode != VOIDmode);
2289 }
2290
2291 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2292 dst_mode = word_mode;
2293 else
2294 dst_mode = mode;
2295 dst = gen_reg_rtx (dst_mode);
2296
2297 for (i = 0; i < n_regs; i++)
2298 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2299
2300 if (mode != dst_mode)
2301 dst = gen_lowpart (mode, dst);
2302
2303 return dst;
2304}
2305
07409b3a 2306/* Add a USE expression for REG to the (possibly empty) list pointed
2307 to by CALL_FUSAGE. REG must denote a hard register. */
10f307d9 2308
2309void
3754d046 2310use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
7e2ca70b 2311{
b69de85a 2312 gcc_assert (REG_P (reg));
2313
2314 if (!HARD_REGISTER_P (reg))
2315 return;
1f8b6002 2316
7e2ca70b 2317 *call_fusage
b4eeceb9 2318 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
7e2ca70b 2319}
2320
e67cfba4 2321/* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2322 to by CALL_FUSAGE. REG must denote a hard register. */
2323
2324void
3754d046 2325clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
e67cfba4 2326{
2327 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2328
2329 *call_fusage
2330 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2331}
2332
07409b3a 2333/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2334 starting at REGNO. All of these registers must be hard registers. */
7e2ca70b 2335
2336void
35cb5232 2337use_regs (rtx *call_fusage, int regno, int nregs)
10f307d9 2338{
f2799de7 2339 int i;
10f307d9 2340
611234b4 2341 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
f2799de7 2342
2343 for (i = 0; i < nregs; i++)
936082bb 2344 use_reg (call_fusage, regno_reg_rtx[regno + i]);
10f307d9 2345}
ce739127 2346
2347/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2348 PARALLEL REGS. This is for calls that pass values in multiple
2349 non-contiguous locations. The Irix 6 ABI has examples of this. */
2350
2351void
35cb5232 2352use_group_regs (rtx *call_fusage, rtx regs)
ce739127 2353{
2354 int i;
2355
2f373e5d 2356 for (i = 0; i < XVECLEN (regs, 0); i++)
2357 {
2358 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
ce739127 2359
2f373e5d 2360 /* A NULL entry means the parameter goes both on the stack and in
2361 registers. This can also be a MEM for targets that pass values
2362 partially on the stack and partially in registers. */
8ad4c111 2363 if (reg != 0 && REG_P (reg))
2f373e5d 2364 use_reg (call_fusage, reg);
2365 }
ce739127 2366}
c1a83279 2367
2368/* Return the defining gimple statement for SSA_NAME NAME if it is an
2369 assigment and the code of the expresion on the RHS is CODE. Return
2370 NULL otherwise. */
2371
2372static gimple
2373get_def_for_expr (tree name, enum tree_code code)
2374{
2375 gimple def_stmt;
2376
2377 if (TREE_CODE (name) != SSA_NAME)
2378 return NULL;
2379
2380 def_stmt = get_gimple_for_ssa_name (name);
2381 if (!def_stmt
2382 || gimple_assign_rhs_code (def_stmt) != code)
2383 return NULL;
2384
2385 return def_stmt;
2386}
c909ed33 2387
2388/* Return the defining gimple statement for SSA_NAME NAME if it is an
2389 assigment and the class of the expresion on the RHS is CLASS. Return
2390 NULL otherwise. */
2391
2392static gimple
2393get_def_for_expr_class (tree name, enum tree_code_class tclass)
2394{
2395 gimple def_stmt;
2396
2397 if (TREE_CODE (name) != SSA_NAME)
2398 return NULL;
2399
2400 def_stmt = get_gimple_for_ssa_name (name);
2401 if (!def_stmt
2402 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2403 return NULL;
2404
2405 return def_stmt;
2406}
10f307d9 2407\f
6840589f 2408
d1f6ae0c 2409/* Determine whether the LEN bytes generated by CONSTFUN can be
2410 stored to memory using several move instructions. CONSTFUNDATA is
2411 a pointer which will be passed as argument in every CONSTFUN call.
4b297e2e 2412 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2413 a memset operation and false if it's a copy of a constant string.
2414 Return nonzero if a call to store_by_pieces should succeed. */
d1f6ae0c 2415
6840589f 2416int
35cb5232 2417can_store_by_pieces (unsigned HOST_WIDE_INT len,
3754d046 2418 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
4b297e2e 2419 void *constfundata, unsigned int align, bool memsetp)
6840589f 2420{
025d4f81 2421 unsigned HOST_WIDE_INT l;
2422 unsigned int max_size;
6840589f 2423 HOST_WIDE_INT offset = 0;
3754d046 2424 machine_mode mode;
6840589f 2425 enum insn_code icode;
2426 int reverse;
d92517d3 2427 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2428 rtx cst ATTRIBUTE_UNUSED;
6840589f 2429
1d881c02 2430 if (len == 0)
2431 return 1;
2432
d4bd0e64 2433 if (!targetm.use_by_pieces_infrastructure_p (len, align,
2434 memsetp
2435 ? SET_BY_PIECES
2436 : STORE_BY_PIECES,
2437 optimize_insn_for_speed_p ()))
6840589f 2438 return 0;
2439
c7e41aee 2440 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
6840589f 2441
2442 /* We would first store what we can in the largest integer mode, then go to
2443 successively smaller modes. */
2444
2445 for (reverse = 0;
2446 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2447 reverse++)
2448 {
2449 l = len;
d1f6ae0c 2450 max_size = STORE_MAX_PIECES + 1;
01dd0067 2451 while (max_size > 1 && l > 0)
6840589f 2452 {
c7e41aee 2453 mode = widest_int_mode_for_size (max_size);
6840589f 2454
2455 if (mode == VOIDmode)
2456 break;
2457
d6bf3b14 2458 icode = optab_handler (mov_optab, mode);
6840589f 2459 if (icode != CODE_FOR_nothing
2460 && align >= GET_MODE_ALIGNMENT (mode))
2461 {
2462 unsigned int size = GET_MODE_SIZE (mode);
2463
2464 while (l >= size)
2465 {
2466 if (reverse)
2467 offset -= size;
2468
2469 cst = (*constfun) (constfundata, offset, mode);
ca316360 2470 if (!targetm.legitimate_constant_p (mode, cst))
6840589f 2471 return 0;
2472
2473 if (!reverse)
2474 offset += size;
2475
2476 l -= size;
2477 }
2478 }
2479
2480 max_size = GET_MODE_SIZE (mode);
2481 }
2482
2483 /* The code above should have handled everything. */
611234b4 2484 gcc_assert (!l);
6840589f 2485 }
2486
2487 return 1;
2488}
2489
2490/* Generate several move instructions to store LEN bytes generated by
2491 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2492 pointer which will be passed as argument in every CONSTFUN call.
4b297e2e 2493 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2494 a memset operation and false if it's a copy of a constant string.
9fe0e1b8 2495 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2496 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2497 stpcpy. */
6840589f 2498
9fe0e1b8 2499rtx
35cb5232 2500store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
3754d046 2501 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
4b297e2e 2502 void *constfundata, unsigned int align, bool memsetp, int endp)
6840589f 2503{
3754d046 2504 machine_mode to_addr_mode = get_address_mode (to);
584511c1 2505 struct store_by_pieces_d data;
6840589f 2506
1d881c02 2507 if (len == 0)
2508 {
611234b4 2509 gcc_assert (endp != 2);
1d881c02 2510 return to;
2511 }
2512
d4bd0e64 2513 gcc_assert (targetm.use_by_pieces_infrastructure_p
2514 (len, align,
2515 memsetp
2516 ? SET_BY_PIECES
2517 : STORE_BY_PIECES,
2518 optimize_insn_for_speed_p ()));
2519
6840589f 2520 data.constfun = constfun;
2521 data.constfundata = constfundata;
2522 data.len = len;
2523 data.to = to;
2524 store_by_pieces_1 (&data, align);
9fe0e1b8 2525 if (endp)
2526 {
2527 rtx to1;
2528
611234b4 2529 gcc_assert (!data.reverse);
9fe0e1b8 2530 if (data.autinc_to)
2531 {
2532 if (endp == 2)
2533 {
2534 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2535 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2536 else
98155838 2537 data.to_addr = copy_to_mode_reg (to_addr_mode,
29c05e22 2538 plus_constant (to_addr_mode,
2539 data.to_addr,
9fe0e1b8 2540 -1));
2541 }
2542 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2543 data.offset);
2544 }
2545 else
2546 {
2547 if (endp == 2)
2548 --data.offset;
2549 to1 = adjust_address (data.to, QImode, data.offset);
2550 }
2551 return to1;
2552 }
2553 else
2554 return data.to;
6840589f 2555}
2556
325d1c45 2557/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
0a534ba7 2558 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
dbd14dc5 2559
2560static void
f1667d92 2561clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
dbd14dc5 2562{
584511c1 2563 struct store_by_pieces_d data;
6840589f 2564
1d881c02 2565 if (len == 0)
2566 return;
2567
6840589f 2568 data.constfun = clear_by_pieces_1;
2571646d 2569 data.constfundata = NULL;
6840589f 2570 data.len = len;
2571 data.to = to;
2572 store_by_pieces_1 (&data, align);
2573}
2574
2575/* Callback routine for clear_by_pieces.
2576 Return const0_rtx unconditionally. */
2577
2578static rtx
35cb5232 2579clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2580 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3754d046 2581 machine_mode mode ATTRIBUTE_UNUSED)
6840589f 2582{
2583 return const0_rtx;
2584}
2585
2586/* Subroutine of clear_by_pieces and store_by_pieces.
2587 Generate several move instructions to store LEN bytes of block TO. (A MEM
0a534ba7 2588 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
6840589f 2589
2590static void
584511c1 2591store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
35cb5232 2592 unsigned int align ATTRIBUTE_UNUSED)
6840589f 2593{
3754d046 2594 machine_mode to_addr_mode = get_address_mode (data->to);
6840589f 2595 rtx to_addr = XEXP (data->to, 0);
025d4f81 2596 unsigned int max_size = STORE_MAX_PIECES + 1;
53bd09ab 2597 enum insn_code icode;
dbd14dc5 2598
6840589f 2599 data->offset = 0;
2600 data->to_addr = to_addr;
2601 data->autinc_to
dbd14dc5 2602 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2603 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2604
6840589f 2605 data->explicit_inc_to = 0;
2606 data->reverse
dbd14dc5 2607 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
6840589f 2608 if (data->reverse)
2609 data->offset = data->len;
dbd14dc5 2610
6840589f 2611 /* If storing requires more than two move insns,
dbd14dc5 2612 copy addresses to registers (to make displacements shorter)
2613 and use post-increment if available. */
6840589f 2614 if (!data->autinc_to
025d4f81 2615 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
dbd14dc5 2616 {
c7e41aee 2617 /* Determine the main mode we'll be using.
2618 MODE might not be used depending on the definitions of the
2619 USE_* macros below. */
3754d046 2620 machine_mode mode ATTRIBUTE_UNUSED
c7e41aee 2621 = widest_int_mode_for_size (max_size);
53bd09ab 2622
6840589f 2623 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
dbd14dc5 2624 {
98155838 2625 data->to_addr = copy_to_mode_reg (to_addr_mode,
29c05e22 2626 plus_constant (to_addr_mode,
2627 to_addr,
2628 data->len));
6840589f 2629 data->autinc_to = 1;
2630 data->explicit_inc_to = -1;
dbd14dc5 2631 }
f7c44134 2632
6840589f 2633 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2634 && ! data->autinc_to)
dbd14dc5 2635 {
98155838 2636 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
6840589f 2637 data->autinc_to = 1;
2638 data->explicit_inc_to = 1;
dbd14dc5 2639 }
f7c44134 2640
6840589f 2641 if ( !data->autinc_to && CONSTANT_P (to_addr))
98155838 2642 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
dbd14dc5 2643 }
2644
c7e41aee 2645 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
dbd14dc5 2646
6840589f 2647 /* First store what we can in the largest integer mode, then go to
dbd14dc5 2648 successively smaller modes. */
2649
01dd0067 2650 while (max_size > 1 && data->len > 0)
dbd14dc5 2651 {
3754d046 2652 machine_mode mode = widest_int_mode_for_size (max_size);
dbd14dc5 2653
2654 if (mode == VOIDmode)
2655 break;
2656
d6bf3b14 2657 icode = optab_handler (mov_optab, mode);
325d1c45 2658 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
6840589f 2659 store_by_pieces_2 (GEN_FCN (icode), mode, data);
dbd14dc5 2660
2661 max_size = GET_MODE_SIZE (mode);
2662 }
2663
2664 /* The code above should have handled everything. */
611234b4 2665 gcc_assert (!data->len);
dbd14dc5 2666}
2667
6840589f 2668/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
dbd14dc5 2669 with move instructions for mode MODE. GENFUN is the gen_... function
2670 to make a move insn for that mode. DATA has all the other info. */
2671
2672static void
3d953cb1 2673store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
584511c1 2674 struct store_by_pieces_d *data)
dbd14dc5 2675{
f7c44134 2676 unsigned int size = GET_MODE_SIZE (mode);
6840589f 2677 rtx to1, cst;
dbd14dc5 2678
2679 while (data->len >= size)
2680 {
f7c44134 2681 if (data->reverse)
2682 data->offset -= size;
dbd14dc5 2683
f7c44134 2684 if (data->autinc_to)
bf42c62d 2685 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2686 data->offset);
fa56dc1d 2687 else
e513d163 2688 to1 = adjust_address (data->to, mode, data->offset);
dbd14dc5 2689
e4e498cf 2690 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
6840589f 2691 emit_insn (gen_add2_insn (data->to_addr,
d11aedc7 2692 gen_int_mode (-(HOST_WIDE_INT) size,
2693 GET_MODE (data->to_addr))));
dbd14dc5 2694
6840589f 2695 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2696 emit_insn ((*genfun) (to1, cst));
f7c44134 2697
e4e498cf 2698 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
d11aedc7 2699 emit_insn (gen_add2_insn (data->to_addr,
2700 gen_int_mode (size,
2701 GET_MODE (data->to_addr))));
dbd14dc5 2702
f7c44134 2703 if (! data->reverse)
2704 data->offset += size;
dbd14dc5 2705
2706 data->len -= size;
2707 }
2708}
2709\f
325d1c45 2710/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2a631e19 2711 its length in bytes. */
0dbd1c74 2712
2713rtx
162719b3 2714clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
36d63243 2715 unsigned int expected_align, HOST_WIDE_INT expected_size,
2716 unsigned HOST_WIDE_INT min_size,
9db0f34d 2717 unsigned HOST_WIDE_INT max_size,
2718 unsigned HOST_WIDE_INT probable_max_size)
10f307d9 2719{
3754d046 2720 machine_mode mode = GET_MODE (object);
83016f38 2721 unsigned int align;
0dbd1c74 2722
0b25db21 2723 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2724
20c377c2 2725 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2726 just move a zero. Otherwise, do this a piece at a time. */
83016f38 2727 if (mode != BLKmode
971ba038 2728 && CONST_INT_P (size)
83016f38 2729 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
10f307d9 2730 {
83016f38 2731 rtx zero = CONST0_RTX (mode);
2732 if (zero != NULL)
2733 {
2734 emit_move_insn (object, zero);
2735 return NULL;
2736 }
2737
2738 if (COMPLEX_MODE_P (mode))
2739 {
2740 zero = CONST0_RTX (GET_MODE_INNER (mode));
2741 if (zero != NULL)
2742 {
2743 write_complex_part (object, zero, 0);
2744 write_complex_part (object, zero, 1);
2745 return NULL;
2746 }
2747 }
c0bfc78e 2748 }
2749
83016f38 2750 if (size == const0_rtx)
2751 return NULL;
2752
2753 align = MEM_ALIGN (object);
2754
971ba038 2755 if (CONST_INT_P (size)
d4bd0e64 2756 && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
2757 CLEAR_BY_PIECES,
2758 optimize_insn_for_speed_p ()))
83016f38 2759 clear_by_pieces (object, INTVAL (size), align);
162719b3 2760 else if (set_storage_via_setmem (object, size, const0_rtx, align,
36d63243 2761 expected_align, expected_size,
9db0f34d 2762 min_size, max_size, probable_max_size))
83016f38 2763 ;
bd1a81f7 2764 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
ab608690 2765 return set_storage_via_libcall (object, size, const0_rtx,
2766 method == BLOCK_OP_TAILCALL);
bd1a81f7 2767 else
2768 gcc_unreachable ();
83016f38 2769
2770 return NULL;
c0bfc78e 2771}
2772
162719b3 2773rtx
2774clear_storage (rtx object, rtx size, enum block_op_methods method)
2775{
36d63243 2776 unsigned HOST_WIDE_INT max, min = 0;
2777 if (GET_CODE (size) == CONST_INT)
2778 min = max = UINTVAL (size);
2779 else
2780 max = GET_MODE_MASK (GET_MODE (size));
9db0f34d 2781 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
162719b3 2782}
2783
2784
f896c932 2785/* A subroutine of clear_storage. Expand a call to memset.
c0bfc78e 2786 Return the return value of memset, 0 otherwise. */
dbd14dc5 2787
ab608690 2788rtx
2789set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
c0bfc78e 2790{
c2f47e15 2791 tree call_expr, fn, object_tree, size_tree, val_tree;
3754d046 2792 machine_mode size_mode;
c0bfc78e 2793 rtx retval;
dbd14dc5 2794
0a534ba7 2795 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2796 place those into new pseudos into a VAR_DECL and use them later. */
f708f8fd 2797
99182918 2798 object = copy_addr_to_reg (XEXP (object, 0));
f708f8fd 2799
f896c932 2800 size_mode = TYPE_MODE (sizetype);
c0bfc78e 2801 size = convert_to_mode (size_mode, size, 1);
2802 size = copy_to_mode_reg (size_mode, size);
f708f8fd 2803
c0bfc78e 2804 /* It is incorrect to use the libcall calling conventions to call
2805 memset in this context. This could be a user call to memset and
2806 the user may wish to examine the return value from memset. For
2807 targets where libcalls and normal calls have different conventions
f896c932 2808 for returning pointers, we could end up generating incorrect code. */
06b8e3db 2809
c0bfc78e 2810 object_tree = make_tree (ptr_type_node, object);
971ba038 2811 if (!CONST_INT_P (val))
ab608690 2812 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
f896c932 2813 size_tree = make_tree (sizetype, size);
ab608690 2814 val_tree = make_tree (integer_type_node, val);
c0bfc78e 2815
2816 fn = clear_storage_libcall_fn (true);
d52d7a3a 2817 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
0b25db21 2818 CALL_EXPR_TAILCALL (call_expr) = tailcall;
c0bfc78e 2819
8ec3c5c2 2820 retval = expand_normal (call_expr);
c0bfc78e 2821
f896c932 2822 return retval;
c0bfc78e 2823}
2824
ab608690 2825/* A subroutine of set_storage_via_libcall. Create the tree node
ea259bbe 2826 for the function we use for block clears. */
c0bfc78e 2827
aa140b76 2828tree block_clear_fn;
8ca560c1 2829
d459e0d8 2830void
35cb5232 2831init_block_clear_fn (const char *asmspec)
c0bfc78e 2832{
d459e0d8 2833 if (!block_clear_fn)
c0bfc78e 2834 {
d459e0d8 2835 tree fn, args;
2836
f896c932 2837 fn = get_identifier ("memset");
2838 args = build_function_type_list (ptr_type_node, ptr_type_node,
2839 integer_type_node, sizetype,
2840 NULL_TREE);
c0bfc78e 2841
e60a6f7b 2842 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
c0bfc78e 2843 DECL_EXTERNAL (fn) = 1;
2844 TREE_PUBLIC (fn) = 1;
2845 DECL_ARTIFICIAL (fn) = 1;
2846 TREE_NOTHROW (fn) = 1;
f0f2eb24 2847 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2848 DECL_VISIBILITY_SPECIFIED (fn) = 1;
c0bfc78e 2849
2850 block_clear_fn = fn;
10f307d9 2851 }
0dbd1c74 2852
d459e0d8 2853 if (asmspec)
b2c4af5e 2854 set_user_assembler_name (block_clear_fn, asmspec);
d459e0d8 2855}
2856
2857static tree
35cb5232 2858clear_storage_libcall_fn (int for_call)
d459e0d8 2859{
2860 static bool emitted_extern;
2861
2862 if (!block_clear_fn)
2863 init_block_clear_fn (NULL);
2864
c0bfc78e 2865 if (for_call && !emitted_extern)
2866 {
2867 emitted_extern = true;
b2c4af5e 2868 make_decl_rtl (block_clear_fn);
c0bfc78e 2869 }
10f307d9 2870
d459e0d8 2871 return block_clear_fn;
c0bfc78e 2872}
7a3e5564 2873\f
2874/* Expand a setmem pattern; return true if successful. */
2875
2876bool
162719b3 2877set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
36d63243 2878 unsigned int expected_align, HOST_WIDE_INT expected_size,
2879 unsigned HOST_WIDE_INT min_size,
9db0f34d 2880 unsigned HOST_WIDE_INT max_size,
2881 unsigned HOST_WIDE_INT probable_max_size)
7a3e5564 2882{
2883 /* Try the most limited insn first, because there's no point
2884 including more than one in the machine description unless
2885 the more limited one has some advantage. */
2886
3754d046 2887 machine_mode mode;
7a3e5564 2888
162719b3 2889 if (expected_align < align)
2890 expected_align = align;
36d63243 2891 if (expected_size != -1)
2892 {
2893 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2894 expected_size = max_size;
2895 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2896 expected_size = min_size;
2897 }
162719b3 2898
7a3e5564 2899 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2900 mode = GET_MODE_WIDER_MODE (mode))
2901 {
6b531606 2902 enum insn_code code = direct_optab_handler (setmem_optab, mode);
7a3e5564 2903
2904 if (code != CODE_FOR_nothing
300c6cee 2905 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2906 here because if SIZE is less than the mode mask, as it is
2907 returned by the macro, it will definitely be less than the
2908 actual mode mask. Since SIZE is within the Pmode address
2909 space, we limit MODE to Pmode. */
971ba038 2910 && ((CONST_INT_P (size)
7a3e5564 2911 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2912 <= (GET_MODE_MASK (mode) >> 1)))
36d63243 2913 || max_size <= (GET_MODE_MASK (mode) >> 1)
300c6cee 2914 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
8786db1e 2915 {
9db0f34d 2916 struct expand_operand ops[9];
8786db1e 2917 unsigned int nops;
2918
32f79657 2919 nops = insn_data[(int) code].n_generator_args;
9db0f34d 2920 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
b52cb719 2921
8786db1e 2922 create_fixed_operand (&ops[0], object);
2923 /* The check above guarantees that this size conversion is valid. */
2924 create_convert_operand_to (&ops[1], size, mode, true);
2925 create_convert_operand_from (&ops[2], val, byte_mode, true);
2926 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
36d63243 2927 if (nops >= 6)
cc0dc0e0 2928 {
8786db1e 2929 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2930 create_integer_operand (&ops[5], expected_size);
cc0dc0e0 2931 }
9db0f34d 2932 if (nops >= 8)
36d63243 2933 {
2934 create_integer_operand (&ops[6], min_size);
2935 /* If we can not represent the maximal size,
2936 make parameter NULL. */
2937 if ((HOST_WIDE_INT) max_size != -1)
2938 create_integer_operand (&ops[7], max_size);
2939 else
2940 create_fixed_operand (&ops[7], NULL);
2941 }
9db0f34d 2942 if (nops == 9)
2943 {
2944 /* If we can not represent the maximal size,
2945 make parameter NULL. */
2946 if ((HOST_WIDE_INT) probable_max_size != -1)
2947 create_integer_operand (&ops[8], probable_max_size);
2948 else
2949 create_fixed_operand (&ops[8], NULL);
2950 }
8786db1e 2951 if (maybe_expand_insn (code, nops, ops))
2952 return true;
7a3e5564 2953 }
2954 }
2955
2956 return false;
2957}
2958
c0bfc78e 2959\f
de17a47b 2960/* Write to one of the components of the complex value CPLX. Write VAL to
2961 the real part if IMAG_P is false, and the imaginary part if its true. */
10f307d9 2962
0c93c8a9 2963void
de17a47b 2964write_complex_part (rtx cplx, rtx val, bool imag_p)
2965{
3754d046 2966 machine_mode cmode;
2967 machine_mode imode;
a3b104d2 2968 unsigned ibitsize;
2969
de17a47b 2970 if (GET_CODE (cplx) == CONCAT)
de17a47b 2971 {
a3b104d2 2972 emit_move_insn (XEXP (cplx, imag_p), val);
2973 return;
2974 }
2975
2976 cmode = GET_MODE (cplx);
2977 imode = GET_MODE_INNER (cmode);
2978 ibitsize = GET_MODE_BITSIZE (imode);
10f307d9 2979
ba881251 2980 /* For MEMs simplify_gen_subreg may generate an invalid new address
2981 because, e.g., the original address is considered mode-dependent
2982 by the target, which restricts simplify_subreg from invoking
2983 adjust_address_nv. Instead of preparing fallback support for an
2984 invalid address, we call adjust_address_nv directly. */
2985 if (MEM_P (cplx))
69edf651 2986 {
2987 emit_move_insn (adjust_address_nv (cplx, imode,
2988 imag_p ? GET_MODE_SIZE (imode) : 0),
2989 val);
2990 return;
2991 }
ba881251 2992
a3b104d2 2993 /* If the sub-object is at least word sized, then we know that subregging
2994 will work. This special case is important, since store_bit_field
2995 wants to operate on integer modes, and there's rarely an OImode to
2996 correspond to TCmode. */
ccd5a3ef 2997 if (ibitsize >= BITS_PER_WORD
2998 /* For hard regs we have exact predicates. Assume we can split
2999 the original object if it spans an even number of hard regs.
3000 This special case is important for SCmode on 64-bit platforms
3001 where the natural size of floating-point regs is 32-bit. */
1c14a50e 3002 || (REG_P (cplx)
ccd5a3ef 3003 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
0933f1d9 3004 && REG_NREGS (cplx) % 2 == 0))
a3b104d2 3005 {
3006 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3007 imag_p ? GET_MODE_SIZE (imode) : 0);
ccd5a3ef 3008 if (part)
3009 {
3010 emit_move_insn (part, val);
3011 return;
3012 }
3013 else
3014 /* simplify_gen_subreg may fail for sub-word MEMs. */
3015 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
de17a47b 3016 }
ccd5a3ef 3017
4bb60ec7 3018 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
de17a47b 3019}
3020
3021/* Extract one of the components of the complex value CPLX. Extract the
3022 real part if IMAG_P is false, and the imaginary part if it's true. */
3023
3024static rtx
3025read_complex_part (rtx cplx, bool imag_p)
10f307d9 3026{
3754d046 3027 machine_mode cmode, imode;
de17a47b 3028 unsigned ibitsize;
10f307d9 3029
de17a47b 3030 if (GET_CODE (cplx) == CONCAT)
3031 return XEXP (cplx, imag_p);
10f307d9 3032
de17a47b 3033 cmode = GET_MODE (cplx);
3034 imode = GET_MODE_INNER (cmode);
3035 ibitsize = GET_MODE_BITSIZE (imode);
3036
3037 /* Special case reads from complex constants that got spilled to memory. */
3038 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
94580317 3039 {
de17a47b 3040 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3041 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3042 {
3043 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3044 if (CONSTANT_CLASS_P (part))
3045 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3046 }
3047 }
c0c4a46d 3048
ba881251 3049 /* For MEMs simplify_gen_subreg may generate an invalid new address
3050 because, e.g., the original address is considered mode-dependent
3051 by the target, which restricts simplify_subreg from invoking
3052 adjust_address_nv. Instead of preparing fallback support for an
3053 invalid address, we call adjust_address_nv directly. */
3054 if (MEM_P (cplx))
3055 return adjust_address_nv (cplx, imode,
3056 imag_p ? GET_MODE_SIZE (imode) : 0);
3057
a3b104d2 3058 /* If the sub-object is at least word sized, then we know that subregging
3059 will work. This special case is important, since extract_bit_field
3060 wants to operate on integer modes, and there's rarely an OImode to
3061 correspond to TCmode. */
ccd5a3ef 3062 if (ibitsize >= BITS_PER_WORD
3063 /* For hard regs we have exact predicates. Assume we can split
3064 the original object if it spans an even number of hard regs.
3065 This special case is important for SCmode on 64-bit platforms
3066 where the natural size of floating-point regs is 32-bit. */
1c14a50e 3067 || (REG_P (cplx)
ccd5a3ef 3068 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
0933f1d9 3069 && REG_NREGS (cplx) % 2 == 0))
a3b104d2 3070 {
3071 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3072 imag_p ? GET_MODE_SIZE (imode) : 0);
ccd5a3ef 3073 if (ret)
3074 return ret;
3075 else
3076 /* simplify_gen_subreg may fail for sub-word MEMs. */
3077 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
a3b104d2 3078 }
3079
de17a47b 3080 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3f71db40 3081 true, NULL_RTX, imode, imode);
de17a47b 3082}
3083\f
f2ed60da 3084/* A subroutine of emit_move_insn_1. Yet another lowpart generator.
df297520 3085 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
f2ed60da 3086 represented in NEW_MODE. If FORCE is true, this will never happen, as
3087 we'll force-create a SUBREG if needed. */
6442675c 3088
de17a47b 3089static rtx
3754d046 3090emit_move_change_mode (machine_mode new_mode,
3091 machine_mode old_mode, rtx x, bool force)
de17a47b 3092{
df297520 3093 rtx ret;
de17a47b 3094
2749a22e 3095 if (push_operand (x, GET_MODE (x)))
3096 {
3097 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3098 MEM_COPY_ATTRIBUTES (ret, x);
3099 }
3100 else if (MEM_P (x))
de17a47b 3101 {
d2121072 3102 /* We don't have to worry about changing the address since the
3103 size in bytes is supposed to be the same. */
3104 if (reload_in_progress)
3105 {
3106 /* Copy the MEM to change the mode and move any
3107 substitutions from the old MEM to the new one. */
3108 ret = adjust_address_nv (x, new_mode, 0);
3109 copy_replacements (x, ret);
3110 }
3111 else
3112 ret = adjust_address (x, new_mode, 0);
94580317 3113 }
de17a47b 3114 else
3115 {
0975351b 3116 /* Note that we do want simplify_subreg's behavior of validating
df297520 3117 that the new mode is ok for a hard register. If we were to use
3118 simplify_gen_subreg, we would create the subreg, but would
3119 probably run into the target not being able to implement it. */
f2ed60da 3120 /* Except, of course, when FORCE is true, when this is exactly what
3121 we want. Which is needed for CCmodes on some targets. */
3122 if (force)
3123 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3124 else
3125 ret = simplify_subreg (new_mode, x, old_mode, 0);
de17a47b 3126 }
10f307d9 3127
df297520 3128 return ret;
3129}
3130
de17a47b 3131/* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3132 an integer mode of the same size as MODE. Returns the instruction
3133 emitted, or NULL if such a move could not be generated. */
10f307d9 3134
c81fd430 3135static rtx_insn *
3754d046 3136emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
de17a47b 3137{
3754d046 3138 machine_mode imode;
de17a47b 3139 enum insn_code code;
10f307d9 3140
de17a47b 3141 /* There must exist a mode of the exact size we require. */
3142 imode = int_mode_for_mode (mode);
3143 if (imode == BLKmode)
c81fd430 3144 return NULL;
94580317 3145
de17a47b 3146 /* The target must support moves in this mode. */
d6bf3b14 3147 code = optab_handler (mov_optab, imode);
de17a47b 3148 if (code == CODE_FOR_nothing)
c81fd430 3149 return NULL;
94580317 3150
80e467e2 3151 x = emit_move_change_mode (imode, mode, x, force);
f2ed60da 3152 if (x == NULL_RTX)
c81fd430 3153 return NULL;
80e467e2 3154 y = emit_move_change_mode (imode, mode, y, force);
f2ed60da 3155 if (y == NULL_RTX)
c81fd430 3156 return NULL;
f2ed60da 3157 return emit_insn (GEN_FCN (code) (x, y));
aaad03e5 3158}
3159
de17a47b 3160/* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3161 Return an equivalent MEM that does not use an auto-increment. */
aaad03e5 3162
dc7cdd37 3163rtx
3754d046 3164emit_move_resolve_push (machine_mode mode, rtx x)
aaad03e5 3165{
de17a47b 3166 enum rtx_code code = GET_CODE (XEXP (x, 0));
3167 HOST_WIDE_INT adjust;
3168 rtx temp;
aaad03e5 3169
de17a47b 3170 adjust = GET_MODE_SIZE (mode);
3171#ifdef PUSH_ROUNDING
3172 adjust = PUSH_ROUNDING (adjust);
3173#endif
3174 if (code == PRE_DEC || code == POST_DEC)
3175 adjust = -adjust;
3cb7a129 3176 else if (code == PRE_MODIFY || code == POST_MODIFY)
3177 {
3178 rtx expr = XEXP (XEXP (x, 0), 1);
3179 HOST_WIDE_INT val;
3180
3181 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
971ba038 3182 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3cb7a129 3183 val = INTVAL (XEXP (expr, 1));
3184 if (GET_CODE (expr) == MINUS)
3185 val = -val;
3186 gcc_assert (adjust == val || adjust == -val);
3187 adjust = val;
3188 }
1203f673 3189
de17a47b 3190 /* Do not use anti_adjust_stack, since we don't want to update
3191 stack_pointer_delta. */
3192 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
0359f9f5 3193 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
de17a47b 3194 0, OPTAB_LIB_WIDEN);
3195 if (temp != stack_pointer_rtx)
3196 emit_move_insn (stack_pointer_rtx, temp);
10f307d9 3197
de17a47b 3198 switch (code)
b63679d2 3199 {
de17a47b 3200 case PRE_INC:
3201 case PRE_DEC:
3cb7a129 3202 case PRE_MODIFY:
de17a47b 3203 temp = stack_pointer_rtx;
3204 break;
3205 case POST_INC:
de17a47b 3206 case POST_DEC:
3cb7a129 3207 case POST_MODIFY:
29c05e22 3208 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
de17a47b 3209 break;
3210 default:
3211 gcc_unreachable ();
3212 }
b63679d2 3213
de17a47b 3214 return replace_equiv_address (x, temp);
3215}
a8d8b962 3216
de17a47b 3217/* A subroutine of emit_move_complex. Generate a move from Y into X.
3218 X is known to satisfy push_operand, and MODE is known to be complex.
3219 Returns the last instruction emitted. */
76ab50f8 3220
c81fd430 3221rtx_insn *
3754d046 3222emit_move_complex_push (machine_mode mode, rtx x, rtx y)
de17a47b 3223{
3754d046 3224 machine_mode submode = GET_MODE_INNER (mode);
de17a47b 3225 bool imag_first;
76ab50f8 3226
de17a47b 3227#ifdef PUSH_ROUNDING
3228 unsigned int submodesize = GET_MODE_SIZE (submode);
76ab50f8 3229
de17a47b 3230 /* In case we output to the stack, but the size is smaller than the
3231 machine can push exactly, we need to use move instructions. */
3232 if (PUSH_ROUNDING (submodesize) != submodesize)
3233 {
3234 x = emit_move_resolve_push (mode, x);
3235 return emit_move_insn (x, y);
3236 }
4ed008e7 3237#endif
b63679d2 3238
de17a47b 3239 /* Note that the real part always precedes the imag part in memory
3240 regardless of machine's endianness. */
3241 switch (GET_CODE (XEXP (x, 0)))
3242 {
3243 case PRE_DEC:
3244 case POST_DEC:
3245 imag_first = true;
3246 break;
3247 case PRE_INC:
3248 case POST_INC:
3249 imag_first = false;
3250 break;
3251 default:
3252 gcc_unreachable ();
3253 }
2166bbaa 3254
de17a47b 3255 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3256 read_complex_part (y, imag_first));
3257 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3258 read_complex_part (y, !imag_first));
3259}
5b5abf88 3260
64003036 3261/* A subroutine of emit_move_complex. Perform the move from Y to X
3262 via two moves of the parts. Returns the last instruction emitted. */
3263
c81fd430 3264rtx_insn *
64003036 3265emit_move_complex_parts (rtx x, rtx y)
3266{
3267 /* Show the output dies here. This is necessary for SUBREGs
3268 of pseudos since we cannot track their lifetimes correctly;
3269 hard regs shouldn't appear here except as return values. */
3270 if (!reload_completed && !reload_in_progress
3271 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
18b42941 3272 emit_clobber (x);
64003036 3273
3274 write_complex_part (x, read_complex_part (y, false), false);
3275 write_complex_part (x, read_complex_part (y, true), true);
3276
3277 return get_last_insn ();
3278}
3279
de17a47b 3280/* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3281 MODE is known to be complex. Returns the last instruction emitted. */
2166bbaa 3282
c81fd430 3283static rtx_insn *
3754d046 3284emit_move_complex (machine_mode mode, rtx x, rtx y)
de17a47b 3285{
3286 bool try_int;
5b5abf88 3287
de17a47b 3288 /* Need to take special care for pushes, to maintain proper ordering
3289 of the data, and possibly extra padding. */
3290 if (push_operand (x, mode))
3291 return emit_move_complex_push (mode, x, y);
b63679d2 3292
493bce58 3293 /* See if we can coerce the target into moving both values at once, except
3294 for floating point where we favor moving as parts if this is easy. */
5720e0a5 3295 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
493bce58 3296 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3297 && !(REG_P (x)
3298 && HARD_REGISTER_P (x)
0933f1d9 3299 && REG_NREGS (x) == 1)
493bce58 3300 && !(REG_P (y)
3301 && HARD_REGISTER_P (y)
0933f1d9 3302 && REG_NREGS (y) == 1))
5720e0a5 3303 try_int = false;
de17a47b 3304 /* Not possible if the values are inherently not adjacent. */
5720e0a5 3305 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
de17a47b 3306 try_int = false;
3307 /* Is possible if both are registers (or subregs of registers). */
3308 else if (register_operand (x, mode) && register_operand (y, mode))
3309 try_int = true;
3310 /* If one of the operands is a memory, and alignment constraints
3311 are friendly enough, we may be able to do combined memory operations.
3312 We do not attempt this if Y is a constant because that combination is
3313 usually better with the by-parts thing below. */
3314 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3315 && (!STRICT_ALIGNMENT
3316 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3317 try_int = true;
3318 else
3319 try_int = false;
3320
3321 if (try_int)
8d94ba7c 3322 {
c81fd430 3323 rtx_insn *ret;
5720e0a5 3324
3325 /* For memory to memory moves, optimal behavior can be had with the
3326 existing block move logic. */
3327 if (MEM_P (x) && MEM_P (y))
3328 {
3329 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3330 BLOCK_OP_NO_LIBCALL);
3331 return get_last_insn ();
3332 }
3333
80e467e2 3334 ret = emit_move_via_integer (mode, x, y, true);
de17a47b 3335 if (ret)
3336 return ret;
3337 }
8d94ba7c 3338
64003036 3339 return emit_move_complex_parts (x, y);
de17a47b 3340}
8d94ba7c 3341
de17a47b 3342/* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3343 MODE is known to be MODE_CC. Returns the last instruction emitted. */
8d94ba7c 3344
c81fd430 3345static rtx_insn *
3754d046 3346emit_move_ccmode (machine_mode mode, rtx x, rtx y)
de17a47b 3347{
c81fd430 3348 rtx_insn *ret;
8d94ba7c 3349
de17a47b 3350 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3351 if (mode != CCmode)
3352 {
d6bf3b14 3353 enum insn_code code = optab_handler (mov_optab, CCmode);
de17a47b 3354 if (code != CODE_FOR_nothing)
f2ed60da 3355 {
3356 x = emit_move_change_mode (CCmode, mode, x, true);
3357 y = emit_move_change_mode (CCmode, mode, y, true);
3358 return emit_insn (GEN_FCN (code) (x, y));
3359 }
de17a47b 3360 }
3361
3362 /* Otherwise, find the MODE_INT mode of the same width. */
80e467e2 3363 ret = emit_move_via_integer (mode, x, y, false);
de17a47b 3364 gcc_assert (ret != NULL);
3365 return ret;
3366}
3367
8dfa1b7f 3368/* Return true if word I of OP lies entirely in the
3369 undefined bits of a paradoxical subreg. */
3370
3371static bool
1f1872fd 3372undefined_operand_subword_p (const_rtx op, int i)
8dfa1b7f 3373{
3754d046 3374 machine_mode innermode, innermostmode;
8dfa1b7f 3375 int offset;
3376 if (GET_CODE (op) != SUBREG)
3377 return false;
3378 innermode = GET_MODE (op);
3379 innermostmode = GET_MODE (SUBREG_REG (op));
3380 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3381 /* The SUBREG_BYTE represents offset, as if the value were stored in
3382 memory, except for a paradoxical subreg where we define
3383 SUBREG_BYTE to be 0; undo this exception as in
3384 simplify_subreg. */
3385 if (SUBREG_BYTE (op) == 0
3386 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3387 {
3388 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3389 if (WORDS_BIG_ENDIAN)
3390 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3391 if (BYTES_BIG_ENDIAN)
3392 offset += difference % UNITS_PER_WORD;
3393 }
3394 if (offset >= GET_MODE_SIZE (innermostmode)
3395 || offset <= -GET_MODE_SIZE (word_mode))
3396 return true;
3397 return false;
3398}
3399
de17a47b 3400/* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3401 MODE is any multi-word or full-word mode that lacks a move_insn
3402 pattern. Note that you will get better code if you define such
3403 patterns, even if they must turn into multiple assembler instructions. */
3404
c81fd430 3405static rtx_insn *
3754d046 3406emit_move_multi_word (machine_mode mode, rtx x, rtx y)
de17a47b 3407{
c81fd430 3408 rtx_insn *last_insn = 0;
3409 rtx_insn *seq;
3410 rtx inner;
de17a47b 3411 bool need_clobber;
3412 int i;
1f8b6002 3413
de17a47b 3414 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
1f8b6002 3415
de17a47b 3416 /* If X is a push on the stack, do the push now and replace
3417 X with a reference to the stack pointer. */
3418 if (push_operand (x, mode))
3419 x = emit_move_resolve_push (mode, x);
3420
3421 /* If we are in reload, see if either operand is a MEM whose address
3422 is scheduled for replacement. */
3423 if (reload_in_progress && MEM_P (x)
3424 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3425 x = replace_equiv_address_nv (x, inner);
3426 if (reload_in_progress && MEM_P (y)
3427 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3428 y = replace_equiv_address_nv (y, inner);
3429
3430 start_sequence ();
3431
3432 need_clobber = false;
3433 for (i = 0;
3434 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3435 i++)
3436 {
3437 rtx xpart = operand_subword (x, i, 1, mode);
8dfa1b7f 3438 rtx ypart;
3439
3440 /* Do not generate code for a move if it would come entirely
3441 from the undefined bits of a paradoxical subreg. */
3442 if (undefined_operand_subword_p (y, i))
3443 continue;
3444
3445 ypart = operand_subword (y, i, 1, mode);
de17a47b 3446
3447 /* If we can't get a part of Y, put Y into memory if it is a
89f18f73 3448 constant. Otherwise, force it into a register. Then we must
3449 be able to get a part of Y. */
de17a47b 3450 if (ypart == 0 && CONSTANT_P (y))
8d94ba7c 3451 {
f2d0e9f1 3452 y = use_anchored_address (force_const_mem (mode, y));
de17a47b 3453 ypart = operand_subword (y, i, 1, mode);
8d94ba7c 3454 }
de17a47b 3455 else if (ypart == 0)
3456 ypart = operand_subword_force (y, i, mode);
3457
3458 gcc_assert (xpart && ypart);
3459
3460 need_clobber |= (GET_CODE (xpart) == SUBREG);
35cb5232 3461
de17a47b 3462 last_insn = emit_move_insn (xpart, ypart);
8d94ba7c 3463 }
3464
de17a47b 3465 seq = get_insns ();
3466 end_sequence ();
3467
3468 /* Show the output dies here. This is necessary for SUBREGs
3469 of pseudos since we cannot track their lifetimes correctly;
3470 hard regs shouldn't appear here except as return values.
3471 We never want to emit such a clobber after reload. */
3472 if (x != y
3473 && ! (reload_in_progress || reload_completed)
3474 && need_clobber != 0)
18b42941 3475 emit_clobber (x);
de17a47b 3476
3477 emit_insn (seq);
3478
3479 return last_insn;
3480}
3481
3482/* Low level part of emit_move_insn.
3483 Called just like emit_move_insn, but assumes X and Y
3484 are basically valid. */
3485
c81fd430 3486rtx_insn *
de17a47b 3487emit_move_insn_1 (rtx x, rtx y)
3488{
3754d046 3489 machine_mode mode = GET_MODE (x);
de17a47b 3490 enum insn_code code;
3491
3492 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3493
d6bf3b14 3494 code = optab_handler (mov_optab, mode);
de17a47b 3495 if (code != CODE_FOR_nothing)
3496 return emit_insn (GEN_FCN (code) (x, y));
3497
3498 /* Expand complex moves by moving real part and imag part. */
3499 if (COMPLEX_MODE_P (mode))
3500 return emit_move_complex (mode, x, y);
3501
68a556d6 3502 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3503 || ALL_FIXED_POINT_MODE_P (mode))
d2121072 3504 {
c81fd430 3505 rtx_insn *result = emit_move_via_integer (mode, x, y, true);
d2121072 3506
3507 /* If we can't find an integer mode, use multi words. */
3508 if (result)
3509 return result;
3510 else
3511 return emit_move_multi_word (mode, x, y);
3512 }
3513
de17a47b 3514 if (GET_MODE_CLASS (mode) == MODE_CC)
3515 return emit_move_ccmode (mode, x, y);
3516
7be9cf34 3517 /* Try using a move pattern for the corresponding integer mode. This is
3518 only safe when simplify_subreg can convert MODE constants into integer
3519 constants. At present, it can only do this reliably if the value
3520 fits within a HOST_WIDE_INT. */
de17a47b 3521 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10f307d9 3522 {
c81fd430 3523 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
c6a6cdaa 3524
de17a47b 3525 if (ret)
c6a6cdaa 3526 {
3527 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3528 return ret;
3529 }
de17a47b 3530 }
ff385626 3531
de17a47b 3532 return emit_move_multi_word (mode, x, y);
3533}
07c143fb 3534
de17a47b 3535/* Generate code to copy Y into X.
3536 Both Y and X must have the same mode, except that
3537 Y can be a constant with VOIDmode.
3538 This mode cannot be BLKmode; use emit_block_move for that.
fa56dc1d 3539
de17a47b 3540 Return the last instruction emitted. */
6702c250 3541
c81fd430 3542rtx_insn *
de17a47b 3543emit_move_insn (rtx x, rtx y)
3544{
3754d046 3545 machine_mode mode = GET_MODE (x);
de17a47b 3546 rtx y_cst = NULL_RTX;
c81fd430 3547 rtx_insn *last_insn;
3548 rtx set;
9cb64ebc 3549
de17a47b 3550 gcc_assert (mode != BLKmode
3551 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
10f307d9 3552
de17a47b 3553 if (CONSTANT_P (y))
3554 {
3555 if (optimize
3556 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3557 && (last_insn = compress_float_constant (x, y)))
3558 return last_insn;
10f307d9 3559
de17a47b 3560 y_cst = y;
10f307d9 3561
ca316360 3562 if (!targetm.legitimate_constant_p (mode, y))
de17a47b 3563 {
3564 y = force_const_mem (mode, y);
7f964718 3565
de17a47b 3566 /* If the target's cannot_force_const_mem prevented the spill,
3567 assume that the target's move expanders will also take care
3568 of the non-legitimate constant. */
3569 if (!y)
3570 y = y_cst;
f2d0e9f1 3571 else
3572 y = use_anchored_address (y);
10f307d9 3573 }
de17a47b 3574 }
dd0d17cd 3575
de17a47b 3576 /* If X or Y are memory references, verify that their addresses are valid
3577 for the machine. */
3578 if (MEM_P (x)
bd1a81f7 3579 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3580 MEM_ADDR_SPACE (x))
4d25f9eb 3581 && ! push_operand (x, GET_MODE (x))))
de17a47b 3582 x = validize_mem (x);
7f964718 3583
de17a47b 3584 if (MEM_P (y)
bd1a81f7 3585 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3586 MEM_ADDR_SPACE (y)))
de17a47b 3587 y = validize_mem (y);
7f964718 3588
de17a47b 3589 gcc_assert (mode != BLKmode);
7f964718 3590
de17a47b 3591 last_insn = emit_move_insn_1 (x, y);
3592
3593 if (y_cst && REG_P (x)
3594 && (set = single_set (last_insn)) != NULL_RTX
3595 && SET_DEST (set) == x
3596 && ! rtx_equal_p (y_cst, SET_SRC (set)))
722c0f6e 3597 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
de17a47b 3598
3599 return last_insn;
10f307d9 3600}
c0c4a46d 3601
34517c64 3602/* Generate the body of an instruction to copy Y into X.
3603 It may be a list of insns, if one insn isn't enough. */
3604
f9a00e9e 3605rtx_insn *
34517c64 3606gen_move_insn (rtx x, rtx y)
3607{
3608 rtx_insn *seq;
3609
3610 start_sequence ();
3611 emit_move_insn_1 (x, y);
3612 seq = get_insns ();
3613 end_sequence ();
3614 return seq;
3615}
3616
c0c4a46d 3617/* If Y is representable exactly in a narrower mode, and the target can
3618 perform the extension directly from constant or memory, then emit the
3619 move as an extension. */
3620
c81fd430 3621static rtx_insn *
35cb5232 3622compress_float_constant (rtx x, rtx y)
c0c4a46d 3623{
3754d046 3624 machine_mode dstmode = GET_MODE (x);
3625 machine_mode orig_srcmode = GET_MODE (y);
3626 machine_mode srcmode;
c0c4a46d 3627 REAL_VALUE_TYPE r;
8b1bf1e9 3628 int oldcost, newcost;
f529eb25 3629 bool speed = optimize_insn_for_speed_p ();
c0c4a46d 3630
3631 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3632
ca316360 3633 if (targetm.legitimate_constant_p (dstmode, y))
7013e87c 3634 oldcost = set_src_cost (y, speed);
8b1bf1e9 3635 else
7013e87c 3636 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
8b1bf1e9 3637
c0c4a46d 3638 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3639 srcmode != orig_srcmode;
3640 srcmode = GET_MODE_WIDER_MODE (srcmode))
3641 {
3642 enum insn_code ic;
c81fd430 3643 rtx trunc_y;
3644 rtx_insn *last_insn;
c0c4a46d 3645
3646 /* Skip if the target can't extend this way. */
3647 ic = can_extend_p (dstmode, srcmode, 0);
3648 if (ic == CODE_FOR_nothing)
3649 continue;
3650
3651 /* Skip if the narrowed value isn't exact. */
3652 if (! exact_real_truncate (srcmode, &r))
3653 continue;
3654
3655 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3656
ca316360 3657 if (targetm.legitimate_constant_p (srcmode, trunc_y))
c0c4a46d 3658 {
3659 /* Skip if the target needs extra instructions to perform
3660 the extension. */
39c56a89 3661 if (!insn_operand_matches (ic, 1, trunc_y))
c0c4a46d 3662 continue;
8b1bf1e9 3663 /* This is valid, but may not be cheaper than the original. */
7013e87c 3664 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3665 speed);
8b1bf1e9 3666 if (oldcost < newcost)
3667 continue;
c0c4a46d 3668 }
3669 else if (float_extend_from_mem[dstmode][srcmode])
8b1bf1e9 3670 {
3671 trunc_y = force_const_mem (srcmode, trunc_y);
3672 /* This is valid, but may not be cheaper than the original. */
7013e87c 3673 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3674 speed);
8b1bf1e9 3675 if (oldcost < newcost)
3676 continue;
3677 trunc_y = validize_mem (trunc_y);
3678 }
c0c4a46d 3679 else
3680 continue;
3220d3c5 3681
3682 /* For CSE's benefit, force the compressed constant pool entry
3683 into a new pseudo. This constant may be used in different modes,
3684 and if not, combine will put things back together for us. */
3685 trunc_y = force_reg (srcmode, trunc_y);
0614d12c 3686
3687 /* If x is a hard register, perform the extension into a pseudo,
3688 so that e.g. stack realignment code is aware of it. */
3689 rtx target = x;
3690 if (REG_P (x) && HARD_REGISTER_P (x))
3691 target = gen_reg_rtx (dstmode);
3692
3693 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
c0c4a46d 3694 last_insn = get_last_insn ();
3695
0614d12c 3696 if (REG_P (target))
6442675c 3697 set_unique_reg_note (last_insn, REG_EQUAL, y);
c0c4a46d 3698
0614d12c 3699 if (target != x)
3700 return emit_move_insn (x, target);
c0c4a46d 3701 return last_insn;
3702 }
3703
c81fd430 3704 return NULL;
c0c4a46d 3705}
10f307d9 3706\f
3707/* Pushing data onto the stack. */
3708
3709/* Push a block of length SIZE (perhaps variable)
3710 and return an rtx to address the beginning of the block.
10f307d9 3711 The value may be virtual_outgoing_args_rtx.
3712
3713 EXTRA is the number of bytes of padding to push in addition to SIZE.
3714 BELOW nonzero means this padding comes at low addresses;
3715 otherwise, the padding comes at high addresses. */
3716
3717rtx
35cb5232 3718push_block (rtx size, int extra, int below)
10f307d9 3719{
19cb6b50 3720 rtx temp;
ed8d3eee 3721
3722 size = convert_modes (Pmode, ptr_mode, size, 1);
10f307d9 3723 if (CONSTANT_P (size))
29c05e22 3724 anti_adjust_stack (plus_constant (Pmode, size, extra));
8ad4c111 3725 else if (REG_P (size) && extra == 0)
10f307d9 3726 anti_adjust_stack (size);
3727 else
3728 {
481feae3 3729 temp = copy_to_mode_reg (Pmode, size);
10f307d9 3730 if (extra != 0)
0359f9f5 3731 temp = expand_binop (Pmode, add_optab, temp,
3732 gen_int_mode (extra, Pmode),
10f307d9 3733 temp, 0, OPTAB_LIB_WIDEN);
3734 anti_adjust_stack (temp);
3735 }
3736
2b785411 3737 if (STACK_GROWS_DOWNWARD)
4448f543 3738 {
4448f543 3739 temp = virtual_outgoing_args_rtx;
3740 if (extra != 0 && below)
29c05e22 3741 temp = plus_constant (Pmode, temp, extra);
4448f543 3742 }
3743 else
3744 {
971ba038 3745 if (CONST_INT_P (size))
29c05e22 3746 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
fa56dc1d 3747 -INTVAL (size) - (below ? 0 : extra));
4448f543 3748 else if (extra != 0 && !below)
3749 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
29c05e22 3750 negate_rtx (Pmode, plus_constant (Pmode, size,
3751 extra)));
4448f543 3752 else
3753 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3754 negate_rtx (Pmode, size));
3755 }
10f307d9 3756
3757 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3758}
3759
dfe00a8f 3760/* A utility routine that returns the base of an auto-inc memory, or NULL. */
3761
3762static rtx
3763mem_autoinc_base (rtx mem)
3764{
3765 if (MEM_P (mem))
3766 {
3767 rtx addr = XEXP (mem, 0);
3768 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3769 return XEXP (addr, 0);
3770 }
3771 return NULL;
3772}
3773
3774/* A utility routine used here, in reload, and in try_split. The insns
3775 after PREV up to and including LAST are known to adjust the stack,
3776 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3777 placing notes as appropriate. PREV may be NULL, indicating the
3778 entire insn sequence prior to LAST should be scanned.
3779
3780 The set of allowed stack pointer modifications is small:
3781 (1) One or more auto-inc style memory references (aka pushes),
3782 (2) One or more addition/subtraction with the SP as destination,
3783 (3) A single move insn with the SP as destination,
45152a7b 3784 (4) A call_pop insn,
3785 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
dfe00a8f 3786
45152a7b 3787 Insns in the sequence that do not modify the SP are ignored,
3788 except for noreturn calls.
dfe00a8f 3789
3790 The return value is the amount of adjustment that can be trivially
3791 verified, via immediate operand or auto-inc. If the adjustment
3792 cannot be trivially extracted, the return value is INT_MIN. */
3793
40125f1c 3794HOST_WIDE_INT
50fc2d35 3795find_args_size_adjust (rtx_insn *insn)
dfe00a8f 3796{
40125f1c 3797 rtx dest, set, pat;
3798 int i;
dfe00a8f 3799
40125f1c 3800 pat = PATTERN (insn);
3801 set = NULL;
dfe00a8f 3802
40125f1c 3803 /* Look for a call_pop pattern. */
3804 if (CALL_P (insn))
3805 {
3806 /* We have to allow non-call_pop patterns for the case
3807 of emit_single_push_insn of a TLS address. */
3808 if (GET_CODE (pat) != PARALLEL)
3809 return 0;
dfe00a8f 3810
40125f1c 3811 /* All call_pop have a stack pointer adjust in the parallel.
3812 The call itself is always first, and the stack adjust is
3813 usually last, so search from the end. */
3814 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
dfe00a8f 3815 {
40125f1c 3816 set = XVECEXP (pat, 0, i);
3817 if (GET_CODE (set) != SET)
a36f1a95 3818 continue;
40125f1c 3819 dest = SET_DEST (set);
3820 if (dest == stack_pointer_rtx)
3821 break;
dfe00a8f 3822 }
40125f1c 3823 /* We'd better have found the stack pointer adjust. */
3824 if (i == 0)
3825 return 0;
3826 /* Fall through to process the extracted SET and DEST
3827 as if it was a standalone insn. */
3828 }
3829 else if (GET_CODE (pat) == SET)
3830 set = pat;
3831 else if ((set = single_set (insn)) != NULL)
3832 ;
3833 else if (GET_CODE (pat) == PARALLEL)
3834 {
3835 /* ??? Some older ports use a parallel with a stack adjust
3836 and a store for a PUSH_ROUNDING pattern, rather than a
3837 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3838 /* ??? See h8300 and m68k, pushqi1. */
3839 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
dfe00a8f 3840 {
40125f1c 3841 set = XVECEXP (pat, 0, i);
3842 if (GET_CODE (set) != SET)
dfe00a8f 3843 continue;
40125f1c 3844 dest = SET_DEST (set);
3845 if (dest == stack_pointer_rtx)
3846 break;
3847
3848 /* We do not expect an auto-inc of the sp in the parallel. */
3849 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3850 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3851 != stack_pointer_rtx);
dfe00a8f 3852 }
40125f1c 3853 if (i < 0)
3854 return 0;
3855 }
3856 else
3857 return 0;
3858
3859 dest = SET_DEST (set);
3860
3861 /* Look for direct modifications of the stack pointer. */
3862 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3863 {
3864 /* Look for a trivial adjustment, otherwise assume nothing. */
3865 /* Note that the SPU restore_stack_block pattern refers to
3866 the stack pointer in V4SImode. Consider that non-trivial. */
3867 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3868 && GET_CODE (SET_SRC (set)) == PLUS
3869 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3870 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3871 return INTVAL (XEXP (SET_SRC (set), 1));
3872 /* ??? Reload can generate no-op moves, which will be cleaned
3873 up later. Recognize it and continue searching. */
3874 else if (rtx_equal_p (dest, SET_SRC (set)))
3875 return 0;
dfe00a8f 3876 else
40125f1c 3877 return HOST_WIDE_INT_MIN;
3878 }
3879 else
3880 {
3881 rtx mem, addr;
dfe00a8f 3882
dfe00a8f 3883 /* Otherwise only think about autoinc patterns. */
40125f1c 3884 if (mem_autoinc_base (dest) == stack_pointer_rtx)
dfe00a8f 3885 {
40125f1c 3886 mem = dest;
3887 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3888 != stack_pointer_rtx);
dfe00a8f 3889 }
40125f1c 3890 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3891 mem = SET_SRC (set);
dfe00a8f 3892 else
40125f1c 3893 return 0;
3894
3895 addr = XEXP (mem, 0);
3896 switch (GET_CODE (addr))
3897 {
3898 case PRE_INC:
3899 case POST_INC:
3900 return GET_MODE_SIZE (GET_MODE (mem));
3901 case PRE_DEC:
3902 case POST_DEC:
3903 return -GET_MODE_SIZE (GET_MODE (mem));
3904 case PRE_MODIFY:
3905 case POST_MODIFY:
3906 addr = XEXP (addr, 1);
3907 gcc_assert (GET_CODE (addr) == PLUS);
3908 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3909 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3910 return INTVAL (XEXP (addr, 1));
3911 default:
3912 gcc_unreachable ();
3913 }
3914 }
3915}
3916
3917int
32f1a0c8 3918fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, int end_args_size)
40125f1c 3919{
3920 int args_size = end_args_size;
3921 bool saw_unknown = false;
4cd001d5 3922 rtx_insn *insn;
40125f1c 3923
3924 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3925 {
3926 HOST_WIDE_INT this_delta;
3927
3928 if (!NONDEBUG_INSN_P (insn))
dfe00a8f 3929 continue;
3930
40125f1c 3931 this_delta = find_args_size_adjust (insn);
3932 if (this_delta == 0)
45152a7b 3933 {
3934 if (!CALL_P (insn)
3935 || ACCUMULATE_OUTGOING_ARGS
3936 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3937 continue;
3938 }
40125f1c 3939
3940 gcc_assert (!saw_unknown);
3941 if (this_delta == HOST_WIDE_INT_MIN)
3942 saw_unknown = true;
3943
dfe00a8f 3944 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3764c94e 3945 if (STACK_GROWS_DOWNWARD)
3946 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3947
dfe00a8f 3948 args_size -= this_delta;
3949 }
3950
3951 return saw_unknown ? INT_MIN : args_size;
3952}
fad4a30c 3953
dfe00a8f 3954#ifdef PUSH_ROUNDING
ef7dc4b4 3955/* Emit single push insn. */
fad4a30c 3956
ef7dc4b4 3957static void
3754d046 3958emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
ef7dc4b4 3959{
ef7dc4b4 3960 rtx dest_addr;
07c143fb 3961 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
ef7dc4b4 3962 rtx dest;
675b92cc 3963 enum insn_code icode;
ef7dc4b4 3964
675b92cc 3965 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3966 /* If there is push pattern, use it. Otherwise try old way of throwing
3967 MEM representing push operation to move expander. */
d6bf3b14 3968 icode = optab_handler (push_optab, mode);
675b92cc 3969 if (icode != CODE_FOR_nothing)
3970 {
8786db1e 3971 struct expand_operand ops[1];
3972
3973 create_input_operand (&ops[0], x, mode);
3974 if (maybe_expand_insn (icode, 1, ops))
3975 return;
675b92cc 3976 }
ef7dc4b4 3977 if (GET_MODE_SIZE (mode) == rounded_size)
3978 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
20e1fca5 3979 /* If we are to pad downward, adjust the stack pointer first and
3980 then store X into the stack location using an offset. This is
3981 because emit_move_insn does not know how to pad; it does not have
3982 access to type. */
3983 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3984 {
3985 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3986 HOST_WIDE_INT offset;
3987
3988 emit_move_insn (stack_pointer_rtx,
3989 expand_binop (Pmode,
3764c94e 3990 STACK_GROWS_DOWNWARD ? sub_optab
3991 : add_optab,
20e1fca5 3992 stack_pointer_rtx,
0359f9f5 3993 gen_int_mode (rounded_size, Pmode),
20e1fca5 3994 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3995
3996 offset = (HOST_WIDE_INT) padding_size;
3764c94e 3997 if (STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_DEC)
20e1fca5 3998 /* We have already decremented the stack pointer, so get the
3999 previous value. */
4000 offset += (HOST_WIDE_INT) rounded_size;
3764c94e 4001
4002 if (!STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_INC)
20e1fca5 4003 /* We have already incremented the stack pointer, so get the
4004 previous value. */
4005 offset -= (HOST_WIDE_INT) rounded_size;
3764c94e 4006
c338f2e3 4007 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4008 gen_int_mode (offset, Pmode));
20e1fca5 4009 }
ef7dc4b4 4010 else
4011 {
3764c94e 4012 if (STACK_GROWS_DOWNWARD)
4013 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4014 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4015 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4016 Pmode));
4017 else
4018 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4019 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4020 gen_int_mode (rounded_size, Pmode));
4021
ef7dc4b4 4022 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4023 }
4024
4025 dest = gen_rtx_MEM (mode, dest_addr);
4026
ef7dc4b4 4027 if (type != 0)
4028 {
4029 set_mem_attributes (dest, type, 1);
a9d9ab08 4030
dc7cdd37 4031 if (cfun->tail_call_marked)
a9d9ab08 4032 /* Function incoming arguments may overlap with sibling call
4033 outgoing arguments and we cannot allow reordering of reads
4034 from function arguments with stores to outgoing arguments
4035 of sibling calls. */
4036 set_mem_alias_set (dest, 0);
ef7dc4b4 4037 }
4038 emit_move_insn (dest, x);
ef7dc4b4 4039}
dfe00a8f 4040
4041/* Emit and annotate a single push insn. */
4042
4043static void
3754d046 4044emit_single_push_insn (machine_mode mode, rtx x, tree type)
dfe00a8f 4045{
4046 int delta, old_delta = stack_pointer_delta;
1d277a67 4047 rtx_insn *prev = get_last_insn ();
4048 rtx_insn *last;
dfe00a8f 4049
4050 emit_single_push_insn_1 (mode, x, type);
4051
4052 last = get_last_insn ();
4053
4054 /* Notice the common case where we emitted exactly one insn. */
4055 if (PREV_INSN (last) == prev)
4056 {
4057 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4058 return;
4059 }
4060
4061 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4062 gcc_assert (delta == INT_MIN || delta == old_delta);
4063}
fad4a30c 4064#endif
ef7dc4b4 4065
a95e5776 4066/* If reading SIZE bytes from X will end up reading from
4067 Y return the number of bytes that overlap. Return -1
4068 if there is no overlap or -2 if we can't determine
4069 (for example when X and Y have different base registers). */
4070
4071static int
4072memory_load_overlap (rtx x, rtx y, HOST_WIDE_INT size)
4073{
4074 rtx tmp = plus_constant (Pmode, x, size);
4075 rtx sub = simplify_gen_binary (MINUS, Pmode, tmp, y);
4076
4077 if (!CONST_INT_P (sub))
4078 return -2;
4079
4080 HOST_WIDE_INT val = INTVAL (sub);
4081
4082 return IN_RANGE (val, 1, size) ? val : -1;
4083}
4084
10f307d9 4085/* Generate code to push X onto the stack, assuming it has mode MODE and
4086 type TYPE.
4087 MODE is redundant except when X is a CONST_INT (since they don't
4088 carry mode info).
4089 SIZE is an rtx for the size of data to be copied (in bytes),
4090 needed only if X is BLKmode.
a95e5776 4091 Return true if successful. May return false if asked to push a
4092 partial argument during a sibcall optimization (as specified by
4093 SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4094 to not overlap.
10f307d9 4095
decd7a45 4096 ALIGN (in bits) is maximum alignment we can assume.
10f307d9 4097
a984cc1e 4098 If PARTIAL and REG are both nonzero, then copy that many of the first
f054eb3c 4099 bytes of X into registers starting with REG, and push the rest of X.
4100 The amount of space pushed is decreased by PARTIAL bytes.
10f307d9 4101 REG must be a hard register in this case.
a984cc1e 4102 If REG is zero but PARTIAL is not, take any all others actions for an
4103 argument partially in registers, but do not actually load any
4104 registers.
10f307d9 4105
4106 EXTRA is the amount in bytes of extra space to leave next to this arg.
4bbea254 4107 This is ignored if an argument block has already been allocated.
10f307d9 4108
4109 On a machine that lacks real push insns, ARGS_ADDR is the address of
4110 the bottom of the argument block for this call. We use indexing off there
4111 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4112 argument block has not been preallocated.
4113
997d68fe 4114 ARGS_SO_FAR is the size of args previously pushed for this call.
4115
4116 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4117 for arguments passed in registers. If nonzero, it will be the number
4118 of bytes required. */
10f307d9 4119
a95e5776 4120bool
3754d046 4121emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
35cb5232 4122 unsigned int align, int partial, rtx reg, int extra,
4123 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
a95e5776 4124 rtx alignment_pad, bool sibcall_p)
10f307d9 4125{
4126 rtx xinner;
3764c94e 4127 enum direction stack_direction = STACK_GROWS_DOWNWARD ? downward : upward;
10f307d9 4128
4129 /* Decide where to pad the argument: `downward' for below,
4130 `upward' for above, or `none' for don't pad it.
4131 Default is below for small data on big-endian machines; else above. */
4132 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4133
ff385626 4134 /* Invert direction if stack is post-decrement.
12a97a04 4135 FIXME: why? */
4136 if (STACK_PUSH_CODE == POST_DEC)
10f307d9 4137 if (where_pad != none)
4138 where_pad = (where_pad == downward ? upward : downward);
4139
0a534ba7 4140 xinner = x;
10f307d9 4141
a95e5776 4142 int nregs = partial / UNITS_PER_WORD;
4143 rtx *tmp_regs = NULL;
4144 int overlapping = 0;
4145
851fc2b3 4146 if (mode == BLKmode
4147 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
10f307d9 4148 {
4149 /* Copy a block into the stack, entirely or partially. */
4150
19cb6b50 4151 rtx temp;
f054eb3c 4152 int used;
a2509aaa 4153 int offset;
10f307d9 4154 int skip;
fa56dc1d 4155
f054eb3c 4156 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4157 used = partial - offset;
a2509aaa 4158
851fc2b3 4159 if (mode != BLKmode)
4160 {
4161 /* A value is to be stored in an insufficiently aligned
4162 stack slot; copy via a suitably aligned slot if
4163 necessary. */
4164 size = GEN_INT (GET_MODE_SIZE (mode));
4165 if (!MEM_P (xinner))
4166 {
0ab48139 4167 temp = assign_temp (type, 1, 1);
851fc2b3 4168 emit_move_insn (temp, xinner);
4169 xinner = temp;
4170 }
4171 }
4172
611234b4 4173 gcc_assert (size);
10f307d9 4174
10f307d9 4175 /* USED is now the # of bytes we need not copy to the stack
4176 because registers will take care of them. */
4177
4178 if (partial != 0)
e513d163 4179 xinner = adjust_address (xinner, BLKmode, used);
10f307d9 4180
4181 /* If the partial register-part of the arg counts in its stack size,
4182 skip the part of stack space corresponding to the registers.
4183 Otherwise, start copying to the beginning of the stack space,
4184 by setting SKIP to 0. */
997d68fe 4185 skip = (reg_parm_stack_space == 0) ? 0 : used;
10f307d9 4186
4187#ifdef PUSH_ROUNDING
4188 /* Do it with several push insns if that doesn't take lots of insns
4189 and if there is no difficulty with push insns that skip bytes
4190 on the stack for alignment purposes. */
4191 if (args_addr == 0
4448f543 4192 && PUSH_ARGS
971ba038 4193 && CONST_INT_P (size)
10f307d9 4194 && skip == 0
b4ad0ea6 4195 && MEM_ALIGN (xinner) >= align
d4bd0e64 4196 && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
10f307d9 4197 /* Here we avoid the case of a structure whose weak alignment
4198 forces many pushes of a small amount of data,
4199 and such small pushes do rounding that causes trouble. */
9439ebf7 4200 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
325d1c45 4201 || align >= BIGGEST_ALIGNMENT
decd7a45 4202 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4203 == (align / BITS_PER_UNIT)))
db5b2472 4204 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
10f307d9 4205 {
4206 /* Push padding now if padding above and stack grows down,
4207 or if padding below and stack grows up.
4208 But if space already allocated, this has already been done. */
4209 if (extra && args_addr == 0
4210 && where_pad != none && where_pad != stack_direction)
b572011e 4211 anti_adjust_stack (GEN_INT (extra));
10f307d9 4212
9fe0e1b8 4213 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
10f307d9 4214 }
4215 else
fa56dc1d 4216#endif /* PUSH_ROUNDING */
10f307d9 4217 {
a9f2963b 4218 rtx target;
4219
10f307d9 4220 /* Otherwise make space on the stack and copy the data
4221 to the address of that space. */
4222
4223 /* Deduct words put into registers from the size we must copy. */
4224 if (partial != 0)
4225 {
971ba038 4226 if (CONST_INT_P (size))
b572011e 4227 size = GEN_INT (INTVAL (size) - used);
10f307d9 4228 else
4229 size = expand_binop (GET_MODE (size), sub_optab, size,
0359f9f5 4230 gen_int_mode (used, GET_MODE (size)),
4231 NULL_RTX, 0, OPTAB_LIB_WIDEN);
10f307d9 4232 }
4233
4234 /* Get the address of the stack space.
4235 In this case, we do not deal with EXTRA separately.
4236 A single stack adjust will do. */
4237 if (! args_addr)
4238 {
4239 temp = push_block (size, extra, where_pad == downward);
4240 extra = 0;
4241 }
971ba038 4242 else if (CONST_INT_P (args_so_far))
10f307d9 4243 temp = memory_address (BLKmode,
29c05e22 4244 plus_constant (Pmode, args_addr,
10f307d9 4245 skip + INTVAL (args_so_far)));
4246 else
4247 temp = memory_address (BLKmode,
29c05e22 4248 plus_constant (Pmode,
4249 gen_rtx_PLUS (Pmode,
941522d6 4250 args_addr,
4251 args_so_far),
10f307d9 4252 skip));
c0bfc78e 4253
4254 if (!ACCUMULATE_OUTGOING_ARGS)
4255 {
4256 /* If the source is referenced relative to the stack pointer,
4257 copy it to another register to stabilize it. We do not need
4258 to do this if we know that we won't be changing sp. */
4259
4260 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4261 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4262 temp = copy_to_reg (temp);
4263 }
4264
fa56dc1d 4265 target = gen_rtx_MEM (BLKmode, temp);
a9f2963b 4266
f7db692b 4267 /* We do *not* set_mem_attributes here, because incoming arguments
4268 may overlap with sibling call outgoing arguments and we cannot
4269 allow reordering of reads from function arguments with stores
4270 to outgoing arguments of sibling calls. We do, however, want
4271 to record the alignment of the stack slot. */
0378dbdc 4272 /* ALIGN may well be better aligned than TYPE, e.g. due to
4273 PARM_BOUNDARY. Assume the caller isn't lying. */
4274 set_mem_align (target, align);
c0bfc78e 4275
a95e5776 4276 /* If part should go in registers and pushing to that part would
4277 overwrite some of the values that need to go into regs, load the
4278 overlapping values into temporary pseudos to be moved into the hard
4279 regs at the end after the stack pushing has completed.
4280 We cannot load them directly into the hard regs here because
4281 they can be clobbered by the block move expansions.
4282 See PR 65358. */
4283
4284 if (partial > 0 && reg != 0 && mode == BLKmode
4285 && GET_CODE (reg) != PARALLEL)
4286 {
4287 overlapping = memory_load_overlap (XEXP (x, 0), temp, partial);
4288 if (overlapping > 0)
4289 {
4290 gcc_assert (overlapping % UNITS_PER_WORD == 0);
4291 overlapping /= UNITS_PER_WORD;
4292
4293 tmp_regs = XALLOCAVEC (rtx, overlapping);
4294
4295 for (int i = 0; i < overlapping; i++)
4296 tmp_regs[i] = gen_reg_rtx (word_mode);
4297
4298 for (int i = 0; i < overlapping; i++)
4299 emit_move_insn (tmp_regs[i],
4300 operand_subword_force (target, i, mode));
4301 }
4302 else if (overlapping == -1)
4303 overlapping = 0;
4304 /* Could not determine whether there is overlap.
4305 Fail the sibcall. */
4306 else
4307 {
4308 overlapping = 0;
4309 if (sibcall_p)
4310 return false;
4311 }
4312 }
0378dbdc 4313 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
10f307d9 4314 }
4315 }
4316 else if (partial > 0)
4317 {
4318 /* Scalar partly in registers. */
4319
4320 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4321 int i;
4322 int not_stack;
f054eb3c 4323 /* # bytes of start of argument
10f307d9 4324 that we must make space for but need not store. */
f0cf03cb 4325 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
10f307d9 4326 int args_offset = INTVAL (args_so_far);
4327 int skip;
4328
4329 /* Push padding now if padding above and stack grows down,
4330 or if padding below and stack grows up.
4331 But if space already allocated, this has already been done. */
4332 if (extra && args_addr == 0
4333 && where_pad != none && where_pad != stack_direction)
b572011e 4334 anti_adjust_stack (GEN_INT (extra));
10f307d9 4335
4336 /* If we make space by pushing it, we might as well push
4337 the real data. Otherwise, we can leave OFFSET nonzero
4338 and leave the space uninitialized. */
4339 if (args_addr == 0)
4340 offset = 0;
4341
4342 /* Now NOT_STACK gets the number of words that we don't need to
dc537795 4343 allocate on the stack. Convert OFFSET to words too. */
f054eb3c 4344 not_stack = (partial - offset) / UNITS_PER_WORD;
f0cf03cb 4345 offset /= UNITS_PER_WORD;
10f307d9 4346
4347 /* If the partial register-part of the arg counts in its stack size,
4348 skip the part of stack space corresponding to the registers.
4349 Otherwise, start copying to the beginning of the stack space,
4350 by setting SKIP to 0. */
997d68fe 4351 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
10f307d9 4352
ca316360 4353 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
10f307d9 4354 x = validize_mem (force_const_mem (mode, x));
4355
4356 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4357 SUBREGs of such registers are not allowed. */
8ad4c111 4358 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
10f307d9 4359 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4360 x = copy_to_reg (x);
4361
4362 /* Loop over all the words allocated on the stack for this arg. */
4363 /* We can do it by words, because any scalar bigger than a word
4364 has a size a multiple of a word. */
10f307d9 4365 for (i = size - 1; i >= not_stack; i--)
10f307d9 4366 if (i >= not_stack + offset)
a95e5776 4367 if (!emit_push_insn (operand_subword_force (x, i, mode),
b572011e 4368 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4369 0, args_addr,
4370 GEN_INT (args_offset + ((i - not_stack + skip)
997d68fe 4371 * UNITS_PER_WORD)),
a95e5776 4372 reg_parm_stack_space, alignment_pad, sibcall_p))
4373 return false;
10f307d9 4374 }
4375 else
4376 {
4377 rtx addr;
f7c44134 4378 rtx dest;
10f307d9 4379
4380 /* Push padding now if padding above and stack grows down,
4381 or if padding below and stack grows up.
4382 But if space already allocated, this has already been done. */
4383 if (extra && args_addr == 0
4384 && where_pad != none && where_pad != stack_direction)
b572011e 4385 anti_adjust_stack (GEN_INT (extra));
10f307d9 4386
4387#ifdef PUSH_ROUNDING
4448f543 4388 if (args_addr == 0 && PUSH_ARGS)
ef7dc4b4 4389 emit_single_push_insn (mode, x, type);
10f307d9 4390 else
4391#endif
eb4b06b6 4392 {
971ba038 4393 if (CONST_INT_P (args_so_far))
eb4b06b6 4394 addr
4395 = memory_address (mode,
29c05e22 4396 plus_constant (Pmode, args_addr,
eb4b06b6 4397 INTVAL (args_so_far)));
fa56dc1d 4398 else
941522d6 4399 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4400 args_so_far));
ef7dc4b4 4401 dest = gen_rtx_MEM (mode, addr);
f7db692b 4402
4403 /* We do *not* set_mem_attributes here, because incoming arguments
4404 may overlap with sibling call outgoing arguments and we cannot
4405 allow reordering of reads from function arguments with stores
4406 to outgoing arguments of sibling calls. We do, however, want
4407 to record the alignment of the stack slot. */
4408 /* ALIGN may well be better aligned than TYPE, e.g. due to
4409 PARM_BOUNDARY. Assume the caller isn't lying. */
4410 set_mem_align (dest, align);
10f307d9 4411
ef7dc4b4 4412 emit_move_insn (dest, x);
ef7dc4b4 4413 }
10f307d9 4414 }
4415
a95e5776 4416 /* Move the partial arguments into the registers and any overlapping
4417 values that we moved into the pseudos in tmp_regs. */
a984cc1e 4418 if (partial > 0 && reg != 0)
ce739127 4419 {
4420 /* Handle calls that pass values in multiple non-contiguous locations.
4421 The Irix 6 ABI has examples of this. */
4422 if (GET_CODE (reg) == PARALLEL)
5f4cd670 4423 emit_group_load (reg, x, type, -1);
ce739127 4424 else
a95e5776 4425 {
f054eb3c 4426 gcc_assert (partial % UNITS_PER_WORD == 0);
a95e5776 4427 move_block_to_reg (REGNO (reg), x, nregs - overlapping, mode);
4428
4429 for (int i = 0; i < overlapping; i++)
4430 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg)
4431 + nregs - overlapping + i),
4432 tmp_regs[i]);
4433
f054eb3c 4434 }
ce739127 4435 }
10f307d9 4436
4437 if (extra && args_addr == 0 && where_pad == stack_direction)
b572011e 4438 anti_adjust_stack (GEN_INT (extra));
fa56dc1d 4439
364a85bd 4440 if (alignment_pad && args_addr == 0)
9d855d2f 4441 anti_adjust_stack (alignment_pad);
a95e5776 4442
4443 return true;
10f307d9 4444}
4445\f
d8e5b213 4446/* Return X if X can be used as a subtarget in a sequence of arithmetic
4447 operations. */
4448
4449static rtx
35cb5232 4450get_subtarget (rtx x)
d8e5b213 4451{
a1ad7483 4452 return (optimize
4453 || x == 0
d8e5b213 4454 /* Only registers can be subtargets. */
8ad4c111 4455 || !REG_P (x)
d8e5b213 4456 /* Don't use hard regs to avoid extending their life. */
4457 || REGNO (x) < FIRST_PSEUDO_REGISTER
d8e5b213 4458 ? 0 : x);
4459}
4460
79367e65 4461/* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4462 FIELD is a bitfield. Returns true if the optimization was successful,
4463 and there's nothing else to do. */
4464
4465static bool
4466optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4467 unsigned HOST_WIDE_INT bitpos,
4bb60ec7 4468 unsigned HOST_WIDE_INT bitregion_start,
4469 unsigned HOST_WIDE_INT bitregion_end,
3754d046 4470 machine_mode mode1, rtx str_rtx,
79367e65 4471 tree to, tree src)
4472{
3754d046 4473 machine_mode str_mode = GET_MODE (str_rtx);
79367e65 4474 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4475 tree op0, op1;
4476 rtx value, result;
4477 optab binop;
c4532f22 4478 gimple srcstmt;
4479 enum tree_code code;
79367e65 4480
4481 if (mode1 != VOIDmode
4482 || bitsize >= BITS_PER_WORD
4483 || str_bitsize > BITS_PER_WORD
4484 || TREE_SIDE_EFFECTS (to)
4485 || TREE_THIS_VOLATILE (to))
4486 return false;
4487
4488 STRIP_NOPS (src);
c4532f22 4489 if (TREE_CODE (src) != SSA_NAME)
4490 return false;
4491 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4492 return false;
4493
4494 srcstmt = get_gimple_for_ssa_name (src);
4495 if (!srcstmt
4496 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
79367e65 4497 return false;
4498
c4532f22 4499 code = gimple_assign_rhs_code (srcstmt);
4500
4501 op0 = gimple_assign_rhs1 (srcstmt);
4502
4503 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4504 to find its initialization. Hopefully the initialization will
4505 be from a bitfield load. */
4506 if (TREE_CODE (op0) == SSA_NAME)
4507 {
4508 gimple op0stmt = get_gimple_for_ssa_name (op0);
4509
4510 /* We want to eventually have OP0 be the same as TO, which
4511 should be a bitfield. */
4512 if (!op0stmt
4513 || !is_gimple_assign (op0stmt)
4514 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4515 return false;
4516 op0 = gimple_assign_rhs1 (op0stmt);
4517 }
4518
4519 op1 = gimple_assign_rhs2 (srcstmt);
79367e65 4520
4521 if (!operand_equal_p (to, op0, 0))
4522 return false;
4523
4524 if (MEM_P (str_rtx))
4525 {
4526 unsigned HOST_WIDE_INT offset1;
4527
4528 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4529 str_mode = word_mode;
4530 str_mode = get_best_mode (bitsize, bitpos,
4bb60ec7 4531 bitregion_start, bitregion_end,
79367e65 4532 MEM_ALIGN (str_rtx), str_mode, 0);
4533 if (str_mode == VOIDmode)
4534 return false;
4535 str_bitsize = GET_MODE_BITSIZE (str_mode);
4536
4537 offset1 = bitpos;
4538 bitpos %= str_bitsize;
4539 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4540 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4541 }
4542 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4543 return false;
4544
4545 /* If the bit field covers the whole REG/MEM, store_field
4546 will likely generate better code. */
4547 if (bitsize >= str_bitsize)
4548 return false;
4549
4550 /* We can't handle fields split across multiple entities. */
4551 if (bitpos + bitsize > str_bitsize)
4552 return false;
4553
4554 if (BYTES_BIG_ENDIAN)
4555 bitpos = str_bitsize - bitpos - bitsize;
4556
c4532f22 4557 switch (code)
79367e65 4558 {
4559 case PLUS_EXPR:
4560 case MINUS_EXPR:
4561 /* For now, just optimize the case of the topmost bitfield
4562 where we don't need to do any masking and also
4563 1 bit bitfields where xor can be used.
4564 We might win by one instruction for the other bitfields
4565 too if insv/extv instructions aren't used, so that
4566 can be added later. */
4567 if (bitpos + bitsize != str_bitsize
4568 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4569 break;
4570
1db6d067 4571 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
79367e65 4572 value = convert_modes (str_mode,
4573 TYPE_MODE (TREE_TYPE (op1)), value,
4574 TYPE_UNSIGNED (TREE_TYPE (op1)));
4575
4576 /* We may be accessing data outside the field, which means
4577 we can alias adjacent data. */
4578 if (MEM_P (str_rtx))
4579 {
4580 str_rtx = shallow_copy_rtx (str_rtx);
4581 set_mem_alias_set (str_rtx, 0);
4582 set_mem_expr (str_rtx, 0);
4583 }
4584
c4532f22 4585 binop = code == PLUS_EXPR ? add_optab : sub_optab;
79367e65 4586 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4587 {
4588 value = expand_and (str_mode, value, const1_rtx, NULL);
4589 binop = xor_optab;
4590 }
40715742 4591 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
79367e65 4592 result = expand_binop (str_mode, binop, str_rtx,
4593 value, str_rtx, 1, OPTAB_WIDEN);
4594 if (result != str_rtx)
4595 emit_move_insn (str_rtx, result);
4596 return true;
4597
0ebe5db7 4598 case BIT_IOR_EXPR:
4599 case BIT_XOR_EXPR:
4600 if (TREE_CODE (op1) != INTEGER_CST)
4601 break;
40715742 4602 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4603 value = convert_modes (str_mode,
0ebe5db7 4604 TYPE_MODE (TREE_TYPE (op1)), value,
4605 TYPE_UNSIGNED (TREE_TYPE (op1)));
4606
4607 /* We may be accessing data outside the field, which means
4608 we can alias adjacent data. */
4609 if (MEM_P (str_rtx))
4610 {
4611 str_rtx = shallow_copy_rtx (str_rtx);
4612 set_mem_alias_set (str_rtx, 0);
4613 set_mem_expr (str_rtx, 0);
4614 }
4615
c4532f22 4616 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
40715742 4617 if (bitpos + bitsize != str_bitsize)
0ebe5db7 4618 {
0359f9f5 4619 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4620 str_mode);
40715742 4621 value = expand_and (str_mode, value, mask, NULL_RTX);
0ebe5db7 4622 }
40715742 4623 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4624 result = expand_binop (str_mode, binop, str_rtx,
0ebe5db7 4625 value, str_rtx, 1, OPTAB_WIDEN);
4626 if (result != str_rtx)
4627 emit_move_insn (str_rtx, result);
4628 return true;
4629
79367e65 4630 default:
4631 break;
4632 }
4633
4634 return false;
4635}
4636
4bb60ec7 4637/* In the C++ memory model, consecutive bit fields in a structure are
4638 considered one memory location.
4639
3cef948a 4640 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
8d8a34f9 4641 returns the bit range of consecutive bits in which this COMPONENT_REF
3cef948a 4642 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4643 and *OFFSET may be adjusted in the process.
4644
4645 If the access does not need to be restricted, 0 is returned in both
8d8a34f9 4646 *BITSTART and *BITEND. */
4bb60ec7 4647
4648static void
4649get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4650 unsigned HOST_WIDE_INT *bitend,
8d8a34f9 4651 tree exp,
3cef948a 4652 HOST_WIDE_INT *bitpos,
4653 tree *offset)
4bb60ec7 4654{
3cef948a 4655 HOST_WIDE_INT bitoffset;
fa42e1a4 4656 tree field, repr;
4bb60ec7 4657
4658 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4659
8d8a34f9 4660 field = TREE_OPERAND (exp, 1);
4661 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4662 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4663 need to limit the range we can access. */
4664 if (!repr)
4bb60ec7 4665 {
4666 *bitstart = *bitend = 0;
4667 return;
4668 }
4669
73041e9b 4670 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4671 part of a larger bit field, then the representative does not serve any
4672 useful purpose. This can occur in Ada. */
4673 if (handled_component_p (TREE_OPERAND (exp, 0)))
4674 {
3754d046 4675 machine_mode rmode;
73041e9b 4676 HOST_WIDE_INT rbitsize, rbitpos;
4677 tree roffset;
4678 int unsignedp;
4679 int volatilep = 0;
4680 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4681 &roffset, &rmode, &unsignedp, &volatilep, false);
4682 if ((rbitpos % BITS_PER_UNIT) != 0)
4683 {
4684 *bitstart = *bitend = 0;
4685 return;
4686 }
4687 }
4688
8d8a34f9 4689 /* Compute the adjustment to bitpos from the offset of the field
fa42e1a4 4690 relative to the representative. DECL_FIELD_OFFSET of field and
4691 repr are the same by construction if they are not constants,
4692 see finish_bitfield_layout. */
e913b5cd 4693 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4694 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4695 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4696 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
fa42e1a4 4697 else
4698 bitoffset = 0;
e913b5cd 4699 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4700 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4bb60ec7 4701
3cef948a 4702 /* If the adjustment is larger than bitpos, we would have a negative bit
5efffd8e 4703 position for the lower bound and this may wreak havoc later. Adjust
4704 offset and bitpos to make the lower bound non-negative in that case. */
3cef948a 4705 if (bitoffset > *bitpos)
4706 {
4707 HOST_WIDE_INT adjust = bitoffset - *bitpos;
3cef948a 4708 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
3cef948a 4709
4710 *bitpos += adjust;
5efffd8e 4711 if (*offset == NULL_TREE)
4712 *offset = size_int (-adjust / BITS_PER_UNIT);
4713 else
4714 *offset
4715 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
3cef948a 4716 *bitstart = 0;
4717 }
4718 else
4719 *bitstart = *bitpos - bitoffset;
4720
e913b5cd 4721 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4bb60ec7 4722}
79367e65 4723
6d1013f7 4724/* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4725 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4726 DECL_RTL was not set yet, return NORTL. */
4727
4728static inline bool
4729addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4730{
4731 if (TREE_CODE (addr) != ADDR_EXPR)
4732 return false;
4733
4734 tree base = TREE_OPERAND (addr, 0);
4735
4736 if (!DECL_P (base)
4737 || TREE_ADDRESSABLE (base)
4738 || DECL_MODE (base) == BLKmode)
4739 return false;
4740
4741 if (!DECL_RTL_SET_P (base))
4742 return nortl;
4743
4744 return (!MEM_P (DECL_RTL (base)));
4745}
4746
a598af2a 4747/* Returns true if the MEM_REF REF refers to an object that does not
4748 reside in memory and has non-BLKmode. */
4749
6d1013f7 4750static inline bool
a598af2a 4751mem_ref_refers_to_non_mem_p (tree ref)
4752{
4753 tree base = TREE_OPERAND (ref, 0);
6d1013f7 4754 return addr_expr_of_non_mem_decl_p_1 (base, false);
4755}
4756
5b5037b3 4757/* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4758 is true, try generating a nontemporal store. */
10f307d9 4759
5d3c0894 4760void
5b5037b3 4761expand_assignment (tree to, tree from, bool nontemporal)
10f307d9 4762{
19cb6b50 4763 rtx to_rtx = 0;
10f307d9 4764 rtx result;
3754d046 4765 machine_mode mode;
56cf6489 4766 unsigned int align;
8786db1e 4767 enum insn_code icode;
10f307d9 4768
4769 /* Don't crash if the lhs of the assignment was erroneous. */
10f307d9 4770 if (TREE_CODE (to) == ERROR_MARK)
9282409c 4771 {
1084097d 4772 expand_normal (from);
5d3c0894 4773 return;
9282409c 4774 }
10f307d9 4775
8f3e551a 4776 /* Optimize away no-op moves without side-effects. */
4777 if (operand_equal_p (to, from, 0))
4778 return;
4779
a598af2a 4780 /* Handle misaligned stores. */
5d9de213 4781 mode = TYPE_MODE (TREE_TYPE (to));
4782 if ((TREE_CODE (to) == MEM_REF
4783 || TREE_CODE (to) == TARGET_MEM_REF)
4784 && mode != BLKmode
55e42d78 4785 && !mem_ref_refers_to_non_mem_p (to)
3482bf13 4786 && ((align = get_object_alignment (to))
56cf6489 4787 < GET_MODE_ALIGNMENT (mode))
55e42d78 4788 && (((icode = optab_handler (movmisalign_optab, mode))
4789 != CODE_FOR_nothing)
4790 || SLOW_UNALIGNED_ACCESS (mode, align)))
5d9de213 4791 {
884b03c9 4792 rtx reg, mem;
5d9de213 4793
4794 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4795 reg = force_not_mem (reg);
884b03c9 4796 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5d9de213 4797
55e42d78 4798 if (icode != CODE_FOR_nothing)
4799 {
884b03c9 4800 struct expand_operand ops[2];
4801
55e42d78 4802 create_fixed_operand (&ops[0], mem);
4803 create_input_operand (&ops[1], reg, mode);
4804 /* The movmisalign<mode> pattern cannot fail, else the assignment
4805 would silently be omitted. */
4806 expand_insn (icode, 2, ops);
4807 }
4808 else
1603adf9 4809 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg);
5d9de213 4810 return;
4811 }
4812
10f307d9 4813 /* Assignment of a structure component needs special treatment
4814 if the structure component's rtx is not simply a MEM.
e3a8913c 4815 Assignment of an array element at a constant index, and assignment of
4816 an array element in an unaligned packed structure field, has the same
a598af2a 4817 problem. Same for (partially) storing into a non-memory object. */
79367e65 4818 if (handled_component_p (to)
182cf5a9 4819 || (TREE_CODE (to) == MEM_REF
a598af2a 4820 && mem_ref_refers_to_non_mem_p (to))
2d55cbd9 4821 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
10f307d9 4822 {
3754d046 4823 machine_mode mode1;
02e7a332 4824 HOST_WIDE_INT bitsize, bitpos;
4bb60ec7 4825 unsigned HOST_WIDE_INT bitregion_start = 0;
4826 unsigned HOST_WIDE_INT bitregion_end = 0;
954bdcb1 4827 tree offset;
10f307d9 4828 int unsignedp;
4829 int volatilep = 0;
88ac3f7f 4830 tree tem;
4831
4832 push_temp_slots ();
7fce34be 4833 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
e7e9416e 4834 &unsignedp, &volatilep, true);
10f307d9 4835
5efffd8e 4836 /* Make sure bitpos is not negative, it can wreak havoc later. */
4837 if (bitpos < 0)
4838 {
4839 gcc_assert (offset == NULL_TREE);
4840 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4841 ? 3 : exact_log2 (BITS_PER_UNIT)));
4842 bitpos &= BITS_PER_UNIT - 1;
4843 }
4844
4bb60ec7 4845 if (TREE_CODE (to) == COMPONENT_REF
4846 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
3cef948a 4847 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
3070a799 4848 /* The C++ memory model naturally applies to byte-aligned fields.
4849 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4850 BITSIZE are not byte-aligned, there is no need to limit the range
4851 we can access. This can occur with packed structures in Ada. */
4852 else if (bitsize > 0
4853 && bitsize % BITS_PER_UNIT == 0
4854 && bitpos % BITS_PER_UNIT == 0)
4855 {
4856 bitregion_start = bitpos;
4857 bitregion_end = bitpos + bitsize - 1;
4858 }
4bb60ec7 4859
7b9e6cc3 4860 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
a689a61a 4861
61a1f9de 4862 /* If the field has a mode, we want to access it in the
1795103a 4863 field's mode, not the computed mode.
4864 If a MEM has VOIDmode (external with incomplete type),
4865 use BLKmode for it instead. */
4866 if (MEM_P (to_rtx))
4867 {
61a1f9de 4868 if (mode1 != VOIDmode)
1795103a 4869 to_rtx = adjust_address (to_rtx, mode1, 0);
4870 else if (GET_MODE (to_rtx) == VOIDmode)
4871 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4872 }
a420d927 4873
954bdcb1 4874 if (offset != 0)
4875 {
3754d046 4876 machine_mode address_mode;
c22de3f0 4877 rtx offset_rtx;
954bdcb1 4878
c22de3f0 4879 if (!MEM_P (to_rtx))
4880 {
4881 /* We can get constant negative offsets into arrays with broken
4882 user code. Translate this to a trap instead of ICEing. */
4883 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4884 expand_builtin_trap ();
4885 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4886 }
33ef2f52 4887
c22de3f0 4888 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
87cf5753 4889 address_mode = get_address_mode (to_rtx);
98155838 4890 if (GET_MODE (offset_rtx) != address_mode)
2ff88218 4891 {
4892 /* We cannot be sure that the RTL in offset_rtx is valid outside
4893 of a memory address context, so force it into a register
4894 before attempting to convert it to the desired mode. */
4895 offset_rtx = force_operand (offset_rtx, NULL_RTX);
4896 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4897 }
33ef2f52 4898
dbe2840a 4899 /* If we have an expression in OFFSET_RTX and a non-zero
4900 byte offset in BITPOS, adding the byte offset before the
4901 OFFSET_RTX results in better intermediate code, which makes
4902 later rtl optimization passes perform better.
4903
4904 We prefer intermediate code like this:
4905
4906 r124:DI=r123:DI+0x18
4907 [r124:DI]=r121:DI
4908
4909 ... instead of ...
4910
4911 r124:DI=r123:DI+0x10
4912 [r124:DI+0x8]=r121:DI
4913
4914 This is only done for aligned data values, as these can
4915 be expected to result in single move instructions. */
4916 if (mode1 != VOIDmode
4917 && bitpos != 0
2b96c5f6 4918 && bitsize > 0
fa56dc1d 4919 && (bitpos % bitsize) == 0
25d55d72 4920 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
dbe2840a 4921 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
25d55d72 4922 {
fac6aae6 4923 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
438167eb 4924 bitregion_start = 0;
4925 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4926 bitregion_end -= bitpos;
25d55d72 4927 bitpos = 0;
4928 }
4929
fcdc122e 4930 to_rtx = offset_address (to_rtx, offset_rtx,
252d0e4d 4931 highest_pow2_factor_for_target (to,
4932 offset));
954bdcb1 4933 }
7014838c 4934
d8d9af50 4935 /* No action is needed if the target is not a memory and the field
4936 lies completely outside that target. This can occur if the source
4937 code contains an out-of-bounds access to a small array. */
4938 if (!MEM_P (to_rtx)
4939 && GET_MODE (to_rtx) != BLKmode
4940 && (unsigned HOST_WIDE_INT) bitpos
995b44f5 4941 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
d8d9af50 4942 {
4943 expand_normal (from);
4944 result = NULL;
4945 }
79367e65 4946 /* Handle expand_expr of a complex value returning a CONCAT. */
d8d9af50 4947 else if (GET_CODE (to_rtx) == CONCAT)
2b96c5f6 4948 {
3a175160 4949 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4950 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4951 && bitpos == 0
4952 && bitsize == mode_bitsize)
4953 result = store_expr (from, to_rtx, false, nontemporal);
4954 else if (bitsize == mode_bitsize / 2
4955 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4956 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4957 nontemporal);
2cd0cb08 4958 else if (bitpos + bitsize <= mode_bitsize / 2)
3a175160 4959 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4bb60ec7 4960 bitregion_start, bitregion_end,
f955ca51 4961 mode1, from,
3a175160 4962 get_alias_set (to), nontemporal);
2cd0cb08 4963 else if (bitpos >= mode_bitsize / 2)
3a175160 4964 result = store_field (XEXP (to_rtx, 1), bitsize,
4bb60ec7 4965 bitpos - mode_bitsize / 2,
4966 bitregion_start, bitregion_end,
4967 mode1, from,
f955ca51 4968 get_alias_set (to), nontemporal);
3a175160 4969 else if (bitpos == 0 && bitsize == mode_bitsize)
020823de 4970 {
3a175160 4971 rtx from_rtx;
4972 result = expand_normal (from);
4973 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4974 TYPE_MODE (TREE_TYPE (from)), 0);
4975 emit_move_insn (XEXP (to_rtx, 0),
4976 read_complex_part (from_rtx, false));
4977 emit_move_insn (XEXP (to_rtx, 1),
4978 read_complex_part (from_rtx, true));
020823de 4979 }
4980 else
4981 {
3a175160 4982 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
2cd0cb08 4983 GET_MODE_SIZE (GET_MODE (to_rtx)));
3a175160 4984 write_complex_part (temp, XEXP (to_rtx, 0), false);
4985 write_complex_part (temp, XEXP (to_rtx, 1), true);
4bb60ec7 4986 result = store_field (temp, bitsize, bitpos,
4987 bitregion_start, bitregion_end,
4988 mode1, from,
f955ca51 4989 get_alias_set (to), nontemporal);
3a175160 4990 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4991 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
020823de 4992 }
10f307d9 4993 }
79367e65 4994 else
0717ec39 4995 {
79367e65 4996 if (MEM_P (to_rtx))
d4ca42d3 4997 {
79367e65 4998 /* If the field is at offset zero, we could have been given the
4999 DECL_RTX of the parent struct. Don't munge it. */
5000 to_rtx = shallow_copy_rtx (to_rtx);
79367e65 5001 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
79367e65 5002 if (volatilep)
5003 MEM_VOLATILE_P (to_rtx) = 1;
0717ec39 5004 }
9c5f26b0 5005
4bb60ec7 5006 if (optimize_bitfield_assignment_op (bitsize, bitpos,
5007 bitregion_start, bitregion_end,
5008 mode1,
79367e65 5009 to_rtx, to, from))
5010 result = NULL;
5011 else
4bb60ec7 5012 result = store_field (to_rtx, bitsize, bitpos,
5013 bitregion_start, bitregion_end,
5014 mode1, from,
f955ca51 5015 get_alias_set (to), nontemporal);
0717ec39 5016 }
5017
79367e65 5018 if (result)
5019 preserve_temp_slots (result);
2b96c5f6 5020 pop_temp_slots ();
5d3c0894 5021 return;
10f307d9 5022 }
5023
a2e044a5 5024 /* If the rhs is a function call and its value is not an aggregate,
5025 call the function before we start to compute the lhs.
5026 This is needed for correct code for cases such as
5027 val = setjmp (buf) on machines where reference to val
e767499e 5028 requires loading up part of an address in a separate insn.
5029
16a8193d 5030 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5031 since it might be a promoted variable where the zero- or sign- extension
5032 needs to be done. Handling this in the normal way is safe because no
a8dd994c 5033 computation is done before the call. The same is true for SSA names. */
45550790 5034 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
b35c122b 5035 && COMPLETE_TYPE_P (TREE_TYPE (from))
61b44857 5036 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
ee5ab2d1 5037 && ! (((TREE_CODE (to) == VAR_DECL
5038 || TREE_CODE (to) == PARM_DECL
5039 || TREE_CODE (to) == RESULT_DECL)
a8dd994c 5040 && REG_P (DECL_RTL (to)))
5041 || TREE_CODE (to) == SSA_NAME))
a2e044a5 5042 {
88ac3f7f 5043 rtx value;
058a1b7a 5044 rtx bounds;
88ac3f7f 5045
5046 push_temp_slots ();
8ec3c5c2 5047 value = expand_normal (from);
058a1b7a 5048
5049 /* Split value and bounds to store them separately. */
5050 chkp_split_slot (value, &value, &bounds);
5051
a2e044a5 5052 if (to_rtx == 0)
8a06f2d4 5053 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
ac263f88 5054
ce739127 5055 /* Handle calls that return values in multiple non-contiguous locations.
5056 The Irix 6 ABI has examples of this. */
5057 if (GET_CODE (to_rtx) == PARALLEL)
2d0fd66d 5058 {
5059 if (GET_CODE (value) == PARALLEL)
5060 emit_group_move (to_rtx, value);
5061 else
5062 emit_group_load (to_rtx, value, TREE_TYPE (from),
5063 int_size_in_bytes (TREE_TYPE (from)));
5064 }
5065 else if (GET_CODE (value) == PARALLEL)
5066 emit_group_store (to_rtx, value, TREE_TYPE (from),
5067 int_size_in_bytes (TREE_TYPE (from)));
ce739127 5068 else if (GET_MODE (to_rtx) == BLKmode)
7e91b548 5069 {
f955ca51 5070 /* Handle calls that return BLKmode values in registers. */
7e91b548 5071 if (REG_P (value))
5072 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5073 else
5074 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5075 }
ac263f88 5076 else
5471b3be 5077 {
85d654dd 5078 if (POINTER_TYPE_P (TREE_TYPE (to)))
98155838 5079 value = convert_memory_address_addr_space
5080 (GET_MODE (to_rtx), value,
5081 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5082
5471b3be 5083 emit_move_insn (to_rtx, value);
5084 }
058a1b7a 5085
5086 /* Store bounds if required. */
5087 if (bounds
5088 && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to))))
5089 {
5090 gcc_assert (MEM_P (to_rtx));
5091 chkp_emit_bounds_store (bounds, value, to_rtx);
5092 }
5093
a2e044a5 5094 preserve_temp_slots (to_rtx);
88ac3f7f 5095 pop_temp_slots ();
5d3c0894 5096 return;
a2e044a5 5097 }
5098
a598af2a 5099 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5100 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
10f307d9 5101
addbe7ac 5102 /* Don't move directly into a return register. */
155b05dc 5103 if (TREE_CODE (to) == RESULT_DECL
8ad4c111 5104 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
addbe7ac 5105 {
88ac3f7f 5106 rtx temp;
5107
5108 push_temp_slots ();
f66cd30d 5109
5110 /* If the source is itself a return value, it still is in a pseudo at
5111 this point so we can move it back to the return register directly. */
5112 if (REG_P (to_rtx)
5113 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5114 && TREE_CODE (from) != CALL_EXPR)
ee5ab2d1 5115 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5116 else
5117 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
155b05dc 5118
2d0fd66d 5119 /* Handle calls that return values in multiple non-contiguous locations.
5120 The Irix 6 ABI has examples of this. */
155b05dc 5121 if (GET_CODE (to_rtx) == PARALLEL)
2d0fd66d 5122 {
5123 if (GET_CODE (temp) == PARALLEL)
5124 emit_group_move (to_rtx, temp);
5125 else
5126 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5127 int_size_in_bytes (TREE_TYPE (from)));
5128 }
ee5ab2d1 5129 else if (temp)
155b05dc 5130 emit_move_insn (to_rtx, temp);
5131
addbe7ac 5132 preserve_temp_slots (to_rtx);
88ac3f7f 5133 pop_temp_slots ();
5d3c0894 5134 return;
addbe7ac 5135 }
5136
10f307d9 5137 /* In case we are returning the contents of an object which overlaps
5138 the place the value is being stored, use a safe function when copying
5139 a value through a pointer into a structure value return block. */
865c8a7e 5140 if (TREE_CODE (to) == RESULT_DECL
5141 && TREE_CODE (from) == INDIRECT_REF
bd1a81f7 5142 && ADDR_SPACE_GENERIC_P
865c8a7e 5143 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5144 && refs_may_alias_p (to, from)
18d50ae6 5145 && cfun->returns_struct
5146 && !cfun->returns_pcc_struct)
10f307d9 5147 {
88ac3f7f 5148 rtx from_rtx, size;
5149
5150 push_temp_slots ();
eaf7767e 5151 size = expr_size (from);
8ec3c5c2 5152 from_rtx = expand_normal (from);
10f307d9 5153
f896c932 5154 emit_library_call (memmove_libfunc, LCT_NORMAL,
5155 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5156 XEXP (from_rtx, 0), Pmode,
5157 convert_to_mode (TYPE_MODE (sizetype),
5158 size, TYPE_UNSIGNED (sizetype)),
5159 TYPE_MODE (sizetype));
10f307d9 5160
5161 preserve_temp_slots (to_rtx);
88ac3f7f 5162 pop_temp_slots ();
5d3c0894 5163 return;
10f307d9 5164 }
5165
5166 /* Compute FROM and store the value in the rtx we got. */
5167
88ac3f7f 5168 push_temp_slots ();
058a1b7a 5169 result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, to);
10f307d9 5170 preserve_temp_slots (result);
88ac3f7f 5171 pop_temp_slots ();
5d3c0894 5172 return;
10f307d9 5173}
5174
5b5037b3 5175/* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5176 succeeded, false otherwise. */
5177
16c9337c 5178bool
5b5037b3 5179emit_storent_insn (rtx to, rtx from)
5180{
8786db1e 5181 struct expand_operand ops[2];
3754d046 5182 machine_mode mode = GET_MODE (to);
d6bf3b14 5183 enum insn_code code = optab_handler (storent_optab, mode);
5b5037b3 5184
5185 if (code == CODE_FOR_nothing)
5186 return false;
5187
8786db1e 5188 create_fixed_operand (&ops[0], to);
5189 create_input_operand (&ops[1], from, mode);
5190 return maybe_expand_insn (code, 2, ops);
5b5037b3 5191}
5192
10f307d9 5193/* Generate code for computing expression EXP,
5194 and storing the value into TARGET.
10f307d9 5195
9282409c 5196 If the mode is BLKmode then we may return TARGET itself.
5197 It turns out that in BLKmode it doesn't cause a problem.
5198 because C has no operators that could combine two different
5199 assignments into the same BLKmode object with different values
5200 with no sequence point. Will other languages need this to
5201 be more thorough?
5202
c0f85e83 5203 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5b5037b3 5204 stack, and block moves may need to be treated specially.
48e1416a 5205
058a1b7a 5206 If NONTEMPORAL is true, try using a nontemporal store instruction.
5207
5208 If BTARGET is not NULL then computed bounds of EXP are
5209 associated with BTARGET. */
10f307d9 5210
5211rtx
058a1b7a 5212store_expr_with_bounds (tree exp, rtx target, int call_param_p,
5213 bool nontemporal, tree btarget)
10f307d9 5214{
19cb6b50 5215 rtx temp;
60ffaf4d 5216 rtx alt_rtl = NULL_RTX;
ed4d69dc 5217 location_t loc = curr_insn_location ();
10f307d9 5218
824638f9 5219 if (VOID_TYPE_P (TREE_TYPE (exp)))
5220 {
5221 /* C++ can generate ?: expressions with a throw expression in one
5222 branch and an rvalue in the other. Here, we resolve attempts to
917bbcab 5223 store the throw expression's nonexistent result. */
c0f85e83 5224 gcc_assert (!call_param_p);
1db6d067 5225 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
824638f9 5226 return NULL_RTX;
5227 }
10f307d9 5228 if (TREE_CODE (exp) == COMPOUND_EXPR)
5229 {
5230 /* Perform first part of compound expression, then assign from second
5231 part. */
a35a63ff 5232 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
c0f85e83 5233 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
058a1b7a 5234 return store_expr_with_bounds (TREE_OPERAND (exp, 1), target,
5235 call_param_p, nontemporal, btarget);
10f307d9 5236 }
5237 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5238 {
5239 /* For conditional expression, get safe form of the target. Then
5240 test the condition, doing the appropriate assignment on either
5241 side. This avoids the creation of unnecessary temporaries.
5242 For non-BLKmode, it is more efficient not to do this. */
5243
1d277a67 5244 rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
10f307d9 5245
d07f1b1f 5246 do_pending_stack_adjust ();
10f307d9 5247 NO_DEFER_POP;
79ab74cc 5248 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
058a1b7a 5249 store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p,
5250 nontemporal, btarget);
1d5ad681 5251 emit_jump_insn (targetm.gen_jump (lab2));
10f307d9 5252 emit_barrier ();
5253 emit_label (lab1);
058a1b7a 5254 store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p,
5255 nontemporal, btarget);
10f307d9 5256 emit_label (lab2);
5257 OK_DEFER_POP;
9012f57d 5258
3f2a8027 5259 return NULL_RTX;
bb11bacb 5260 }
acfb31e5 5261 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
edc2a478 5262 /* If this is a scalar in a register that is stored in a wider mode
acfb31e5 5263 than the declared mode, compute the result into its declared mode
5264 and then convert to the wider mode. Our value is the computed
5265 expression. */
5266 {
d2422fc2 5267 rtx inner_target = 0;
5268
3f2a8027 5269 /* We can do the conversion inside EXP, which will often result
5270 in some optimizations. Do the conversion in two steps: first
5271 change the signedness, if needed, then the extend. But don't
5272 do this if the type of EXP is a subtype of something else
5273 since then the conversion might involve more than just
5274 converting modes. */
5275 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
aec30911 5276 && TREE_TYPE (TREE_TYPE (exp)) == 0
dcfc697f 5277 && GET_MODE_PRECISION (GET_MODE (target))
5278 == TYPE_PRECISION (TREE_TYPE (exp)))
8d426db9 5279 {
e8629f9e 5280 if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5281 TYPE_UNSIGNED (TREE_TYPE (exp))))
a4521f7e 5282 {
5283 /* Some types, e.g. Fortran's logical*4, won't have a signed
5284 version, so use the mode instead. */
5285 tree ntype
11773141 5286 = (signed_or_unsigned_type_for
e8629f9e 5287 (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
a4521f7e 5288 if (ntype == NULL)
5289 ntype = lang_hooks.types.type_for_mode
5290 (TYPE_MODE (TREE_TYPE (exp)),
e8629f9e 5291 SUBREG_PROMOTED_SIGN (target));
a4521f7e 5292
389dd41b 5293 exp = fold_convert_loc (loc, ntype, exp);
a4521f7e 5294 }
8d426db9 5295
389dd41b 5296 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5297 (GET_MODE (SUBREG_REG (target)),
e8629f9e 5298 SUBREG_PROMOTED_SIGN (target)),
389dd41b 5299 exp);
d2422fc2 5300
5301 inner_target = SUBREG_REG (target);
8d426db9 5302 }
fa56dc1d 5303
a35a63ff 5304 temp = expand_expr (exp, inner_target, VOIDmode,
c0f85e83 5305 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
ceefa980 5306
058a1b7a 5307 /* Handle bounds returned by call. */
5308 if (TREE_CODE (exp) == CALL_EXPR)
5309 {
5310 rtx bounds;
5311 chkp_split_slot (temp, &temp, &bounds);
5312 if (bounds && btarget)
5313 {
5314 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5315 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5316 chkp_set_rtl_bounds (btarget, tmp);
5317 }
5318 }
5319
ceefa980 5320 /* If TEMP is a VOIDmode constant, use convert_modes to make
5321 sure that we properly convert it. */
5322 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
c3ba908e 5323 {
5324 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
e8629f9e 5325 temp, SUBREG_PROMOTED_SIGN (target));
c3ba908e 5326 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5327 GET_MODE (target), temp,
e8629f9e 5328 SUBREG_PROMOTED_SIGN (target));
c3ba908e 5329 }
ceefa980 5330
acfb31e5 5331 convert_move (SUBREG_REG (target), temp,
e8629f9e 5332 SUBREG_PROMOTED_SIGN (target));
28ad8d33 5333
3f2a8027 5334 return NULL_RTX;
acfb31e5 5335 }
b412eb5b 5336 else if ((TREE_CODE (exp) == STRING_CST
5337 || (TREE_CODE (exp) == MEM_REF
5338 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5339 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5340 == STRING_CST
5341 && integer_zerop (TREE_OPERAND (exp, 1))))
09879952 5342 && !nontemporal && !call_param_p
b412eb5b 5343 && MEM_P (target))
09879952 5344 {
5345 /* Optimize initialization of an array with a STRING_CST. */
5346 HOST_WIDE_INT exp_len, str_copy_len;
5347 rtx dest_mem;
b412eb5b 5348 tree str = TREE_CODE (exp) == STRING_CST
5349 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
09879952 5350
5351 exp_len = int_expr_size (exp);
5352 if (exp_len <= 0)
5353 goto normal_expr;
5354
b412eb5b 5355 if (TREE_STRING_LENGTH (str) <= 0)
182cf5a9 5356 goto normal_expr;
5357
5358 str_copy_len = strlen (TREE_STRING_POINTER (str));
5359 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5360 goto normal_expr;
5361
5362 str_copy_len = TREE_STRING_LENGTH (str);
b412eb5b 5363 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5364 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
182cf5a9 5365 {
5366 str_copy_len += STORE_MAX_PIECES - 1;
5367 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5368 }
5369 str_copy_len = MIN (str_copy_len, exp_len);
5370 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
b412eb5b 5371 CONST_CAST (char *, TREE_STRING_POINTER (str)),
182cf5a9 5372 MEM_ALIGN (target), false))
5373 goto normal_expr;
5374
5375 dest_mem = target;
5376
5377 dest_mem = store_by_pieces (dest_mem,
5378 str_copy_len, builtin_strncpy_read_str,
b412eb5b 5379 CONST_CAST (char *,
5380 TREE_STRING_POINTER (str)),
182cf5a9 5381 MEM_ALIGN (target), false,
5382 exp_len > str_copy_len ? 1 : 0);
5383 if (exp_len > str_copy_len)
5384 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5385 GEN_INT (exp_len - str_copy_len),
5386 BLOCK_OP_NORMAL);
5387 return NULL_RTX;
5388 }
10f307d9 5389 else
5390 {
5b5037b3 5391 rtx tmp_target;
5392
09879952 5393 normal_expr:
5b5037b3 5394 /* If we want to use a nontemporal store, force the value to
5395 register first. */
5396 tmp_target = nontemporal ? NULL_RTX : target;
5397 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
c0f85e83 5398 (call_param_p
60ffaf4d 5399 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
a12f023f 5400 &alt_rtl, false);
058a1b7a 5401
5402 /* Handle bounds returned by call. */
5403 if (TREE_CODE (exp) == CALL_EXPR)
5404 {
5405 rtx bounds;
5406 chkp_split_slot (temp, &temp, &bounds);
5407 if (bounds && btarget)
5408 {
5409 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5410 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5411 chkp_set_rtl_bounds (btarget, tmp);
5412 }
5413 }
10f307d9 5414 }
5415
c4050ce7 5416 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5417 the same as that of TARGET, adjust the constant. This is needed, for
5418 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5419 only a word-sized value. */
ceefa980 5420 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
43769aba 5421 && TREE_CODE (exp) != ERROR_MARK
ceefa980 5422 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5423 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
78a8ed03 5424 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
ceefa980 5425
10f307d9 5426 /* If value was not generated in the target, store it there.
c0d93299 5427 Convert the value to TARGET's type first if necessary and emit the
5428 pending incrementations that have been queued when expanding EXP.
5429 Note that we cannot emit the whole queue blindly because this will
5430 effectively disable the POST_INC optimization later.
5431
8a06f2d4 5432 If TEMP and TARGET compare equal according to rtx_equal_p, but
14e396bb 5433 one or both of them are volatile memory refs, we have to distinguish
5434 two cases:
5435 - expand_expr has used TARGET. In this case, we must not generate
5436 another copy. This can be detected by TARGET being equal according
5437 to == .
5438 - expand_expr has not used TARGET - that means that the source just
5439 happens to have the same RTX form. Since temp will have been created
5440 by expand_expr, it will compare unequal according to == .
5441 We must generate a copy in this case, to reach the correct number
5442 of volatile memory references. */
10f307d9 5443
b1ba8c8b 5444 if ((! rtx_equal_p (temp, target)
14e396bb 5445 || (temp != target && (side_effects_p (temp)
5446 || side_effects_p (target))))
afadb0ab 5447 && TREE_CODE (exp) != ERROR_MARK
72a64688 5448 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5449 but TARGET is not valid memory reference, TEMP will differ
5450 from TARGET although it is really the same location. */
1e20370f 5451 && !(alt_rtl
5452 && rtx_equal_p (alt_rtl, target)
5453 && !side_effects_p (alt_rtl)
5454 && !side_effects_p (target))
89f18f73 5455 /* If there's nothing to copy, don't bother. Don't call
5456 expr_size unless necessary, because some front-ends (C++)
5457 expr_size-hook must not be given objects that are not
5458 supposed to be bit-copied or bit-initialized. */
d18d957a 5459 && expr_size (exp) != const0_rtx)
10f307d9 5460 {
7e91b548 5461 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
10f307d9 5462 {
7e91b548 5463 if (GET_MODE (target) == BLKmode)
5464 {
f955ca51 5465 /* Handle calls that return BLKmode values in registers. */
60797203 5466 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5467 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
7e91b548 5468 else
60797203 5469 store_bit_field (target,
5470 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5471 0, 0, 0, GET_MODE (temp), temp);
7e91b548 5472 }
10f307d9 5473 else
7e91b548 5474 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
10f307d9 5475 }
5476
5477 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5478 {
18279aee 5479 /* Handle copying a string constant into an array. The string
5480 constant may be shorter than the array. So copy just the string's
5481 actual length, and clear the rest. First get the size of the data
5482 type of the string, which is actually the size of the target. */
5483 rtx size = expr_size (exp);
10f307d9 5484
971ba038 5485 if (CONST_INT_P (size)
35f44ac1 5486 && INTVAL (size) < TREE_STRING_LENGTH (exp))
a35a63ff 5487 emit_block_move (target, temp, size,
c0f85e83 5488 (call_param_p
a35a63ff 5489 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
35f44ac1 5490 else
10f307d9 5491 {
3754d046 5492 machine_mode pointer_mode
98155838 5493 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
3754d046 5494 machine_mode address_mode = get_address_mode (target);
98155838 5495
35f44ac1 5496 /* Compute the size of the data to copy from the string. */
5497 tree copy_size
389dd41b 5498 = size_binop_loc (loc, MIN_EXPR,
5499 make_tree (sizetype, size),
5500 size_int (TREE_STRING_LENGTH (exp)));
a35a63ff 5501 rtx copy_size_rtx
5502 = expand_expr (copy_size, NULL_RTX, VOIDmode,
c0f85e83 5503 (call_param_p
a35a63ff 5504 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
1d277a67 5505 rtx_code_label *label = 0;
35f44ac1 5506
5507 /* Copy that much. */
98155838 5508 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
78a8ed03 5509 TYPE_UNSIGNED (sizetype));
a35a63ff 5510 emit_block_move (target, temp, copy_size_rtx,
c0f85e83 5511 (call_param_p
a35a63ff 5512 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
35f44ac1 5513
ed8d3eee 5514 /* Figure out how much is left in TARGET that we have to clear.
98155838 5515 Do all calculations in pointer_mode. */
971ba038 5516 if (CONST_INT_P (copy_size_rtx))
35f44ac1 5517 {
29c05e22 5518 size = plus_constant (address_mode, size,
5519 -INTVAL (copy_size_rtx));
18279aee 5520 target = adjust_address (target, BLKmode,
5521 INTVAL (copy_size_rtx));
35f44ac1 5522 }
5523 else
5524 {
4a836698 5525 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
b572011e 5526 copy_size_rtx, NULL_RTX, 0,
5527 OPTAB_LIB_WIDEN);
35f44ac1 5528
98155838 5529 if (GET_MODE (copy_size_rtx) != address_mode)
5530 copy_size_rtx = convert_to_mode (address_mode,
5531 copy_size_rtx,
78a8ed03 5532 TYPE_UNSIGNED (sizetype));
18279aee 5533
5534 target = offset_address (target, copy_size_rtx,
5535 highest_pow2_factor (copy_size));
35f44ac1 5536 label = gen_label_rtx ();
5a894bc6 5537 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
2b96c5f6 5538 GET_MODE (size), 0, label);
35f44ac1 5539 }
5540
5541 if (size != const0_rtx)
0b25db21 5542 clear_storage (target, size, BLOCK_OP_NORMAL);
bdf60b71 5543
35f44ac1 5544 if (label)
5545 emit_label (label);
10f307d9 5546 }
5547 }
ce739127 5548 /* Handle calls that return values in multiple non-contiguous locations.
5549 The Irix 6 ABI has examples of this. */
5550 else if (GET_CODE (target) == PARALLEL)
2d0fd66d 5551 {
5552 if (GET_CODE (temp) == PARALLEL)
5553 emit_group_move (target, temp);
5554 else
5555 emit_group_load (target, temp, TREE_TYPE (exp),
5556 int_size_in_bytes (TREE_TYPE (exp)));
5557 }
5558 else if (GET_CODE (temp) == PARALLEL)
5559 emit_group_store (target, temp, TREE_TYPE (exp),
5560 int_size_in_bytes (TREE_TYPE (exp)));
10f307d9 5561 else if (GET_MODE (temp) == BLKmode)
a35a63ff 5562 emit_block_move (target, temp, expr_size (exp),
c0f85e83 5563 (call_param_p
a35a63ff 5564 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
2d0fd66d 5565 /* If we emit a nontemporal store, there is nothing else to do. */
5566 else if (nontemporal && emit_storent_insn (target, temp))
5b5037b3 5567 ;
10f307d9 5568 else
828eae76 5569 {
5570 temp = force_operand (temp, target);
5571 if (temp != target)
5572 emit_move_insn (target, temp);
5573 }
10f307d9 5574 }
9282409c 5575
3f2a8027 5576 return NULL_RTX;
10f307d9 5577}
058a1b7a 5578
5579/* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5580rtx
5581store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5582{
5583 return store_expr_with_bounds (exp, target, call_param_p, nontemporal, NULL);
5584}
10f307d9 5585\f
927b65fb 5586/* Return true if field F of structure TYPE is a flexible array. */
5587
5588static bool
5589flexible_array_member_p (const_tree f, const_tree type)
5590{
5591 const_tree tf;
5592
5593 tf = TREE_TYPE (f);
5594 return (DECL_CHAIN (f) == NULL
5595 && TREE_CODE (tf) == ARRAY_TYPE
5596 && TYPE_DOMAIN (tf)
5597 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5598 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5599 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5600 && int_size_in_bytes (type) >= 0);
5601}
5602
5603/* If FOR_CTOR_P, return the number of top-level elements that a constructor
5604 must have in order for it to completely initialize a value of type TYPE.
5605 Return -1 if the number isn't known.
5606
5607 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5608
5609static HOST_WIDE_INT
5610count_type_elements (const_tree type, bool for_ctor_p)
5611{
5612 switch (TREE_CODE (type))
5613 {
5614 case ARRAY_TYPE:
5615 {
5616 tree nelts;
5617
5618 nelts = array_type_nelts (type);
e913b5cd 5619 if (nelts && tree_fits_uhwi_p (nelts))
927b65fb 5620 {
5621 unsigned HOST_WIDE_INT n;
5622
e913b5cd 5623 n = tree_to_uhwi (nelts) + 1;
927b65fb 5624 if (n == 0 || for_ctor_p)
5625 return n;
5626 else
5627 return n * count_type_elements (TREE_TYPE (type), false);
5628 }
5629 return for_ctor_p ? -1 : 1;
5630 }
5631
5632 case RECORD_TYPE:
5633 {
5634 unsigned HOST_WIDE_INT n;
5635 tree f;
5636
5637 n = 0;
5638 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5639 if (TREE_CODE (f) == FIELD_DECL)
5640 {
5641 if (!for_ctor_p)
5642 n += count_type_elements (TREE_TYPE (f), false);
5643 else if (!flexible_array_member_p (f, type))
5644 /* Don't count flexible arrays, which are not supposed
5645 to be initialized. */
5646 n += 1;
5647 }
5648
5649 return n;
5650 }
5651
5652 case UNION_TYPE:
5653 case QUAL_UNION_TYPE:
5654 {
5655 tree f;
5656 HOST_WIDE_INT n, m;
5657
5658 gcc_assert (!for_ctor_p);
5659 /* Estimate the number of scalars in each field and pick the
5660 maximum. Other estimates would do instead; the idea is simply
5661 to make sure that the estimate is not sensitive to the ordering
5662 of the fields. */
5663 n = 1;
5664 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5665 if (TREE_CODE (f) == FIELD_DECL)
5666 {
5667 m = count_type_elements (TREE_TYPE (f), false);
5668 /* If the field doesn't span the whole union, add an extra
5669 scalar for the rest. */
5670 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5671 TYPE_SIZE (type)) != 1)
5672 m++;
5673 if (n < m)
5674 n = m;
5675 }
5676 return n;
5677 }
5678
5679 case COMPLEX_TYPE:
5680 return 2;
5681
5682 case VECTOR_TYPE:
5683 return TYPE_VECTOR_SUBPARTS (type);
5684
5685 case INTEGER_TYPE:
5686 case REAL_TYPE:
5687 case FIXED_POINT_TYPE:
5688 case ENUMERAL_TYPE:
5689 case BOOLEAN_TYPE:
5690 case POINTER_TYPE:
5691 case OFFSET_TYPE:
5692 case REFERENCE_TYPE:
d965946e 5693 case NULLPTR_TYPE:
927b65fb 5694 return 1;
5695
5696 case ERROR_MARK:
5697 return 0;
5698
5699 case VOID_TYPE:
5700 case METHOD_TYPE:
5701 case FUNCTION_TYPE:
5702 case LANG_TYPE:
5703 default:
5704 gcc_unreachable ();
5705 }
5706}
5707
20169a64 5708/* Helper for categorize_ctor_elements. Identical interface. */
dbd14dc5 5709
20169a64 5710static bool
b7bf20db 5711categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
927b65fb 5712 HOST_WIDE_INT *p_init_elts, bool *p_complete)
dbd14dc5 5713{
c75b4594 5714 unsigned HOST_WIDE_INT idx;
927b65fb 5715 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5716 tree value, purpose, elt_type;
dbd14dc5 5717
20169a64 5718 /* Whether CTOR is a valid constant initializer, in accordance with what
5719 initializer_constant_valid_p does. If inferred from the constructor
5720 elements, true until proven otherwise. */
5721 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5722 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5723
4ee9c684 5724 nz_elts = 0;
927b65fb 5725 init_elts = 0;
5726 num_fields = 0;
5727 elt_type = NULL_TREE;
491e04ef 5728
c75b4594 5729 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
dbd14dc5 5730 {
30d12889 5731 HOST_WIDE_INT mult = 1;
dbd14dc5 5732
0ff8139c 5733 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
4ee9c684 5734 {
5735 tree lo_index = TREE_OPERAND (purpose, 0);
5736 tree hi_index = TREE_OPERAND (purpose, 1);
dbd14dc5 5737
e913b5cd 5738 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5739 mult = (tree_to_uhwi (hi_index)
5740 - tree_to_uhwi (lo_index) + 1);
4ee9c684 5741 }
927b65fb 5742 num_fields += mult;
5743 elt_type = TREE_TYPE (value);
dbd14dc5 5744
4ee9c684 5745 switch (TREE_CODE (value))
5746 {
5747 case CONSTRUCTOR:
5748 {
20169a64 5749 HOST_WIDE_INT nz = 0, ic = 0;
1f8b6002 5750
927b65fb 5751 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5752 p_complete);
20169a64 5753
4ee9c684 5754 nz_elts += mult * nz;
927b65fb 5755 init_elts += mult * ic;
20169a64 5756
5757 if (const_from_elts_p && const_p)
5758 const_p = const_elt_p;
4ee9c684 5759 }
5760 break;
dbd14dc5 5761
4ee9c684 5762 case INTEGER_CST:
5763 case REAL_CST:
68a556d6 5764 case FIXED_CST:
4ee9c684 5765 if (!initializer_zerop (value))
5766 nz_elts += mult;
927b65fb 5767 init_elts += mult;
4ee9c684 5768 break;
839db04c 5769
5770 case STRING_CST:
5771 nz_elts += mult * TREE_STRING_LENGTH (value);
927b65fb 5772 init_elts += mult * TREE_STRING_LENGTH (value);
839db04c 5773 break;
5774
4ee9c684 5775 case COMPLEX_CST:
5776 if (!initializer_zerop (TREE_REALPART (value)))
5777 nz_elts += mult;
5778 if (!initializer_zerop (TREE_IMAGPART (value)))
5779 nz_elts += mult;
927b65fb 5780 init_elts += mult;
4ee9c684 5781 break;
839db04c 5782
4ee9c684 5783 case VECTOR_CST:
5784 {
fadf62f4 5785 unsigned i;
5786 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
42b74698 5787 {
fadf62f4 5788 tree v = VECTOR_CST_ELT (value, i);
5789 if (!initializer_zerop (v))
42b74698 5790 nz_elts += mult;
927b65fb 5791 init_elts += mult;
42b74698 5792 }
4ee9c684 5793 }
5794 break;
886cfd4f 5795
4ee9c684 5796 default:
30d12889 5797 {
927b65fb 5798 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
30d12889 5799 nz_elts += mult * tc;
927b65fb 5800 init_elts += mult * tc;
20169a64 5801
30d12889 5802 if (const_from_elts_p && const_p)
927b65fb 5803 const_p = initializer_constant_valid_p (value, elt_type)
30d12889 5804 != NULL_TREE;
5805 }
4ee9c684 5806 break;
5807 }
5808 }
886cfd4f 5809
927b65fb 5810 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5811 num_fields, elt_type))
5812 *p_complete = false;
7cb4a4d0 5813
4ee9c684 5814 *p_nz_elts += nz_elts;
927b65fb 5815 *p_init_elts += init_elts;
20169a64 5816
5817 return const_p;
4ee9c684 5818}
5819
20169a64 5820/* Examine CTOR to discover:
5821 * how many scalar fields are set to nonzero values,
5822 and place it in *P_NZ_ELTS;
5823 * how many scalar fields in total are in CTOR,
5824 and place it in *P_ELT_COUNT.
927b65fb 5825 * whether the constructor is complete -- in the sense that every
5826 meaningful byte is explicitly given a value --
5827 and place it in *P_COMPLETE.
20169a64 5828
5829 Return whether or not CTOR is a valid static constant initializer, the same
5830 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5831
5832bool
b7bf20db 5833categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
927b65fb 5834 HOST_WIDE_INT *p_init_elts, bool *p_complete)
4ee9c684 5835{
5836 *p_nz_elts = 0;
927b65fb 5837 *p_init_elts = 0;
5838 *p_complete = true;
20169a64 5839
927b65fb 5840 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
4ee9c684 5841}
5842
927b65fb 5843/* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5844 of which had type LAST_TYPE. Each element was itself a complete
5845 initializer, in the sense that every meaningful byte was explicitly
5846 given a value. Return true if the same is true for the constructor
5847 as a whole. */
4ee9c684 5848
927b65fb 5849bool
5850complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5851 const_tree last_type)
4ee9c684 5852{
927b65fb 5853 if (TREE_CODE (type) == UNION_TYPE
5854 || TREE_CODE (type) == QUAL_UNION_TYPE)
4ee9c684 5855 {
927b65fb 5856 if (num_elts == 0)
5857 return false;
fa56dc1d 5858
927b65fb 5859 gcc_assert (num_elts == 1 && last_type);
026a11f4 5860
927b65fb 5861 /* ??? We could look at each element of the union, and find the
5862 largest element. Which would avoid comparing the size of the
5863 initialized element against any tail padding in the union.
5864 Doesn't seem worth the effort... */
5865 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
dbd14dc5 5866 }
927b65fb 5867
5868 return count_type_elements (type, true) == num_elts;
dbd14dc5 5869}
5870
5871/* Return 1 if EXP contains mostly (3/4) zeros. */
5872
a9adb06f 5873static int
1f1872fd 5874mostly_zeros_p (const_tree exp)
dbd14dc5 5875{
dbd14dc5 5876 if (TREE_CODE (exp) == CONSTRUCTOR)
5877 {
927b65fb 5878 HOST_WIDE_INT nz_elts, init_elts;
5879 bool complete_p;
4ee9c684 5880
927b65fb 5881 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5882 return !complete_p || nz_elts < init_elts / 4;
dbd14dc5 5883 }
5884
4ee9c684 5885 return initializer_zerop (exp);
dbd14dc5 5886}
c69ad7b2 5887
5888/* Return 1 if EXP contains all zeros. */
5889
5890static int
1f1872fd 5891all_zeros_p (const_tree exp)
c69ad7b2 5892{
5893 if (TREE_CODE (exp) == CONSTRUCTOR)
c69ad7b2 5894 {
927b65fb 5895 HOST_WIDE_INT nz_elts, init_elts;
5896 bool complete_p;
c69ad7b2 5897
927b65fb 5898 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
96d4b8c5 5899 return nz_elts == 0;
c69ad7b2 5900 }
5901
5902 return initializer_zerop (exp);
5903}
dbd14dc5 5904\f
e7ef3ff2 5905/* Helper function for store_constructor.
5906 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
2c269e73 5907 CLEARED is as for store_constructor.
1179a68b 5908 ALIAS_SET is the alias set to use for any stores.
a5b7fc8b 5909
5910 This provides a recursive shortcut back to store_constructor when it isn't
5911 necessary to go through store_field. This is so that we can pass through
5912 the cleared field to let store_constructor know that we may not have to
5913 clear a substructure if the outer structure has already been cleared. */
e7ef3ff2 5914
5915static void
35cb5232 5916store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
3754d046 5917 HOST_WIDE_INT bitpos, machine_mode mode,
f955ca51 5918 tree exp, int cleared, alias_set_type alias_set)
e7ef3ff2 5919{
5920 if (TREE_CODE (exp) == CONSTRUCTOR
a6645eae 5921 /* We can only call store_constructor recursively if the size and
5922 bit position are on a byte boundary. */
a5b7fc8b 5923 && bitpos % BITS_PER_UNIT == 0
a6645eae 5924 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
6ef828f9 5925 /* If we have a nonzero bitpos for a register target, then we just
a5b7fc8b 5926 let store_field do the bitfield handling. This is unlikely to
5927 generate unnecessary clear instructions anyways. */
e16ceb8e 5928 && (bitpos == 0 || MEM_P (target)))
e7ef3ff2 5929 {
e16ceb8e 5930 if (MEM_P (target))
459b8611 5931 target
5932 = adjust_address (target,
5933 GET_MODE (target) == BLKmode
5934 || 0 != (bitpos
5935 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5936 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
1179a68b 5937
5b90bb08 5938
2c269e73 5939 /* Update the alias set, if required. */
e16ceb8e 5940 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5cc193e7 5941 && MEM_ALIAS_SET (target) != 0)
86ce88aa 5942 {
5943 target = copy_rtx (target);
5944 set_mem_alias_set (target, alias_set);
5945 }
5b90bb08 5946
e792f237 5947 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
e7ef3ff2 5948 }
5949 else
f955ca51 5950 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
e7ef3ff2 5951}
5952
0e80b01d 5953
5954/* Returns the number of FIELD_DECLs in TYPE. */
5955
5956static int
5957fields_length (const_tree type)
5958{
5959 tree t = TYPE_FIELDS (type);
5960 int count = 0;
5961
5962 for (; t; t = DECL_CHAIN (t))
5963 if (TREE_CODE (t) == FIELD_DECL)
5964 ++count;
5965
5966 return count;
5967}
5968
5969
10f307d9 5970/* Store the value of constructor EXP into the rtx TARGET.
2c269e73 5971 TARGET is either a REG or a MEM; we know it cannot conflict, since
5972 safe_from_p has been called.
e792f237 5973 CLEARED is true if TARGET is known to have been zero'd.
5974 SIZE is the number of bytes of TARGET we are allowed to modify: this
a316ea6a 5975 may not be the same as the size of EXP if we are assigning to a field
5976 which has been packed to exclude padding bits. */
10f307d9 5977
5978static void
35cb5232 5979store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
10f307d9 5980{
2ef1e405 5981 tree type = TREE_TYPE (exp);
0bf16c4a 5982#ifdef WORD_REGISTER_OPERATIONS
3a6656ad 5983 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
0bf16c4a 5984#endif
2ef1e405 5985
611234b4 5986 switch (TREE_CODE (type))
10f307d9 5987 {
611234b4 5988 case RECORD_TYPE:
5989 case UNION_TYPE:
5990 case QUAL_UNION_TYPE:
5991 {
c75b4594 5992 unsigned HOST_WIDE_INT idx;
5993 tree field, value;
dbd14dc5 5994
611234b4 5995 /* If size is zero or the target is already cleared, do nothing. */
5996 if (size == 0 || cleared)
dbd14dc5 5997 cleared = 1;
611234b4 5998 /* We either clear the aggregate or indicate the value is dead. */
5999 else if ((TREE_CODE (type) == UNION_TYPE
6000 || TREE_CODE (type) == QUAL_UNION_TYPE)
6001 && ! CONSTRUCTOR_ELTS (exp))
6002 /* If the constructor is empty, clear the union. */
6003 {
0b25db21 6004 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
611234b4 6005 cleared = 1;
6006 }
10f307d9 6007
611234b4 6008 /* If we are building a static constructor into a register,
6009 set the initial value as zero so we can fold the value into
6010 a constant. But if more than one register is involved,
6011 this probably loses. */
6012 else if (REG_P (target) && TREE_STATIC (exp)
6013 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
6014 {
6015 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6016 cleared = 1;
6017 }
fa56dc1d 6018
611234b4 6019 /* If the constructor has fewer fields than the structure or
6020 if we are initializing the structure to mostly zeros, clear
6021 the whole structure first. Don't do this if TARGET is a
6022 register whose mode size isn't equal to SIZE since
6023 clear_storage can't handle this case. */
6024 else if (size > 0
f1f41a6c 6025 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
611234b4 6026 != fields_length (type))
6027 || mostly_zeros_p (exp))
6028 && (!REG_P (target)
6029 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
6030 == size)))
6031 {
0b25db21 6032 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
611234b4 6033 cleared = 1;
6034 }
c869557a 6035
e8d1dcf2 6036 if (REG_P (target) && !cleared)
18b42941 6037 emit_clobber (target);
10f307d9 6038
611234b4 6039 /* Store each element of the constructor into the
6040 corresponding field of TARGET. */
c75b4594 6041 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
611234b4 6042 {
3754d046 6043 machine_mode mode;
611234b4 6044 HOST_WIDE_INT bitsize;
6045 HOST_WIDE_INT bitpos = 0;
6046 tree offset;
6047 rtx to_rtx = target;
1f8b6002 6048
611234b4 6049 /* Just ignore missing fields. We cleared the whole
6050 structure, above, if any fields are missing. */
6051 if (field == 0)
6052 continue;
1f8b6002 6053
611234b4 6054 if (cleared && initializer_zerop (value))
6055 continue;
1f8b6002 6056
e913b5cd 6057 if (tree_fits_uhwi_p (DECL_SIZE (field)))
6058 bitsize = tree_to_uhwi (DECL_SIZE (field));
611234b4 6059 else
6060 bitsize = -1;
1f8b6002 6061
611234b4 6062 mode = DECL_MODE (field);
6063 if (DECL_BIT_FIELD (field))
6064 mode = VOIDmode;
1f8b6002 6065
611234b4 6066 offset = DECL_FIELD_OFFSET (field);
e913b5cd 6067 if (tree_fits_shwi_p (offset)
6068 && tree_fits_shwi_p (bit_position (field)))
611234b4 6069 {
6070 bitpos = int_bit_position (field);
6071 offset = 0;
6072 }
6073 else
e913b5cd 6074 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
1f8b6002 6075
611234b4 6076 if (offset)
6077 {
3754d046 6078 machine_mode address_mode;
611234b4 6079 rtx offset_rtx;
1f8b6002 6080
611234b4 6081 offset
6082 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
6083 make_tree (TREE_TYPE (exp),
6084 target));
6085
8ec3c5c2 6086 offset_rtx = expand_normal (offset);
611234b4 6087 gcc_assert (MEM_P (to_rtx));
1f8b6002 6088
87cf5753 6089 address_mode = get_address_mode (to_rtx);
98155838 6090 if (GET_MODE (offset_rtx) != address_mode)
6091 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
33ef2f52 6092
611234b4 6093 to_rtx = offset_address (to_rtx, offset_rtx,
6094 highest_pow2_factor (offset));
6095 }
7014838c 6096
e6860d27 6097#ifdef WORD_REGISTER_OPERATIONS
611234b4 6098 /* If this initializes a field that is smaller than a
6099 word, at the start of a word, try to widen it to a full
6100 word. This special case allows us to output C++ member
6101 function initializations in a form that the optimizers
6102 can understand. */
6103 if (REG_P (target)
6104 && bitsize < BITS_PER_WORD
6105 && bitpos % BITS_PER_WORD == 0
6106 && GET_MODE_CLASS (mode) == MODE_INT
6107 && TREE_CODE (value) == INTEGER_CST
6108 && exp_size >= 0
6109 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6110 {
6111 tree type = TREE_TYPE (value);
1f8b6002 6112
611234b4 6113 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6114 {
1b3c3119 6115 type = lang_hooks.types.type_for_mode
6116 (word_mode, TYPE_UNSIGNED (type));
e3b560a6 6117 value = fold_convert (type, value);
611234b4 6118 }
1f8b6002 6119
611234b4 6120 if (BYTES_BIG_ENDIAN)
6121 value
faa43f85 6122 = fold_build2 (LSHIFT_EXPR, type, value,
e3b560a6 6123 build_int_cst (type,
faa43f85 6124 BITS_PER_WORD - bitsize));
611234b4 6125 bitsize = BITS_PER_WORD;
6126 mode = word_mode;
6127 }
e6860d27 6128#endif
5cc193e7 6129
611234b4 6130 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6131 && DECL_NONADDRESSABLE_P (field))
6132 {
6133 to_rtx = copy_rtx (to_rtx);
6134 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6135 }
1f8b6002 6136
611234b4 6137 store_constructor_field (to_rtx, bitsize, bitpos, mode,
f955ca51 6138 value, cleared,
611234b4 6139 get_alias_set (TREE_TYPE (field)));
6140 }
6141 break;
6142 }
6143 case ARRAY_TYPE:
6144 {
c75b4594 6145 tree value, index;
6146 unsigned HOST_WIDE_INT i;
611234b4 6147 int need_to_clear;
6148 tree domain;
6149 tree elttype = TREE_TYPE (type);
6150 int const_bounds_p;
6151 HOST_WIDE_INT minelt = 0;
6152 HOST_WIDE_INT maxelt = 0;
6153
6154 domain = TYPE_DOMAIN (type);
6155 const_bounds_p = (TYPE_MIN_VALUE (domain)
6156 && TYPE_MAX_VALUE (domain)
e913b5cd 6157 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6158 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
611234b4 6159
6160 /* If we have constant bounds for the range of the type, get them. */
6161 if (const_bounds_p)
6162 {
e913b5cd 6163 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6164 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
611234b4 6165 }
4418a1d4 6166
611234b4 6167 /* If the constructor has fewer elements than the array, clear
6168 the whole array first. Similarly if this is static
6169 constructor of a non-BLKmode object. */
6170 if (cleared)
6171 need_to_clear = 0;
6172 else if (REG_P (target) && TREE_STATIC (exp))
6173 need_to_clear = 1;
6174 else
6175 {
c75b4594 6176 unsigned HOST_WIDE_INT idx;
6177 tree index, value;
611234b4 6178 HOST_WIDE_INT count = 0, zero_count = 0;
6179 need_to_clear = ! const_bounds_p;
1f8b6002 6180
611234b4 6181 /* This loop is a more accurate version of the loop in
6182 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6183 is also needed to check for missing elements. */
c75b4594 6184 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
611234b4 6185 {
611234b4 6186 HOST_WIDE_INT this_node_count;
c75b4594 6187
6188 if (need_to_clear)
6189 break;
1f8b6002 6190
611234b4 6191 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6192 {
6193 tree lo_index = TREE_OPERAND (index, 0);
6194 tree hi_index = TREE_OPERAND (index, 1);
1f8b6002 6195
e913b5cd 6196 if (! tree_fits_uhwi_p (lo_index)
6197 || ! tree_fits_uhwi_p (hi_index))
611234b4 6198 {
6199 need_to_clear = 1;
6200 break;
6201 }
1f8b6002 6202
e913b5cd 6203 this_node_count = (tree_to_uhwi (hi_index)
6204 - tree_to_uhwi (lo_index) + 1);
611234b4 6205 }
6206 else
6207 this_node_count = 1;
1f8b6002 6208
611234b4 6209 count += this_node_count;
c75b4594 6210 if (mostly_zeros_p (value))
611234b4 6211 zero_count += this_node_count;
6212 }
1f8b6002 6213
611234b4 6214 /* Clear the entire array first if there are any missing
6215 elements, or if the incidence of zero elements is >=
6216 75%. */
6217 if (! need_to_clear
6218 && (count < maxelt - minelt + 1
6219 || 4 * zero_count >= 3 * count))
6220 need_to_clear = 1;
6221 }
1f8b6002 6222
611234b4 6223 if (need_to_clear && size > 0)
6224 {
6225 if (REG_P (target))
6226 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6227 else
0b25db21 6228 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
611234b4 6229 cleared = 1;
6230 }
4418a1d4 6231
611234b4 6232 if (!cleared && REG_P (target))
6233 /* Inform later passes that the old value is dead. */
18b42941 6234 emit_clobber (target);
4418a1d4 6235
611234b4 6236 /* Store each element of the constructor into the
6237 corresponding element of TARGET, determined by counting the
6238 elements. */
c75b4594 6239 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
611234b4 6240 {
3754d046 6241 machine_mode mode;
611234b4 6242 HOST_WIDE_INT bitsize;
6243 HOST_WIDE_INT bitpos;
611234b4 6244 rtx xtarget = target;
1f8b6002 6245
611234b4 6246 if (cleared && initializer_zerop (value))
6247 continue;
1f8b6002 6248
611234b4 6249 mode = TYPE_MODE (elttype);
6250 if (mode == BLKmode)
e913b5cd 6251 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6252 ? tree_to_uhwi (TYPE_SIZE (elttype))
611234b4 6253 : -1);
6254 else
6255 bitsize = GET_MODE_BITSIZE (mode);
1f8b6002 6256
611234b4 6257 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6258 {
6259 tree lo_index = TREE_OPERAND (index, 0);
6260 tree hi_index = TREE_OPERAND (index, 1);
6261 rtx index_r, pos_rtx;
6262 HOST_WIDE_INT lo, hi, count;
6263 tree position;
1f8b6002 6264
611234b4 6265 /* If the range is constant and "small", unroll the loop. */
6266 if (const_bounds_p
e913b5cd 6267 && tree_fits_shwi_p (lo_index)
6268 && tree_fits_shwi_p (hi_index)
6269 && (lo = tree_to_shwi (lo_index),
6270 hi = tree_to_shwi (hi_index),
611234b4 6271 count = hi - lo + 1,
6272 (!MEM_P (target)
6273 || count <= 2
e913b5cd 6274 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6275 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
611234b4 6276 <= 40 * 8)))))
6277 {
6278 lo -= minelt; hi -= minelt;
6279 for (; lo <= hi; lo++)
6280 {
e913b5cd 6281 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
1f8b6002 6282
611234b4 6283 if (MEM_P (target)
6284 && !MEM_KEEP_ALIAS_SET_P (target)
6285 && TREE_CODE (type) == ARRAY_TYPE
6286 && TYPE_NONALIASED_COMPONENT (type))
6287 {
6288 target = copy_rtx (target);
6289 MEM_KEEP_ALIAS_SET_P (target) = 1;
6290 }
1f8b6002 6291
611234b4 6292 store_constructor_field
f955ca51 6293 (target, bitsize, bitpos, mode, value, cleared,
611234b4 6294 get_alias_set (elttype));
6295 }
6296 }
6297 else
6298 {
1d277a67 6299 rtx_code_label *loop_start = gen_label_rtx ();
6300 rtx_code_label *loop_end = gen_label_rtx ();
611234b4 6301 tree exit_cond;
1f8b6002 6302
8ec3c5c2 6303 expand_normal (hi_index);
1f8b6002 6304
e60a6f7b 6305 index = build_decl (EXPR_LOCATION (exp),
6306 VAR_DECL, NULL_TREE, domain);
3b2411a8 6307 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
611234b4 6308 SET_DECL_RTL (index, index_r);
5b5037b3 6309 store_expr (lo_index, index_r, 0, false);
1f8b6002 6310
611234b4 6311 /* Build the head of the loop. */
6312 do_pending_stack_adjust ();
6313 emit_label (loop_start);
6314
6315 /* Assign value to element index. */
e3b560a6 6316 position =
6317 fold_convert (ssizetype,
6318 fold_build2 (MINUS_EXPR,
6319 TREE_TYPE (index),
6320 index,
6321 TYPE_MIN_VALUE (domain)));
6322
6323 position =
6324 size_binop (MULT_EXPR, position,
6325 fold_convert (ssizetype,
6326 TYPE_SIZE_UNIT (elttype)));
1f8b6002 6327
8ec3c5c2 6328 pos_rtx = expand_normal (position);
611234b4 6329 xtarget = offset_address (target, pos_rtx,
6330 highest_pow2_factor (position));
6331 xtarget = adjust_address (xtarget, mode, 0);
6332 if (TREE_CODE (value) == CONSTRUCTOR)
6333 store_constructor (value, xtarget, cleared,
6334 bitsize / BITS_PER_UNIT);
6335 else
5b5037b3 6336 store_expr (value, xtarget, 0, false);
611234b4 6337
6338 /* Generate a conditional jump to exit the loop. */
6339 exit_cond = build2 (LT_EXPR, integer_type_node,
6340 index, hi_index);
79ab74cc 6341 jumpif (exit_cond, loop_end, -1);
1f8b6002 6342
611234b4 6343 /* Update the loop counter, and jump to the head of
6344 the loop. */
6345 expand_assignment (index,
6346 build2 (PLUS_EXPR, TREE_TYPE (index),
5b5037b3 6347 index, integer_one_node),
6348 false);
1f8b6002 6349
611234b4 6350 emit_jump (loop_start);
1f8b6002 6351
611234b4 6352 /* Build the end of the loop. */
6353 emit_label (loop_end);
6354 }
6355 }
e913b5cd 6356 else if ((index != 0 && ! tree_fits_shwi_p (index))
6357 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
611234b4 6358 {
6359 tree position;
1f8b6002 6360
611234b4 6361 if (index == 0)
6362 index = ssize_int (1);
1f8b6002 6363
611234b4 6364 if (minelt)
6365 index = fold_convert (ssizetype,
faa43f85 6366 fold_build2 (MINUS_EXPR,
6367 TREE_TYPE (index),
6368 index,
6369 TYPE_MIN_VALUE (domain)));
1f8b6002 6370
e3b560a6 6371 position =
6372 size_binop (MULT_EXPR, index,
6373 fold_convert (ssizetype,
6374 TYPE_SIZE_UNIT (elttype)));
611234b4 6375 xtarget = offset_address (target,
8ec3c5c2 6376 expand_normal (position),
611234b4 6377 highest_pow2_factor (position));
6378 xtarget = adjust_address (xtarget, mode, 0);
5b5037b3 6379 store_expr (value, xtarget, 0, false);
611234b4 6380 }
6381 else
6382 {
6383 if (index != 0)
e913b5cd 6384 bitpos = ((tree_to_shwi (index) - minelt)
6385 * tree_to_uhwi (TYPE_SIZE (elttype)));
611234b4 6386 else
e913b5cd 6387 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
1f8b6002 6388
611234b4 6389 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6390 && TREE_CODE (type) == ARRAY_TYPE
6391 && TYPE_NONALIASED_COMPONENT (type))
6392 {
6393 target = copy_rtx (target);
6394 MEM_KEEP_ALIAS_SET_P (target) = 1;
6395 }
6396 store_constructor_field (target, bitsize, bitpos, mode, value,
f955ca51 6397 cleared, get_alias_set (elttype));
611234b4 6398 }
6399 }
6400 break;
6401 }
4418a1d4 6402
611234b4 6403 case VECTOR_TYPE:
6404 {
c75b4594 6405 unsigned HOST_WIDE_INT idx;
6406 constructor_elt *ce;
611234b4 6407 int i;
6408 int need_to_clear;
d386876e 6409 int icode = CODE_FOR_nothing;
611234b4 6410 tree elttype = TREE_TYPE (type);
e913b5cd 6411 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
3754d046 6412 machine_mode eltmode = TYPE_MODE (elttype);
611234b4 6413 HOST_WIDE_INT bitsize;
6414 HOST_WIDE_INT bitpos;
9c1b832c 6415 rtvec vector = NULL;
611234b4 6416 unsigned n_elts;
4eaf1e94 6417 alias_set_type alias;
1f8b6002 6418
611234b4 6419 gcc_assert (eltmode != BLKmode);
1f8b6002 6420
611234b4 6421 n_elts = TYPE_VECTOR_SUBPARTS (type);
6422 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6423 {
3754d046 6424 machine_mode mode = GET_MODE (target);
1f8b6002 6425
d6bf3b14 6426 icode = (int) optab_handler (vec_init_optab, mode);
57b5438a 6427 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6428 if (icode != CODE_FOR_nothing)
6429 {
6430 tree value;
6431
6432 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6433 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6434 {
6435 icode = CODE_FOR_nothing;
6436 break;
6437 }
6438 }
611234b4 6439 if (icode != CODE_FOR_nothing)
6440 {
6441 unsigned int i;
1f8b6002 6442
9c1b832c 6443 vector = rtvec_alloc (n_elts);
611234b4 6444 for (i = 0; i < n_elts; i++)
9c1b832c 6445 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
611234b4 6446 }
6447 }
1f8b6002 6448
611234b4 6449 /* If the constructor has fewer elements than the vector,
6450 clear the whole array first. Similarly if this is static
6451 constructor of a non-BLKmode object. */
6452 if (cleared)
6453 need_to_clear = 0;
6454 else if (REG_P (target) && TREE_STATIC (exp))
6455 need_to_clear = 1;
6456 else
6457 {
6458 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
c75b4594 6459 tree value;
1f8b6002 6460
c75b4594 6461 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
611234b4 6462 {
e913b5cd 6463 int n_elts_here = tree_to_uhwi
611234b4 6464 (int_const_binop (TRUNC_DIV_EXPR,
c75b4594 6465 TYPE_SIZE (TREE_TYPE (value)),
e913b5cd 6466 TYPE_SIZE (elttype)));
1f8b6002 6467
611234b4 6468 count += n_elts_here;
c75b4594 6469 if (mostly_zeros_p (value))
611234b4 6470 zero_count += n_elts_here;
6471 }
4418a1d4 6472
611234b4 6473 /* Clear the entire vector first if there are any missing elements,
6474 or if the incidence of zero elements is >= 75%. */
6475 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6476 }
1f8b6002 6477
611234b4 6478 if (need_to_clear && size > 0 && !vector)
6479 {
6480 if (REG_P (target))
4eaf1e94 6481 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
611234b4 6482 else
0b25db21 6483 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
611234b4 6484 cleared = 1;
6485 }
1f8b6002 6486
49f312aa 6487 /* Inform later passes that the old value is dead. */
1abf6b04 6488 if (!cleared && !vector && REG_P (target))
49f312aa 6489 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
611234b4 6490
4eaf1e94 6491 if (MEM_P (target))
6492 alias = MEM_ALIAS_SET (target);
6493 else
6494 alias = get_alias_set (elttype);
6495
611234b4 6496 /* Store each element of the constructor into the corresponding
6497 element of TARGET, determined by counting the elements. */
c75b4594 6498 for (idx = 0, i = 0;
f1f41a6c 6499 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
c75b4594 6500 idx++, i += bitsize / elt_size)
611234b4 6501 {
611234b4 6502 HOST_WIDE_INT eltpos;
c75b4594 6503 tree value = ce->value;
1f8b6002 6504
e913b5cd 6505 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
611234b4 6506 if (cleared && initializer_zerop (value))
6507 continue;
1f8b6002 6508
c75b4594 6509 if (ce->index)
e913b5cd 6510 eltpos = tree_to_uhwi (ce->index);
611234b4 6511 else
6512 eltpos = i;
1f8b6002 6513
611234b4 6514 if (vector)
6515 {
57b5438a 6516 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6517 elements. */
611234b4 6518 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
9c1b832c 6519 RTVEC_ELT (vector, eltpos)
8ec3c5c2 6520 = expand_normal (value);
611234b4 6521 }
6522 else
6523 {
3754d046 6524 machine_mode value_mode =
611234b4 6525 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
4418a1d4 6526 ? TYPE_MODE (TREE_TYPE (value))
6527 : eltmode;
611234b4 6528 bitpos = eltpos * elt_size;
f955ca51 6529 store_constructor_field (target, bitsize, bitpos, value_mode,
6530 value, cleared, alias);
611234b4 6531 }
6532 }
1f8b6002 6533
611234b4 6534 if (vector)
6535 emit_insn (GEN_FCN (icode)
6536 (target,
9c1b832c 6537 gen_rtx_PARALLEL (GET_MODE (target), vector)));
611234b4 6538 break;
6539 }
1f8b6002 6540
611234b4 6541 default:
6542 gcc_unreachable ();
97b2af42 6543 }
10f307d9 6544}
6545
6546/* Store the value of EXP (an expression tree)
6547 into a subfield of TARGET which has mode MODE and occupies
6548 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6549 If MODE is VOIDmode, it means that we are storing into a bit-field.
6550
4bb60ec7 6551 BITREGION_START is bitpos of the first bitfield in this region.
6552 BITREGION_END is the bitpos of the ending bitfield in this region.
6553 These two fields are 0, if the C++ memory model does not apply,
6554 or we are not interested in keeping track of bitfield regions.
6555
58f9138c 6556 Always return const0_rtx unless we have something particular to
6557 return.
10f307d9 6558
1e2513d9 6559 ALIAS_SET is the alias set for the destination. This value will
6560 (in general) be different from that for TARGET, since TARGET is a
5b5037b3 6561 reference to the containing structure.
48e1416a 6562
5b5037b3 6563 If NONTEMPORAL is true, try generating a nontemporal store. */
10f307d9 6564
6565static rtx
35cb5232 6566store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
4bb60ec7 6567 unsigned HOST_WIDE_INT bitregion_start,
6568 unsigned HOST_WIDE_INT bitregion_end,
3754d046 6569 machine_mode mode, tree exp,
32c2fdea 6570 alias_set_type alias_set, bool nontemporal)
10f307d9 6571{
0dbd1c74 6572 if (TREE_CODE (exp) == ERROR_MARK)
6573 return const0_rtx;
6574
55e9836d 6575 /* If we have nothing to store, do nothing unless the expression has
6576 side-effects. */
6577 if (bitsize == 0)
1db6d067 6578 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
10f307d9 6579
efd3939c 6580 if (GET_CODE (target) == CONCAT)
6581 {
6582 /* We're storing into a struct containing a single __complex. */
6583
611234b4 6584 gcc_assert (!bitpos);
5b5037b3 6585 return store_expr (exp, target, 0, nontemporal);
efd3939c 6586 }
10f307d9 6587
6588 /* If the structure is in a register or if the component
6589 is a bit field, we cannot use addressing to access it.
6590 Use bit-field techniques or SUBREG to store in it. */
6591
07edfa02 6592 if (mode == VOIDmode
03519f22 6593 || (mode != BLKmode && ! direct_store[(int) mode]
6594 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6595 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
8ad4c111 6596 || REG_P (target)
66aa258b 6597 || GET_CODE (target) == SUBREG
4e05e574 6598 /* If the field isn't aligned enough to store as an ordinary memref,
6599 store it as a bit field. */
9a0db358 6600 || (mode != BLKmode
8f6f6bc8 6601 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6602 || bitpos % GET_MODE_ALIGNMENT (mode))
6603 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
35cb5232 6604 || (bitpos % BITS_PER_UNIT != 0)))
941a2396 6605 || (bitsize >= 0 && mode != BLKmode
6606 && GET_MODE_BITSIZE (mode) > bitsize)
155b05dc 6607 /* If the RHS and field are a constant size and the size of the
6608 RHS isn't the same size as the bitfield, we must use bitfield
6609 operations. */
a0c2c45b 6610 || (bitsize >= 0
6611 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
182cf5a9 6612 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6613 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6614 decl we must use bitfield operations. */
6615 || (bitsize >= 0
6616 && TREE_CODE (exp) == MEM_REF
6617 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6618 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6619 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6620 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
10f307d9 6621 {
24bdc387 6622 rtx temp;
c1a83279 6623 gimple nop_def;
24bdc387 6624
6625 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6626 implies a mask operation. If the precision is the same size as
6627 the field we're storing into, that mask is redundant. This is
6628 particularly common with bit field assignments generated by the
6629 C front end. */
c1a83279 6630 nop_def = get_def_for_expr (exp, NOP_EXPR);
6631 if (nop_def)
60fb4601 6632 {
6633 tree type = TREE_TYPE (exp);
6634 if (INTEGRAL_TYPE_P (type)
6635 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6636 && bitsize == TYPE_PRECISION (type))
6637 {
c1a83279 6638 tree op = gimple_assign_rhs1 (nop_def);
6639 type = TREE_TYPE (op);
60fb4601 6640 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
c1a83279 6641 exp = op;
60fb4601 6642 }
6643 }
24bdc387 6644
8ec3c5c2 6645 temp = expand_normal (exp);
97d7f645 6646
0aa5cbcc 6647 /* If BITSIZE is narrower than the size of the type of EXP
6648 we will be narrowing TEMP. Normally, what's wanted are the
6649 low-order bits. However, if EXP's type is a record and this is
6650 big-endian machine, we want the upper BITSIZE bits. */
6651 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
cce8da2f 6652 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
0aa5cbcc 6653 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6654 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
f5ff0b21 6655 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
8a348c93 6656 NULL_RTX, 1);
0aa5cbcc 6657
40715742 6658 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
97d7f645 6659 if (mode != VOIDmode && mode != BLKmode
6660 && mode != TYPE_MODE (TREE_TYPE (exp)))
6661 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6662
7081e928 6663 /* If TEMP is not a PARALLEL (see below) and its mode and that of TARGET
6664 are both BLKmode, both must be in memory and BITPOS must be aligned
6665 on a byte boundary. If so, we simply do a block copy. Likewise for
6666 a BLKmode-like TARGET. */
6667 if (GET_CODE (temp) != PARALLEL
6668 && GET_MODE (temp) == BLKmode
3bfa8ada 6669 && (GET_MODE (target) == BLKmode
6670 || (MEM_P (target)
6671 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6672 && (bitpos % BITS_PER_UNIT) == 0
6673 && (bitsize % BITS_PER_UNIT) == 0)))
0e20f9fb 6674 {
611234b4 6675 gcc_assert (MEM_P (target) && MEM_P (temp)
3bfa8ada 6676 && (bitpos % BITS_PER_UNIT) == 0);
0e20f9fb 6677
e513d163 6678 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
0e20f9fb 6679 emit_block_move (target, temp,
2b96c5f6 6680 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
0378dbdc 6681 / BITS_PER_UNIT),
6682 BLOCK_OP_NORMAL);
0e20f9fb 6683
58f9138c 6684 return const0_rtx;
0e20f9fb 6685 }
6686
2d0fd66d 6687 /* Handle calls that return values in multiple non-contiguous locations.
6688 The Irix 6 ABI has examples of this. */
61c39547 6689 if (GET_CODE (temp) == PARALLEL)
6690 {
f955ca51 6691 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
61c39547 6692 rtx temp_target;
e0fb89d5 6693 if (mode == BLKmode || mode == VOIDmode)
f955ca51 6694 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6695 temp_target = gen_reg_rtx (mode);
6696 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
61c39547 6697 temp = temp_target;
6698 }
f955ca51 6699 else if (mode == BLKmode)
7e91b548 6700 {
f955ca51 6701 /* Handle calls that return BLKmode values in registers. */
6702 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6703 {
6704 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6705 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6706 temp = temp_target;
6707 }
6708 else
6709 {
6710 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6711 rtx temp_target;
6712 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6713 temp_target = gen_reg_rtx (mode);
6714 temp_target
6715 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
3f71db40 6716 temp_target, mode, mode);
f955ca51 6717 temp = temp_target;
6718 }
7e91b548 6719 }
6720
61c39547 6721 /* Store the value in the bitfield. */
6722 store_bit_field (target, bitsize, bitpos,
6723 bitregion_start, bitregion_end,
6724 mode, temp);
2b96c5f6 6725
10f307d9 6726 return const0_rtx;
6727 }
6728 else
6729 {
10f307d9 6730 /* Now build a reference to just the desired component. */
58f9138c 6731 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
2b96c5f6 6732
6733 if (to_rtx == target)
6734 to_rtx = copy_rtx (to_rtx);
537ffcfc 6735
5cc193e7 6736 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
2b96c5f6 6737 set_mem_alias_set (to_rtx, alias_set);
10f307d9 6738
5b5037b3 6739 return store_expr (exp, to_rtx, 0, nontemporal);
10f307d9 6740 }
6741}
6742\f
6743/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
ba04d9d5 6744 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6745 codes and find the ultimate containing object, which we return.
10f307d9 6746
6747 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6748 bit position, and *PUNSIGNEDP to the signedness of the field.
954bdcb1 6749 If the position of the field is variable, we store a tree
6750 giving the variable offset (in units) in *POFFSET.
6751 This offset is in addition to the bit position.
6752 If the position is not variable, we store 0 in *POFFSET.
10f307d9 6753
6754 If any of the extraction expressions is volatile,
6755 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6756
3bfa8ada 6757 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6758 Otherwise, it is a mode that can be used to access the field.
01ab6370 6759
6760 If the field describes a variable-sized object, *PMODE is set to
3bfa8ada 6761 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
e7e9416e 6762 this case, but the address of the object can be found.
6763
6764 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6765 look through nodes that serve as markers of a greater alignment than
6766 the one that can be deduced from the expression. These nodes make it
6767 possible for front-ends to prevent temporaries from being created by
6768 the middle-end on alignment considerations. For that purpose, the
6769 normal operating mode at high-level is to always pass FALSE so that
6770 the ultimate containing object is really returned; moreover, the
6771 associated predicate handled_component_p will always return TRUE
6772 on these nodes, thus indicating that they are essentially handled
6773 by get_inner_reference. TRUE should only be passed when the caller
6774 is scanning the expression in order to build another representation
6775 and specifically knows how to handle these nodes; as such, this is
6776 the normal operating mode in the RTL expanders. */
10f307d9 6777
6778tree
35cb5232 6779get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6780 HOST_WIDE_INT *pbitpos, tree *poffset,
3754d046 6781 machine_mode *pmode, int *punsignedp,
e7e9416e 6782 int *pvolatilep, bool keep_aligning)
10f307d9 6783{
6784 tree size_tree = 0;
3754d046 6785 machine_mode mode = VOIDmode;
3bfa8ada 6786 bool blkmode_bitfield = false;
902de8ed 6787 tree offset = size_zero_node;
5de9d3ed 6788 offset_int bit_offset = 0;
10f307d9 6789
02e7a332 6790 /* First get the mode, signedness, and size. We do this from just the
6791 outermost expression. */
b21392bb 6792 *pbitsize = -1;
10f307d9 6793 if (TREE_CODE (exp) == COMPONENT_REF)
6794 {
3bfa8ada 6795 tree field = TREE_OPERAND (exp, 1);
6796 size_tree = DECL_SIZE (field);
7691c4ce 6797 if (flag_strict_volatile_bitfields > 0
6798 && TREE_THIS_VOLATILE (exp)
6799 && DECL_BIT_FIELD_TYPE (field)
6800 && DECL_MODE (field) != BLKmode)
a420d927 6801 /* Volatile bitfields should be accessed in the mode of the
6802 field's type, not the mode computed based on the bit
6803 size. */
6804 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
7691c4ce 6805 else if (!DECL_BIT_FIELD (field))
6806 mode = DECL_MODE (field);
6807 else if (DECL_MODE (field) == BLKmode)
6808 blkmode_bitfield = true;
3bfa8ada 6809
6810 *punsignedp = DECL_UNSIGNED (field);
10f307d9 6811 }
6812 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6813 {
6814 size_tree = TREE_OPERAND (exp, 1);
70337474 6815 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6816 || TYPE_UNSIGNED (TREE_TYPE (exp)));
1f8b6002 6817
8ea8de24 6818 /* For vector types, with the correct size of access, use the mode of
6819 inner type. */
6820 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6821 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6822 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6823 mode = TYPE_MODE (TREE_TYPE (exp));
10f307d9 6824 }
6825 else
6826 {
6827 mode = TYPE_MODE (TREE_TYPE (exp));
78a8ed03 6828 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
02e7a332 6829
be2828ce 6830 if (mode == BLKmode)
6831 size_tree = TYPE_SIZE (TREE_TYPE (exp));
02e7a332 6832 else
6833 *pbitsize = GET_MODE_BITSIZE (mode);
10f307d9 6834 }
fa56dc1d 6835
02e7a332 6836 if (size_tree != 0)
10f307d9 6837 {
e913b5cd 6838 if (! tree_fits_uhwi_p (size_tree))
01ab6370 6839 mode = BLKmode, *pbitsize = -1;
6840 else
e913b5cd 6841 *pbitsize = tree_to_uhwi (size_tree);
10f307d9 6842 }
6843
6844 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6845 and find the ultimate containing object. */
10f307d9 6846 while (1)
6847 {
1f9b622b 6848 switch (TREE_CODE (exp))
10f307d9 6849 {
1f9b622b 6850 case BIT_FIELD_REF:
5de9d3ed 6851 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
1f9b622b 6852 break;
10f307d9 6853
1f9b622b 6854 case COMPONENT_REF:
6855 {
6856 tree field = TREE_OPERAND (exp, 1);
6857 tree this_offset = component_ref_field_offset (exp);
227bf826 6858
1f9b622b 6859 /* If this field hasn't been filled in yet, don't go past it.
6860 This should only happen when folding expressions made during
6861 type construction. */
6862 if (this_offset == 0)
6863 break;
75f7b24f 6864
1f9b622b 6865 offset = size_binop (PLUS_EXPR, offset, this_offset);
5de9d3ed 6866 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
7114c815 6867
1f9b622b 6868 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6869 }
6870 break;
7114c815 6871
1f9b622b 6872 case ARRAY_REF:
6873 case ARRAY_RANGE_REF:
6874 {
6875 tree index = TREE_OPERAND (exp, 1);
6876 tree low_bound = array_ref_low_bound (exp);
6877 tree unit_size = array_ref_element_size (exp);
6878
6879 /* We assume all arrays have sizes that are a multiple of a byte.
6880 First subtract the lower bound, if any, in the type of the
6881 index, then convert to sizetype and multiply by the size of
6882 the array element. */
6883 if (! integer_zerop (low_bound))
faa43f85 6884 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6885 index, low_bound);
1f9b622b 6886
6887 offset = size_binop (PLUS_EXPR, offset,
6888 size_binop (MULT_EXPR,
e3b560a6 6889 fold_convert (sizetype, index),
1f9b622b 6890 unit_size));
6891 }
6892 break;
6893
6894 case REALPART_EXPR:
1f9b622b 6895 break;
6896
6897 case IMAGPART_EXPR:
e913b5cd 6898 bit_offset += *pbitsize;
1f9b622b 6899 break;
6900
1f9b622b 6901 case VIEW_CONVERT_EXPR:
e7e9416e 6902 if (keep_aligning && STRICT_ALIGNMENT
6903 && (TYPE_ALIGN (TREE_TYPE (exp))
1f9b622b 6904 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
1f9b622b 6905 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6906 < BIGGEST_ALIGNMENT)
6907 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6908 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6909 goto done;
6910 break;
6911
182cf5a9 6912 case MEM_REF:
6913 /* Hand back the decl for MEM[&decl, off]. */
6914 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6915 {
6916 tree off = TREE_OPERAND (exp, 1);
6917 if (!integer_zerop (off))
6918 {
5de9d3ed 6919 offset_int boff, coff = mem_ref_offset (exp);
885a2694 6920 boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
cf8f0e63 6921 bit_offset += boff;
182cf5a9 6922 }
6923 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6924 }
6925 goto done;
6926
1f9b622b 6927 default:
6928 goto done;
6929 }
954bdcb1 6930
6931 /* If any reference in the chain is volatile, the effect is volatile. */
6932 if (TREE_THIS_VOLATILE (exp))
6933 *pvolatilep = 1;
7fce34be 6934
10f307d9 6935 exp = TREE_OPERAND (exp, 0);
6936 }
1f9b622b 6937 done:
10f307d9 6938
02e7a332 6939 /* If OFFSET is constant, see if we can return the whole thing as a
85a32bdb 6940 constant bit position. Make sure to handle overflow during
6941 this conversion. */
2ad5f5fc 6942 if (TREE_CODE (offset) == INTEGER_CST)
6943 {
5de9d3ed 6944 offset_int tem = wi::sext (wi::to_offset (offset),
6945 TYPE_PRECISION (sizetype));
885a2694 6946 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
cf8f0e63 6947 tem += bit_offset;
796b6678 6948 if (wi::fits_shwi_p (tem))
cf8f0e63 6949 {
6950 *pbitpos = tem.to_shwi ();
3bfa8ada 6951 *poffset = offset = NULL_TREE;
85a32bdb 6952 }
6953 }
6954
6955 /* Otherwise, split it up. */
3bfa8ada 6956 if (offset)
6957 {
476e59ce 6958 /* Avoid returning a negative bitpos as this may wreak havoc later. */
f7572df2 6959 if (wi::neg_p (bit_offset) || !wi::fits_shwi_p (bit_offset))
476e59ce 6960 {
885a2694 6961 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
5de9d3ed 6962 offset_int tem = bit_offset.and_not (mask);
476e59ce 6963 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6964 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
cf8f0e63 6965 bit_offset -= tem;
885a2694 6966 tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
476e59ce 6967 offset = size_binop (PLUS_EXPR, offset,
e913b5cd 6968 wide_int_to_tree (sizetype, tem));
476e59ce 6969 }
6970
cf8f0e63 6971 *pbitpos = bit_offset.to_shwi ();
3bfa8ada 6972 *poffset = offset;
6973 }
6974
6975 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6976 if (mode == VOIDmode
6977 && blkmode_bitfield
6978 && (*pbitpos % BITS_PER_UNIT) == 0
6979 && (*pbitsize % BITS_PER_UNIT) == 0)
6980 *pmode = BLKmode;
6981 else
6982 *pmode = mode;
c869557a 6983
10f307d9 6984 return exp;
6985}
eb4b06b6 6986
6cbeacbb 6987/* Alignment in bits the TARGET of an assignment may be assumed to have. */
6988
6989static unsigned HOST_WIDE_INT
6990target_align (const_tree target)
6991{
6992 /* We might have a chain of nested references with intermediate misaligning
6993 bitfields components, so need to recurse to find out. */
6994
6995 unsigned HOST_WIDE_INT this_align, outer_align;
6996
6997 switch (TREE_CODE (target))
6998 {
6999 case BIT_FIELD_REF:
7000 return 1;
7001
7002 case COMPONENT_REF:
7003 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7004 outer_align = target_align (TREE_OPERAND (target, 0));
7005 return MIN (this_align, outer_align);
7006
7007 case ARRAY_REF:
7008 case ARRAY_RANGE_REF:
7009 this_align = TYPE_ALIGN (TREE_TYPE (target));
7010 outer_align = target_align (TREE_OPERAND (target, 0));
7011 return MIN (this_align, outer_align);
7012
7013 CASE_CONVERT:
7014 case NON_LVALUE_EXPR:
7015 case VIEW_CONVERT_EXPR:
7016 this_align = TYPE_ALIGN (TREE_TYPE (target));
7017 outer_align = target_align (TREE_OPERAND (target, 0));
7018 return MAX (this_align, outer_align);
7019
7020 default:
7021 return TYPE_ALIGN (TREE_TYPE (target));
7022 }
7023}
7024
10f307d9 7025\f
dc183975 7026/* Given an rtx VALUE that may contain additions and multiplications, return
7027 an equivalent value that just refers to a register, memory, or constant.
7028 This is done by generating instructions to perform the arithmetic and
7029 returning a pseudo-register containing the value.
c4f1a887 7030
7031 The returned value may be a REG, SUBREG, MEM or constant. */
10f307d9 7032
7033rtx
35cb5232 7034force_operand (rtx value, rtx target)
10f307d9 7035{
fef8467d 7036 rtx op1, op2;
10f307d9 7037 /* Use subtarget as the target for operand 0 of a binary operation. */
19cb6b50 7038 rtx subtarget = get_subtarget (target);
fef8467d 7039 enum rtx_code code = GET_CODE (value);
10f307d9 7040
f9cce2dc 7041 /* Check for subreg applied to an expression produced by loop optimizer. */
7042 if (code == SUBREG
8ad4c111 7043 && !REG_P (SUBREG_REG (value))
e16ceb8e 7044 && !MEM_P (SUBREG_REG (value)))
f9cce2dc 7045 {
4631d202 7046 value
7047 = simplify_gen_subreg (GET_MODE (value),
7048 force_reg (GET_MODE (SUBREG_REG (value)),
7049 force_operand (SUBREG_REG (value),
7050 NULL_RTX)),
7051 GET_MODE (SUBREG_REG (value)),
7052 SUBREG_BYTE (value));
f9cce2dc 7053 code = GET_CODE (value);
7054 }
7055
8b59469a 7056 /* Check for a PIC address load. */
fef8467d 7057 if ((code == PLUS || code == MINUS)
8b59469a 7058 && XEXP (value, 0) == pic_offset_table_rtx
7059 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7060 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7061 || GET_CODE (XEXP (value, 1)) == CONST))
7062 {
7063 if (!subtarget)
7064 subtarget = gen_reg_rtx (GET_MODE (value));
7065 emit_move_insn (subtarget, value);
7066 return subtarget;
7067 }
7068
6720e96c 7069 if (ARITHMETIC_P (value))
10f307d9 7070 {
7071 op2 = XEXP (value, 1);
8ad4c111 7072 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
10f307d9 7073 subtarget = 0;
971ba038 7074 if (code == MINUS && CONST_INT_P (op2))
10f307d9 7075 {
fef8467d 7076 code = PLUS;
10f307d9 7077 op2 = negate_rtx (GET_MODE (value), op2);
7078 }
7079
7080 /* Check for an addition with OP2 a constant integer and our first
fef8467d 7081 operand a PLUS of a virtual register and something else. In that
7082 case, we want to emit the sum of the virtual register and the
7083 constant first and then add the other value. This allows virtual
7084 register instantiation to simply modify the constant rather than
7085 creating another one around this addition. */
971ba038 7086 if (code == PLUS && CONST_INT_P (op2)
10f307d9 7087 && GET_CODE (XEXP (value, 0)) == PLUS
8ad4c111 7088 && REG_P (XEXP (XEXP (value, 0), 0))
10f307d9 7089 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7090 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7091 {
fef8467d 7092 rtx temp = expand_simple_binop (GET_MODE (value), code,
7093 XEXP (XEXP (value, 0), 0), op2,
7094 subtarget, 0, OPTAB_LIB_WIDEN);
7095 return expand_simple_binop (GET_MODE (value), code, temp,
7096 force_operand (XEXP (XEXP (value,
7097 0), 1), 0),
7098 target, 0, OPTAB_LIB_WIDEN);
10f307d9 7099 }
fa56dc1d 7100
fef8467d 7101 op1 = force_operand (XEXP (value, 0), subtarget);
7102 op2 = force_operand (op2, NULL_RTX);
7103 switch (code)
7104 {
7105 case MULT:
7106 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7107 case DIV:
7108 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7109 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7110 target, 1, OPTAB_LIB_WIDEN);
7111 else
7112 return expand_divmod (0,
7113 FLOAT_MODE_P (GET_MODE (value))
7114 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7115 GET_MODE (value), op1, op2, target, 0);
fef8467d 7116 case MOD:
7117 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7118 target, 0);
fef8467d 7119 case UDIV:
7120 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7121 target, 1);
fef8467d 7122 case UMOD:
7123 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7124 target, 1);
fef8467d 7125 case ASHIFTRT:
7126 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7127 target, 0, OPTAB_LIB_WIDEN);
fef8467d 7128 default:
7129 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7130 target, 1, OPTAB_LIB_WIDEN);
7131 }
7132 }
6720e96c 7133 if (UNARY_P (value))
fef8467d 7134 {
c0427b5d 7135 if (!target)
7136 target = gen_reg_rtx (GET_MODE (value));
fef8467d 7137 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6184cd69 7138 switch (code)
7139 {
c0427b5d 7140 case ZERO_EXTEND:
7141 case SIGN_EXTEND:
6184cd69 7142 case TRUNCATE:
e40df2f5 7143 case FLOAT_EXTEND:
7144 case FLOAT_TRUNCATE:
c0427b5d 7145 convert_move (target, op1, code == ZERO_EXTEND);
7146 return target;
7147
7148 case FIX:
7149 case UNSIGNED_FIX:
7150 expand_fix (target, op1, code == UNSIGNED_FIX);
7151 return target;
7152
7153 case FLOAT:
7154 case UNSIGNED_FLOAT:
7155 expand_float (target, op1, code == UNSIGNED_FLOAT);
7156 return target;
7157
6184cd69 7158 default:
7159 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7160 }
10f307d9 7161 }
3084721c 7162
7163#ifdef INSN_SCHEDULING
7164 /* On machines that have insn scheduling, we want all memory reference to be
7165 explicit, so we need to deal with such paradoxical SUBREGs. */
b537bfdb 7166 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
3084721c 7167 value
7168 = simplify_gen_subreg (GET_MODE (value),
7169 force_reg (GET_MODE (SUBREG_REG (value)),
7170 force_operand (SUBREG_REG (value),
7171 NULL_RTX)),
7172 GET_MODE (SUBREG_REG (value)),
7173 SUBREG_BYTE (value));
7174#endif
7175
10f307d9 7176 return value;
7177}
7178\f
10f307d9 7179/* Subroutine of expand_expr: return nonzero iff there is no way that
997d68fe 7180 EXP can reference X, which is being modified. TOP_P is nonzero if this
7181 call is going to be used to determine whether we need a temporary
67e40adc 7182 for EXP, as opposed to a recursive call to this function.
7183
7184 It is always safe for this routine to return zero since it merely
7185 searches for optimization opportunities. */
10f307d9 7186
e41f0d80 7187int
1f1872fd 7188safe_from_p (const_rtx x, tree exp, int top_p)
10f307d9 7189{
7190 rtx exp_rtl = 0;
7191 int i, nops;
7192
a71ba0b1 7193 if (x == 0
7194 /* If EXP has varying size, we MUST use a target since we currently
62d8c952 7195 have no way of allocating temporaries of variable size
7196 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7197 So we assume here that something at a higher level has prevented a
b9438b95 7198 clash. This is somewhat bogus, but the best we can do. Only
997d68fe 7199 do this when X is BLKmode and when we are at the top level. */
4b72716d 7200 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
b9438b95 7201 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
62d8c952 7202 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7203 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7204 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7205 != INTEGER_CST)
387bc205 7206 && GET_MODE (x) == BLKmode)
7207 /* If X is in the outgoing argument area, it is always safe. */
e16ceb8e 7208 || (MEM_P (x)
387bc205 7209 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7210 || (GET_CODE (XEXP (x, 0)) == PLUS
7211 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
10f307d9 7212 return 1;
7213
7214 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7215 find the underlying pseudo. */
7216 if (GET_CODE (x) == SUBREG)
7217 {
7218 x = SUBREG_REG (x);
8ad4c111 7219 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
10f307d9 7220 return 0;
7221 }
7222
387bc205 7223 /* Now look at our tree code and possibly recurse. */
10f307d9 7224 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7225 {
ce45a448 7226 case tcc_declaration:
6db2b7ab 7227 exp_rtl = DECL_RTL_IF_SET (exp);
10f307d9 7228 break;
7229
ce45a448 7230 case tcc_constant:
10f307d9 7231 return 1;
7232
ce45a448 7233 case tcc_exceptional:
10f307d9 7234 if (TREE_CODE (exp) == TREE_LIST)
56c7ac50 7235 {
7236 while (1)
7237 {
7238 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7239 return 0;
7240 exp = TREE_CHAIN (exp);
7241 if (!exp)
7242 return 1;
7243 if (TREE_CODE (exp) != TREE_LIST)
7244 return safe_from_p (x, exp, 0);
7245 }
7246 }
a5b684d2 7247 else if (TREE_CODE (exp) == CONSTRUCTOR)
7248 {
7249 constructor_elt *ce;
7250 unsigned HOST_WIDE_INT idx;
7251
f1f41a6c 7252 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
a5b684d2 7253 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7254 || !safe_from_p (x, ce->value, 0))
7255 return 0;
7256 return 1;
7257 }
67e40adc 7258 else if (TREE_CODE (exp) == ERROR_MARK)
7259 return 1; /* An already-visited SAVE_EXPR? */
10f307d9 7260 else
7261 return 0;
7262
ce45a448 7263 case tcc_statement:
7dd37241 7264 /* The only case we look at here is the DECL_INITIAL inside a
7265 DECL_EXPR. */
7266 return (TREE_CODE (exp) != DECL_EXPR
7267 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7268 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7269 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7270
ce45a448 7271 case tcc_binary:
7272 case tcc_comparison:
56c7ac50 7273 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7274 return 0;
d632b59a 7275 /* Fall through. */
56c7ac50 7276
ce45a448 7277 case tcc_unary:
56c7ac50 7278 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
10f307d9 7279
ce45a448 7280 case tcc_expression:
7281 case tcc_reference:
c2f47e15 7282 case tcc_vl_exp:
10f307d9 7283 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7284 the expression. If it is set, we conflict iff we are that rtx or
7285 both are in memory. Otherwise, we check all operands of the
7286 expression recursively. */
7287
7288 switch (TREE_CODE (exp))
7289 {
7290 case ADDR_EXPR:
86ce88aa 7291 /* If the operand is static or we are static, we can't conflict.
7292 Likewise if we don't conflict with the operand at all. */
7293 if (staticp (TREE_OPERAND (exp, 0))
7294 || TREE_STATIC (exp)
7295 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7296 return 1;
7297
7298 /* Otherwise, the only way this can conflict is if we are taking
7299 the address of a DECL a that address if part of X, which is
7300 very rare. */
7301 exp = TREE_OPERAND (exp, 0);
7302 if (DECL_P (exp))
7303 {
7304 if (!DECL_RTL_SET_P (exp)
e16ceb8e 7305 || !MEM_P (DECL_RTL (exp)))
86ce88aa 7306 return 0;
7307 else
7308 exp_rtl = XEXP (DECL_RTL (exp), 0);
7309 }
7310 break;
10f307d9 7311
5d9de213 7312 case MEM_REF:
e16ceb8e 7313 if (MEM_P (x)
387bc205 7314 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7315 get_alias_set (exp)))
10f307d9 7316 return 0;
7317 break;
7318
7319 case CALL_EXPR:
bc33ff05 7320 /* Assume that the call will clobber all hard registers and
7321 all of memory. */
8ad4c111 7322 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
e16ceb8e 7323 || MEM_P (x))
bc33ff05 7324 return 0;
10f307d9 7325 break;
7326
10f307d9 7327 case WITH_CLEANUP_EXPR:
34e2ddcd 7328 case CLEANUP_POINT_EXPR:
6388f9f7 7329 /* Lowered by gimplify.c. */
611234b4 7330 gcc_unreachable ();
6388f9f7 7331
10f307d9 7332 case SAVE_EXPR:
67c155cb 7333 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
10f307d9 7334
0dbd1c74 7335 default:
7336 break;
10f307d9 7337 }
7338
7339 /* If we have an rtx, we do not need to scan our operands. */
7340 if (exp_rtl)
7341 break;
7342
c2f47e15 7343 nops = TREE_OPERAND_LENGTH (exp);
10f307d9 7344 for (i = 0; i < nops; i++)
7345 if (TREE_OPERAND (exp, i) != 0
997d68fe 7346 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
10f307d9 7347 return 0;
e41f0d80 7348
ce45a448 7349 break;
7350
7351 case tcc_type:
7352 /* Should never get a type here. */
7353 gcc_unreachable ();
10f307d9 7354 }
7355
7356 /* If we have an rtl, find any enclosed object. Then see if we conflict
7357 with it. */
7358 if (exp_rtl)
7359 {
7360 if (GET_CODE (exp_rtl) == SUBREG)
7361 {
7362 exp_rtl = SUBREG_REG (exp_rtl);
8ad4c111 7363 if (REG_P (exp_rtl)
10f307d9 7364 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7365 return 0;
7366 }
7367
7368 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
387bc205 7369 are memory and they conflict. */
10f307d9 7370 return ! (rtx_equal_p (x, exp_rtl)
e16ceb8e 7371 || (MEM_P (x) && MEM_P (exp_rtl)
376a287d 7372 && true_dependence (exp_rtl, VOIDmode, x)));
10f307d9 7373 }
7374
7375 /* If we reach here, it is safe. */
7376 return 1;
7377}
7378
155b05dc 7379\f
fcdc122e 7380/* Return the highest power of two that EXP is known to be a multiple of.
7381 This is used in updating alignment of MEMs in array references. */
7382
516849c7 7383unsigned HOST_WIDE_INT
b7bf20db 7384highest_pow2_factor (const_tree exp)
fcdc122e 7385{
c8a2b4ff 7386 unsigned HOST_WIDE_INT ret;
7387 int trailing_zeros = tree_ctz (exp);
7388 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7389 return BIGGEST_ALIGNMENT;
7390 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7391 if (ret > BIGGEST_ALIGNMENT)
7392 return BIGGEST_ALIGNMENT;
7393 return ret;
fcdc122e 7394}
5b965633 7395
252d0e4d 7396/* Similar, except that the alignment requirements of TARGET are
7397 taken into account. Assume it is at least as aligned as its
7398 type, unless it is a COMPONENT_REF in which case the layout of
7399 the structure gives the alignment. */
5b965633 7400
84130727 7401static unsigned HOST_WIDE_INT
b7bf20db 7402highest_pow2_factor_for_target (const_tree target, const_tree exp)
5b965633 7403{
6cbeacbb 7404 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7405 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
48e1416a 7406
6cbeacbb 7407 return MAX (factor, talign);
5b965633 7408}
fcdc122e 7409\f
9d75589a 7410/* Convert the tree comparison code TCODE to the rtl one where the
c909ed33 7411 signedness is UNSIGNEDP. */
7412
7413static enum rtx_code
7414convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7415{
7416 enum rtx_code code;
7417 switch (tcode)
7418 {
7419 case EQ_EXPR:
7420 code = EQ;
7421 break;
7422 case NE_EXPR:
7423 code = NE;
7424 break;
7425 case LT_EXPR:
7426 code = unsignedp ? LTU : LT;
7427 break;
7428 case LE_EXPR:
7429 code = unsignedp ? LEU : LE;
7430 break;
7431 case GT_EXPR:
7432 code = unsignedp ? GTU : GT;
7433 break;
7434 case GE_EXPR:
7435 code = unsignedp ? GEU : GE;
7436 break;
7437 case UNORDERED_EXPR:
7438 code = UNORDERED;
7439 break;
7440 case ORDERED_EXPR:
7441 code = ORDERED;
7442 break;
7443 case UNLT_EXPR:
7444 code = UNLT;
7445 break;
7446 case UNLE_EXPR:
7447 code = UNLE;
7448 break;
7449 case UNGT_EXPR:
7450 code = UNGT;
7451 break;
7452 case UNGE_EXPR:
7453 code = UNGE;
7454 break;
7455 case UNEQ_EXPR:
7456 code = UNEQ;
7457 break;
7458 case LTGT_EXPR:
7459 code = LTGT;
7460 break;
7461
7462 default:
7463 gcc_unreachable ();
7464 }
7465 return code;
7466}
7467
33204670 7468/* Subroutine of expand_expr. Expand the two operands of a binary
7469 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7470 The value may be stored in TARGET if TARGET is nonzero. The
7471 MODIFIER argument is as documented by expand_expr. */
7472
01ee997b 7473void
33204670 7474expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7475 enum expand_modifier modifier)
7476{
7477 if (! safe_from_p (target, exp1, 1))
7478 target = 0;
7479 if (operand_equal_p (exp0, exp1, 0))
7480 {
7481 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7482 *op1 = copy_rtx (*op0);
7483 }
7484 else
7485 {
3541e113 7486 /* If we need to preserve evaluation order, copy exp0 into its own
7487 temporary variable so that it can't be clobbered by exp1. */
7488 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7489 exp0 = save_expr (exp0);
33204670 7490 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7491 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7492 }
7493}
7494
c3a9c149 7495\f
334ec2d8 7496/* Return a MEM that contains constant EXP. DEFER is as for
f2d0e9f1 7497 output_constant_def and MODIFIER is as for expand_expr. */
7498
7499static rtx
7500expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7501{
7502 rtx mem;
7503
7504 mem = output_constant_def (exp, defer);
7505 if (modifier != EXPAND_INITIALIZER)
7506 mem = use_anchored_address (mem);
7507 return mem;
7508}
7509
b51e4016 7510/* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
ec1e52d1 7511 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7512
7513static rtx
3754d046 7514expand_expr_addr_expr_1 (tree exp, rtx target, machine_mode tmode,
98155838 7515 enum expand_modifier modifier, addr_space_t as)
ec1e52d1 7516{
7517 rtx result, subtarget;
7518 tree inner, offset;
7519 HOST_WIDE_INT bitsize, bitpos;
7520 int volatilep, unsignedp;
3754d046 7521 machine_mode mode1;
ec1e52d1 7522
7523 /* If we are taking the address of a constant and are at the top level,
7524 we have to use output_constant_def since we can't call force_const_mem
7525 at top level. */
7526 /* ??? This should be considered a front-end bug. We should not be
7527 generating ADDR_EXPR of something that isn't an LVALUE. The only
7528 exception here is STRING_CST. */
e54c9818 7529 if (CONSTANT_CLASS_P (exp))
792729b8 7530 {
7531 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7532 if (modifier < EXPAND_SUM)
7533 result = force_operand (result, target);
7534 return result;
7535 }
ec1e52d1 7536
7537 /* Everything must be something allowed by is_gimple_addressable. */
7538 switch (TREE_CODE (exp))
7539 {
7540 case INDIRECT_REF:
7541 /* This case will happen via recursion for &a->b. */
f2d0e9f1 7542 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
ec1e52d1 7543
182cf5a9 7544 case MEM_REF:
7545 {
7546 tree tem = TREE_OPERAND (exp, 0);
7547 if (!integer_zerop (TREE_OPERAND (exp, 1)))
a0553bff 7548 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
182cf5a9 7549 return expand_expr (tem, target, tmode, modifier);
7550 }
7551
ec1e52d1 7552 case CONST_DECL:
c5075621 7553 /* Expand the initializer like constants above. */
792729b8 7554 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7555 0, modifier), 0);
7556 if (modifier < EXPAND_SUM)
7557 result = force_operand (result, target);
7558 return result;
ec1e52d1 7559
7560 case REALPART_EXPR:
7561 /* The real part of the complex number is always first, therefore
7562 the address is the same as the address of the parent object. */
7563 offset = 0;
7564 bitpos = 0;
7565 inner = TREE_OPERAND (exp, 0);
7566 break;
7567
7568 case IMAGPART_EXPR:
7569 /* The imaginary part of the complex number is always second.
91275768 7570 The expression is therefore always offset by the size of the
ec1e52d1 7571 scalar type. */
7572 offset = 0;
7573 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7574 inner = TREE_OPERAND (exp, 0);
7575 break;
7576
03404fe6 7577 case COMPOUND_LITERAL_EXPR:
50f9371e 7578 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7579 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7580 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7581 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7582 the initializers aren't gimplified. */
7583 if (COMPOUND_LITERAL_EXPR_DECL (exp)
7584 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp)))
03404fe6 7585 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7586 target, tmode, modifier, as);
7587 /* FALLTHRU */
ec1e52d1 7588 default:
7589 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7590 expand_expr, as that can have various side effects; LABEL_DECLs for
e54c9818 7591 example, may not have their DECL_RTL set yet. Expand the rtl of
7592 CONSTRUCTORs too, which should yield a memory reference for the
7593 constructor's contents. Assume language specific tree nodes can
7594 be expanded in some interesting way. */
862f468c 7595 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
ec1e52d1 7596 if (DECL_P (exp)
e54c9818 7597 || TREE_CODE (exp) == CONSTRUCTOR
862f468c 7598 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
ec1e52d1 7599 {
7600 result = expand_expr (exp, target, tmode,
7601 modifier == EXPAND_INITIALIZER
7602 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7603
7604 /* If the DECL isn't in memory, then the DECL wasn't properly
7605 marked TREE_ADDRESSABLE, which will be either a front-end
7606 or a tree optimizer bug. */
5b9418fd 7607
7608 if (TREE_ADDRESSABLE (exp)
7609 && ! MEM_P (result)
9af5ce0c 7610 && ! targetm.calls.allocate_stack_slots_for_args ())
5b9418fd 7611 {
7612 error ("local frame unavailable (naked function?)");
7613 return result;
7614 }
7615 else
7616 gcc_assert (MEM_P (result));
ec1e52d1 7617 result = XEXP (result, 0);
7618
7619 /* ??? Is this needed anymore? */
ea259bbe 7620 if (DECL_P (exp))
7621 TREE_USED (exp) = 1;
ec1e52d1 7622
7623 if (modifier != EXPAND_INITIALIZER
41628de0 7624 && modifier != EXPAND_CONST_ADDRESS
7625 && modifier != EXPAND_SUM)
ec1e52d1 7626 result = force_operand (result, target);
7627 return result;
7628 }
7629
e7e9416e 7630 /* Pass FALSE as the last argument to get_inner_reference although
7631 we are expanding to RTL. The rationale is that we know how to
7632 handle "aligning nodes" here: we can just bypass them because
7633 they won't change the final object whose address will be returned
7634 (they actually exist only for that purpose). */
ec1e52d1 7635 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
e7e9416e 7636 &mode1, &unsignedp, &volatilep, false);
ec1e52d1 7637 break;
7638 }
7639
7640 /* We must have made progress. */
611234b4 7641 gcc_assert (inner != exp);
ec1e52d1 7642
7643 subtarget = offset || bitpos ? NULL_RTX : target;
41727a57 7644 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7645 inner alignment, force the inner to be sufficiently aligned. */
7646 if (CONSTANT_CLASS_P (inner)
7647 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7648 {
7649 inner = copy_node (inner);
7650 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7651 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7652 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7653 }
98155838 7654 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
ec1e52d1 7655
ec1e52d1 7656 if (offset)
7657 {
7658 rtx tmp;
7659
7660 if (modifier != EXPAND_NORMAL)
7661 result = force_operand (result, NULL);
48e1416a 7662 tmp = expand_expr (offset, NULL_RTX, tmode,
af391a06 7663 modifier == EXPAND_INITIALIZER
7664 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
ec1e52d1 7665
1c634092 7666 /* expand_expr is allowed to return an object in a mode other
7667 than TMODE. If it did, we need to convert. */
7668 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7669 tmp = convert_modes (tmode, GET_MODE (tmp),
7670 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
98155838 7671 result = convert_memory_address_addr_space (tmode, result, as);
7672 tmp = convert_memory_address_addr_space (tmode, tmp, as);
07f6ff58 7673
3286ab0c 7674 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88ff2f0d 7675 result = simplify_gen_binary (PLUS, tmode, result, tmp);
ec1e52d1 7676 else
7677 {
7678 subtarget = bitpos ? NULL_RTX : target;
7679 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7680 1, OPTAB_LIB_WIDEN);
7681 }
7682 }
7683
7684 if (bitpos)
7685 {
7686 /* Someone beforehand should have rejected taking the address
7687 of such an object. */
07f6ff58 7688 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
ec1e52d1 7689
fe265396 7690 result = convert_memory_address_addr_space (tmode, result, as);
29c05e22 7691 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
ec1e52d1 7692 if (modifier < EXPAND_SUM)
7693 result = force_operand (result, target);
7694 }
7695
7696 return result;
7697}
7698
b51e4016 7699/* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7700 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7701
7702static rtx
3754d046 7703expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
b51e4016 7704 enum expand_modifier modifier)
7705{
98155838 7706 addr_space_t as = ADDR_SPACE_GENERIC;
3754d046 7707 machine_mode address_mode = Pmode;
7708 machine_mode pointer_mode = ptr_mode;
7709 machine_mode rmode;
b51e4016 7710 rtx result;
7711
07f6ff58 7712 /* Target mode of VOIDmode says "whatever's natural". */
7713 if (tmode == VOIDmode)
7714 tmode = TYPE_MODE (TREE_TYPE (exp));
7715
98155838 7716 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7717 {
7718 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7719 address_mode = targetm.addr_space.address_mode (as);
7720 pointer_mode = targetm.addr_space.pointer_mode (as);
7721 }
7722
07f6ff58 7723 /* We can get called with some Weird Things if the user does silliness
7724 like "(short) &a". In that case, convert_memory_address won't do
7725 the right thing, so ignore the given target mode. */
98155838 7726 if (tmode != address_mode && tmode != pointer_mode)
7727 tmode = address_mode;
07f6ff58 7728
b51e4016 7729 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
98155838 7730 tmode, modifier, as);
b51e4016 7731
7732 /* Despite expand_expr claims concerning ignoring TMODE when not
07f6ff58 7733 strictly convenient, stuff breaks if we don't honor it. Note
7734 that combined with the above, we only do this for pointer modes. */
b51e4016 7735 rmode = GET_MODE (result);
7736 if (rmode == VOIDmode)
7737 rmode = tmode;
7738 if (rmode != tmode)
98155838 7739 result = convert_memory_address_addr_space (tmode, result, as);
07f6ff58 7740
b51e4016 7741 return result;
7742}
7743
d4cf8ff7 7744/* Generate code for computing CONSTRUCTOR EXP.
7745 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7746 is TRUE, instead of creating a temporary variable in memory
7747 NULL is returned and the caller needs to handle it differently. */
7748
7749static rtx
7750expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7751 bool avoid_temp_mem)
7752{
7753 tree type = TREE_TYPE (exp);
3754d046 7754 machine_mode mode = TYPE_MODE (type);
d4cf8ff7 7755
7756 /* Try to avoid creating a temporary at all. This is possible
7757 if all of the initializer is zero.
7758 FIXME: try to handle all [0..255] initializers we can handle
7759 with memset. */
7760 if (TREE_STATIC (exp)
7761 && !TREE_ADDRESSABLE (exp)
7762 && target != 0 && mode == BLKmode
7763 && all_zeros_p (exp))
7764 {
7765 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7766 return target;
7767 }
7768
7769 /* All elts simple constants => refer to a constant in memory. But
7770 if this is a non-BLKmode mode, let it store a field at a time
e913b5cd 7771 since that should make a CONST_INT, CONST_WIDE_INT or
7772 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7773 use, it is best to store directly into the target unless the type
7774 is large enough that memcpy will be used. If we are making an
7775 initializer and all operands are constant, put it in memory as
7776 well.
d4cf8ff7 7777
7778 FIXME: Avoid trying to fill vector constructors piece-meal.
7779 Output them with output_constant_def below unless we're sure
7780 they're zeros. This should go away when vector initializers
7781 are treated like VECTOR_CST instead of arrays. */
7782 if ((TREE_STATIC (exp)
7783 && ((mode == BLKmode
7784 && ! (target != 0 && safe_from_p (target, exp, 1)))
7785 || TREE_ADDRESSABLE (exp)
e913b5cd 7786 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
d4bd0e64 7787 && (! can_move_by_pieces
e913b5cd 7788 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
d4cf8ff7 7789 TYPE_ALIGN (type)))
7790 && ! mostly_zeros_p (exp))))
7791 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7792 && TREE_CONSTANT (exp)))
7793 {
7794 rtx constructor;
7795
7796 if (avoid_temp_mem)
7797 return NULL_RTX;
7798
7799 constructor = expand_expr_constant (exp, 1, modifier);
7800
7801 if (modifier != EXPAND_CONST_ADDRESS
7802 && modifier != EXPAND_INITIALIZER
7803 && modifier != EXPAND_SUM)
7804 constructor = validize_mem (constructor);
7805
7806 return constructor;
7807 }
7808
7809 /* Handle calls that pass values in multiple non-contiguous
7810 locations. The Irix 6 ABI has examples of this. */
7811 if (target == 0 || ! safe_from_p (target, exp, 1)
7812 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7813 {
7814 if (avoid_temp_mem)
7815 return NULL_RTX;
7816
9f495e8d 7817 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
d4cf8ff7 7818 }
7819
7820 store_constructor (exp, target, 0, int_expr_size (exp));
7821 return target;
7822}
7823
b51e4016 7824
10f307d9 7825/* expand_expr: generate code for computing expression EXP.
7826 An rtx for the computed value is returned. The value is never null.
7827 In the case of a void EXP, const0_rtx is returned.
7828
7829 The value may be stored in TARGET if TARGET is nonzero.
7830 TARGET is just a suggestion; callers must assume that
7831 the rtx returned may not be the same as TARGET.
7832
7833 If TARGET is CONST0_RTX, it means that the value will be ignored.
7834
7835 If TMODE is not VOIDmode, it suggests generating the
7836 result in mode TMODE. But this is done only when convenient.
7837 Otherwise, TMODE is ignored and the value generated in its natural mode.
7838 TMODE is just a suggestion; callers must assume that
7839 the rtx returned may not have mode TMODE.
7840
d2ae1b1e 7841 Note that TARGET may have neither TMODE nor MODE. In that case, it
7842 probably will not be used.
10f307d9 7843
7844 If MODIFIER is EXPAND_SUM then when EXP is an addition
7845 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7846 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7847 products as above, or REG or MEM, or constant.
7848 Ordinarily in such cases we would output mul or add instructions
7849 and then return a pseudo reg containing the sum.
7850
7851 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7852 it also marks a label as absolutely required (it can't be dead).
1aaabd2e 7853 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d2ae1b1e 7854 This is used for outputting expressions used in initializers.
7855
7856 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7857 with a constant address even if that address is not normally legitimate.
a35a63ff 7858 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7859
7860 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7861 a call parameter. Such targets require special care as we haven't yet
7862 marked TARGET so that it's safe from being trashed by libcalls. We
7863 don't want to use TARGET for anything but the final result;
7864 Intermediate values must go elsewhere. Additionally, calls to
491e04ef 7865 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
60ffaf4d 7866
7867 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7868 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7869 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7870 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
a12f023f 7871 recursively.
7872
7873 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7874 In this case, we don't adjust a returned MEM rtx that wouldn't be
7875 sufficiently aligned for its mode; instead, it's up to the caller
7876 to deal with it afterwards. This is used to make sure that unaligned
7877 base objects for which out-of-bounds accesses are supported, for
7878 example record types with trailing arrays, aren't realigned behind
7879 the back of the caller.
7880 The normal operating mode is to pass FALSE for this parameter. */
10f307d9 7881
7882rtx
3754d046 7883expand_expr_real (tree exp, rtx target, machine_mode tmode,
a12f023f 7884 enum expand_modifier modifier, rtx *alt_rtl,
7885 bool inner_reference_p)
4ee9c684 7886{
8cee8dc0 7887 rtx ret;
4ee9c684 7888
7889 /* Handle ERROR_MARK before anybody tries to access its type. */
7890 if (TREE_CODE (exp) == ERROR_MARK
75a70cf9 7891 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
4ee9c684 7892 {
7893 ret = CONST0_RTX (tmode);
7894 return ret ? ret : const0_rtx;
7895 }
7896
a12f023f 7897 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
7898 inner_reference_p);
4ee9c684 7899 return ret;
7900}
7901
c909ed33 7902/* Try to expand the conditional expression which is represented by
7903 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7904 return the rtl reg which repsents the result. Otherwise return
7905 NULL_RTL. */
7906
7907static rtx
7908expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7909 tree treeop1 ATTRIBUTE_UNUSED,
7910 tree treeop2 ATTRIBUTE_UNUSED)
7911{
c909ed33 7912 rtx insn;
7913 rtx op00, op01, op1, op2;
7914 enum rtx_code comparison_code;
3754d046 7915 machine_mode comparison_mode;
c909ed33 7916 gimple srcstmt;
7917 rtx temp;
7918 tree type = TREE_TYPE (treeop1);
7919 int unsignedp = TYPE_UNSIGNED (type);
3754d046 7920 machine_mode mode = TYPE_MODE (type);
7921 machine_mode orig_mode = mode;
c909ed33 7922
c909ed33 7923 /* If we cannot do a conditional move on the mode, try doing it
7924 with the promoted mode. */
7925 if (!can_conditionally_move_p (mode))
631188f5 7926 {
7927 mode = promote_mode (type, mode, &unsignedp);
7928 if (!can_conditionally_move_p (mode))
7929 return NULL_RTX;
7930 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
7931 }
7932 else
7933 temp = assign_temp (type, 0, 1);
c909ed33 7934
7935 start_sequence ();
7936 expand_operands (treeop1, treeop2,
7937 temp, &op1, &op2, EXPAND_NORMAL);
7938
7939 if (TREE_CODE (treeop0) == SSA_NAME
7940 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7941 {
7942 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7943 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7944 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7945 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7946 comparison_mode = TYPE_MODE (type);
7947 unsignedp = TYPE_UNSIGNED (type);
7948 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7949 }
21c8a0ab 7950 else if (COMPARISON_CLASS_P (treeop0))
c909ed33 7951 {
7952 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
7953 enum tree_code cmpcode = TREE_CODE (treeop0);
7954 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
7955 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
7956 unsignedp = TYPE_UNSIGNED (type);
7957 comparison_mode = TYPE_MODE (type);
7958 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7959 }
7960 else
7961 {
7962 op00 = expand_normal (treeop0);
7963 op01 = const0_rtx;
7964 comparison_code = NE;
9336ad57 7965 comparison_mode = GET_MODE (op00);
7966 if (comparison_mode == VOIDmode)
7967 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
c909ed33 7968 }
7969
7970 if (GET_MODE (op1) != mode)
7971 op1 = gen_lowpart (mode, op1);
7972
7973 if (GET_MODE (op2) != mode)
7974 op2 = gen_lowpart (mode, op2);
7975
7976 /* Try to emit the conditional move. */
7977 insn = emit_conditional_move (temp, comparison_code,
7978 op00, op01, comparison_mode,
7979 op1, op2, mode,
7980 unsignedp);
7981
7982 /* If we could do the conditional move, emit the sequence,
7983 and return. */
7984 if (insn)
7985 {
1d277a67 7986 rtx_insn *seq = get_insns ();
c909ed33 7987 end_sequence ();
7988 emit_insn (seq);
31c66114 7989 return convert_modes (orig_mode, mode, temp, 0);
c909ed33 7990 }
7991
7992 /* Otherwise discard the sequence and fall back to code with
7993 branches. */
7994 end_sequence ();
c909ed33 7995 return NULL_RTX;
7996}
7997
16c9337c 7998rtx
3754d046 7999expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
dff12ad7 8000 enum expand_modifier modifier)
10f307d9 8001{
dff12ad7 8002 rtx op0, op1, op2, temp;
f9a00e9e 8003 rtx_code_label *lab;
35cc02b5 8004 tree type;
78a8ed03 8005 int unsignedp;
3754d046 8006 machine_mode mode;
dff12ad7 8007 enum tree_code code = ops->code;
10f307d9 8008 optab this_optab;
32b3a273 8009 rtx subtarget, original_target;
8010 int ignore;
dcfc697f 8011 bool reduce_bit_field;
dff12ad7 8012 location_t loc = ops->location;
b9be572e 8013 tree treeop0, treeop1, treeop2;
dcfc697f 8014#define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
4f7f7efd 8015 ? reduce_to_bit_field_precision ((expr), \
8016 target, \
8017 type) \
8018 : (expr))
10f307d9 8019
dff12ad7 8020 type = ops->type;
75a70cf9 8021 mode = TYPE_MODE (type);
8022 unsignedp = TYPE_UNSIGNED (type);
78a8ed03 8023
dff12ad7 8024 treeop0 = ops->op0;
8025 treeop1 = ops->op1;
b9be572e 8026 treeop2 = ops->op2;
dff12ad7 8027
8028 /* We should be called only on simple (binary or unary) expressions,
8029 exactly those that are valid in gimple expressions that aren't
8030 GIMPLE_SINGLE_RHS (or invalid). */
8031 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
00f4f705 8032 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8033 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
588e1cc3 8034
32b3a273 8035 ignore = (target == const0_rtx
d9659041 8036 || ((CONVERT_EXPR_CODE_P (code)
6a2b2394 8037 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
32b3a273 8038 && TREE_CODE (type) == VOID_TYPE));
8039
dff12ad7 8040 /* We should be called only if we need the result. */
8041 gcc_assert (!ignore);
8042
dcfc697f 8043 /* An operation in what may be a bit-field type needs the
8044 result to be reduced to the precision of the bit-field type,
8045 which is narrower than that of the type's mode. */
c3c9a9f3 8046 reduce_bit_field = (INTEGRAL_TYPE_P (type)
dcfc697f 8047 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8048
dcfc697f 8049 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8050 target = 0;
8051
8052 /* Use subtarget as the target for operand 0 of a binary operation. */
8053 subtarget = get_subtarget (target);
8054 original_target = target;
10f307d9 8055
10f307d9 8056 switch (code)
8057 {
e38def9c 8058 case NON_LVALUE_EXPR:
dff12ad7 8059 case PAREN_EXPR:
8060 CASE_CONVERT:
8061 if (treeop0 == error_mark_node)
8062 return const0_rtx;
4ee9c684 8063
dff12ad7 8064 if (TREE_CODE (type) == UNION_TYPE)
8065 {
8066 tree valtype = TREE_TYPE (treeop0);
4ee9c684 8067
dff12ad7 8068 /* If both input and output are BLKmode, this conversion isn't doing
8069 anything except possibly changing memory attribute. */
8070 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8071 {
8072 rtx result = expand_expr (treeop0, target, tmode,
8073 modifier);
10f307d9 8074
dff12ad7 8075 result = copy_rtx (result);
8076 set_mem_attributes (result, type, 0);
8077 return result;
8078 }
dec41e98 8079
dff12ad7 8080 if (target == 0)
8081 {
8082 if (TYPE_MODE (type) != BLKmode)
8083 target = gen_reg_rtx (TYPE_MODE (type));
8084 else
0ab48139 8085 target = assign_temp (type, 1, 1);
dff12ad7 8086 }
eb4b06b6 8087
dff12ad7 8088 if (MEM_P (target))
8089 /* Store data into beginning of memory target. */
8090 store_expr (treeop0,
8091 adjust_address (target, TYPE_MODE (valtype), 0),
8092 modifier == EXPAND_STACK_PARM,
8093 false);
9dda1f80 8094
dff12ad7 8095 else
8096 {
8097 gcc_assert (REG_P (target));
d2ae1b1e 8098
dff12ad7 8099 /* Store this field into a union of the proper type. */
8100 store_field (target,
8101 MIN ((int_size_in_bytes (TREE_TYPE
8102 (treeop0))
8103 * BITS_PER_UNIT),
8104 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
f955ca51 8105 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
dff12ad7 8106 }
d2ae1b1e 8107
dff12ad7 8108 /* Return the entire union. */
8109 return target;
34f17b00 8110 }
8111
dff12ad7 8112 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8113 {
8114 op0 = expand_expr (treeop0, target, VOIDmode,
8115 modifier);
d2ae1b1e 8116
dff12ad7 8117 /* If the signedness of the conversion differs and OP0 is
8118 a promoted SUBREG, clear that indication since we now
8119 have to do the proper extension. */
8120 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8121 && GET_CODE (op0) == SUBREG)
8122 SUBREG_PROMOTED_VAR_P (op0) = 0;
d2ae1b1e 8123
dff12ad7 8124 return REDUCE_BIT_FIELD (op0);
60ffaf4d 8125 }
acfb31e5 8126
dff12ad7 8127 op0 = expand_expr (treeop0, NULL_RTX, mode,
8128 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8129 if (GET_MODE (op0) == mode)
8130 ;
8131
8132 /* If OP0 is a constant, just convert it into the proper mode. */
8133 else if (CONSTANT_P (op0))
6e6b4174 8134 {
dff12ad7 8135 tree inner_type = TREE_TYPE (treeop0);
3754d046 8136 machine_mode inner_mode = GET_MODE (op0);
faa7e9d5 8137
8138 if (inner_mode == VOIDmode)
8139 inner_mode = TYPE_MODE (inner_type);
6e6b4174 8140
dff12ad7 8141 if (modifier == EXPAND_INITIALIZER)
8142 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8143 subreg_lowpart_offset (mode,
8144 inner_mode));
8145 else
8146 op0= convert_modes (mode, inner_mode, op0,
8147 TYPE_UNSIGNED (inner_type));
6e6b4174 8148 }
8149
dff12ad7 8150 else if (modifier == EXPAND_INITIALIZER)
8151 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
acfb31e5 8152
dff12ad7 8153 else if (target == 0)
8154 op0 = convert_to_mode (mode, op0,
8155 TYPE_UNSIGNED (TREE_TYPE
8156 (treeop0)));
8157 else
acfb31e5 8158 {
dff12ad7 8159 convert_move (target, op0,
8160 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8161 op0 = target;
8162 }
1f8b6002 8163
dff12ad7 8164 return REDUCE_BIT_FIELD (op0);
10f307d9 8165
bd1a81f7 8166 case ADDR_SPACE_CONVERT_EXPR:
8167 {
8168 tree treeop0_type = TREE_TYPE (treeop0);
8169 addr_space_t as_to;
8170 addr_space_t as_from;
8171
8172 gcc_assert (POINTER_TYPE_P (type));
8173 gcc_assert (POINTER_TYPE_P (treeop0_type));
8174
8175 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8176 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8177
8178 /* Conversions between pointers to the same address space should
8179 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8180 gcc_assert (as_to != as_from);
8181
8182 /* Ask target code to handle conversion between pointers
8183 to overlapping address spaces. */
8184 if (targetm.addr_space.subset_p (as_to, as_from)
8185 || targetm.addr_space.subset_p (as_from, as_to))
8186 {
8187 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8188 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8189 gcc_assert (op0);
8190 return op0;
8191 }
8192
8193 /* For disjoint address spaces, converting anything but
8194 a null pointer invokes undefined behaviour. We simply
8195 always return a null pointer here. */
8196 return CONST0_RTX (mode);
8197 }
8198
48e1416a 8199 case POINTER_PLUS_EXPR:
dff12ad7 8200 /* Even though the sizetype mode and the pointer's mode can be different
48e1416a 8201 expand is able to handle this correctly and get the correct result out
dff12ad7 8202 of the PLUS_EXPR code. */
8203 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8204 if sizetype precision is smaller than pointer precision. */
8205 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8206 treeop1 = fold_convert_loc (loc, type,
8207 fold_convert_loc (loc, ssizetype,
8208 treeop1));
450c6e32 8209 /* If sizetype precision is larger than pointer precision, truncate the
8210 offset to have matching modes. */
8211 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8212 treeop1 = fold_convert_loc (loc, type, treeop1);
8213
dff12ad7 8214 case PLUS_EXPR:
dff12ad7 8215 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8216 something else, make sure we add the register to the constant and
8217 then to the other thing. This case can occur during strength
8218 reduction and doing it this way will produce better code if the
8219 frame pointer or argument pointer is eliminated.
10f307d9 8220
dff12ad7 8221 fold-const.c will ensure that the constant is always in the inner
8222 PLUS_EXPR, so the only case we need to do anything about is if
8223 sp, ap, or fp is our second argument, in which case we must swap
8224 the innermost first argument and our second argument. */
fa56dc1d 8225
dff12ad7 8226 if (TREE_CODE (treeop0) == PLUS_EXPR
8227 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8228 && TREE_CODE (treeop1) == VAR_DECL
8229 && (DECL_RTL (treeop1) == frame_pointer_rtx
8230 || DECL_RTL (treeop1) == stack_pointer_rtx
8231 || DECL_RTL (treeop1) == arg_pointer_rtx))
8232 {
c8010b80 8233 gcc_unreachable ();
dff12ad7 8234 }
10f307d9 8235
dff12ad7 8236 /* If the result is to be ptr_mode and we are adding an integer to
8237 something, we might be forming a constant. So try to use
8238 plus_constant. If it produces a sum and we can't accept it,
8239 use force_operand. This allows P = &ARR[const] to generate
8240 efficient code on machines where a SYMBOL_REF is not a valid
8241 address.
68a556d6 8242
dff12ad7 8243 If this is an EXPAND_SUM call, always return the sum. */
8244 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8245 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
67cae17d 8246 {
dff12ad7 8247 if (modifier == EXPAND_STACK_PARM)
8248 target = 0;
8249 if (TREE_CODE (treeop0) == INTEGER_CST
995b44f5 8250 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
dff12ad7 8251 && TREE_CONSTANT (treeop1))
8252 {
8253 rtx constant_part;
e913b5cd 8254 HOST_WIDE_INT wc;
3754d046 8255 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
67cae17d 8256
dff12ad7 8257 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8258 EXPAND_SUM);
796b6678 8259 /* Use wi::shwi to ensure that the constant is
dff12ad7 8260 truncated according to the mode of OP1, then sign extended
8261 to a HOST_WIDE_INT. Using the constant directly can result
8262 in non-canonical RTL in a 64x32 cross compile. */
f9ae6f95 8263 wc = TREE_INT_CST_LOW (treeop0);
796b6678 8264 constant_part =
8265 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
29c05e22 8266 op1 = plus_constant (mode, op1, INTVAL (constant_part));
dff12ad7 8267 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8268 op1 = force_operand (op1, target);
8269 return REDUCE_BIT_FIELD (op1);
8270 }
67cae17d 8271
dff12ad7 8272 else if (TREE_CODE (treeop1) == INTEGER_CST
995b44f5 8273 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
dff12ad7 8274 && TREE_CONSTANT (treeop0))
8275 {
8276 rtx constant_part;
e913b5cd 8277 HOST_WIDE_INT wc;
3754d046 8278 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
67cae17d 8279
dff12ad7 8280 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8281 (modifier == EXPAND_INITIALIZER
8282 ? EXPAND_INITIALIZER : EXPAND_SUM));
8283 if (! CONSTANT_P (op0))
8284 {
8285 op1 = expand_expr (treeop1, NULL_RTX,
8286 VOIDmode, modifier);
8287 /* Return a PLUS if modifier says it's OK. */
8288 if (modifier == EXPAND_SUM
8289 || modifier == EXPAND_INITIALIZER)
8290 return simplify_gen_binary (PLUS, mode, op0, op1);
8291 goto binop2;
8292 }
796b6678 8293 /* Use wi::shwi to ensure that the constant is
dff12ad7 8294 truncated according to the mode of OP1, then sign extended
8295 to a HOST_WIDE_INT. Using the constant directly can result
8296 in non-canonical RTL in a 64x32 cross compile. */
f9ae6f95 8297 wc = TREE_INT_CST_LOW (treeop1);
796b6678 8298 constant_part
8299 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
29c05e22 8300 op0 = plus_constant (mode, op0, INTVAL (constant_part));
dff12ad7 8301 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8302 op0 = force_operand (op0, target);
8303 return REDUCE_BIT_FIELD (op0);
8304 }
67cae17d 8305 }
8306
46b155e1 8307 /* Use TER to expand pointer addition of a negated value
8308 as pointer subtraction. */
8309 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8310 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8311 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8312 && TREE_CODE (treeop1) == SSA_NAME
8313 && TYPE_MODE (TREE_TYPE (treeop0))
8314 == TYPE_MODE (TREE_TYPE (treeop1)))
8315 {
8316 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8317 if (def)
8318 {
8319 treeop1 = gimple_assign_rhs1 (def);
8320 code = MINUS_EXPR;
8321 goto do_minus;
8322 }
8323 }
8324
dff12ad7 8325 /* No sense saving up arithmetic to be done
8326 if it's all in the wrong mode to form part of an address.
8327 And force_operand won't know whether to sign-extend or
8328 zero-extend. */
8329 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8330 || mode != ptr_mode)
8331 {
8332 expand_operands (treeop0, treeop1,
8333 subtarget, &op0, &op1, EXPAND_NORMAL);
8334 if (op0 == const0_rtx)
8335 return op1;
8336 if (op1 == const0_rtx)
8337 return op0;
8338 goto binop2;
8339 }
67cae17d 8340
dff12ad7 8341 expand_operands (treeop0, treeop1,
8342 subtarget, &op0, &op1, modifier);
8343 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
10f307d9 8344
dff12ad7 8345 case MINUS_EXPR:
46b155e1 8346 do_minus:
dff12ad7 8347 /* For initializers, we are allowed to return a MINUS of two
8348 symbolic constants. Here we handle all cases when both operands
8349 are constant. */
8350 /* Handle difference of two symbolic constants,
8351 for the sake of an initializer. */
8352 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8353 && really_constant_p (treeop0)
8354 && really_constant_p (treeop1))
8355 {
8356 expand_operands (treeop0, treeop1,
8357 NULL_RTX, &op0, &op1, modifier);
acfb31e5 8358
dff12ad7 8359 /* If the last operand is a CONST_INT, use plus_constant of
8360 the negated constant. Else make the MINUS. */
8361 if (CONST_INT_P (op1))
29c05e22 8362 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8363 -INTVAL (op1)));
dff12ad7 8364 else
8365 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8366 }
acfb31e5 8367
dff12ad7 8368 /* No sense saving up arithmetic to be done
8369 if it's all in the wrong mode to form part of an address.
8370 And force_operand won't know whether to sign-extend or
8371 zero-extend. */
8372 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8373 || mode != ptr_mode)
8374 goto binop;
10f307d9 8375
dff12ad7 8376 expand_operands (treeop0, treeop1,
8377 subtarget, &op0, &op1, modifier);
c19f64ba 8378
dff12ad7 8379 /* Convert A - const to A + (-const). */
8380 if (CONST_INT_P (op1))
f75fb6ae 8381 {
dff12ad7 8382 op1 = negate_rtx (mode, op1);
8383 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
f75fb6ae 8384 }
603c4ee1 8385
dff12ad7 8386 goto binop2;
fa56dc1d 8387
00f4f705 8388 case WIDEN_MULT_PLUS_EXPR:
8389 case WIDEN_MULT_MINUS_EXPR:
8390 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
b9be572e 8391 op2 = expand_normal (treeop2);
00f4f705 8392 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8393 target, unsignedp);
8394 return target;
8395
62be004c 8396 case WIDEN_MULT_EXPR:
dff12ad7 8397 /* If first operand is constant, swap them.
8398 Thus the following special case checks need only
8399 check the second operand. */
8400 if (TREE_CODE (treeop0) == INTEGER_CST)
a4f59596 8401 std::swap (treeop0, treeop1);
10f307d9 8402
dff12ad7 8403 /* First, check if we have a multiplication of one signed and one
8404 unsigned operand. */
62be004c 8405 if (TREE_CODE (treeop1) != INTEGER_CST
8406 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8407 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
dff12ad7 8408 {
3754d046 8409 machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
dff12ad7 8410 this_optab = usmul_widen_optab;
aff5fb4d 8411 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8412 != CODE_FOR_nothing)
dff12ad7 8413 {
aff5fb4d 8414 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8415 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8416 EXPAND_NORMAL);
8417 else
8418 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8419 EXPAND_NORMAL);
4620c2de 8420 /* op0 and op1 might still be constant, despite the above
8421 != INTEGER_CST check. Handle it. */
8422 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8423 {
8424 op0 = convert_modes (innermode, mode, op0, true);
8425 op1 = convert_modes (innermode, mode, op1, false);
8426 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8427 target, unsignedp));
8428 }
aff5fb4d 8429 goto binop3;
dff12ad7 8430 }
8431 }
62be004c 8432 /* Check for a multiplication with matching signedness. */
8433 else if ((TREE_CODE (treeop1) == INTEGER_CST
8434 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8435 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8436 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
dff12ad7 8437 {
62be004c 8438 tree op0type = TREE_TYPE (treeop0);
3754d046 8439 machine_mode innermode = TYPE_MODE (op0type);
dff12ad7 8440 bool zextend_p = TYPE_UNSIGNED (op0type);
8441 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8442 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
f1a494f4 8443
aff5fb4d 8444 if (TREE_CODE (treeop0) != INTEGER_CST)
dff12ad7 8445 {
aff5fb4d 8446 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
5a574e8b 8447 != CODE_FOR_nothing)
dff12ad7 8448 {
62be004c 8449 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8450 EXPAND_NORMAL);
4620c2de 8451 /* op0 and op1 might still be constant, despite the above
8452 != INTEGER_CST check. Handle it. */
8453 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8454 {
8455 widen_mult_const:
8456 op0 = convert_modes (innermode, mode, op0, zextend_p);
8457 op1
8458 = convert_modes (innermode, mode, op1,
8459 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8460 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8461 target,
8462 unsignedp));
8463 }
62be004c 8464 temp = expand_widening_mult (mode, op0, op1, target,
8465 unsignedp, this_optab);
8466 return REDUCE_BIT_FIELD (temp);
dff12ad7 8467 }
aff5fb4d 8468 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
5a574e8b 8469 != CODE_FOR_nothing
62be004c 8470 && innermode == word_mode)
dff12ad7 8471 {
8472 rtx htem, hipart;
62be004c 8473 op0 = expand_normal (treeop0);
8474 if (TREE_CODE (treeop1) == INTEGER_CST)
dff12ad7 8475 op1 = convert_modes (innermode, mode,
4620c2de 8476 expand_normal (treeop1),
8477 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
dff12ad7 8478 else
62be004c 8479 op1 = expand_normal (treeop1);
4620c2de 8480 /* op0 and op1 might still be constant, despite the above
8481 != INTEGER_CST check. Handle it. */
8482 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8483 goto widen_mult_const;
dff12ad7 8484 temp = expand_binop (mode, other_optab, op0, op1, target,
8485 unsignedp, OPTAB_LIB_WIDEN);
8486 hipart = gen_highpart (innermode, temp);
8487 htem = expand_mult_highpart_adjust (innermode, hipart,
8488 op0, op1, hipart,
8489 zextend_p);
8490 if (htem != hipart)
8491 emit_move_insn (hipart, htem);
8492 return REDUCE_BIT_FIELD (temp);
8493 }
8494 }
8495 }
62be004c 8496 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8497 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8498 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8499 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8500
b9be572e 8501 case FMA_EXPR:
8502 {
8503 optab opt = fma_optab;
8504 gimple def0, def2;
8505
d325c8f4 8506 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8507 call. */
8508 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8509 {
8510 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8511 tree call_expr;
8512
8513 gcc_assert (fn != NULL_TREE);
8514 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8515 return expand_builtin (call_expr, target, subtarget, mode, false);
8516 }
8517
b9be572e 8518 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
ec4d2c3d 8519 /* The multiplication is commutative - look at its 2nd operand
8520 if the first isn't fed by a negate. */
8521 if (!def0)
8522 {
8523 def0 = get_def_for_expr (treeop1, NEGATE_EXPR);
8524 /* Swap operands if the 2nd operand is fed by a negate. */
8525 if (def0)
a4f59596 8526 std::swap (treeop0, treeop1);
ec4d2c3d 8527 }
b9be572e 8528 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8529
8530 op0 = op2 = NULL;
8531
8532 if (def0 && def2
8533 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8534 {
8535 opt = fnms_optab;
8536 op0 = expand_normal (gimple_assign_rhs1 (def0));
8537 op2 = expand_normal (gimple_assign_rhs1 (def2));
8538 }
8539 else if (def0
8540 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8541 {
8542 opt = fnma_optab;
8543 op0 = expand_normal (gimple_assign_rhs1 (def0));
8544 }
8545 else if (def2
8546 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8547 {
8548 opt = fms_optab;
8549 op2 = expand_normal (gimple_assign_rhs1 (def2));
8550 }
8551
8552 if (op0 == NULL)
8553 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8554 if (op2 == NULL)
8555 op2 = expand_normal (treeop2);
8556 op1 = expand_normal (treeop1);
8557
8558 return expand_ternary_op (TYPE_MODE (type), opt,
8559 op0, op1, op2, target, 0);
8560 }
8561
62be004c 8562 case MULT_EXPR:
8563 /* If this is a fixed-point operation, then we cannot use the code
8564 below because "expand_mult" doesn't support sat/no-sat fixed-point
8565 multiplications. */
8566 if (ALL_FIXED_POINT_MODE_P (mode))
8567 goto binop;
8568
8569 /* If first operand is constant, swap them.
8570 Thus the following special case checks need only
8571 check the second operand. */
8572 if (TREE_CODE (treeop0) == INTEGER_CST)
a4f59596 8573 std::swap (treeop0, treeop1);
62be004c 8574
8575 /* Attempt to return something suitable for generating an
8576 indexed address, for machines that support that. */
8577
8578 if (modifier == EXPAND_SUM && mode == ptr_mode
e913b5cd 8579 && tree_fits_shwi_p (treeop1))
62be004c 8580 {
8581 tree exp1 = treeop1;
8582
8583 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8584 EXPAND_SUM);
8585
8586 if (!REG_P (op0))
8587 op0 = force_operand (op0, NULL_RTX);
8588 if (!REG_P (op0))
8589 op0 = copy_to_mode_reg (mode, op0);
8590
8591 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
fcb97e84 8592 gen_int_mode (tree_to_shwi (exp1),
62be004c 8593 TYPE_MODE (TREE_TYPE (exp1)))));
8594 }
8595
8596 if (modifier == EXPAND_STACK_PARM)
8597 target = 0;
8598
8599 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
dff12ad7 8600 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
10f307d9 8601
dff12ad7 8602 case TRUNC_DIV_EXPR:
8603 case FLOOR_DIV_EXPR:
8604 case CEIL_DIV_EXPR:
8605 case ROUND_DIV_EXPR:
8606 case EXACT_DIV_EXPR:
8607 /* If this is a fixed-point operation, then we cannot use the code
8608 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8609 divisions. */
8610 if (ALL_FIXED_POINT_MODE_P (mode))
8611 goto binop;
aed164c3 8612
dff12ad7 8613 if (modifier == EXPAND_STACK_PARM)
8614 target = 0;
8615 /* Possible optimization: compute the dividend with EXPAND_SUM
8616 then if the divisor is constant can optimize the case
8617 where some terms of the dividend have coeffs divisible by it. */
8618 expand_operands (treeop0, treeop1,
8619 subtarget, &op0, &op1, EXPAND_NORMAL);
8620 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
aed164c3 8621
dff12ad7 8622 case RDIV_EXPR:
8623 goto binop;
4ee9c684 8624
ebf4f764 8625 case MULT_HIGHPART_EXPR:
8626 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8627 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8628 gcc_assert (temp);
8629 return temp;
8630
dff12ad7 8631 case TRUNC_MOD_EXPR:
8632 case FLOOR_MOD_EXPR:
8633 case CEIL_MOD_EXPR:
8634 case ROUND_MOD_EXPR:
8635 if (modifier == EXPAND_STACK_PARM)
8636 target = 0;
8637 expand_operands (treeop0, treeop1,
8638 subtarget, &op0, &op1, EXPAND_NORMAL);
8639 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
cf389750 8640
dff12ad7 8641 case FIXED_CONVERT_EXPR:
8642 op0 = expand_normal (treeop0);
8643 if (target == 0 || modifier == EXPAND_STACK_PARM)
8644 target = gen_reg_rtx (mode);
cf389750 8645
dff12ad7 8646 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8647 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8648 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8649 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8650 else
8651 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8652 return target;
4ee9c684 8653
dff12ad7 8654 case FIX_TRUNC_EXPR:
8655 op0 = expand_normal (treeop0);
8656 if (target == 0 || modifier == EXPAND_STACK_PARM)
8657 target = gen_reg_rtx (mode);
8658 expand_fix (target, op0, unsignedp);
8659 return target;
10f307d9 8660
dff12ad7 8661 case FLOAT_EXPR:
8662 op0 = expand_normal (treeop0);
8663 if (target == 0 || modifier == EXPAND_STACK_PARM)
8664 target = gen_reg_rtx (mode);
8665 /* expand_float can't figure out what to do if FROM has VOIDmode.
8666 So give it the correct mode. With -O, cse will optimize this. */
8667 if (GET_MODE (op0) == VOIDmode)
8668 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8669 op0);
8670 expand_float (target, op0,
8671 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8672 return target;
2ef1e405 8673
dff12ad7 8674 case NEGATE_EXPR:
8675 op0 = expand_expr (treeop0, subtarget,
8676 VOIDmode, EXPAND_NORMAL);
8677 if (modifier == EXPAND_STACK_PARM)
8678 target = 0;
8679 temp = expand_unop (mode,
8680 optab_for_tree_code (NEGATE_EXPR, type,
8681 optab_default),
8682 op0, target, 0);
8683 gcc_assert (temp);
8684 return REDUCE_BIT_FIELD (temp);
a0c2c45b 8685
dff12ad7 8686 case ABS_EXPR:
8687 op0 = expand_expr (treeop0, subtarget,
8688 VOIDmode, EXPAND_NORMAL);
8689 if (modifier == EXPAND_STACK_PARM)
8690 target = 0;
fa56dc1d 8691
dff12ad7 8692 /* ABS_EXPR is not valid for complex arguments. */
8693 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8694 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
cf389750 8695
dff12ad7 8696 /* Unsigned abs is simply the operand. Testing here means we don't
8697 risk generating incorrect code below. */
8698 if (TYPE_UNSIGNED (type))
8699 return op0;
c75b4594 8700
dff12ad7 8701 return expand_abs (mode, op0, target, unsignedp,
8702 safe_from_p (target, treeop0, 1));
d4cf8ff7 8703
dff12ad7 8704 case MAX_EXPR:
8705 case MIN_EXPR:
8706 target = original_target;
8707 if (target == 0
8708 || modifier == EXPAND_STACK_PARM
8709 || (MEM_P (target) && MEM_VOLATILE_P (target))
8710 || GET_MODE (target) != mode
8711 || (REG_P (target)
8712 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8713 target = gen_reg_rtx (mode);
8714 expand_operands (treeop0, treeop1,
8715 target, &op0, &op1, EXPAND_NORMAL);
d4cf8ff7 8716
dff12ad7 8717 /* First try to do it with a special MIN or MAX instruction.
8718 If that does not win, use a conditional jump to select the proper
8719 value. */
8720 this_optab = optab_for_tree_code (code, type, optab_default);
8721 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8722 OPTAB_WIDEN);
8723 if (temp != 0)
8724 return temp;
1f8b6002 8725
dff12ad7 8726 /* At this point, a MEM target is no longer useful; we will get better
8727 code without it. */
1f8b6002 8728
dff12ad7 8729 if (! REG_P (target))
8730 target = gen_reg_rtx (mode);
1f8b6002 8731
dff12ad7 8732 /* If op1 was placed in target, swap op0 and op1. */
8733 if (target != op0 && target == op1)
1d1a557c 8734 std::swap (op0, op1);
1f8b6002 8735
dff12ad7 8736 /* We generate better code and avoid problems with op1 mentioning
8737 target by forcing op1 into a pseudo if it isn't a constant. */
8738 if (! CONSTANT_P (op1))
8739 op1 = force_reg (mode, op1);
2ef1e405 8740
10f307d9 8741 {
dff12ad7 8742 enum rtx_code comparison_code;
8743 rtx cmpop1 = op1;
952f3892 8744
dff12ad7 8745 if (code == MAX_EXPR)
8746 comparison_code = unsignedp ? GEU : GE;
8747 else
8748 comparison_code = unsignedp ? LEU : LE;
952f3892 8749
dff12ad7 8750 /* Canonicalize to comparisons against 0. */
8751 if (op1 == const1_rtx)
5fb2a157 8752 {
dff12ad7 8753 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8754 or (a != 0 ? a : 1) for unsigned.
8755 For MIN we are safe converting (a <= 1 ? a : 1)
8756 into (a <= 0 ? a : 1) */
8757 cmpop1 = const0_rtx;
8758 if (code == MAX_EXPR)
8759 comparison_code = unsignedp ? NE : GT;
5fb2a157 8760 }
dff12ad7 8761 if (op1 == constm1_rtx && !unsignedp)
a7bab26c 8762 {
dff12ad7 8763 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8764 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8765 cmpop1 = const0_rtx;
8766 if (code == MIN_EXPR)
8767 comparison_code = LT;
a7bab26c 8768 }
9630036a 8769
dff12ad7 8770 /* Use a conditional move if possible. */
8771 if (can_conditionally_move_p (mode))
954bdcb1 8772 {
dff12ad7 8773 rtx insn;
954bdcb1 8774
dff12ad7 8775 start_sequence ();
5785f96f 8776
dff12ad7 8777 /* Try to emit the conditional move. */
8778 insn = emit_conditional_move (target, comparison_code,
8779 op0, cmpop1, mode,
8780 op0, op1, mode,
8781 unsignedp);
8782
8783 /* If we could do the conditional move, emit the sequence,
8784 and return. */
8785 if (insn)
25d55d72 8786 {
1d277a67 8787 rtx_insn *seq = get_insns ();
dff12ad7 8788 end_sequence ();
8789 emit_insn (seq);
8790 return target;
25d55d72 8791 }
8792
dff12ad7 8793 /* Otherwise discard the sequence and fall back to code with
8794 branches. */
8795 end_sequence ();
954bdcb1 8796 }
9630036a 8797
dff12ad7 8798 if (target != op0)
8799 emit_move_insn (target, op0);
954bdcb1 8800
f9a00e9e 8801 lab = gen_label_rtx ();
dff12ad7 8802 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
f9a00e9e 8803 unsignedp, mode, NULL_RTX, NULL, lab,
79ab74cc 8804 -1);
dff12ad7 8805 }
8806 emit_move_insn (target, op1);
f9a00e9e 8807 emit_label (lab);
dff12ad7 8808 return target;
67c68e45 8809
dff12ad7 8810 case BIT_NOT_EXPR:
8811 op0 = expand_expr (treeop0, subtarget,
8812 VOIDmode, EXPAND_NORMAL);
8813 if (modifier == EXPAND_STACK_PARM)
8814 target = 0;
f1c44a44 8815 /* In case we have to reduce the result to bitfield precision
7e5608ca 8816 for unsigned bitfield expand this as XOR with a proper constant
8817 instead. */
8818 if (reduce_bit_field && TYPE_UNSIGNED (type))
e913b5cd 8819 {
796b6678 8820 wide_int mask = wi::mask (TYPE_PRECISION (type),
8821 false, GET_MODE_PRECISION (mode));
e913b5cd 8822
8823 temp = expand_binop (mode, xor_optab, op0,
8824 immed_wide_int_const (mask, mode),
8825 target, 1, OPTAB_LIB_WIDEN);
8826 }
f1c44a44 8827 else
8828 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
dff12ad7 8829 gcc_assert (temp);
8830 return temp;
c3a9c149 8831
dff12ad7 8832 /* ??? Can optimize bitwise operations with one arg constant.
8833 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8834 and (a bitwise1 b) bitwise2 b (etc)
8835 but that is probably not worth while. */
10f307d9 8836
dff12ad7 8837 case BIT_AND_EXPR:
dff12ad7 8838 case BIT_IOR_EXPR:
dff12ad7 8839 case BIT_XOR_EXPR:
8840 goto binop;
0e9fefce 8841
dff12ad7 8842 case LROTATE_EXPR:
8843 case RROTATE_EXPR:
8844 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8845 || (GET_MODE_PRECISION (TYPE_MODE (type))
8846 == TYPE_PRECISION (type)));
8847 /* fall through */
0e20f9fb 8848
dff12ad7 8849 case LSHIFT_EXPR:
8850 case RSHIFT_EXPR:
8851 /* If this is a fixed-point operation, then we cannot use the code
8852 below because "expand_shift" doesn't support sat/no-sat fixed-point
8853 shifts. */
8854 if (ALL_FIXED_POINT_MODE_P (mode))
8855 goto binop;
fa56dc1d 8856
dff12ad7 8857 if (! safe_from_p (subtarget, treeop1, 1))
8858 subtarget = 0;
8859 if (modifier == EXPAND_STACK_PARM)
8860 target = 0;
8861 op0 = expand_expr (treeop0, subtarget,
8862 VOIDmode, EXPAND_NORMAL);
f5ff0b21 8863 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8864 unsignedp);
dff12ad7 8865 if (code == LSHIFT_EXPR)
8866 temp = REDUCE_BIT_FIELD (temp);
8867 return temp;
10f307d9 8868
dff12ad7 8869 /* Could determine the answer when only additive constants differ. Also,
8870 the addition of one can be handled by changing the condition. */
8871 case LT_EXPR:
8872 case LE_EXPR:
8873 case GT_EXPR:
8874 case GE_EXPR:
8875 case EQ_EXPR:
8876 case NE_EXPR:
8877 case UNORDERED_EXPR:
8878 case ORDERED_EXPR:
8879 case UNLT_EXPR:
8880 case UNLE_EXPR:
8881 case UNGT_EXPR:
8882 case UNGE_EXPR:
8883 case UNEQ_EXPR:
8884 case LTGT_EXPR:
f9a00e9e 8885 {
8886 temp = do_store_flag (ops,
8887 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8888 tmode != VOIDmode ? tmode : mode);
8889 if (temp)
8890 return temp;
8891
8892 /* Use a compare and a jump for BLKmode comparisons, or for function
a84cae90 8893 type comparisons is have_canonicalize_funcptr_for_compare. */
f9a00e9e 8894
8895 if ((target == 0
8896 || modifier == EXPAND_STACK_PARM
8897 || ! safe_from_p (target, treeop0, 1)
8898 || ! safe_from_p (target, treeop1, 1)
8899 /* Make sure we don't have a hard reg (such as function's return
8900 value) live across basic blocks, if not optimizing. */
8901 || (!optimize && REG_P (target)
8902 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8903 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8904
8905 emit_move_insn (target, const0_rtx);
8906
8907 rtx_code_label *lab1 = gen_label_rtx ();
8908 jumpifnot_1 (code, treeop0, treeop1, lab1, -1);
8909
8910 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8911 emit_move_insn (target, constm1_rtx);
8912 else
8913 emit_move_insn (target, const1_rtx);
10f307d9 8914
f9a00e9e 8915 emit_label (lab1);
8916 return target;
8917 }
dff12ad7 8918 case COMPLEX_EXPR:
8919 /* Get the rtx code of the operands. */
8920 op0 = expand_normal (treeop0);
8921 op1 = expand_normal (treeop1);
f8ca8b77 8922
dff12ad7 8923 if (!target)
8924 target = gen_reg_rtx (TYPE_MODE (type));
83014b20 8925 else
8926 /* If target overlaps with op1, then either we need to force
8927 op1 into a pseudo (if target also overlaps with op0),
8928 or write the complex parts in reverse order. */
8929 switch (GET_CODE (target))
8930 {
8931 case CONCAT:
8932 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
8933 {
8934 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
8935 {
8936 complex_expr_force_op1:
8937 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
8938 emit_move_insn (temp, op1);
8939 op1 = temp;
8940 break;
8941 }
8942 complex_expr_swap_order:
8943 /* Move the imaginary (op1) and real (op0) parts to their
8944 location. */
8945 write_complex_part (target, op1, true);
8946 write_complex_part (target, op0, false);
8947
8948 return target;
8949 }
8950 break;
8951 case MEM:
8952 temp = adjust_address_nv (target,
8953 GET_MODE_INNER (GET_MODE (target)), 0);
8954 if (reg_overlap_mentioned_p (temp, op1))
8955 {
3754d046 8956 machine_mode imode = GET_MODE_INNER (GET_MODE (target));
83014b20 8957 temp = adjust_address_nv (target, imode,
8958 GET_MODE_SIZE (imode));
8959 if (reg_overlap_mentioned_p (temp, op0))
8960 goto complex_expr_force_op1;
8961 goto complex_expr_swap_order;
8962 }
8963 break;
8964 default:
8965 if (reg_overlap_mentioned_p (target, op1))
8966 {
8967 if (reg_overlap_mentioned_p (target, op0))
8968 goto complex_expr_force_op1;
8969 goto complex_expr_swap_order;
8970 }
8971 break;
8972 }
b5ba9f3a 8973
dff12ad7 8974 /* Move the real (op0) and imaginary (op1) parts to their location. */
8975 write_complex_part (target, op0, false);
8976 write_complex_part (target, op1, true);
c3a9c149 8977
dff12ad7 8978 return target;
6e6b4174 8979
dff12ad7 8980 case WIDEN_SUM_EXPR:
8981 {
8982 tree oprnd0 = treeop0;
8983 tree oprnd1 = treeop1;
1c9f9aa6 8984
dff12ad7 8985 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8986 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8987 target, unsignedp);
8988 return target;
10f307d9 8989 }
8990
dff12ad7 8991 case REDUC_MAX_EXPR:
8992 case REDUC_MIN_EXPR:
8993 case REDUC_PLUS_EXPR:
10fc867f 8994 {
dff12ad7 8995 op0 = expand_normal (treeop0);
8996 this_optab = optab_for_tree_code (code, type, optab_default);
3754d046 8997 machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0));
f3d76545 8998
8999 if (optab_handler (this_optab, vec_mode) != CODE_FOR_nothing)
9000 {
9001 struct expand_operand ops[2];
9002 enum insn_code icode = optab_handler (this_optab, vec_mode);
9003
9004 create_output_operand (&ops[0], target, mode);
9005 create_input_operand (&ops[1], op0, vec_mode);
9006 if (maybe_expand_insn (icode, 2, ops))
9007 {
9008 target = ops[0].value;
9009 if (GET_MODE (target) != mode)
9010 return gen_lowpart (tmode, target);
9011 return target;
9012 }
9013 }
9014 /* Fall back to optab with vector result, and then extract scalar. */
9015 this_optab = scalar_reduc_to_vector (this_optab, type);
7ba68b18 9016 temp = expand_unop (vec_mode, this_optab, op0, NULL_RTX, unsignedp);
9017 gcc_assert (temp);
9018 /* The tree code produces a scalar result, but (somewhat by convention)
9019 the optab produces a vector with the result in element 0 if
9020 little-endian, or element N-1 if big-endian. So pull the scalar
9021 result out of that element. */
9022 int index = BYTES_BIG_ENDIAN ? GET_MODE_NUNITS (vec_mode) - 1 : 0;
9023 int bitsize = GET_MODE_BITSIZE (GET_MODE_INNER (vec_mode));
9024 temp = extract_bit_field (temp, bitsize, bitsize * index, unsignedp,
9025 target, mode, mode);
dff12ad7 9026 gcc_assert (temp);
9027 return temp;
9028 }
10fc867f 9029
dff12ad7 9030 case VEC_UNPACK_HI_EXPR:
9031 case VEC_UNPACK_LO_EXPR:
9032 {
9033 op0 = expand_normal (treeop0);
dff12ad7 9034 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9035 target, unsignedp);
9036 gcc_assert (temp);
9037 return temp;
9038 }
a9d9ab08 9039
dff12ad7 9040 case VEC_UNPACK_FLOAT_HI_EXPR:
9041 case VEC_UNPACK_FLOAT_LO_EXPR:
9042 {
9043 op0 = expand_normal (treeop0);
9044 /* The signedness is determined from input operand. */
dff12ad7 9045 temp = expand_widen_pattern_expr
9046 (ops, op0, NULL_RTX, NULL_RTX,
9047 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
155b05dc 9048
dff12ad7 9049 gcc_assert (temp);
9050 return temp;
9051 }
d2ae1b1e 9052
dff12ad7 9053 case VEC_WIDEN_MULT_HI_EXPR:
9054 case VEC_WIDEN_MULT_LO_EXPR:
79a78f7f 9055 case VEC_WIDEN_MULT_EVEN_EXPR:
9056 case VEC_WIDEN_MULT_ODD_EXPR:
6083c152 9057 case VEC_WIDEN_LSHIFT_HI_EXPR:
9058 case VEC_WIDEN_LSHIFT_LO_EXPR:
79a78f7f 9059 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9060 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9061 target, unsignedp);
9062 gcc_assert (target);
9063 return target;
6083c152 9064
dff12ad7 9065 case VEC_PACK_TRUNC_EXPR:
9066 case VEC_PACK_SAT_EXPR:
9067 case VEC_PACK_FIX_TRUNC_EXPR:
9068 mode = TYPE_MODE (TREE_TYPE (treeop0));
9069 goto binop;
f4803722 9070
9071 case VEC_PERM_EXPR:
e21c468f 9072 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9073 op2 = expand_normal (treeop2);
d9198d85 9074
9075 /* Careful here: if the target doesn't support integral vector modes,
9076 a constant selection vector could wind up smooshed into a normal
9077 integral constant. */
9078 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9079 {
9080 tree sel_type = TREE_TYPE (treeop2);
3754d046 9081 machine_mode vmode
d9198d85 9082 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9083 TYPE_VECTOR_SUBPARTS (sel_type));
9084 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9085 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9086 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9087 }
9088 else
9089 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9090
e21c468f 9091 temp = expand_vec_perm (mode, op0, op1, op2, target);
9092 gcc_assert (temp);
9093 return temp;
10f307d9 9094
c86930b0 9095 case DOT_PROD_EXPR:
9096 {
9097 tree oprnd0 = treeop0;
9098 tree oprnd1 = treeop1;
9099 tree oprnd2 = treeop2;
9100 rtx op2;
9101
9102 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9103 op2 = expand_normal (oprnd2);
9104 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9105 target, unsignedp);
9106 return target;
9107 }
9108
a2287001 9109 case SAD_EXPR:
9110 {
9111 tree oprnd0 = treeop0;
9112 tree oprnd1 = treeop1;
9113 tree oprnd2 = treeop2;
9114 rtx op2;
9115
9116 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9117 op2 = expand_normal (oprnd2);
9118 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9119 target, unsignedp);
9120 return target;
9121 }
9122
c86930b0 9123 case REALIGN_LOAD_EXPR:
9124 {
9125 tree oprnd0 = treeop0;
9126 tree oprnd1 = treeop1;
9127 tree oprnd2 = treeop2;
9128 rtx op2;
9129
9130 this_optab = optab_for_tree_code (code, type, optab_default);
9131 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9132 op2 = expand_normal (oprnd2);
9133 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9134 target, unsignedp);
9135 gcc_assert (temp);
9136 return temp;
9137 }
9138
8a2caf10 9139 case COND_EXPR:
f9a00e9e 9140 {
9141 /* A COND_EXPR with its type being VOID_TYPE represents a
9142 conditional jump and is handled in
9143 expand_gimple_cond_expr. */
9144 gcc_assert (!VOID_TYPE_P (type));
9145
9146 /* Note that COND_EXPRs whose type is a structure or union
9147 are required to be constructed to contain assignments of
9148 a temporary variable, so that we can evaluate them here
9149 for side effect only. If type is void, we must do likewise. */
9150
9151 gcc_assert (!TREE_ADDRESSABLE (type)
9152 && !ignore
9153 && TREE_TYPE (treeop1) != void_type_node
9154 && TREE_TYPE (treeop2) != void_type_node);
9155
9156 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9157 if (temp)
9158 return temp;
9159
9160 /* If we are not to produce a result, we have no target. Otherwise,
9161 if a target was specified use it; it will not be used as an
9162 intermediate target unless it is safe. If no target, use a
9163 temporary. */
9164
9165 if (modifier != EXPAND_STACK_PARM
9166 && original_target
9167 && safe_from_p (original_target, treeop0, 1)
9168 && GET_MODE (original_target) == mode
9169 && !MEM_P (original_target))
9170 temp = original_target;
9171 else
9172 temp = assign_temp (type, 0, 1);
9173
9174 do_pending_stack_adjust ();
9175 NO_DEFER_POP;
9176 rtx_code_label *lab0 = gen_label_rtx ();
9177 rtx_code_label *lab1 = gen_label_rtx ();
9178 jumpifnot (treeop0, lab0, -1);
9179 store_expr (treeop1, temp,
9180 modifier == EXPAND_STACK_PARM,
9181 false);
9182
1d5ad681 9183 emit_jump_insn (targetm.gen_jump (lab1));
f9a00e9e 9184 emit_barrier ();
9185 emit_label (lab0);
9186 store_expr (treeop2, temp,
9187 modifier == EXPAND_STACK_PARM,
9188 false);
9189
9190 emit_label (lab1);
9191 OK_DEFER_POP;
c909ed33 9192 return temp;
f9a00e9e 9193 }
8a2caf10 9194
9195 case VEC_COND_EXPR:
9196 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9197 return target;
9198
dff12ad7 9199 default:
9200 gcc_unreachable ();
9201 }
d2ae1b1e 9202
dff12ad7 9203 /* Here to do an ordinary binary operator. */
9204 binop:
9205 expand_operands (treeop0, treeop1,
9206 subtarget, &op0, &op1, EXPAND_NORMAL);
9207 binop2:
9208 this_optab = optab_for_tree_code (code, type, optab_default);
9209 binop3:
9210 if (modifier == EXPAND_STACK_PARM)
9211 target = 0;
9212 temp = expand_binop (mode, this_optab, op0, op1, target,
1701f789 9213 unsignedp, OPTAB_LIB_WIDEN);
dff12ad7 9214 gcc_assert (temp);
8618a9e7 9215 /* Bitwise operations do not need bitfield reduction as we expect their
9216 operands being properly truncated. */
9217 if (code == BIT_XOR_EXPR
9218 || code == BIT_AND_EXPR
9219 || code == BIT_IOR_EXPR)
9220 return temp;
dff12ad7 9221 return REDUCE_BIT_FIELD (temp);
9222}
9223#undef REDUCE_BIT_FIELD
d324678b 9224
f7373a91 9225
9226/* Return TRUE if expression STMT is suitable for replacement.
9227 Never consider memory loads as replaceable, because those don't ever lead
9228 into constant expressions. */
9229
9230static bool
9231stmt_is_replaceable_p (gimple stmt)
9232{
9233 if (ssa_is_replaceable_p (stmt))
9234 {
9235 /* Don't move around loads. */
9236 if (!gimple_assign_single_p (stmt)
9237 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9238 return true;
9239 }
9240 return false;
9241}
9242
16c9337c 9243rtx
3754d046 9244expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
a12f023f 9245 enum expand_modifier modifier, rtx *alt_rtl,
9246 bool inner_reference_p)
dff12ad7 9247{
9248 rtx op0, op1, temp, decl_rtl;
9249 tree type;
9250 int unsignedp;
1382992b 9251 machine_mode mode;
dff12ad7 9252 enum tree_code code = TREE_CODE (exp);
dff12ad7 9253 rtx subtarget, original_target;
9254 int ignore;
9255 tree context;
9256 bool reduce_bit_field;
9257 location_t loc = EXPR_LOCATION (exp);
9258 struct separate_ops ops;
9259 tree treeop0, treeop1, treeop2;
ae929441 9260 tree ssa_name = NULL_TREE;
9261 gimple g;
d324678b 9262
dff12ad7 9263 type = TREE_TYPE (exp);
9264 mode = TYPE_MODE (type);
9265 unsignedp = TYPE_UNSIGNED (type);
d324678b 9266
dff12ad7 9267 treeop0 = treeop1 = treeop2 = NULL_TREE;
9268 if (!VL_EXP_CLASS_P (exp))
9269 switch (TREE_CODE_LENGTH (code))
9270 {
9271 default:
9272 case 3: treeop2 = TREE_OPERAND (exp, 2);
9273 case 2: treeop1 = TREE_OPERAND (exp, 1);
9274 case 1: treeop0 = TREE_OPERAND (exp, 0);
9275 case 0: break;
9276 }
9277 ops.code = code;
9278 ops.type = type;
9279 ops.op0 = treeop0;
9280 ops.op1 = treeop1;
9281 ops.op2 = treeop2;
9282 ops.location = loc;
dda75192 9283
dff12ad7 9284 ignore = (target == const0_rtx
9285 || ((CONVERT_EXPR_CODE_P (code)
9286 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9287 && TREE_CODE (type) == VOID_TYPE));
5b1bb114 9288
dff12ad7 9289 /* An operation in what may be a bit-field type needs the
9290 result to be reduced to the precision of the bit-field type,
9291 which is narrower than that of the type's mode. */
9292 reduce_bit_field = (!ignore
c3c9a9f3 9293 && INTEGRAL_TYPE_P (type)
dff12ad7 9294 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
dda75192 9295
dff12ad7 9296 /* If we are going to ignore this result, we need only do something
9297 if there is a side-effect somewhere in the expression. If there
9298 is, short-circuit the most common cases here. Note that we must
9299 not call expand_expr with anything but const0_rtx in case this
9300 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
d2ae1b1e 9301
dff12ad7 9302 if (ignore)
9303 {
9304 if (! TREE_SIDE_EFFECTS (exp))
9305 return const0_rtx;
9306
9307 /* Ensure we reference a volatile object even if value is ignored, but
9308 don't do this if all we are doing is taking its address. */
9309 if (TREE_THIS_VOLATILE (exp)
9310 && TREE_CODE (exp) != FUNCTION_DECL
9311 && mode != VOIDmode && mode != BLKmode
9312 && modifier != EXPAND_CONST_ADDRESS)
78aee3e4 9313 {
dff12ad7 9314 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9315 if (MEM_P (temp))
1084097d 9316 copy_to_reg (temp);
dff12ad7 9317 return const0_rtx;
78aee3e4 9318 }
9319
dff12ad7 9320 if (TREE_CODE_CLASS (code) == tcc_unary
2330f9c5 9321 || code == BIT_FIELD_REF
9322 || code == COMPONENT_REF
9323 || code == INDIRECT_REF)
dff12ad7 9324 return expand_expr (treeop0, const0_rtx, VOIDmode,
9325 modifier);
f9b618cc 9326
dff12ad7 9327 else if (TREE_CODE_CLASS (code) == tcc_binary
9328 || TREE_CODE_CLASS (code) == tcc_comparison
9329 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9330 {
9331 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9332 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9333 return const0_rtx;
9334 }
f9b618cc 9335
dff12ad7 9336 target = 0;
9337 }
f9b618cc 9338
dff12ad7 9339 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9340 target = 0;
f9b618cc 9341
dff12ad7 9342 /* Use subtarget as the target for operand 0 of a binary operation. */
9343 subtarget = get_subtarget (target);
9344 original_target = target;
f9b618cc 9345
dff12ad7 9346 switch (code)
9347 {
9348 case LABEL_DECL:
9349 {
9350 tree function = decl_function_context (exp);
f9b618cc 9351
dff12ad7 9352 temp = label_rtx (exp);
9353 temp = gen_rtx_LABEL_REF (Pmode, temp);
f9b618cc 9354
dff12ad7 9355 if (function != current_function_decl
9356 && function != 0)
9357 LABEL_REF_NONLOCAL_P (temp) = 1;
9358
9359 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9360 return temp;
f9b618cc 9361 }
9362
dff12ad7 9363 case SSA_NAME:
9364 /* ??? ivopts calls expander, without any preparation from
9365 out-of-ssa. So fake instructions as if this was an access to the
9366 base variable. This unnecessarily allocates a pseudo, see how we can
9367 reuse it, if partition base vars have it set already. */
9368 if (!currently_expanding_to_rtl)
ec11736b 9369 {
9370 tree var = SSA_NAME_VAR (exp);
9371 if (var && DECL_RTL_SET_P (var))
9372 return DECL_RTL (var);
9373 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9374 LAST_VIRTUAL_REGISTER + 1);
9375 }
ae929441 9376
9377 g = get_gimple_for_ssa_name (exp);
351b6ccf 9378 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9379 if (g == NULL
9380 && modifier == EXPAND_INITIALIZER
9381 && !SSA_NAME_IS_DEFAULT_DEF (exp)
1382992b 9382 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
351b6ccf 9383 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9384 g = SSA_NAME_DEF_STMT (exp);
ae929441 9385 if (g)
0a98b6d9 9386 {
ed4d69dc 9387 rtx r;
903906b6 9388 ops.code = gimple_assign_rhs_code (g);
9389 switch (get_gimple_rhs_class (ops.code))
9390 {
9391 case GIMPLE_TERNARY_RHS:
9392 ops.op2 = gimple_assign_rhs3 (g);
9393 /* Fallthru */
9394 case GIMPLE_BINARY_RHS:
9395 ops.op1 = gimple_assign_rhs2 (g);
9336ad57 9396
9397 /* Try to expand conditonal compare. */
9398 if (targetm.gen_ccmp_first)
9399 {
9400 gcc_checking_assert (targetm.gen_ccmp_next != NULL);
9401 r = expand_ccmp_expr (g);
9402 if (r)
9403 break;
9404 }
903906b6 9405 /* Fallthru */
9406 case GIMPLE_UNARY_RHS:
9407 ops.op0 = gimple_assign_rhs1 (g);
9408 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9409 ops.location = gimple_location (g);
9410 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9411 break;
9412 case GIMPLE_SINGLE_RHS:
9413 {
9414 location_t saved_loc = curr_insn_location ();
9415 set_curr_insn_location (gimple_location (g));
9416 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9417 tmode, modifier, NULL, inner_reference_p);
9418 set_curr_insn_location (saved_loc);
9419 break;
9420 }
9421 default:
9422 gcc_unreachable ();
9423 }
0a98b6d9 9424 if (REG_P (r) && !REG_EXPR (r))
9425 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9426 return r;
9427 }
ae929441 9428
9429 ssa_name = exp;
9430 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9431 exp = SSA_NAME_VAR (ssa_name);
dff12ad7 9432 goto expand_decl_rtl;
f96c43fb 9433
dff12ad7 9434 case PARM_DECL:
9435 case VAR_DECL:
9436 /* If a static var's type was incomplete when the decl was written,
9437 but the type is complete now, lay out the decl now. */
9438 if (DECL_SIZE (exp) == 0
9439 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9440 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9441 layout_decl (exp, 0);
9442
dff12ad7 9443 /* ... fall through ... */
f96c43fb 9444
dff12ad7 9445 case FUNCTION_DECL:
9446 case RESULT_DECL:
9447 decl_rtl = DECL_RTL (exp);
9448 expand_decl_rtl:
9449 gcc_assert (decl_rtl);
9450 decl_rtl = copy_rtx (decl_rtl);
47a55be7 9451 /* Record writes to register variables. */
d82cf2b2 9452 if (modifier == EXPAND_WRITE
9453 && REG_P (decl_rtl)
9454 && HARD_REGISTER_P (decl_rtl))
9455 add_to_hard_reg_set (&crtl->asm_clobbers,
9456 GET_MODE (decl_rtl), REGNO (decl_rtl));
f96c43fb 9457
dff12ad7 9458 /* Ensure variable marked as used even if it doesn't go through
9459 a parser. If it hasn't be used yet, write out an external
9460 definition. */
1382992b 9461 TREE_USED (exp) = 1;
f96c43fb 9462
dff12ad7 9463 /* Show we haven't gotten RTL for this yet. */
9464 temp = 0;
f96c43fb 9465
dff12ad7 9466 /* Variables inherited from containing functions should have
9467 been lowered by this point. */
1382992b 9468 context = decl_function_context (exp);
9469 gcc_assert (SCOPE_FILE_SCOPE_P (context)
dff12ad7 9470 || context == current_function_decl
9471 || TREE_STATIC (exp)
ca601f81 9472 || DECL_EXTERNAL (exp)
dff12ad7 9473 /* ??? C++ creates functions that are not TREE_STATIC. */
9474 || TREE_CODE (exp) == FUNCTION_DECL);
f96c43fb 9475
dff12ad7 9476 /* This is the case of an array whose size is to be determined
9477 from its initializer, while the initializer is still being parsed.
0ab48139 9478 ??? We aren't parsing while expanding anymore. */
e58d0f17 9479
dff12ad7 9480 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9481 temp = validize_mem (decl_rtl);
f96c43fb 9482
dff12ad7 9483 /* If DECL_RTL is memory, we are in the normal case and the
9484 address is not valid, get the address into a register. */
ff385626 9485
dff12ad7 9486 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9487 {
9488 if (alt_rtl)
9489 *alt_rtl = decl_rtl;
9490 decl_rtl = use_anchored_address (decl_rtl);
9491 if (modifier != EXPAND_CONST_ADDRESS
9492 && modifier != EXPAND_SUM
1382992b 9493 && !memory_address_addr_space_p (DECL_MODE (exp),
bd1a81f7 9494 XEXP (decl_rtl, 0),
9495 MEM_ADDR_SPACE (decl_rtl)))
dff12ad7 9496 temp = replace_equiv_address (decl_rtl,
9497 copy_rtx (XEXP (decl_rtl, 0)));
f96c43fb 9498 }
9499
dff12ad7 9500 /* If we got something, return it. But first, set the alignment
9501 if the address is a register. */
9502 if (temp != 0)
9503 {
1382992b 9504 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
dff12ad7 9505 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
f96c43fb 9506
dff12ad7 9507 return temp;
9508 }
0de36bdb 9509
ee5ab2d1 9510 /* If the mode of DECL_RTL does not match that of the decl,
9511 there are two cases: we are dealing with a BLKmode value
9512 that is returned in a register, or we are dealing with
9513 a promoted value. In the latter case, return a SUBREG
9514 of the wanted mode, but mark it so that we know that it
9515 was already extended. */
9516 if (REG_P (decl_rtl)
1382992b 9517 && DECL_MODE (exp) != BLKmode
9518 && GET_MODE (decl_rtl) != DECL_MODE (exp))
7e564f73 9519 {
3754d046 9520 machine_mode pmode;
7e564f73 9521
ae929441 9522 /* Get the signedness to be used for this variable. Ensure we get
9523 the same mode we got when the variable was declared. */
1382992b 9524 if (code == SSA_NAME
9525 && (g = SSA_NAME_DEF_STMT (ssa_name))
9526 && gimple_code (g) == GIMPLE_CALL
9527 && !gimple_call_internal_p (g))
4acb747c 9528 pmode = promote_function_mode (type, mode, &unsignedp,
9529 gimple_call_fntype (g),
9530 2);
ae929441 9531 else
1382992b 9532 pmode = promote_decl_mode (exp, &unsignedp);
dff12ad7 9533 gcc_assert (GET_MODE (decl_rtl) == pmode);
9534
9535 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9536 SUBREG_PROMOTED_VAR_P (temp) = 1;
5a9ccd1b 9537 SUBREG_PROMOTED_SET (temp, unsignedp);
dff12ad7 9538 return temp;
7e564f73 9539 }
9540
dff12ad7 9541 return decl_rtl;
10f307d9 9542
dff12ad7 9543 case INTEGER_CST:
90079d10 9544 /* Given that TYPE_PRECISION (type) is not always equal to
c4050ce7 9545 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9546 the former to the latter according to the signedness of the
90079d10 9547 type. */
c4050ce7 9548 temp = immed_wide_int_const (wide_int::from
9549 (exp,
9550 GET_MODE_PRECISION (TYPE_MODE (type)),
9551 TYPE_SIGN (type)),
9552 TYPE_MODE (type));
9553 return temp;
9554
dff12ad7 9555 case VECTOR_CST:
9556 {
9557 tree tmp = NULL_TREE;
9558 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9559 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9560 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9561 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9562 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9563 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9564 return const_vector_from_tree (exp);
9565 if (GET_MODE_CLASS (mode) == MODE_INT)
9566 {
9567 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9568 if (type_for_mode)
9569 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9570 }
9571 if (!tmp)
fadf62f4 9572 {
f1f41a6c 9573 vec<constructor_elt, va_gc> *v;
fadf62f4 9574 unsigned i;
f1f41a6c 9575 vec_alloc (v, VECTOR_CST_NELTS (exp));
fadf62f4 9576 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9577 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9578 tmp = build_constructor (type, v);
9579 }
dff12ad7 9580 return expand_expr (tmp, ignore ? const0_rtx : target,
9581 tmode, modifier);
9582 }
10f307d9 9583
dff12ad7 9584 case CONST_DECL:
9585 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
10f307d9 9586
dff12ad7 9587 case REAL_CST:
9588 /* If optimized, generate immediate CONST_DOUBLE
9589 which will be turned into memory by reload if necessary.
2c551bbe 9590
dff12ad7 9591 We used to force a register so that loop.c could see it. But
9592 this does not allow gen_* patterns to perform optimizations with
9593 the constants. It also produces two insns in cases like "x = 1.0;".
9594 On most machines, floating-point constants are not permitted in
9595 many insns, so we'd end up copying it to a register in any case.
10f307d9 9596
dff12ad7 9597 Now, we do the copying in expand_binop, if appropriate. */
9598 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9599 TYPE_MODE (TREE_TYPE (exp)));
2c551bbe 9600
dff12ad7 9601 case FIXED_CST:
9602 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9603 TYPE_MODE (TREE_TYPE (exp)));
10f307d9 9604
dff12ad7 9605 case COMPLEX_CST:
9606 /* Handle evaluating a complex constant in a CONCAT target. */
9607 if (original_target && GET_CODE (original_target) == CONCAT)
a60e4107 9608 {
3754d046 9609 machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
dff12ad7 9610 rtx rtarg, itarg;
9611
9612 rtarg = XEXP (original_target, 0);
9613 itarg = XEXP (original_target, 1);
9614
9615 /* Move the real and imaginary parts separately. */
9616 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9617 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9618
9619 if (op0 != rtarg)
9620 emit_move_insn (rtarg, op0);
9621 if (op1 != itarg)
9622 emit_move_insn (itarg, op1);
9623
9624 return original_target;
a60e4107 9625 }
10f307d9 9626
dff12ad7 9627 /* ... fall through ... */
10f307d9 9628
dff12ad7 9629 case STRING_CST:
9630 temp = expand_expr_constant (exp, 1, modifier);
43fda261 9631
dff12ad7 9632 /* temp contains a constant address.
9633 On RISC machines where a constant address isn't valid,
9634 make some insns to get that address into a register. */
9635 if (modifier != EXPAND_CONST_ADDRESS
9636 && modifier != EXPAND_INITIALIZER
9637 && modifier != EXPAND_SUM
bd1a81f7 9638 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9639 MEM_ADDR_SPACE (temp)))
dff12ad7 9640 return replace_equiv_address (temp,
9641 copy_rtx (XEXP (temp, 0)));
9642 return temp;
43fda261 9643
dff12ad7 9644 case SAVE_EXPR:
9645 {
9646 tree val = treeop0;
a12f023f 9647 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9648 inner_reference_p);
94d01330 9649
dff12ad7 9650 if (!SAVE_EXPR_RESOLVED_P (exp))
9651 {
9652 /* We can indeed still hit this case, typically via builtin
9653 expanders calling save_expr immediately before expanding
9654 something. Assume this means that we only have to deal
9655 with non-BLKmode values. */
9656 gcc_assert (GET_MODE (ret) != BLKmode);
a02b3586 9657
ed4d69dc 9658 val = build_decl (curr_insn_location (),
dff12ad7 9659 VAR_DECL, NULL, TREE_TYPE (exp));
9660 DECL_ARTIFICIAL (val) = 1;
9661 DECL_IGNORED_P (val) = 1;
9662 treeop0 = val;
9663 TREE_OPERAND (exp, 0) = treeop0;
9664 SAVE_EXPR_RESOLVED_P (exp) = 1;
acd367d1 9665
dff12ad7 9666 if (!CONSTANT_P (ret))
9667 ret = copy_to_reg (ret);
9668 SET_DECL_RTL (val, ret);
9669 }
acd367d1 9670
dff12ad7 9671 return ret;
9672 }
9673
dff12ad7 9674
9675 case CONSTRUCTOR:
9676 /* If we don't need the result, just ensure we evaluate any
9677 subexpressions. */
9678 if (ignore)
acd367d1 9679 {
dff12ad7 9680 unsigned HOST_WIDE_INT idx;
9681 tree value;
9682
9683 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9684 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9685
9686 return const0_rtx;
acd367d1 9687 }
9688
dff12ad7 9689 return expand_constructor (exp, target, modifier, false);
10f307d9 9690
5d9de213 9691 case TARGET_MEM_REF:
dff12ad7 9692 {
1211c450 9693 addr_space_t as
9694 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
36eabf84 9695 enum insn_code icode;
56cf6489 9696 unsigned int align;
bd1a81f7 9697
64641360 9698 op0 = addr_for_mem_ref (exp, as, true);
bd1a81f7 9699 op0 = memory_address_addr_space (mode, op0, as);
dff12ad7 9700 temp = gen_rtx_MEM (mode, op0);
dff12ad7 9701 set_mem_attributes (temp, exp, 0);
bd1a81f7 9702 set_mem_addr_space (temp, as);
3482bf13 9703 align = get_object_alignment (exp);
884b03c9 9704 if (modifier != EXPAND_WRITE
28a9e8c4 9705 && modifier != EXPAND_MEMORY
884b03c9 9706 && mode != BLKmode
56cf6489 9707 && align < GET_MODE_ALIGNMENT (mode)
5d9de213 9708 /* If the target does not have special handling for unaligned
9709 loads of mode then it can use regular moves for them. */
9710 && ((icode = optab_handler (movmisalign_optab, mode))
9711 != CODE_FOR_nothing))
dff12ad7 9712 {
36eabf84 9713 struct expand_operand ops[2];
a35a63ff 9714
dff12ad7 9715 /* We've already validated the memory, and we're creating a
36eabf84 9716 new pseudo destination. The predicates really can't fail,
9717 nor can the generator. */
9718 create_output_operand (&ops[0], NULL_RTX, mode);
9719 create_fixed_operand (&ops[1], temp);
9720 expand_insn (icode, 2, ops);
e9b15297 9721 temp = ops[0].value;
dff12ad7 9722 }
dff12ad7 9723 return temp;
9724 }
9725
182cf5a9 9726 case MEM_REF:
9727 {
9728 addr_space_t as
1211c450 9729 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
3754d046 9730 machine_mode address_mode;
182cf5a9 9731 tree base = TREE_OPERAND (exp, 0);
86638c2e 9732 gimple def_stmt;
36eabf84 9733 enum insn_code icode;
56cf6489 9734 unsigned align;
182cf5a9 9735 /* Handle expansion of non-aliased memory with non-BLKmode. That
9736 might end up in a register. */
a598af2a 9737 if (mem_ref_refers_to_non_mem_p (exp))
182cf5a9 9738 {
e913b5cd 9739 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
182cf5a9 9740 base = TREE_OPERAND (base, 0);
a598af2a 9741 if (offset == 0
e913b5cd 9742 && tree_fits_uhwi_p (TYPE_SIZE (type))
a598af2a 9743 && (GET_MODE_BITSIZE (DECL_MODE (base))
e913b5cd 9744 == tree_to_uhwi (TYPE_SIZE (type))))
e9b15297 9745 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
a598af2a 9746 target, tmode, modifier);
e9b15297 9747 if (TYPE_MODE (type) == BLKmode)
182cf5a9 9748 {
a598af2a 9749 temp = assign_stack_temp (DECL_MODE (base),
0ab48139 9750 GET_MODE_SIZE (DECL_MODE (base)));
a598af2a 9751 store_expr (base, temp, 0, false);
9752 temp = adjust_address (temp, BLKmode, offset);
e9b15297 9753 set_mem_size (temp, int_size_in_bytes (type));
a598af2a 9754 return temp;
182cf5a9 9755 }
e9b15297 9756 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9757 bitsize_int (offset * BITS_PER_UNIT));
9758 return expand_expr (exp, target, tmode, modifier);
182cf5a9 9759 }
9760 address_mode = targetm.addr_space.address_mode (as);
86638c2e 9761 base = TREE_OPERAND (exp, 0);
9762 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
5d9de213 9763 {
9764 tree mask = gimple_assign_rhs2 (def_stmt);
9765 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9766 gimple_assign_rhs1 (def_stmt), mask);
9767 TREE_OPERAND (exp, 0) = base;
9768 }
3482bf13 9769 align = get_object_alignment (exp);
84f7af3e 9770 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
d244cab5 9771 op0 = memory_address_addr_space (mode, op0, as);
182cf5a9 9772 if (!integer_zerop (TREE_OPERAND (exp, 1)))
6b0e9e4b 9773 {
e913b5cd 9774 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
6b0e9e4b 9775 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
628e6f66 9776 op0 = memory_address_addr_space (mode, op0, as);
6b0e9e4b 9777 }
182cf5a9 9778 temp = gen_rtx_MEM (mode, op0);
9779 set_mem_attributes (temp, exp, 0);
9780 set_mem_addr_space (temp, as);
9781 if (TREE_THIS_VOLATILE (exp))
9782 MEM_VOLATILE_P (temp) = 1;
884b03c9 9783 if (modifier != EXPAND_WRITE
28a9e8c4 9784 && modifier != EXPAND_MEMORY
a12f023f 9785 && !inner_reference_p
884b03c9 9786 && mode != BLKmode
e2c56221 9787 && align < GET_MODE_ALIGNMENT (mode))
5d9de213 9788 {
e2c56221 9789 if ((icode = optab_handler (movmisalign_optab, mode))
9790 != CODE_FOR_nothing)
9791 {
9792 struct expand_operand ops[2];
9793
9794 /* We've already validated the memory, and we're creating a
9795 new pseudo destination. The predicates really can't fail,
9796 nor can the generator. */
9797 create_output_operand (&ops[0], NULL_RTX, mode);
9798 create_fixed_operand (&ops[1], temp);
9799 expand_insn (icode, 2, ops);
e9b15297 9800 temp = ops[0].value;
e2c56221 9801 }
9802 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9803 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9804 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
3f71db40 9805 (modifier == EXPAND_STACK_PARM
9806 ? NULL_RTX : target),
e2c56221 9807 mode, mode);
5d9de213 9808 }
182cf5a9 9809 return temp;
9810 }
9811
dff12ad7 9812 case ARRAY_REF:
9813
9814 {
9815 tree array = treeop0;
9816 tree index = treeop1;
f42d23fc 9817 tree init;
dff12ad7 9818
9819 /* Fold an expression like: "foo"[2].
9820 This is not done in fold so it won't happen inside &.
9821 Don't fold if this is for wide characters since it's too
9822 difficult to do correctly and this is a very rare case. */
9823
9824 if (modifier != EXPAND_CONST_ADDRESS
9825 && modifier != EXPAND_INITIALIZER
9826 && modifier != EXPAND_MEMORY)
9827 {
9828 tree t = fold_read_from_constant_string (exp);
9829
9830 if (t)
9831 return expand_expr (t, target, tmode, modifier);
9832 }
9833
9834 /* If this is a constant index into a constant array,
9835 just get the value from the array. Handle both the cases when
9836 we have an explicit constructor and when our operand is a variable
9837 that was declared const. */
9838
9839 if (modifier != EXPAND_CONST_ADDRESS
9840 && modifier != EXPAND_INITIALIZER
9841 && modifier != EXPAND_MEMORY
9842 && TREE_CODE (array) == CONSTRUCTOR
9843 && ! TREE_SIDE_EFFECTS (array)
9844 && TREE_CODE (index) == INTEGER_CST)
9845 {
9846 unsigned HOST_WIDE_INT ix;
9847 tree field, value;
9848
9849 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9850 field, value)
9851 if (tree_int_cst_equal (field, index))
10b58489 9852 {
dff12ad7 9853 if (!TREE_SIDE_EFFECTS (value))
9854 return expand_expr (fold (value), target, tmode, modifier);
9855 break;
10b58489 9856 }
dff12ad7 9857 }
10f307d9 9858
dff12ad7 9859 else if (optimize >= 1
9860 && modifier != EXPAND_CONST_ADDRESS
9861 && modifier != EXPAND_INITIALIZER
9862 && modifier != EXPAND_MEMORY
9863 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
df8d3e89 9864 && TREE_CODE (index) == INTEGER_CST
9865 && (TREE_CODE (array) == VAR_DECL
9866 || TREE_CODE (array) == CONST_DECL)
9867 && (init = ctor_for_folding (array)) != error_mark_node)
dff12ad7 9868 {
e8e60ada 9869 if (init == NULL_TREE)
9870 {
9871 tree value = build_zero_cst (type);
9872 if (TREE_CODE (value) == CONSTRUCTOR)
9873 {
9874 /* If VALUE is a CONSTRUCTOR, this optimization is only
9875 useful if this doesn't store the CONSTRUCTOR into
9876 memory. If it does, it is more efficient to just
9877 load the data from the array directly. */
9878 rtx ret = expand_constructor (value, target,
9879 modifier, true);
9880 if (ret == NULL_RTX)
9881 value = NULL_TREE;
9882 }
9883
9884 if (value)
9885 return expand_expr (value, target, tmode, modifier);
9886 }
9887 else if (TREE_CODE (init) == CONSTRUCTOR)
dff12ad7 9888 {
df8d3e89 9889 unsigned HOST_WIDE_INT ix;
9890 tree field, value;
68a556d6 9891
df8d3e89 9892 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9893 field, value)
9894 if (tree_int_cst_equal (field, index))
9895 {
9896 if (TREE_SIDE_EFFECTS (value))
9897 break;
10f307d9 9898
df8d3e89 9899 if (TREE_CODE (value) == CONSTRUCTOR)
dff12ad7 9900 {
df8d3e89 9901 /* If VALUE is a CONSTRUCTOR, this
9902 optimization is only useful if
9903 this doesn't store the CONSTRUCTOR
9904 into memory. If it does, it is more
9905 efficient to just load the data from
9906 the array directly. */
9907 rtx ret = expand_constructor (value, target,
9908 modifier, true);
9909 if (ret == NULL_RTX)
dff12ad7 9910 break;
dff12ad7 9911 }
df8d3e89 9912
f42d23fc 9913 return
9914 expand_expr (fold (value), target, tmode, modifier);
df8d3e89 9915 }
9916 }
f42d23fc 9917 else if (TREE_CODE (init) == STRING_CST)
df8d3e89 9918 {
df8d3e89 9919 tree low_bound = array_ref_low_bound (exp);
f42d23fc 9920 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
9921
9922 /* Optimize the special case of a zero lower bound.
9923
9924 We convert the lower bound to sizetype to avoid problems
9925 with constant folding. E.g. suppose the lower bound is
9926 1 and its mode is QI. Without the conversion
9927 (ARRAY + (INDEX - (unsigned char)1))
9928 becomes
9929 (ARRAY + (-(unsigned char)1) + INDEX)
9930 which becomes
9931 (ARRAY + 255 + INDEX). Oops! */
9932 if (!integer_zerop (low_bound))
df8d3e89 9933 index1 = size_diffop_loc (loc, index1,
f42d23fc 9934 fold_convert_loc (loc, sizetype,
9935 low_bound));
df8d3e89 9936
f42d23fc 9937 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
dff12ad7 9938 {
df8d3e89 9939 tree type = TREE_TYPE (TREE_TYPE (init));
3754d046 9940 machine_mode mode = TYPE_MODE (type);
3a54beaf 9941
df8d3e89 9942 if (GET_MODE_CLASS (mode) == MODE_INT
9943 && GET_MODE_SIZE (mode) == 1)
9944 return gen_int_mode (TREE_STRING_POINTER (init)
f9ae6f95 9945 [TREE_INT_CST_LOW (index1)],
df8d3e89 9946 mode);
dff12ad7 9947 }
9948 }
9949 }
9950 }
9951 goto normal_inner_ref;
9952
9953 case COMPONENT_REF:
9954 /* If the operand is a CONSTRUCTOR, we can just extract the
9955 appropriate field if it is present. */
9956 if (TREE_CODE (treeop0) == CONSTRUCTOR)
9957 {
9958 unsigned HOST_WIDE_INT idx;
9959 tree field, value;
9960
9961 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9962 idx, field, value)
9963 if (field == treeop1
9964 /* We can normally use the value of the field in the
9965 CONSTRUCTOR. However, if this is a bitfield in
9966 an integral mode that we can fit in a HOST_WIDE_INT,
9967 we must mask only the number of bits in the bitfield,
9968 since this is done implicitly by the constructor. If
9969 the bitfield does not meet either of those conditions,
9970 we can't do this optimization. */
9971 && (! DECL_BIT_FIELD (field)
9972 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
995b44f5 9973 && (GET_MODE_PRECISION (DECL_MODE (field))
dff12ad7 9974 <= HOST_BITS_PER_WIDE_INT))))
9975 {
9976 if (DECL_BIT_FIELD (field)
9977 && modifier == EXPAND_STACK_PARM)
9978 target = 0;
9979 op0 = expand_expr (value, target, tmode, modifier);
9980 if (DECL_BIT_FIELD (field))
9981 {
f9ae6f95 9982 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3754d046 9983 machine_mode imode = TYPE_MODE (TREE_TYPE (field));
dff12ad7 9984
9985 if (TYPE_UNSIGNED (TREE_TYPE (field)))
9986 {
0359f9f5 9987 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
9988 imode);
dff12ad7 9989 op0 = expand_and (imode, op0, op1, target);
9990 }
9991 else
9992 {
995b44f5 9993 int count = GET_MODE_PRECISION (imode) - bitsize;
dff12ad7 9994
9995 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
9996 target, 0);
9997 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9998 target, 0);
9999 }
10000 }
10001
10002 return op0;
10003 }
10004 }
10005 goto normal_inner_ref;
10006
10007 case BIT_FIELD_REF:
10008 case ARRAY_RANGE_REF:
10009 normal_inner_ref:
10010 {
3754d046 10011 machine_mode mode1, mode2;
dff12ad7 10012 HOST_WIDE_INT bitsize, bitpos;
10013 tree offset;
10014 int volatilep = 0, must_force_mem;
10015 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
10016 &mode1, &unsignedp, &volatilep, true);
10017 rtx orig_op0, memloc;
db219949 10018 bool clear_mem_expr = false;
dff12ad7 10019
10020 /* If we got back the original object, something is wrong. Perhaps
10021 we are evaluating an expression too early. In any event, don't
10022 infinitely recurse. */
10023 gcc_assert (tem != exp);
10024
10025 /* If TEM's type is a union of variable size, pass TARGET to the inner
10026 computation, since it will need a temporary and TARGET is known
10027 to have to do. This occurs in unchecked conversion in Ada. */
10028 orig_op0 = op0
a12f023f 10029 = expand_expr_real (tem,
10030 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10031 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10032 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10033 != INTEGER_CST)
10034 && modifier != EXPAND_STACK_PARM
10035 ? target : NULL_RTX),
10036 VOIDmode,
10037 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10038 NULL, true);
a420d927 10039
61a1f9de 10040 /* If the field has a mode, we want to access it in the
1795103a 10041 field's mode, not the computed mode.
10042 If a MEM has VOIDmode (external with incomplete type),
10043 use BLKmode for it instead. */
10044 if (MEM_P (op0))
10045 {
61a1f9de 10046 if (mode1 != VOIDmode)
1795103a 10047 op0 = adjust_address (op0, mode1, 0);
10048 else if (GET_MODE (op0) == VOIDmode)
10049 op0 = adjust_address (op0, BLKmode, 0);
10050 }
a420d927 10051
dff12ad7 10052 mode2
10053 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10054
10055 /* If we have either an offset, a BLKmode result, or a reference
10056 outside the underlying object, we must force it to memory.
10057 Such a case can occur in Ada if we have unchecked conversion
10058 of an expression from a scalar type to an aggregate type or
10059 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10060 passed a partially uninitialized object or a view-conversion
10061 to a larger size. */
10062 must_force_mem = (offset
10063 || mode1 == BLKmode
10064 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10065
10066 /* Handle CONCAT first. */
10067 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10068 {
10069 if (bitpos == 0
10070 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
10071 return op0;
10072 if (bitpos == 0
10073 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10074 && bitsize)
10075 {
10076 op0 = XEXP (op0, 0);
10077 mode2 = GET_MODE (op0);
10078 }
10079 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10080 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10081 && bitpos
10082 && bitsize)
10083 {
10084 op0 = XEXP (op0, 1);
10085 bitpos = 0;
10086 mode2 = GET_MODE (op0);
10087 }
10088 else
10089 /* Otherwise force into memory. */
10090 must_force_mem = 1;
10091 }
10092
10093 /* If this is a constant, put it in a register if it is a legitimate
10094 constant and we don't need a memory reference. */
10095 if (CONSTANT_P (op0)
10096 && mode2 != BLKmode
ca316360 10097 && targetm.legitimate_constant_p (mode2, op0)
dff12ad7 10098 && !must_force_mem)
10099 op0 = force_reg (mode2, op0);
10100
10101 /* Otherwise, if this is a constant, try to force it to the constant
10102 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10103 is a legitimate constant. */
10104 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10105 op0 = validize_mem (memloc);
10106
10107 /* Otherwise, if this is a constant or the object is not in memory
10108 and need be, put it there. */
10109 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10110 {
9f495e8d 10111 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
dff12ad7 10112 emit_move_insn (memloc, op0);
10113 op0 = memloc;
db219949 10114 clear_mem_expr = true;
dff12ad7 10115 }
10116
10117 if (offset)
10118 {
3754d046 10119 machine_mode address_mode;
dff12ad7 10120 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10121 EXPAND_SUM);
10122
10123 gcc_assert (MEM_P (op0));
10124
87cf5753 10125 address_mode = get_address_mode (op0);
98155838 10126 if (GET_MODE (offset_rtx) != address_mode)
2ff88218 10127 {
10128 /* We cannot be sure that the RTL in offset_rtx is valid outside
10129 of a memory address context, so force it into a register
10130 before attempting to convert it to the desired mode. */
10131 offset_rtx = force_operand (offset_rtx, NULL_RTX);
10132 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10133 }
dff12ad7 10134
dbe2840a 10135 /* See the comment in expand_assignment for the rationale. */
10136 if (mode1 != VOIDmode
10137 && bitpos != 0
10138 && bitsize > 0
dff12ad7 10139 && (bitpos % bitsize) == 0
10140 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
dbe2840a 10141 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
dff12ad7 10142 {
10143 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10144 bitpos = 0;
10145 }
10146
10147 op0 = offset_address (op0, offset_rtx,
10148 highest_pow2_factor (offset));
10149 }
10150
10151 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10152 record its alignment as BIGGEST_ALIGNMENT. */
10153 if (MEM_P (op0) && bitpos == 0 && offset != 0
10154 && is_aligning_offset (offset, tem))
10155 set_mem_align (op0, BIGGEST_ALIGNMENT);
10156
10157 /* Don't forget about volatility even if this is a bitfield. */
10158 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10159 {
10160 if (op0 == orig_op0)
10161 op0 = copy_rtx (op0);
10162
10163 MEM_VOLATILE_P (op0) = 1;
10164 }
10165
10166 /* In cases where an aligned union has an unaligned object
10167 as a field, we might be extracting a BLKmode value from
10168 an integer-mode (e.g., SImode) object. Handle this case
10169 by doing the extract into an object as wide as the field
10170 (which we know to be the width of a basic mode), then
10171 storing into memory, and changing the mode to BLKmode. */
10172 if (mode1 == VOIDmode
10173 || REG_P (op0) || GET_CODE (op0) == SUBREG
10174 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10175 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10176 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10177 && modifier != EXPAND_CONST_ADDRESS
7b642468 10178 && modifier != EXPAND_INITIALIZER
10179 && modifier != EXPAND_MEMORY)
61a1f9de 10180 /* If the bitfield is volatile and the bitsize
10181 is narrower than the access size of the bitfield,
10182 we need to extract bitfields from the access. */
10183 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10184 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10185 && mode1 != BLKmode
10186 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
dff12ad7 10187 /* If the field isn't aligned enough to fetch as a memref,
10188 fetch it as a bit field. */
10189 || (mode1 != BLKmode
10190 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10191 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10192 || (MEM_P (op0)
10193 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10194 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
780871fb 10195 && modifier != EXPAND_MEMORY
dff12ad7 10196 && ((modifier == EXPAND_CONST_ADDRESS
10197 || modifier == EXPAND_INITIALIZER)
10198 ? STRICT_ALIGNMENT
10199 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10200 || (bitpos % BITS_PER_UNIT != 0)))
10201 /* If the type and the field are a constant size and the
10202 size of the type isn't the same size as the bitfield,
10203 we must use bitfield operations. */
10204 || (bitsize >= 0
10205 && TYPE_SIZE (TREE_TYPE (exp))
10206 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10207 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10208 bitsize)))
10209 {
3754d046 10210 machine_mode ext_mode = mode;
dff12ad7 10211
10212 if (ext_mode == BLKmode
10213 && ! (target != 0 && MEM_P (op0)
10214 && MEM_P (target)
10215 && bitpos % BITS_PER_UNIT == 0))
10216 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10217
10218 if (ext_mode == BLKmode)
10219 {
10220 if (target == 0)
0ab48139 10221 target = assign_temp (type, 1, 1);
dff12ad7 10222
4a5cda13 10223 /* ??? Unlike the similar test a few lines below, this one is
10224 very likely obsolete. */
dff12ad7 10225 if (bitsize == 0)
10226 return target;
10227
10228 /* In this case, BITPOS must start at a byte boundary and
10229 TARGET, if specified, must be a MEM. */
10230 gcc_assert (MEM_P (op0)
10231 && (!target || MEM_P (target))
10232 && !(bitpos % BITS_PER_UNIT));
10233
10234 emit_block_move (target,
10235 adjust_address (op0, VOIDmode,
10236 bitpos / BITS_PER_UNIT),
10237 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10238 / BITS_PER_UNIT),
10239 (modifier == EXPAND_STACK_PARM
10240 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10241
10242 return target;
10243 }
10244
4a5cda13 10245 /* If we have nothing to extract, the result will be 0 for targets
10246 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10247 return 0 for the sake of consistency, as reading a zero-sized
10248 bitfield is valid in Ada and the value is fully specified. */
10249 if (bitsize == 0)
10250 return const0_rtx;
10251
dff12ad7 10252 op0 = validize_mem (op0);
10253
10254 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10255 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10256
3f71db40 10257 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
dff12ad7 10258 (modifier == EXPAND_STACK_PARM
10259 ? NULL_RTX : target),
10260 ext_mode, ext_mode);
10261
10262 /* If the result is a record type and BITSIZE is narrower than
10263 the mode of OP0, an integral mode, and this is a big endian
10264 machine, we must put the field into the high-order bits. */
10265 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10266 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10267 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10268 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
f5ff0b21 10269 GET_MODE_BITSIZE (GET_MODE (op0))
10270 - bitsize, op0, 1);
dff12ad7 10271
10272 /* If the result type is BLKmode, store the data into a temporary
10273 of the appropriate type, but with the mode corresponding to the
217d5117 10274 mode for the data we have (op0's mode). */
dff12ad7 10275 if (mode == BLKmode)
10276 {
217d5117 10277 rtx new_rtx
10278 = assign_stack_temp_for_type (ext_mode,
10279 GET_MODE_BITSIZE (ext_mode),
10280 type);
dff12ad7 10281 emit_move_insn (new_rtx, op0);
10282 op0 = copy_rtx (new_rtx);
10283 PUT_MODE (op0, BLKmode);
dff12ad7 10284 }
5db186f1 10285
dff12ad7 10286 return op0;
10287 }
10f307d9 10288
dff12ad7 10289 /* If the result is BLKmode, use that to access the object
10290 now as well. */
10291 if (mode == BLKmode)
10292 mode1 = BLKmode;
10f307d9 10293
dff12ad7 10294 /* Get a reference to just this component. */
10295 if (modifier == EXPAND_CONST_ADDRESS
10296 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10297 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10298 else
10299 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10f307d9 10300
dff12ad7 10301 if (op0 == orig_op0)
10302 op0 = copy_rtx (op0);
10f307d9 10303
db219949 10304 set_mem_attributes (op0, exp, 0);
75d525a2 10305
dff12ad7 10306 if (REG_P (XEXP (op0, 0)))
10307 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
fa56dc1d 10308
db219949 10309 /* If op0 is a temporary because the original expressions was forced
10310 to memory, clear MEM_EXPR so that the original expression cannot
10311 be marked as addressable through MEM_EXPR of the temporary. */
10312 if (clear_mem_expr)
10313 set_mem_expr (op0, NULL_TREE);
10314
dff12ad7 10315 MEM_VOLATILE_P (op0) |= volatilep;
10316 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10317 || modifier == EXPAND_CONST_ADDRESS
10318 || modifier == EXPAND_INITIALIZER)
10319 return op0;
f42d23fc 10320
e9b15297 10321 if (target == 0)
dff12ad7 10322 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
446a42ee 10323
dff12ad7 10324 convert_move (target, op0, unsignedp);
10325 return target;
10326 }
a54ebf2e 10327
dff12ad7 10328 case OBJ_TYPE_REF:
10329 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
93d9d66f 10330
dff12ad7 10331 case CALL_EXPR:
10332 /* All valid uses of __builtin_va_arg_pack () are removed during
10333 inlining. */
10334 if (CALL_EXPR_VA_ARG_PACK (exp))
10335 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
b533d30b 10336 {
dff12ad7 10337 tree fndecl = get_callee_fndecl (exp), attr;
ca436b82 10338
dff12ad7 10339 if (fndecl
10340 && (attr = lookup_attribute ("error",
10341 DECL_ATTRIBUTES (fndecl))) != NULL)
10342 error ("%Kcall to %qs declared with attribute error: %s",
10343 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10344 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10345 if (fndecl
10346 && (attr = lookup_attribute ("warning",
10347 DECL_ATTRIBUTES (fndecl))) != NULL)
10348 warning_at (tree_nonartificial_location (exp),
10349 0, "%Kcall to %qs declared with attribute warning: %s",
10350 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10351 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
ca436b82 10352
dff12ad7 10353 /* Check for a built-in function. */
10354 if (fndecl && DECL_BUILT_IN (fndecl))
b533d30b 10355 {
dff12ad7 10356 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
f21337ef 10357 if (CALL_WITH_BOUNDS_P (exp))
10358 return expand_builtin_with_bounds (exp, target, subtarget,
10359 tmode, ignore);
10360 else
10361 return expand_builtin (exp, target, subtarget, tmode, ignore);
b533d30b 10362 }
dff12ad7 10363 }
10364 return expand_call (exp, target, ignore);
ca436b82 10365
dff12ad7 10366 case VIEW_CONVERT_EXPR:
10367 op0 = NULL_RTX;
ca436b82 10368
dff12ad7 10369 /* If we are converting to BLKmode, try to avoid an intermediate
10370 temporary by fetching an inner memory reference. */
10371 if (mode == BLKmode
e9b15297 10372 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
dff12ad7 10373 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10374 && handled_component_p (treeop0))
10375 {
3754d046 10376 machine_mode mode1;
dff12ad7 10377 HOST_WIDE_INT bitsize, bitpos;
10378 tree offset;
10379 int unsignedp;
10380 int volatilep = 0;
10381 tree tem
10382 = get_inner_reference (treeop0, &bitsize, &bitpos,
10383 &offset, &mode1, &unsignedp, &volatilep,
10384 true);
10385 rtx orig_op0;
ca436b82 10386
dff12ad7 10387 /* ??? We should work harder and deal with non-zero offsets. */
10388 if (!offset
10389 && (bitpos % BITS_PER_UNIT) == 0
10390 && bitsize >= 0
e9b15297 10391 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
dff12ad7 10392 {
10393 /* See the normal_inner_ref case for the rationale. */
10394 orig_op0
a12f023f 10395 = expand_expr_real (tem,
10396 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10397 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10398 != INTEGER_CST)
10399 && modifier != EXPAND_STACK_PARM
10400 ? target : NULL_RTX),
10401 VOIDmode,
10402 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10403 NULL, true);
ca436b82 10404
dff12ad7 10405 if (MEM_P (orig_op0))
b533d30b 10406 {
dff12ad7 10407 op0 = orig_op0;
b533d30b 10408
dff12ad7 10409 /* Get a reference to just this component. */
10410 if (modifier == EXPAND_CONST_ADDRESS
10411 || modifier == EXPAND_SUM
10412 || modifier == EXPAND_INITIALIZER)
10413 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10414 else
10415 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
d2ae1b1e 10416
dff12ad7 10417 if (op0 == orig_op0)
10418 op0 = copy_rtx (op0);
10419
10420 set_mem_attributes (op0, treeop0, 0);
10421 if (REG_P (XEXP (op0, 0)))
10422 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10423
10424 MEM_VOLATILE_P (op0) |= volatilep;
10425 }
10426 }
b533d30b 10427 }
10f307d9 10428
dff12ad7 10429 if (!op0)
a12f023f 10430 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10431 NULL, inner_reference_p);
dff12ad7 10432
10433 /* If the input and output modes are both the same, we are done. */
10434 if (mode == GET_MODE (op0))
10435 ;
10436 /* If neither mode is BLKmode, and both modes are the same size
10437 then we can use gen_lowpart. */
10438 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
995b44f5 10439 && (GET_MODE_PRECISION (mode)
10440 == GET_MODE_PRECISION (GET_MODE (op0)))
dff12ad7 10441 && !COMPLEX_MODE_P (GET_MODE (op0)))
10442 {
10443 if (GET_CODE (op0) == SUBREG)
10444 op0 = force_reg (GET_MODE (op0), op0);
062fb763 10445 temp = gen_lowpart_common (mode, op0);
10446 if (temp)
10447 op0 = temp;
10448 else
10449 {
10450 if (!REG_P (op0) && !MEM_P (op0))
10451 op0 = force_reg (GET_MODE (op0), op0);
10452 op0 = gen_lowpart (mode, op0);
10453 }
dff12ad7 10454 }
f235634e 10455 /* If both types are integral, convert from one mode to the other. */
10456 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
48e1416a 10457 op0 = convert_modes (mode, GET_MODE (op0), op0,
dff12ad7 10458 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
fc0bb78b 10459 /* If the output type is a bit-field type, do an extraction. */
10460 else if (reduce_bit_field)
10461 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10462 TYPE_UNSIGNED (type), NULL_RTX,
10463 mode, mode);
dff12ad7 10464 /* As a last resort, spill op0 to memory, and reload it in a
10465 different mode. */
10466 else if (!MEM_P (op0))
10467 {
10468 /* If the operand is not a MEM, force it into memory. Since we
10469 are going to be changing the mode of the MEM, don't call
10470 force_const_mem for constants because we don't allow pool
10471 constants to change mode. */
10472 tree inner_type = TREE_TYPE (treeop0);
10473
10474 gcc_assert (!TREE_ADDRESSABLE (exp));
10f307d9 10475
dff12ad7 10476 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10477 target
10478 = assign_stack_temp_for_type
10479 (TYPE_MODE (inner_type),
0ab48139 10480 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
d2ae1b1e 10481
dff12ad7 10482 emit_move_insn (target, op0);
10483 op0 = target;
10484 }
d2ae1b1e 10485
fc0bb78b 10486 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10487 output type is such that the operand is known to be aligned, indicate
10488 that it is. Otherwise, we need only be concerned about alignment for
10489 non-BLKmode results. */
dff12ad7 10490 if (MEM_P (op0))
10491 {
7f295214 10492 enum insn_code icode;
10493
dff12ad7 10494 if (TYPE_ALIGN_OK (type))
d3909c67 10495 {
10496 /* ??? Copying the MEM without substantially changing it might
10497 run afoul of the code handling volatile memory references in
10498 store_expr, which assumes that TARGET is returned unmodified
10499 if it has been used. */
10500 op0 = copy_rtx (op0);
10501 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10502 }
a12f023f 10503 else if (modifier != EXPAND_WRITE
10504 && modifier != EXPAND_MEMORY
10505 && !inner_reference_p
dff12ad7 10506 && mode != BLKmode
10507 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10508 {
a12f023f 10509 /* If the target does have special handling for unaligned
10510 loads of mode then use them. */
10511 if ((icode = optab_handler (movmisalign_optab, mode))
10512 != CODE_FOR_nothing)
10513 {
9ed997be 10514 rtx reg;
a12f023f 10515
10516 op0 = adjust_address (op0, mode, 0);
10517 /* We've already validated the memory, and we're creating a
10518 new pseudo destination. The predicates really can't
10519 fail. */
10520 reg = gen_reg_rtx (mode);
10521
10522 /* Nor can the insn generator. */
9ed997be 10523 rtx_insn *insn = GEN_FCN (icode) (reg, op0);
a12f023f 10524 emit_insn (insn);
10525 return reg;
10526 }
10527 else if (STRICT_ALIGNMENT)
10528 {
10529 tree inner_type = TREE_TYPE (treeop0);
10530 HOST_WIDE_INT temp_size
10531 = MAX (int_size_in_bytes (inner_type),
10532 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10533 rtx new_rtx
10534 = assign_stack_temp_for_type (mode, temp_size, type);
10535 rtx new_with_op0_mode
10536 = adjust_address (new_rtx, GET_MODE (op0), 0);
10537
10538 gcc_assert (!TREE_ADDRESSABLE (exp));
10539
10540 if (GET_MODE (op0) == BLKmode)
10541 emit_block_move (new_with_op0_mode, op0,
10542 GEN_INT (GET_MODE_SIZE (mode)),
10543 (modifier == EXPAND_STACK_PARM
10544 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10545 else
10546 emit_move_insn (new_with_op0_mode, op0);
6295ca72 10547
a12f023f 10548 op0 = new_rtx;
10549 }
dff12ad7 10550 }
68a556d6 10551
dff12ad7 10552 op0 = adjust_address (op0, mode, 0);
10553 }
10f307d9 10554
dff12ad7 10555 return op0;
4ee9c684 10556
41076ef6 10557 case MODIFY_EXPR:
10558 {
588e1cc3 10559 tree lhs = treeop0;
10560 tree rhs = treeop1;
987329ad 10561 gcc_assert (ignore);
10562
10f307d9 10563 /* Check for |= or &= of a bitfield of size one into another bitfield
10564 of size 1. In this case, (unless we need the result of the
10565 assignment) we can do this more efficiently with a
10566 test followed by an assignment, if necessary.
10567
10568 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10569 things change so we do, this code should be enhanced to
10570 support it. */
987329ad 10571 if (TREE_CODE (lhs) == COMPONENT_REF
10f307d9 10572 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10573 || TREE_CODE (rhs) == BIT_AND_EXPR)
10574 && TREE_OPERAND (rhs, 0) == lhs
10575 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
a0c2c45b 10576 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10577 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10f307d9 10578 {
1d277a67 10579 rtx_code_label *label = gen_label_rtx ();
e3b560a6 10580 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10f307d9 10581 do_jump (TREE_OPERAND (rhs, 1),
e3b560a6 10582 value ? label : 0,
79ab74cc 10583 value ? 0 : label, -1);
5b5037b3 10584 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
ca06a84c 10585 false);
01ab6370 10586 do_pending_stack_adjust ();
10f307d9 10587 emit_label (label);
10588 return const0_rtx;
10589 }
10590
ca06a84c 10591 expand_assignment (lhs, rhs, false);
993e4bab 10592 return const0_rtx;
10f307d9 10593 }
10594
10f307d9 10595 case ADDR_EXPR:
b51e4016 10596 return expand_expr_addr_expr (exp, target, tmode, modifier);
10f307d9 10597
b63679d2 10598 case REALPART_EXPR:
588e1cc3 10599 op0 = expand_normal (treeop0);
de17a47b 10600 return read_complex_part (op0, false);
fa56dc1d 10601
b63679d2 10602 case IMAGPART_EXPR:
588e1cc3 10603 op0 = expand_normal (treeop0);
de17a47b 10604 return read_complex_part (op0, true);
b63679d2 10605
16c9337c 10606 case RETURN_EXPR:
10607 case LABEL_EXPR:
10608 case GOTO_EXPR:
10609 case SWITCH_EXPR:
10610 case ASM_EXPR:
16c9337c 10611 /* Expanded in cfgexpand.c. */
10612 gcc_unreachable ();
4ee9c684 10613
694ec519 10614 case TRY_CATCH_EXPR:
4ee9c684 10615 case CATCH_EXPR:
4ee9c684 10616 case EH_FILTER_EXPR:
f0c211a3 10617 case TRY_FINALLY_EXPR:
6388f9f7 10618 /* Lowered by tree-eh.c. */
611234b4 10619 gcc_unreachable ();
f0c211a3 10620
6388f9f7 10621 case WITH_CLEANUP_EXPR:
10622 case CLEANUP_POINT_EXPR:
10623 case TARGET_EXPR:
873f1e89 10624 case CASE_LABEL_EXPR:
2799a2b7 10625 case VA_ARG_EXPR:
491e04ef 10626 case BIND_EXPR:
e3ee6a3e 10627 case INIT_EXPR:
10628 case CONJ_EXPR:
10629 case COMPOUND_EXPR:
10630 case PREINCREMENT_EXPR:
10631 case PREDECREMENT_EXPR:
10632 case POSTINCREMENT_EXPR:
10633 case POSTDECREMENT_EXPR:
10634 case LOOP_EXPR:
10635 case EXIT_EXPR:
7843e4bc 10636 case COMPOUND_LITERAL_EXPR:
6388f9f7 10637 /* Lowered by gimplify.c. */
611234b4 10638 gcc_unreachable ();
f0c211a3 10639
6bfa2cc1 10640 case FDESC_EXPR:
10641 /* Function descriptors are not valid except for as
10642 initialization constants, and should not be expanded. */
611234b4 10643 gcc_unreachable ();
6bfa2cc1 10644
80f06481 10645 case WITH_SIZE_EXPR:
10646 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10647 have pulled out the size to use in whatever context it needed. */
588e1cc3 10648 return expand_expr_real (treeop0, original_target, tmode,
a12f023f 10649 modifier, alt_rtl, inner_reference_p);
80f06481 10650
10f307d9 10651 default:
dff12ad7 10652 return expand_expr_real_2 (&ops, target, tmode, modifier);
10f307d9 10653 }
4f7f7efd 10654}
4f7f7efd 10655\f
10656/* Subroutine of above: reduce EXP to the precision of TYPE (in the
10657 signedness of TYPE), possibly returning the result in TARGET. */
10658static rtx
10659reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10660{
10661 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10662 if (target && GET_MODE (target) != GET_MODE (exp))
10663 target = 0;
32e79ae6 10664 /* For constant values, reduce using build_int_cst_type. */
971ba038 10665 if (CONST_INT_P (exp))
32e79ae6 10666 {
10667 HOST_WIDE_INT value = INTVAL (exp);
10668 tree t = build_int_cst_type (type, value);
10669 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10670 }
10671 else if (TYPE_UNSIGNED (type))
4f7f7efd 10672 {
3754d046 10673 machine_mode mode = GET_MODE (exp);
ddb1be65 10674 rtx mask = immed_wide_int_const
796b6678 10675 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
e913b5cd 10676 return expand_and (mode, exp, mask, target);
4f7f7efd 10677 }
10678 else
10679 {
995b44f5 10680 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
f5ff0b21 10681 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10682 exp, count, target, 0);
10683 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10684 exp, count, target, 0);
4f7f7efd 10685 }
10f307d9 10686}
b54842d8 10687\f
67c68e45 10688/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10689 when applied to the address of EXP produces an address known to be
10690 aligned more than BIGGEST_ALIGNMENT. */
10691
10692static int
1f1872fd 10693is_aligning_offset (const_tree offset, const_tree exp)
67c68e45 10694{
55f9d7dc 10695 /* Strip off any conversions. */
72dd6141 10696 while (CONVERT_EXPR_P (offset))
67c68e45 10697 offset = TREE_OPERAND (offset, 0);
10698
10699 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10700 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10701 if (TREE_CODE (offset) != BIT_AND_EXPR
e913b5cd 10702 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
491e04ef 10703 || compare_tree_int (TREE_OPERAND (offset, 1),
6be0ba7c 10704 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
ac8c312d 10705 || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
67c68e45 10706 return 0;
10707
10708 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10709 It must be NEGATE_EXPR. Then strip any more conversions. */
10710 offset = TREE_OPERAND (offset, 0);
72dd6141 10711 while (CONVERT_EXPR_P (offset))
67c68e45 10712 offset = TREE_OPERAND (offset, 0);
10713
10714 if (TREE_CODE (offset) != NEGATE_EXPR)
10715 return 0;
10716
10717 offset = TREE_OPERAND (offset, 0);
72dd6141 10718 while (CONVERT_EXPR_P (offset))
67c68e45 10719 offset = TREE_OPERAND (offset, 0);
10720
55f9d7dc 10721 /* This must now be the address of EXP. */
10722 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
67c68e45 10723}
10724\f
dafdd1c8 10725/* Return the tree node if an ARG corresponds to a string constant or zero
6ef828f9 10726 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
902de8ed 10727 in bytes within the string that ARG is accessing. The type of the
10728 offset will be `sizetype'. */
b54842d8 10729
53800dbe 10730tree
35cb5232 10731string_constant (tree arg, tree *ptr_offset)
b54842d8 10732{
de556b32 10733 tree array, offset, lower_bound;
b54842d8 10734 STRIP_NOPS (arg);
10735
d2165e90 10736 if (TREE_CODE (arg) == ADDR_EXPR)
b54842d8 10737 {
d2165e90 10738 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10739 {
10740 *ptr_offset = size_zero_node;
10741 return TREE_OPERAND (arg, 0);
10742 }
10743 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10744 {
10745 array = TREE_OPERAND (arg, 0);
10746 offset = size_zero_node;
10747 }
10748 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10749 {
10750 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10751 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10752 if (TREE_CODE (array) != STRING_CST
10753 && TREE_CODE (array) != VAR_DECL)
10754 return 0;
de556b32 10755
f2b32076 10756 /* Check if the array has a nonzero lower bound. */
de556b32 10757 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10758 if (!integer_zerop (lower_bound))
10759 {
10760 /* If the offset and base aren't both constants, return 0. */
10761 if (TREE_CODE (lower_bound) != INTEGER_CST)
10762 return 0;
10763 if (TREE_CODE (offset) != INTEGER_CST)
10764 return 0;
10765 /* Adjust offset by the lower bound. */
1f8b6002 10766 offset = size_diffop (fold_convert (sizetype, offset),
de556b32 10767 fold_convert (sizetype, lower_bound));
10768 }
d2165e90 10769 }
f32d300e 10770 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10771 {
10772 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10773 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10774 if (TREE_CODE (array) != ADDR_EXPR)
10775 return 0;
10776 array = TREE_OPERAND (array, 0);
10777 if (TREE_CODE (array) != STRING_CST
10778 && TREE_CODE (array) != VAR_DECL)
10779 return 0;
10780 }
d2165e90 10781 else
10782 return 0;
4ee9c684 10783 }
0de36bdb 10784 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
b54842d8 10785 {
10786 tree arg0 = TREE_OPERAND (arg, 0);
10787 tree arg1 = TREE_OPERAND (arg, 1);
10788
10789 STRIP_NOPS (arg0);
10790 STRIP_NOPS (arg1);
10791
10792 if (TREE_CODE (arg0) == ADDR_EXPR
d2165e90 10793 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10794 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10f307d9 10795 {
d2165e90 10796 array = TREE_OPERAND (arg0, 0);
10797 offset = arg1;
10f307d9 10798 }
b54842d8 10799 else if (TREE_CODE (arg1) == ADDR_EXPR
d2165e90 10800 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10801 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10f307d9 10802 {
d2165e90 10803 array = TREE_OPERAND (arg1, 0);
10804 offset = arg0;
10f307d9 10805 }
d2165e90 10806 else
10807 return 0;
10808 }
10809 else
10810 return 0;
10811
10812 if (TREE_CODE (array) == STRING_CST)
10813 {
e3b560a6 10814 *ptr_offset = fold_convert (sizetype, offset);
d2165e90 10815 return array;
10816 }
93b79643 10817 else if (TREE_CODE (array) == VAR_DECL
10818 || TREE_CODE (array) == CONST_DECL)
d2165e90 10819 {
10820 int length;
df8d3e89 10821 tree init = ctor_for_folding (array);
d2165e90 10822
10823 /* Variables initialized to string literals can be handled too. */
df8d3e89 10824 if (init == error_mark_node
10825 || !init
10826 || TREE_CODE (init) != STRING_CST)
d2165e90 10827 return 0;
10828
d2165e90 10829 /* Avoid const char foo[4] = "abcde"; */
10830 if (DECL_SIZE_UNIT (array) == NULL_TREE
10831 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
df8d3e89 10832 || (length = TREE_STRING_LENGTH (init)) <= 0
d2165e90 10833 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10834 return 0;
10835
10836 /* If variable is bigger than the string literal, OFFSET must be constant
10837 and inside of the bounds of the string literal. */
e3b560a6 10838 offset = fold_convert (sizetype, offset);
d2165e90 10839 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
e913b5cd 10840 && (! tree_fits_uhwi_p (offset)
d2165e90 10841 || compare_tree_int (offset, length) >= 0))
10842 return 0;
10843
10844 *ptr_offset = offset;
df8d3e89 10845 return init;
b54842d8 10846 }
649d8da6 10847
b54842d8 10848 return 0;
10849}
649d8da6 10850\f
2c02962c 10851/* Generate code to calculate OPS, and exploded expression
10852 using a store-flag instruction and return an rtx for the result.
10853 OPS reflects a comparison.
649d8da6 10854
b54842d8 10855 If TARGET is nonzero, store the result there if convenient.
649d8da6 10856
b54842d8 10857 Return zero if there is no suitable set-flag instruction
10858 available on this machine.
649d8da6 10859
b54842d8 10860 Once expand_expr has been called on the arguments of the comparison,
10861 we are committed to doing the store flag, since it is not safe to
10862 re-evaluate the expression. We emit the store-flag insn by calling
10863 emit_store_flag, but only expand the arguments if we have a reason
10864 to believe that emit_store_flag will be successful. If we think that
10865 it will, but it isn't, we have to simulate the store-flag with a
10866 set/jump/set sequence. */
649d8da6 10867
b54842d8 10868static rtx
3754d046 10869do_store_flag (sepops ops, rtx target, machine_mode mode)
b54842d8 10870{
10871 enum rtx_code code;
10872 tree arg0, arg1, type;
3754d046 10873 machine_mode operand_mode;
b54842d8 10874 int unsignedp;
10875 rtx op0, op1;
b54842d8 10876 rtx subtarget = target;
2c02962c 10877 location_t loc = ops->location;
649d8da6 10878
2c02962c 10879 arg0 = ops->op0;
10880 arg1 = ops->op1;
fc80e4dd 10881
10882 /* Don't crash if the comparison was erroneous. */
10883 if (arg0 == error_mark_node || arg1 == error_mark_node)
10884 return const0_rtx;
10885
b54842d8 10886 type = TREE_TYPE (arg0);
10887 operand_mode = TYPE_MODE (type);
78a8ed03 10888 unsignedp = TYPE_UNSIGNED (type);
649d8da6 10889
b54842d8 10890 /* We won't bother with BLKmode store-flag operations because it would mean
10891 passing a lot of information to emit_store_flag. */
10892 if (operand_mode == BLKmode)
10893 return 0;
649d8da6 10894
b54842d8 10895 /* We won't bother with store-flag operations involving function pointers
10896 when function pointers must be canonicalized before comparisons. */
a84cae90 10897 if (targetm.have_canonicalize_funcptr_for_compare ()
2c02962c 10898 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10899 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
b54842d8 10900 == FUNCTION_TYPE))
2c02962c 10901 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10902 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
b54842d8 10903 == FUNCTION_TYPE))))
10904 return 0;
649d8da6 10905
b54842d8 10906 STRIP_NOPS (arg0);
10907 STRIP_NOPS (arg1);
6cf89e04 10908
d7ad16c2 10909 /* For vector typed comparisons emit code to generate the desired
10910 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10911 expander for this. */
10912 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10913 {
10914 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10915 tree if_true = constant_boolean_node (true, ops->type);
10916 tree if_false = constant_boolean_node (false, ops->type);
10917 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10918 }
10919
b54842d8 10920 /* Get the rtx comparison code to use. We know that EXP is a comparison
10921 operation of some type. Some comparisons against 1 and -1 can be
10922 converted to comparisons with zero. Do so here so that the tests
10923 below will be aware that we have a comparison with zero. These
10924 tests will not catch constants in the first operand, but constants
10925 are rarely passed as the first operand. */
649d8da6 10926
2c02962c 10927 switch (ops->code)
b54842d8 10928 {
10929 case EQ_EXPR:
10930 code = EQ;
10f307d9 10931 break;
b54842d8 10932 case NE_EXPR:
10933 code = NE;
10f307d9 10934 break;
b54842d8 10935 case LT_EXPR:
10936 if (integer_onep (arg1))
10937 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10938 else
10939 code = unsignedp ? LTU : LT;
649d8da6 10940 break;
b54842d8 10941 case LE_EXPR:
10942 if (! unsignedp && integer_all_onesp (arg1))
10943 arg1 = integer_zero_node, code = LT;
10944 else
10945 code = unsignedp ? LEU : LE;
649d8da6 10946 break;
b54842d8 10947 case GT_EXPR:
10948 if (! unsignedp && integer_all_onesp (arg1))
10949 arg1 = integer_zero_node, code = GE;
10950 else
10951 code = unsignedp ? GTU : GT;
10952 break;
10953 case GE_EXPR:
10954 if (integer_onep (arg1))
10955 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10956 else
10957 code = unsignedp ? GEU : GE;
649d8da6 10958 break;
a4110d9a 10959
10960 case UNORDERED_EXPR:
10961 code = UNORDERED;
10962 break;
10963 case ORDERED_EXPR:
10964 code = ORDERED;
10965 break;
10966 case UNLT_EXPR:
10967 code = UNLT;
10968 break;
10969 case UNLE_EXPR:
10970 code = UNLE;
10971 break;
10972 case UNGT_EXPR:
10973 code = UNGT;
10974 break;
10975 case UNGE_EXPR:
10976 code = UNGE;
10977 break;
10978 case UNEQ_EXPR:
10979 code = UNEQ;
10980 break;
318a728f 10981 case LTGT_EXPR:
10982 code = LTGT;
10983 break;
a4110d9a 10984
649d8da6 10985 default:
611234b4 10986 gcc_unreachable ();
10f307d9 10987 }
10f307d9 10988
b54842d8 10989 /* Put a constant second. */
68a556d6 10990 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10991 || TREE_CODE (arg0) == FIXED_CST)
b54842d8 10992 {
a4f59596 10993 std::swap (arg0, arg1);
b54842d8 10994 code = swap_condition (code);
649d8da6 10995 }
10f307d9 10996
b54842d8 10997 /* If this is an equality or inequality test of a single bit, we can
10998 do this by shifting the bit being tested to the low-order bit and
10999 masking the result with the constant 1. If the condition was EQ,
11000 we xor it with 1. This does not require an scc insn and is faster
6881f973 11001 than an scc insn even if we have it.
11002
11003 The code to make this transformation was moved into fold_single_bit_test,
11004 so we just call into the folder and expand its result. */
3218a49d 11005
b54842d8 11006 if ((code == NE || code == EQ)
a53286c0 11007 && integer_zerop (arg1)
d430af93 11008 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
76c3b8b7 11009 {
a53286c0 11010 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11011 if (srcstmt
11012 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11013 {
11014 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11015 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11016 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11017 gimple_assign_rhs1 (srcstmt),
11018 gimple_assign_rhs2 (srcstmt));
11019 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11020 if (temp)
11021 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11022 }
76c3b8b7 11023 }
10f307d9 11024
d8e5b213 11025 if (! get_subtarget (target)
a54ebf2e 11026 || GET_MODE (subtarget) != operand_mode)
b54842d8 11027 subtarget = 0;
11028
b9c74b4d 11029 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
b54842d8 11030
11031 if (target == 0)
11032 target = gen_reg_rtx (mode);
11033
80e1bfa1 11034 /* Try a cstore if possible. */
11035 return emit_store_flag_force (target, code, op0, op1,
d430af93 11036 operand_mode, unsignedp,
11037 (TYPE_PRECISION (ops->type) == 1
11038 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
649d8da6 11039}
b54842d8 11040\f
539a3a92 11041/* Attempt to generate a casesi instruction. Returns 1 if successful,
584abc98 11042 0 otherwise (i.e. if there is no casesi instruction).
11043
11044 DEFAULT_PROBABILITY is the probability of jumping to the default
11045 label. */
539a3a92 11046int
35cb5232 11047try_casesi (tree index_type, tree index_expr, tree minval, tree range,
584abc98 11048 rtx table_label, rtx default_label, rtx fallback_label,
11049 int default_probability)
539a3a92 11050{
8786db1e 11051 struct expand_operand ops[5];
3754d046 11052 machine_mode index_mode = SImode;
539a3a92 11053 rtx op1, op2, index;
539a3a92 11054
9a1bd12f 11055 if (! targetm.have_casesi ())
539a3a92 11056 return 0;
11057
11058 /* Convert the index to SImode. */
11059 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11060 {
3754d046 11061 machine_mode omode = TYPE_MODE (index_type);
8ec3c5c2 11062 rtx rangertx = expand_normal (range);
539a3a92 11063
11064 /* We must handle the endpoints in the original mode. */
b55f9493 11065 index_expr = build2 (MINUS_EXPR, index_type,
11066 index_expr, minval);
539a3a92 11067 minval = integer_zero_node;
8ec3c5c2 11068 index = expand_normal (index_expr);
72c30859 11069 if (default_label)
11070 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
584abc98 11071 omode, 1, default_label,
11072 default_probability);
539a3a92 11073 /* Now we can safely truncate. */
11074 index = convert_to_mode (index_mode, index, 0);
11075 }
11076 else
11077 {
11078 if (TYPE_MODE (index_type) != index_mode)
11079 {
1b3c3119 11080 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
e3b560a6 11081 index_expr = fold_convert (index_type, index_expr);
539a3a92 11082 }
11083
8ec3c5c2 11084 index = expand_normal (index_expr);
539a3a92 11085 }
0a534ba7 11086
539a3a92 11087 do_pending_stack_adjust ();
11088
8ec3c5c2 11089 op1 = expand_normal (minval);
8ec3c5c2 11090 op2 = expand_normal (range);
539a3a92 11091
8786db1e 11092 create_input_operand (&ops[0], index, index_mode);
11093 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11094 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11095 create_fixed_operand (&ops[3], table_label);
11096 create_fixed_operand (&ops[4], (default_label
11097 ? default_label
11098 : fallback_label));
9a1bd12f 11099 expand_jump_insn (targetm.code_for_casesi, 5, ops);
539a3a92 11100 return 1;
11101}
11102
11103/* Attempt to generate a tablejump instruction; same concept. */
539a3a92 11104/* Subroutine of the next function.
11105
11106 INDEX is the value being switched on, with the lowest value
b54842d8 11107 in the table already subtracted.
11108 MODE is its expected mode (needed if INDEX is constant).
11109 RANGE is the length of the jump table.
11110 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
1ccc1a7e 11111
b54842d8 11112 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
584abc98 11113 index value is out of range.
11114 DEFAULT_PROBABILITY is the probability of jumping to
11115 the default label. */
a92771b8 11116
539a3a92 11117static void
3754d046 11118do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
584abc98 11119 rtx default_label, int default_probability)
649d8da6 11120{
19cb6b50 11121 rtx temp, vector;
1ccc1a7e 11122
edb7afe8 11123 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11124 cfun->cfg->max_jumptable_ents = INTVAL (range);
71a455ac 11125
b54842d8 11126 /* Do an unsigned comparison (in the proper mode) between the index
11127 expression and the value which represents the length of the range.
11128 Since we just finished subtracting the lower bound of the range
11129 from the index expression, this comparison allows us to simultaneously
11130 check that the original index expression value is both greater than
11131 or equal to the minimum value of the range and less than or equal to
11132 the maximum value of the range. */
9282409c 11133
72c30859 11134 if (default_label)
11135 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
584abc98 11136 default_label, default_probability);
11137
10f307d9 11138
b54842d8 11139 /* If index is in range, it must fit in Pmode.
11140 Convert to Pmode so we can index with it. */
11141 if (mode != Pmode)
11142 index = convert_to_mode (Pmode, index, 1);
10f307d9 11143
c7bf1374 11144 /* Don't let a MEM slip through, because then INDEX that comes
b54842d8 11145 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11146 and break_out_memory_refs will go to work on it and mess it up. */
11147#ifdef PIC_CASE_VECTOR_ADDRESS
8ad4c111 11148 if (flag_pic && !REG_P (index))
b54842d8 11149 index = copy_to_mode_reg (Pmode, index);
11150#endif
649d8da6 11151
b54842d8 11152 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11153 GET_MODE_SIZE, because this indicates how large insns are. The other
11154 uses should all be Pmode, because they are addresses. This code
11155 could fail if addresses and insns are not the same size. */
178ef0b6 11156 index = simplify_gen_binary (MULT, Pmode, index,
11157 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11158 Pmode));
11159 index = simplify_gen_binary (PLUS, Pmode, index,
11160 gen_rtx_LABEL_REF (Pmode, table_label));
11161
b54842d8 11162#ifdef PIC_CASE_VECTOR_ADDRESS
11163 if (flag_pic)
11164 index = PIC_CASE_VECTOR_ADDRESS (index);
11165 else
10f307d9 11166#endif
4d25f9eb 11167 index = memory_address (CASE_VECTOR_MODE, index);
b54842d8 11168 temp = gen_reg_rtx (CASE_VECTOR_MODE);
e265a6da 11169 vector = gen_const_mem (CASE_VECTOR_MODE, index);
b54842d8 11170 convert_move (temp, vector, 0);
11171
9a1bd12f 11172 emit_jump_insn (targetm.gen_tablejump (temp, table_label));
b54842d8 11173
11174 /* If we are generating PIC code or if the table is PC-relative, the
11175 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11176 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11177 emit_barrier ();
10f307d9 11178}
b54842d8 11179
539a3a92 11180int
35cb5232 11181try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
584abc98 11182 rtx table_label, rtx default_label, int default_probability)
539a3a92 11183{
11184 rtx index;
11185
9a1bd12f 11186 if (! targetm.have_tablejump ())
539a3a92 11187 return 0;
11188
faa43f85 11189 index_expr = fold_build2 (MINUS_EXPR, index_type,
e3b560a6 11190 fold_convert (index_type, index_expr),
11191 fold_convert (index_type, minval));
8ec3c5c2 11192 index = expand_normal (index_expr);
539a3a92 11193 do_pending_stack_adjust ();
11194
11195 do_tablejump (index, TYPE_MODE (index_type),
11196 convert_modes (TYPE_MODE (index_type),
11197 TYPE_MODE (TREE_TYPE (range)),
8ec3c5c2 11198 expand_normal (range),
78a8ed03 11199 TYPE_UNSIGNED (TREE_TYPE (range))),
584abc98 11200 table_label, default_label, default_probability);
539a3a92 11201 return 1;
11202}
1f3233d1 11203
c3309fc6 11204/* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11205static rtx
35cb5232 11206const_vector_from_tree (tree exp)
c3309fc6 11207{
11208 rtvec v;
fadf62f4 11209 unsigned i;
11210 int units;
11211 tree elt;
3754d046 11212 machine_mode inner, mode;
c3309fc6 11213
11214 mode = TYPE_MODE (TREE_TYPE (exp));
11215
4ee9c684 11216 if (initializer_zerop (exp))
c3309fc6 11217 return CONST0_RTX (mode);
11218
11219 units = GET_MODE_NUNITS (mode);
11220 inner = GET_MODE_INNER (mode);
11221
11222 v = rtvec_alloc (units);
11223
fadf62f4 11224 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
c3309fc6 11225 {
fadf62f4 11226 elt = VECTOR_CST_ELT (exp, i);
c3309fc6 11227
11228 if (TREE_CODE (elt) == REAL_CST)
11229 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11230 inner);
68a556d6 11231 else if (TREE_CODE (elt) == FIXED_CST)
11232 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11233 inner);
c3309fc6 11234 else
c4050ce7 11235 RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
c3309fc6 11236 }
11237
6e68dcb2 11238 return gen_rtx_CONST_VECTOR (mode, v);
c3309fc6 11239}
58d82cd0 11240
382597e4 11241/* Build a decl for a personality function given a language prefix. */
58d82cd0 11242
11243tree
382597e4 11244build_personality_function (const char *lang)
58d82cd0 11245{
382597e4 11246 const char *unwind_and_version;
58d82cd0 11247 tree decl, type;
382597e4 11248 char *name;
11249
218e3e4e 11250 switch (targetm_common.except_unwind_info (&global_options))
382597e4 11251 {
11252 case UI_NONE:
11253 return NULL;
11254 case UI_SJLJ:
11255 unwind_and_version = "_sj0";
11256 break;
11257 case UI_DWARF2:
11258 case UI_TARGET:
11259 unwind_and_version = "_v0";
11260 break;
8ad0b530 11261 case UI_SEH:
11262 unwind_and_version = "_seh0";
11263 break;
382597e4 11264 default:
11265 gcc_unreachable ();
11266 }
11267
11268 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
58d82cd0 11269
11270 type = build_function_type_list (integer_type_node, integer_type_node,
11271 long_long_unsigned_type_node,
11272 ptr_type_node, ptr_type_node, NULL_TREE);
11273 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11274 get_identifier (name), type);
11275 DECL_ARTIFICIAL (decl) = 1;
11276 DECL_EXTERNAL (decl) = 1;
11277 TREE_PUBLIC (decl) = 1;
11278
11279 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11280 are the flags assigned by targetm.encode_section_info. */
11281 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11282
11283 return decl;
11284}
11285
11286/* Extracts the personality function of DECL and returns the corresponding
11287 libfunc. */
11288
11289rtx
11290get_personality_function (tree decl)
11291{
11292 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11293 enum eh_personality_kind pk;
11294
11295 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11296 if (pk == eh_personality_none)
11297 return NULL;
11298
11299 if (!personality
11300 && pk == eh_personality_any)
11301 personality = lang_hooks.eh_personality ();
11302
11303 if (pk == eh_personality_lang)
11304 gcc_assert (personality != NULL_TREE);
11305
11306 return XEXP (DECL_RTL (personality), 0);
11307}
11308
d53441c8 11309/* Returns a tree for the size of EXP in bytes. */
11310
11311static tree
11312tree_expr_size (const_tree exp)
11313{
11314 if (DECL_P (exp)
11315 && DECL_SIZE_UNIT (exp) != 0)
11316 return DECL_SIZE_UNIT (exp);
11317 else
11318 return size_in_bytes (TREE_TYPE (exp));
11319}
11320
11321/* Return an rtx for the size in bytes of the value of EXP. */
11322
11323rtx
11324expr_size (tree exp)
11325{
11326 tree size;
11327
11328 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11329 size = TREE_OPERAND (exp, 1);
11330 else
11331 {
11332 size = tree_expr_size (exp);
11333 gcc_assert (size);
11334 gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
11335 }
11336
11337 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
11338}
11339
11340/* Return a wide integer for the size in bytes of the value of EXP, or -1
11341 if the size can vary or is larger than an integer. */
11342
11343static HOST_WIDE_INT
11344int_expr_size (tree exp)
11345{
11346 tree size;
11347
11348 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11349 size = TREE_OPERAND (exp, 1);
11350 else
11351 {
11352 size = tree_expr_size (exp);
11353 gcc_assert (size);
11354 }
11355
11356 if (size == 0 || !tree_fits_shwi_p (size))
11357 return -1;
11358
11359 return tree_to_shwi (size);
11360}
11361
1f3233d1 11362#include "gt-expr.h"