]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/expr.c
2015-07-07 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "rtl.h"
27 #include "df.h"
28 #include "ssa.h"
29 #include "alias.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
32 #include "attribs.h"
33 #include "varasm.h"
34 #include "flags.h"
35 #include "regs.h"
36 #include "except.h"
37 #include "insn-config.h"
38 #include "insn-attr.h"
39 #include "expmed.h"
40 #include "dojump.h"
41 #include "explow.h"
42 #include "calls.h"
43 #include "emit-rtl.h"
44 #include "stmt.h"
45 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
46 #include "expr.h"
47 #include "insn-codes.h"
48 #include "optabs.h"
49 #include "libfuncs.h"
50 #include "recog.h"
51 #include "reload.h"
52 #include "typeclass.h"
53 #include "toplev.h"
54 #include "langhooks.h"
55 #include "intl.h"
56 #include "tm_p.h"
57 #include "tree-iterator.h"
58 #include "internal-fn.h"
59 #include "cgraph.h"
60 #include "target.h"
61 #include "common/common-target.h"
62 #include "timevar.h"
63 #include "diagnostic.h"
64 #include "tree-ssa-live.h"
65 #include "tree-outof-ssa.h"
66 #include "target-globals.h"
67 #include "params.h"
68 #include "tree-ssa-address.h"
69 #include "cfgexpand.h"
70 #include "builtins.h"
71 #include "tree-chkp.h"
72 #include "rtl-chkp.h"
73 #include "ccmp.h"
74
75
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
82 int cse_not_expected;
83
84 /* This structure is used by move_by_pieces to describe the move to
85 be performed. */
86 struct move_by_pieces_d
87 {
88 rtx to;
89 rtx to_addr;
90 int autinc_to;
91 int explicit_inc_to;
92 rtx from;
93 rtx from_addr;
94 int autinc_from;
95 int explicit_inc_from;
96 unsigned HOST_WIDE_INT len;
97 HOST_WIDE_INT offset;
98 int reverse;
99 };
100
101 /* This structure is used by store_by_pieces to describe the clear to
102 be performed. */
103
104 struct store_by_pieces_d
105 {
106 rtx to;
107 rtx to_addr;
108 int autinc_to;
109 int explicit_inc_to;
110 unsigned HOST_WIDE_INT len;
111 HOST_WIDE_INT offset;
112 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode);
113 void *constfundata;
114 int reverse;
115 };
116
117 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
118 struct move_by_pieces_d *);
119 static bool block_move_libcall_safe_for_call_parm (void);
120 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
121 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
122 unsigned HOST_WIDE_INT);
123 static tree emit_block_move_libcall_fn (int);
124 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
125 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, machine_mode);
126 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
127 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
128 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
129 struct store_by_pieces_d *);
130 static tree clear_storage_libcall_fn (int);
131 static rtx_insn *compress_float_constant (rtx, rtx);
132 static rtx get_subtarget (rtx);
133 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
134 HOST_WIDE_INT, machine_mode,
135 tree, int, alias_set_type);
136 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
137 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
138 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
139 machine_mode, tree, alias_set_type, bool);
140
141 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
142
143 static int is_aligning_offset (const_tree, const_tree);
144 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
145 static rtx do_store_flag (sepops, rtx, machine_mode);
146 #ifdef PUSH_ROUNDING
147 static void emit_single_push_insn (machine_mode, rtx, tree);
148 #endif
149 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx, int);
150 static rtx const_vector_from_tree (tree);
151 static tree tree_expr_size (const_tree);
152 static HOST_WIDE_INT int_expr_size (tree);
153
154 \f
155 /* This is run to set up which modes can be used
156 directly in memory and to initialize the block move optab. It is run
157 at the beginning of compilation and when the target is reinitialized. */
158
159 void
160 init_expr_target (void)
161 {
162 rtx insn, pat;
163 machine_mode mode;
164 int num_clobbers;
165 rtx mem, mem1;
166 rtx reg;
167
168 /* Try indexing by frame ptr and try by stack ptr.
169 It is known that on the Convex the stack ptr isn't a valid index.
170 With luck, one or the other is valid on any machine. */
171 mem = gen_rtx_MEM (word_mode, stack_pointer_rtx);
172 mem1 = gen_rtx_MEM (word_mode, frame_pointer_rtx);
173
174 /* A scratch register we can modify in-place below to avoid
175 useless RTL allocations. */
176 reg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
177
178 insn = rtx_alloc (INSN);
179 pat = gen_rtx_SET (NULL_RTX, NULL_RTX);
180 PATTERN (insn) = pat;
181
182 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
183 mode = (machine_mode) ((int) mode + 1))
184 {
185 int regno;
186
187 direct_load[(int) mode] = direct_store[(int) mode] = 0;
188 PUT_MODE (mem, mode);
189 PUT_MODE (mem1, mode);
190
191 /* See if there is some register that can be used in this mode and
192 directly loaded or stored from memory. */
193
194 if (mode != VOIDmode && mode != BLKmode)
195 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
196 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
197 regno++)
198 {
199 if (! HARD_REGNO_MODE_OK (regno, mode))
200 continue;
201
202 set_mode_and_regno (reg, mode, regno);
203
204 SET_SRC (pat) = mem;
205 SET_DEST (pat) = reg;
206 if (recog (pat, insn, &num_clobbers) >= 0)
207 direct_load[(int) mode] = 1;
208
209 SET_SRC (pat) = mem1;
210 SET_DEST (pat) = reg;
211 if (recog (pat, insn, &num_clobbers) >= 0)
212 direct_load[(int) mode] = 1;
213
214 SET_SRC (pat) = reg;
215 SET_DEST (pat) = mem;
216 if (recog (pat, insn, &num_clobbers) >= 0)
217 direct_store[(int) mode] = 1;
218
219 SET_SRC (pat) = reg;
220 SET_DEST (pat) = mem1;
221 if (recog (pat, insn, &num_clobbers) >= 0)
222 direct_store[(int) mode] = 1;
223 }
224 }
225
226 mem = gen_rtx_MEM (VOIDmode, gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 1));
227
228 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
229 mode = GET_MODE_WIDER_MODE (mode))
230 {
231 machine_mode srcmode;
232 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
233 srcmode = GET_MODE_WIDER_MODE (srcmode))
234 {
235 enum insn_code ic;
236
237 ic = can_extend_p (mode, srcmode, 0);
238 if (ic == CODE_FOR_nothing)
239 continue;
240
241 PUT_MODE (mem, srcmode);
242
243 if (insn_operand_matches (ic, 1, mem))
244 float_extend_from_mem[mode][srcmode] = true;
245 }
246 }
247 }
248
249 /* This is run at the start of compiling a function. */
250
251 void
252 init_expr (void)
253 {
254 memset (&crtl->expr, 0, sizeof (crtl->expr));
255 }
256 \f
257 /* Copy data from FROM to TO, where the machine modes are not the same.
258 Both modes may be integer, or both may be floating, or both may be
259 fixed-point.
260 UNSIGNEDP should be nonzero if FROM is an unsigned type.
261 This causes zero-extension instead of sign-extension. */
262
263 void
264 convert_move (rtx to, rtx from, int unsignedp)
265 {
266 machine_mode to_mode = GET_MODE (to);
267 machine_mode from_mode = GET_MODE (from);
268 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
269 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
270 enum insn_code code;
271 rtx libcall;
272
273 /* rtx code for making an equivalent value. */
274 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
275 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
276
277
278 gcc_assert (to_real == from_real);
279 gcc_assert (to_mode != BLKmode);
280 gcc_assert (from_mode != BLKmode);
281
282 /* If the source and destination are already the same, then there's
283 nothing to do. */
284 if (to == from)
285 return;
286
287 /* If FROM is a SUBREG that indicates that we have already done at least
288 the required extension, strip it. We don't handle such SUBREGs as
289 TO here. */
290
291 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
292 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
293 >= GET_MODE_PRECISION (to_mode))
294 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
295 from = gen_lowpart (to_mode, from), from_mode = to_mode;
296
297 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
298
299 if (to_mode == from_mode
300 || (from_mode == VOIDmode && CONSTANT_P (from)))
301 {
302 emit_move_insn (to, from);
303 return;
304 }
305
306 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
307 {
308 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
309
310 if (VECTOR_MODE_P (to_mode))
311 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
312 else
313 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
314
315 emit_move_insn (to, from);
316 return;
317 }
318
319 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
320 {
321 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
322 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
323 return;
324 }
325
326 if (to_real)
327 {
328 rtx value;
329 rtx_insn *insns;
330 convert_optab tab;
331
332 gcc_assert ((GET_MODE_PRECISION (from_mode)
333 != GET_MODE_PRECISION (to_mode))
334 || (DECIMAL_FLOAT_MODE_P (from_mode)
335 != DECIMAL_FLOAT_MODE_P (to_mode)));
336
337 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
338 /* Conversion between decimal float and binary float, same size. */
339 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
340 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
341 tab = sext_optab;
342 else
343 tab = trunc_optab;
344
345 /* Try converting directly if the insn is supported. */
346
347 code = convert_optab_handler (tab, to_mode, from_mode);
348 if (code != CODE_FOR_nothing)
349 {
350 emit_unop_insn (code, to, from,
351 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
352 return;
353 }
354
355 /* Otherwise use a libcall. */
356 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
357
358 /* Is this conversion implemented yet? */
359 gcc_assert (libcall);
360
361 start_sequence ();
362 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
363 1, from, from_mode);
364 insns = get_insns ();
365 end_sequence ();
366 emit_libcall_block (insns, to, value,
367 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
368 from)
369 : gen_rtx_FLOAT_EXTEND (to_mode, from));
370 return;
371 }
372
373 /* Handle pointer conversion. */ /* SPEE 900220. */
374 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
375 {
376 convert_optab ctab;
377
378 if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
379 ctab = trunc_optab;
380 else if (unsignedp)
381 ctab = zext_optab;
382 else
383 ctab = sext_optab;
384
385 if (convert_optab_handler (ctab, to_mode, from_mode)
386 != CODE_FOR_nothing)
387 {
388 emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
389 to, from, UNKNOWN);
390 return;
391 }
392 }
393
394 /* Targets are expected to provide conversion insns between PxImode and
395 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
396 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
397 {
398 machine_mode full_mode
399 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
400
401 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
402 != CODE_FOR_nothing);
403
404 if (full_mode != from_mode)
405 from = convert_to_mode (full_mode, from, unsignedp);
406 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
407 to, from, UNKNOWN);
408 return;
409 }
410 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
411 {
412 rtx new_from;
413 machine_mode full_mode
414 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
415 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
416 enum insn_code icode;
417
418 icode = convert_optab_handler (ctab, full_mode, from_mode);
419 gcc_assert (icode != CODE_FOR_nothing);
420
421 if (to_mode == full_mode)
422 {
423 emit_unop_insn (icode, to, from, UNKNOWN);
424 return;
425 }
426
427 new_from = gen_reg_rtx (full_mode);
428 emit_unop_insn (icode, new_from, from, UNKNOWN);
429
430 /* else proceed to integer conversions below. */
431 from_mode = full_mode;
432 from = new_from;
433 }
434
435 /* Make sure both are fixed-point modes or both are not. */
436 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
437 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
438 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
439 {
440 /* If we widen from_mode to to_mode and they are in the same class,
441 we won't saturate the result.
442 Otherwise, always saturate the result to play safe. */
443 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
444 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
445 expand_fixed_convert (to, from, 0, 0);
446 else
447 expand_fixed_convert (to, from, 0, 1);
448 return;
449 }
450
451 /* Now both modes are integers. */
452
453 /* Handle expanding beyond a word. */
454 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
455 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
456 {
457 rtx_insn *insns;
458 rtx lowpart;
459 rtx fill_value;
460 rtx lowfrom;
461 int i;
462 machine_mode lowpart_mode;
463 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
464
465 /* Try converting directly if the insn is supported. */
466 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
467 != CODE_FOR_nothing)
468 {
469 /* If FROM is a SUBREG, put it into a register. Do this
470 so that we always generate the same set of insns for
471 better cse'ing; if an intermediate assignment occurred,
472 we won't be doing the operation directly on the SUBREG. */
473 if (optimize > 0 && GET_CODE (from) == SUBREG)
474 from = force_reg (from_mode, from);
475 emit_unop_insn (code, to, from, equiv_code);
476 return;
477 }
478 /* Next, try converting via full word. */
479 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
480 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
481 != CODE_FOR_nothing))
482 {
483 rtx word_to = gen_reg_rtx (word_mode);
484 if (REG_P (to))
485 {
486 if (reg_overlap_mentioned_p (to, from))
487 from = force_reg (from_mode, from);
488 emit_clobber (to);
489 }
490 convert_move (word_to, from, unsignedp);
491 emit_unop_insn (code, to, word_to, equiv_code);
492 return;
493 }
494
495 /* No special multiword conversion insn; do it by hand. */
496 start_sequence ();
497
498 /* Since we will turn this into a no conflict block, we must ensure the
499 the source does not overlap the target so force it into an isolated
500 register when maybe so. Likewise for any MEM input, since the
501 conversion sequence might require several references to it and we
502 must ensure we're getting the same value every time. */
503
504 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
505 from = force_reg (from_mode, from);
506
507 /* Get a copy of FROM widened to a word, if necessary. */
508 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
509 lowpart_mode = word_mode;
510 else
511 lowpart_mode = from_mode;
512
513 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
514
515 lowpart = gen_lowpart (lowpart_mode, to);
516 emit_move_insn (lowpart, lowfrom);
517
518 /* Compute the value to put in each remaining word. */
519 if (unsignedp)
520 fill_value = const0_rtx;
521 else
522 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
523 LT, lowfrom, const0_rtx,
524 lowpart_mode, 0, -1);
525
526 /* Fill the remaining words. */
527 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
528 {
529 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
530 rtx subword = operand_subword (to, index, 1, to_mode);
531
532 gcc_assert (subword);
533
534 if (fill_value != subword)
535 emit_move_insn (subword, fill_value);
536 }
537
538 insns = get_insns ();
539 end_sequence ();
540
541 emit_insn (insns);
542 return;
543 }
544
545 /* Truncating multi-word to a word or less. */
546 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
547 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
548 {
549 if (!((MEM_P (from)
550 && ! MEM_VOLATILE_P (from)
551 && direct_load[(int) to_mode]
552 && ! mode_dependent_address_p (XEXP (from, 0),
553 MEM_ADDR_SPACE (from)))
554 || REG_P (from)
555 || GET_CODE (from) == SUBREG))
556 from = force_reg (from_mode, from);
557 convert_move (to, gen_lowpart (word_mode, from), 0);
558 return;
559 }
560
561 /* Now follow all the conversions between integers
562 no more than a word long. */
563
564 /* For truncation, usually we can just refer to FROM in a narrower mode. */
565 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
566 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
567 {
568 if (!((MEM_P (from)
569 && ! MEM_VOLATILE_P (from)
570 && direct_load[(int) to_mode]
571 && ! mode_dependent_address_p (XEXP (from, 0),
572 MEM_ADDR_SPACE (from)))
573 || REG_P (from)
574 || GET_CODE (from) == SUBREG))
575 from = force_reg (from_mode, from);
576 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
577 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
578 from = copy_to_reg (from);
579 emit_move_insn (to, gen_lowpart (to_mode, from));
580 return;
581 }
582
583 /* Handle extension. */
584 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
585 {
586 /* Convert directly if that works. */
587 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
588 != CODE_FOR_nothing)
589 {
590 emit_unop_insn (code, to, from, equiv_code);
591 return;
592 }
593 else
594 {
595 machine_mode intermediate;
596 rtx tmp;
597 int shift_amount;
598
599 /* Search for a mode to convert via. */
600 for (intermediate = from_mode; intermediate != VOIDmode;
601 intermediate = GET_MODE_WIDER_MODE (intermediate))
602 if (((can_extend_p (to_mode, intermediate, unsignedp)
603 != CODE_FOR_nothing)
604 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
605 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
606 && (can_extend_p (intermediate, from_mode, unsignedp)
607 != CODE_FOR_nothing))
608 {
609 convert_move (to, convert_to_mode (intermediate, from,
610 unsignedp), unsignedp);
611 return;
612 }
613
614 /* No suitable intermediate mode.
615 Generate what we need with shifts. */
616 shift_amount = (GET_MODE_PRECISION (to_mode)
617 - GET_MODE_PRECISION (from_mode));
618 from = gen_lowpart (to_mode, force_reg (from_mode, from));
619 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
620 to, unsignedp);
621 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
622 to, unsignedp);
623 if (tmp != to)
624 emit_move_insn (to, tmp);
625 return;
626 }
627 }
628
629 /* Support special truncate insns for certain modes. */
630 if (convert_optab_handler (trunc_optab, to_mode,
631 from_mode) != CODE_FOR_nothing)
632 {
633 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
634 to, from, UNKNOWN);
635 return;
636 }
637
638 /* Handle truncation of volatile memrefs, and so on;
639 the things that couldn't be truncated directly,
640 and for which there was no special instruction.
641
642 ??? Code above formerly short-circuited this, for most integer
643 mode pairs, with a force_reg in from_mode followed by a recursive
644 call to this routine. Appears always to have been wrong. */
645 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
646 {
647 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
648 emit_move_insn (to, temp);
649 return;
650 }
651
652 /* Mode combination is not recognized. */
653 gcc_unreachable ();
654 }
655
656 /* Return an rtx for a value that would result
657 from converting X to mode MODE.
658 Both X and MODE may be floating, or both integer.
659 UNSIGNEDP is nonzero if X is an unsigned value.
660 This can be done by referring to a part of X in place
661 or by copying to a new temporary with conversion. */
662
663 rtx
664 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
665 {
666 return convert_modes (mode, VOIDmode, x, unsignedp);
667 }
668
669 /* Return an rtx for a value that would result
670 from converting X from mode OLDMODE to mode MODE.
671 Both modes may be floating, or both integer.
672 UNSIGNEDP is nonzero if X is an unsigned value.
673
674 This can be done by referring to a part of X in place
675 or by copying to a new temporary with conversion.
676
677 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
678
679 rtx
680 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
681 {
682 rtx temp;
683
684 /* If FROM is a SUBREG that indicates that we have already done at least
685 the required extension, strip it. */
686
687 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
688 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
689 && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
690 x = gen_lowpart (mode, SUBREG_REG (x));
691
692 if (GET_MODE (x) != VOIDmode)
693 oldmode = GET_MODE (x);
694
695 if (mode == oldmode)
696 return x;
697
698 if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
699 {
700 /* If the caller did not tell us the old mode, then there is not
701 much to do with respect to canonicalization. We have to
702 assume that all the bits are significant. */
703 if (GET_MODE_CLASS (oldmode) != MODE_INT)
704 oldmode = MAX_MODE_INT;
705 wide_int w = wide_int::from (std::make_pair (x, oldmode),
706 GET_MODE_PRECISION (mode),
707 unsignedp ? UNSIGNED : SIGNED);
708 return immed_wide_int_const (w, mode);
709 }
710
711 /* We can do this with a gen_lowpart if both desired and current modes
712 are integer, and this is either a constant integer, a register, or a
713 non-volatile MEM. */
714 if (GET_MODE_CLASS (mode) == MODE_INT
715 && GET_MODE_CLASS (oldmode) == MODE_INT
716 && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
717 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
718 || (REG_P (x)
719 && (!HARD_REGISTER_P (x)
720 || HARD_REGNO_MODE_OK (REGNO (x), mode))
721 && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
722
723 return gen_lowpart (mode, x);
724
725 /* Converting from integer constant into mode is always equivalent to an
726 subreg operation. */
727 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
728 {
729 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
730 return simplify_gen_subreg (mode, x, oldmode, 0);
731 }
732
733 temp = gen_reg_rtx (mode);
734 convert_move (temp, x, unsignedp);
735 return temp;
736 }
737 \f
738 /* Return the largest alignment we can use for doing a move (or store)
739 of MAX_PIECES. ALIGN is the largest alignment we could use. */
740
741 static unsigned int
742 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
743 {
744 machine_mode tmode;
745
746 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
747 if (align >= GET_MODE_ALIGNMENT (tmode))
748 align = GET_MODE_ALIGNMENT (tmode);
749 else
750 {
751 machine_mode tmode, xmode;
752
753 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
754 tmode != VOIDmode;
755 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
756 if (GET_MODE_SIZE (tmode) > max_pieces
757 || SLOW_UNALIGNED_ACCESS (tmode, align))
758 break;
759
760 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
761 }
762
763 return align;
764 }
765
766 /* Return the widest integer mode no wider than SIZE. If no such mode
767 can be found, return VOIDmode. */
768
769 static machine_mode
770 widest_int_mode_for_size (unsigned int size)
771 {
772 machine_mode tmode, mode = VOIDmode;
773
774 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
775 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
776 if (GET_MODE_SIZE (tmode) < size)
777 mode = tmode;
778
779 return mode;
780 }
781
782 /* Determine whether the LEN bytes can be moved by using several move
783 instructions. Return nonzero if a call to move_by_pieces should
784 succeed. */
785
786 int
787 can_move_by_pieces (unsigned HOST_WIDE_INT len,
788 unsigned int align)
789 {
790 return targetm.use_by_pieces_infrastructure_p (len, align, MOVE_BY_PIECES,
791 optimize_insn_for_speed_p ());
792 }
793
794 /* Generate several move instructions to copy LEN bytes from block FROM to
795 block TO. (These are MEM rtx's with BLKmode).
796
797 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
798 used to push FROM to the stack.
799
800 ALIGN is maximum stack alignment we can assume.
801
802 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
803 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
804 stpcpy. */
805
806 rtx
807 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
808 unsigned int align, int endp)
809 {
810 struct move_by_pieces_d data;
811 machine_mode to_addr_mode;
812 machine_mode from_addr_mode = get_address_mode (from);
813 rtx to_addr, from_addr = XEXP (from, 0);
814 unsigned int max_size = MOVE_MAX_PIECES + 1;
815 enum insn_code icode;
816
817 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
818
819 data.offset = 0;
820 data.from_addr = from_addr;
821 if (to)
822 {
823 to_addr_mode = get_address_mode (to);
824 to_addr = XEXP (to, 0);
825 data.to = to;
826 data.autinc_to
827 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
828 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
829 data.reverse
830 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
831 }
832 else
833 {
834 to_addr_mode = VOIDmode;
835 to_addr = NULL_RTX;
836 data.to = NULL_RTX;
837 data.autinc_to = 1;
838 if (STACK_GROWS_DOWNWARD)
839 data.reverse = 1;
840 else
841 data.reverse = 0;
842 }
843 data.to_addr = to_addr;
844 data.from = from;
845 data.autinc_from
846 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
847 || GET_CODE (from_addr) == POST_INC
848 || GET_CODE (from_addr) == POST_DEC);
849
850 data.explicit_inc_from = 0;
851 data.explicit_inc_to = 0;
852 if (data.reverse) data.offset = len;
853 data.len = len;
854
855 /* If copying requires more than two move insns,
856 copy addresses to registers (to make displacements shorter)
857 and use post-increment if available. */
858 if (!(data.autinc_from && data.autinc_to)
859 && move_by_pieces_ninsns (len, align, max_size) > 2)
860 {
861 /* Find the mode of the largest move...
862 MODE might not be used depending on the definitions of the
863 USE_* macros below. */
864 machine_mode mode ATTRIBUTE_UNUSED
865 = widest_int_mode_for_size (max_size);
866
867 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
868 {
869 data.from_addr = copy_to_mode_reg (from_addr_mode,
870 plus_constant (from_addr_mode,
871 from_addr, len));
872 data.autinc_from = 1;
873 data.explicit_inc_from = -1;
874 }
875 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
876 {
877 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
878 data.autinc_from = 1;
879 data.explicit_inc_from = 1;
880 }
881 if (!data.autinc_from && CONSTANT_P (from_addr))
882 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
883 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
884 {
885 data.to_addr = copy_to_mode_reg (to_addr_mode,
886 plus_constant (to_addr_mode,
887 to_addr, len));
888 data.autinc_to = 1;
889 data.explicit_inc_to = -1;
890 }
891 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
892 {
893 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
894 data.autinc_to = 1;
895 data.explicit_inc_to = 1;
896 }
897 if (!data.autinc_to && CONSTANT_P (to_addr))
898 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
899 }
900
901 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
902
903 /* First move what we can in the largest integer mode, then go to
904 successively smaller modes. */
905
906 while (max_size > 1 && data.len > 0)
907 {
908 machine_mode mode = widest_int_mode_for_size (max_size);
909
910 if (mode == VOIDmode)
911 break;
912
913 icode = optab_handler (mov_optab, mode);
914 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
915 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
916
917 max_size = GET_MODE_SIZE (mode);
918 }
919
920 /* The code above should have handled everything. */
921 gcc_assert (!data.len);
922
923 if (endp)
924 {
925 rtx to1;
926
927 gcc_assert (!data.reverse);
928 if (data.autinc_to)
929 {
930 if (endp == 2)
931 {
932 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
933 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
934 else
935 data.to_addr = copy_to_mode_reg (to_addr_mode,
936 plus_constant (to_addr_mode,
937 data.to_addr,
938 -1));
939 }
940 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
941 data.offset);
942 }
943 else
944 {
945 if (endp == 2)
946 --data.offset;
947 to1 = adjust_address (data.to, QImode, data.offset);
948 }
949 return to1;
950 }
951 else
952 return data.to;
953 }
954
955 /* Return number of insns required to move L bytes by pieces.
956 ALIGN (in bits) is maximum alignment we can assume. */
957
958 unsigned HOST_WIDE_INT
959 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
960 unsigned int max_size)
961 {
962 unsigned HOST_WIDE_INT n_insns = 0;
963
964 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
965
966 while (max_size > 1 && l > 0)
967 {
968 machine_mode mode;
969 enum insn_code icode;
970
971 mode = widest_int_mode_for_size (max_size);
972
973 if (mode == VOIDmode)
974 break;
975
976 icode = optab_handler (mov_optab, mode);
977 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
978 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
979
980 max_size = GET_MODE_SIZE (mode);
981 }
982
983 gcc_assert (!l);
984 return n_insns;
985 }
986
987 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
988 with move instructions for mode MODE. GENFUN is the gen_... function
989 to make a move insn for that mode. DATA has all the other info. */
990
991 static void
992 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
993 struct move_by_pieces_d *data)
994 {
995 unsigned int size = GET_MODE_SIZE (mode);
996 rtx to1 = NULL_RTX, from1;
997
998 while (data->len >= size)
999 {
1000 if (data->reverse)
1001 data->offset -= size;
1002
1003 if (data->to)
1004 {
1005 if (data->autinc_to)
1006 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1007 data->offset);
1008 else
1009 to1 = adjust_address (data->to, mode, data->offset);
1010 }
1011
1012 if (data->autinc_from)
1013 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1014 data->offset);
1015 else
1016 from1 = adjust_address (data->from, mode, data->offset);
1017
1018 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1019 emit_insn (gen_add2_insn (data->to_addr,
1020 gen_int_mode (-(HOST_WIDE_INT) size,
1021 GET_MODE (data->to_addr))));
1022 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1023 emit_insn (gen_add2_insn (data->from_addr,
1024 gen_int_mode (-(HOST_WIDE_INT) size,
1025 GET_MODE (data->from_addr))));
1026
1027 if (data->to)
1028 emit_insn ((*genfun) (to1, from1));
1029 else
1030 {
1031 #ifdef PUSH_ROUNDING
1032 emit_single_push_insn (mode, from1, NULL);
1033 #else
1034 gcc_unreachable ();
1035 #endif
1036 }
1037
1038 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1039 emit_insn (gen_add2_insn (data->to_addr,
1040 gen_int_mode (size,
1041 GET_MODE (data->to_addr))));
1042 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1043 emit_insn (gen_add2_insn (data->from_addr,
1044 gen_int_mode (size,
1045 GET_MODE (data->from_addr))));
1046
1047 if (! data->reverse)
1048 data->offset += size;
1049
1050 data->len -= size;
1051 }
1052 }
1053 \f
1054 /* Emit code to move a block Y to a block X. This may be done with
1055 string-move instructions, with multiple scalar move instructions,
1056 or with a library call.
1057
1058 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1059 SIZE is an rtx that says how long they are.
1060 ALIGN is the maximum alignment we can assume they have.
1061 METHOD describes what kind of copy this is, and what mechanisms may be used.
1062 MIN_SIZE is the minimal size of block to move
1063 MAX_SIZE is the maximal size of block to move, if it can not be represented
1064 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1065
1066 Return the address of the new block, if memcpy is called and returns it,
1067 0 otherwise. */
1068
1069 rtx
1070 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1071 unsigned int expected_align, HOST_WIDE_INT expected_size,
1072 unsigned HOST_WIDE_INT min_size,
1073 unsigned HOST_WIDE_INT max_size,
1074 unsigned HOST_WIDE_INT probable_max_size)
1075 {
1076 bool may_use_call;
1077 rtx retval = 0;
1078 unsigned int align;
1079
1080 gcc_assert (size);
1081 if (CONST_INT_P (size)
1082 && INTVAL (size) == 0)
1083 return 0;
1084
1085 switch (method)
1086 {
1087 case BLOCK_OP_NORMAL:
1088 case BLOCK_OP_TAILCALL:
1089 may_use_call = true;
1090 break;
1091
1092 case BLOCK_OP_CALL_PARM:
1093 may_use_call = block_move_libcall_safe_for_call_parm ();
1094
1095 /* Make inhibit_defer_pop nonzero around the library call
1096 to force it to pop the arguments right away. */
1097 NO_DEFER_POP;
1098 break;
1099
1100 case BLOCK_OP_NO_LIBCALL:
1101 may_use_call = false;
1102 break;
1103
1104 default:
1105 gcc_unreachable ();
1106 }
1107
1108 gcc_assert (MEM_P (x) && MEM_P (y));
1109 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1110 gcc_assert (align >= BITS_PER_UNIT);
1111
1112 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1113 block copy is more efficient for other large modes, e.g. DCmode. */
1114 x = adjust_address (x, BLKmode, 0);
1115 y = adjust_address (y, BLKmode, 0);
1116
1117 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1118 can be incorrect is coming from __builtin_memcpy. */
1119 if (CONST_INT_P (size))
1120 {
1121 x = shallow_copy_rtx (x);
1122 y = shallow_copy_rtx (y);
1123 set_mem_size (x, INTVAL (size));
1124 set_mem_size (y, INTVAL (size));
1125 }
1126
1127 if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
1128 move_by_pieces (x, y, INTVAL (size), align, 0);
1129 else if (emit_block_move_via_movmem (x, y, size, align,
1130 expected_align, expected_size,
1131 min_size, max_size, probable_max_size))
1132 ;
1133 else if (may_use_call
1134 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1135 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1136 {
1137 /* Since x and y are passed to a libcall, mark the corresponding
1138 tree EXPR as addressable. */
1139 tree y_expr = MEM_EXPR (y);
1140 tree x_expr = MEM_EXPR (x);
1141 if (y_expr)
1142 mark_addressable (y_expr);
1143 if (x_expr)
1144 mark_addressable (x_expr);
1145 retval = emit_block_move_via_libcall (x, y, size,
1146 method == BLOCK_OP_TAILCALL);
1147 }
1148
1149 else
1150 emit_block_move_via_loop (x, y, size, align);
1151
1152 if (method == BLOCK_OP_CALL_PARM)
1153 OK_DEFER_POP;
1154
1155 return retval;
1156 }
1157
1158 rtx
1159 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1160 {
1161 unsigned HOST_WIDE_INT max, min = 0;
1162 if (GET_CODE (size) == CONST_INT)
1163 min = max = UINTVAL (size);
1164 else
1165 max = GET_MODE_MASK (GET_MODE (size));
1166 return emit_block_move_hints (x, y, size, method, 0, -1,
1167 min, max, max);
1168 }
1169
1170 /* A subroutine of emit_block_move. Returns true if calling the
1171 block move libcall will not clobber any parameters which may have
1172 already been placed on the stack. */
1173
1174 static bool
1175 block_move_libcall_safe_for_call_parm (void)
1176 {
1177 #if defined (REG_PARM_STACK_SPACE)
1178 tree fn;
1179 #endif
1180
1181 /* If arguments are pushed on the stack, then they're safe. */
1182 if (PUSH_ARGS)
1183 return true;
1184
1185 /* If registers go on the stack anyway, any argument is sure to clobber
1186 an outgoing argument. */
1187 #if defined (REG_PARM_STACK_SPACE)
1188 fn = emit_block_move_libcall_fn (false);
1189 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1190 depend on its argument. */
1191 (void) fn;
1192 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1193 && REG_PARM_STACK_SPACE (fn) != 0)
1194 return false;
1195 #endif
1196
1197 /* If any argument goes in memory, then it might clobber an outgoing
1198 argument. */
1199 {
1200 CUMULATIVE_ARGS args_so_far_v;
1201 cumulative_args_t args_so_far;
1202 tree fn, arg;
1203
1204 fn = emit_block_move_libcall_fn (false);
1205 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1206 args_so_far = pack_cumulative_args (&args_so_far_v);
1207
1208 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1209 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1210 {
1211 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1212 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1213 NULL_TREE, true);
1214 if (!tmp || !REG_P (tmp))
1215 return false;
1216 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1217 return false;
1218 targetm.calls.function_arg_advance (args_so_far, mode,
1219 NULL_TREE, true);
1220 }
1221 }
1222 return true;
1223 }
1224
1225 /* A subroutine of emit_block_move. Expand a movmem pattern;
1226 return true if successful. */
1227
1228 static bool
1229 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1230 unsigned int expected_align, HOST_WIDE_INT expected_size,
1231 unsigned HOST_WIDE_INT min_size,
1232 unsigned HOST_WIDE_INT max_size,
1233 unsigned HOST_WIDE_INT probable_max_size)
1234 {
1235 int save_volatile_ok = volatile_ok;
1236 machine_mode mode;
1237
1238 if (expected_align < align)
1239 expected_align = align;
1240 if (expected_size != -1)
1241 {
1242 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1243 expected_size = probable_max_size;
1244 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1245 expected_size = min_size;
1246 }
1247
1248 /* Since this is a move insn, we don't care about volatility. */
1249 volatile_ok = 1;
1250
1251 /* Try the most limited insn first, because there's no point
1252 including more than one in the machine description unless
1253 the more limited one has some advantage. */
1254
1255 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1256 mode = GET_MODE_WIDER_MODE (mode))
1257 {
1258 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1259
1260 if (code != CODE_FOR_nothing
1261 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1262 here because if SIZE is less than the mode mask, as it is
1263 returned by the macro, it will definitely be less than the
1264 actual mode mask. Since SIZE is within the Pmode address
1265 space, we limit MODE to Pmode. */
1266 && ((CONST_INT_P (size)
1267 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1268 <= (GET_MODE_MASK (mode) >> 1)))
1269 || max_size <= (GET_MODE_MASK (mode) >> 1)
1270 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1271 {
1272 struct expand_operand ops[9];
1273 unsigned int nops;
1274
1275 /* ??? When called via emit_block_move_for_call, it'd be
1276 nice if there were some way to inform the backend, so
1277 that it doesn't fail the expansion because it thinks
1278 emitting the libcall would be more efficient. */
1279 nops = insn_data[(int) code].n_generator_args;
1280 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1281
1282 create_fixed_operand (&ops[0], x);
1283 create_fixed_operand (&ops[1], y);
1284 /* The check above guarantees that this size conversion is valid. */
1285 create_convert_operand_to (&ops[2], size, mode, true);
1286 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1287 if (nops >= 6)
1288 {
1289 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1290 create_integer_operand (&ops[5], expected_size);
1291 }
1292 if (nops >= 8)
1293 {
1294 create_integer_operand (&ops[6], min_size);
1295 /* If we can not represent the maximal size,
1296 make parameter NULL. */
1297 if ((HOST_WIDE_INT) max_size != -1)
1298 create_integer_operand (&ops[7], max_size);
1299 else
1300 create_fixed_operand (&ops[7], NULL);
1301 }
1302 if (nops == 9)
1303 {
1304 /* If we can not represent the maximal size,
1305 make parameter NULL. */
1306 if ((HOST_WIDE_INT) probable_max_size != -1)
1307 create_integer_operand (&ops[8], probable_max_size);
1308 else
1309 create_fixed_operand (&ops[8], NULL);
1310 }
1311 if (maybe_expand_insn (code, nops, ops))
1312 {
1313 volatile_ok = save_volatile_ok;
1314 return true;
1315 }
1316 }
1317 }
1318
1319 volatile_ok = save_volatile_ok;
1320 return false;
1321 }
1322
1323 /* A subroutine of emit_block_move. Expand a call to memcpy.
1324 Return the return value from memcpy, 0 otherwise. */
1325
1326 rtx
1327 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1328 {
1329 rtx dst_addr, src_addr;
1330 tree call_expr, fn, src_tree, dst_tree, size_tree;
1331 machine_mode size_mode;
1332 rtx retval;
1333
1334 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1335 pseudos. We can then place those new pseudos into a VAR_DECL and
1336 use them later. */
1337
1338 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1339 src_addr = copy_addr_to_reg (XEXP (src, 0));
1340
1341 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1342 src_addr = convert_memory_address (ptr_mode, src_addr);
1343
1344 dst_tree = make_tree (ptr_type_node, dst_addr);
1345 src_tree = make_tree (ptr_type_node, src_addr);
1346
1347 size_mode = TYPE_MODE (sizetype);
1348
1349 size = convert_to_mode (size_mode, size, 1);
1350 size = copy_to_mode_reg (size_mode, size);
1351
1352 /* It is incorrect to use the libcall calling conventions to call
1353 memcpy in this context. This could be a user call to memcpy and
1354 the user may wish to examine the return value from memcpy. For
1355 targets where libcalls and normal calls have different conventions
1356 for returning pointers, we could end up generating incorrect code. */
1357
1358 size_tree = make_tree (sizetype, size);
1359
1360 fn = emit_block_move_libcall_fn (true);
1361 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1362 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1363
1364 retval = expand_normal (call_expr);
1365
1366 return retval;
1367 }
1368
1369 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1370 for the function we use for block copies. */
1371
1372 static GTY(()) tree block_move_fn;
1373
1374 void
1375 init_block_move_fn (const char *asmspec)
1376 {
1377 if (!block_move_fn)
1378 {
1379 tree args, fn, attrs, attr_args;
1380
1381 fn = get_identifier ("memcpy");
1382 args = build_function_type_list (ptr_type_node, ptr_type_node,
1383 const_ptr_type_node, sizetype,
1384 NULL_TREE);
1385
1386 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1387 DECL_EXTERNAL (fn) = 1;
1388 TREE_PUBLIC (fn) = 1;
1389 DECL_ARTIFICIAL (fn) = 1;
1390 TREE_NOTHROW (fn) = 1;
1391 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1392 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1393
1394 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1395 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1396
1397 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1398
1399 block_move_fn = fn;
1400 }
1401
1402 if (asmspec)
1403 set_user_assembler_name (block_move_fn, asmspec);
1404 }
1405
1406 static tree
1407 emit_block_move_libcall_fn (int for_call)
1408 {
1409 static bool emitted_extern;
1410
1411 if (!block_move_fn)
1412 init_block_move_fn (NULL);
1413
1414 if (for_call && !emitted_extern)
1415 {
1416 emitted_extern = true;
1417 make_decl_rtl (block_move_fn);
1418 }
1419
1420 return block_move_fn;
1421 }
1422
1423 /* A subroutine of emit_block_move. Copy the data via an explicit
1424 loop. This is used only when libcalls are forbidden. */
1425 /* ??? It'd be nice to copy in hunks larger than QImode. */
1426
1427 static void
1428 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1429 unsigned int align ATTRIBUTE_UNUSED)
1430 {
1431 rtx_code_label *cmp_label, *top_label;
1432 rtx iter, x_addr, y_addr, tmp;
1433 machine_mode x_addr_mode = get_address_mode (x);
1434 machine_mode y_addr_mode = get_address_mode (y);
1435 machine_mode iter_mode;
1436
1437 iter_mode = GET_MODE (size);
1438 if (iter_mode == VOIDmode)
1439 iter_mode = word_mode;
1440
1441 top_label = gen_label_rtx ();
1442 cmp_label = gen_label_rtx ();
1443 iter = gen_reg_rtx (iter_mode);
1444
1445 emit_move_insn (iter, const0_rtx);
1446
1447 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1448 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1449 do_pending_stack_adjust ();
1450
1451 emit_jump (cmp_label);
1452 emit_label (top_label);
1453
1454 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1455 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1456
1457 if (x_addr_mode != y_addr_mode)
1458 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1459 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1460
1461 x = change_address (x, QImode, x_addr);
1462 y = change_address (y, QImode, y_addr);
1463
1464 emit_move_insn (x, y);
1465
1466 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1467 true, OPTAB_LIB_WIDEN);
1468 if (tmp != iter)
1469 emit_move_insn (iter, tmp);
1470
1471 emit_label (cmp_label);
1472
1473 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1474 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1475 }
1476 \f
1477 /* Copy all or part of a value X into registers starting at REGNO.
1478 The number of registers to be filled is NREGS. */
1479
1480 void
1481 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
1482 {
1483 if (nregs == 0)
1484 return;
1485
1486 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1487 x = validize_mem (force_const_mem (mode, x));
1488
1489 /* See if the machine can do this with a load multiple insn. */
1490 if (targetm.have_load_multiple ())
1491 {
1492 rtx_insn *last = get_last_insn ();
1493 rtx first = gen_rtx_REG (word_mode, regno);
1494 if (rtx_insn *pat = targetm.gen_load_multiple (first, x,
1495 GEN_INT (nregs)))
1496 {
1497 emit_insn (pat);
1498 return;
1499 }
1500 else
1501 delete_insns_since (last);
1502 }
1503
1504 for (int i = 0; i < nregs; i++)
1505 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1506 operand_subword_force (x, i, mode));
1507 }
1508
1509 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1510 The number of registers to be filled is NREGS. */
1511
1512 void
1513 move_block_from_reg (int regno, rtx x, int nregs)
1514 {
1515 if (nregs == 0)
1516 return;
1517
1518 /* See if the machine can do this with a store multiple insn. */
1519 if (targetm.have_store_multiple ())
1520 {
1521 rtx_insn *last = get_last_insn ();
1522 rtx first = gen_rtx_REG (word_mode, regno);
1523 if (rtx_insn *pat = targetm.gen_store_multiple (x, first,
1524 GEN_INT (nregs)))
1525 {
1526 emit_insn (pat);
1527 return;
1528 }
1529 else
1530 delete_insns_since (last);
1531 }
1532
1533 for (int i = 0; i < nregs; i++)
1534 {
1535 rtx tem = operand_subword (x, i, 1, BLKmode);
1536
1537 gcc_assert (tem);
1538
1539 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1540 }
1541 }
1542
1543 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1544 ORIG, where ORIG is a non-consecutive group of registers represented by
1545 a PARALLEL. The clone is identical to the original except in that the
1546 original set of registers is replaced by a new set of pseudo registers.
1547 The new set has the same modes as the original set. */
1548
1549 rtx
1550 gen_group_rtx (rtx orig)
1551 {
1552 int i, length;
1553 rtx *tmps;
1554
1555 gcc_assert (GET_CODE (orig) == PARALLEL);
1556
1557 length = XVECLEN (orig, 0);
1558 tmps = XALLOCAVEC (rtx, length);
1559
1560 /* Skip a NULL entry in first slot. */
1561 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1562
1563 if (i)
1564 tmps[0] = 0;
1565
1566 for (; i < length; i++)
1567 {
1568 machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1569 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1570
1571 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1572 }
1573
1574 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1575 }
1576
1577 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1578 except that values are placed in TMPS[i], and must later be moved
1579 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1580
1581 static void
1582 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1583 {
1584 rtx src;
1585 int start, i;
1586 machine_mode m = GET_MODE (orig_src);
1587
1588 gcc_assert (GET_CODE (dst) == PARALLEL);
1589
1590 if (m != VOIDmode
1591 && !SCALAR_INT_MODE_P (m)
1592 && !MEM_P (orig_src)
1593 && GET_CODE (orig_src) != CONCAT)
1594 {
1595 machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1596 if (imode == BLKmode)
1597 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1598 else
1599 src = gen_reg_rtx (imode);
1600 if (imode != BLKmode)
1601 src = gen_lowpart (GET_MODE (orig_src), src);
1602 emit_move_insn (src, orig_src);
1603 /* ...and back again. */
1604 if (imode != BLKmode)
1605 src = gen_lowpart (imode, src);
1606 emit_group_load_1 (tmps, dst, src, type, ssize);
1607 return;
1608 }
1609
1610 /* Check for a NULL entry, used to indicate that the parameter goes
1611 both on the stack and in registers. */
1612 if (XEXP (XVECEXP (dst, 0, 0), 0))
1613 start = 0;
1614 else
1615 start = 1;
1616
1617 /* Process the pieces. */
1618 for (i = start; i < XVECLEN (dst, 0); i++)
1619 {
1620 machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1621 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1622 unsigned int bytelen = GET_MODE_SIZE (mode);
1623 int shift = 0;
1624
1625 /* Handle trailing fragments that run over the size of the struct. */
1626 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1627 {
1628 /* Arrange to shift the fragment to where it belongs.
1629 extract_bit_field loads to the lsb of the reg. */
1630 if (
1631 #ifdef BLOCK_REG_PADDING
1632 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1633 == (BYTES_BIG_ENDIAN ? upward : downward)
1634 #else
1635 BYTES_BIG_ENDIAN
1636 #endif
1637 )
1638 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1639 bytelen = ssize - bytepos;
1640 gcc_assert (bytelen > 0);
1641 }
1642
1643 /* If we won't be loading directly from memory, protect the real source
1644 from strange tricks we might play; but make sure that the source can
1645 be loaded directly into the destination. */
1646 src = orig_src;
1647 if (!MEM_P (orig_src)
1648 && (!CONSTANT_P (orig_src)
1649 || (GET_MODE (orig_src) != mode
1650 && GET_MODE (orig_src) != VOIDmode)))
1651 {
1652 if (GET_MODE (orig_src) == VOIDmode)
1653 src = gen_reg_rtx (mode);
1654 else
1655 src = gen_reg_rtx (GET_MODE (orig_src));
1656
1657 emit_move_insn (src, orig_src);
1658 }
1659
1660 /* Optimize the access just a bit. */
1661 if (MEM_P (src)
1662 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1663 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1664 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1665 && bytelen == GET_MODE_SIZE (mode))
1666 {
1667 tmps[i] = gen_reg_rtx (mode);
1668 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1669 }
1670 else if (COMPLEX_MODE_P (mode)
1671 && GET_MODE (src) == mode
1672 && bytelen == GET_MODE_SIZE (mode))
1673 /* Let emit_move_complex do the bulk of the work. */
1674 tmps[i] = src;
1675 else if (GET_CODE (src) == CONCAT)
1676 {
1677 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1678 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1679
1680 if ((bytepos == 0 && bytelen == slen0)
1681 || (bytepos != 0 && bytepos + bytelen <= slen))
1682 {
1683 /* The following assumes that the concatenated objects all
1684 have the same size. In this case, a simple calculation
1685 can be used to determine the object and the bit field
1686 to be extracted. */
1687 tmps[i] = XEXP (src, bytepos / slen0);
1688 if (! CONSTANT_P (tmps[i])
1689 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1690 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1691 (bytepos % slen0) * BITS_PER_UNIT,
1692 1, NULL_RTX, mode, mode);
1693 }
1694 else
1695 {
1696 rtx mem;
1697
1698 gcc_assert (!bytepos);
1699 mem = assign_stack_temp (GET_MODE (src), slen);
1700 emit_move_insn (mem, src);
1701 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1702 0, 1, NULL_RTX, mode, mode);
1703 }
1704 }
1705 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1706 SIMD register, which is currently broken. While we get GCC
1707 to emit proper RTL for these cases, let's dump to memory. */
1708 else if (VECTOR_MODE_P (GET_MODE (dst))
1709 && REG_P (src))
1710 {
1711 int slen = GET_MODE_SIZE (GET_MODE (src));
1712 rtx mem;
1713
1714 mem = assign_stack_temp (GET_MODE (src), slen);
1715 emit_move_insn (mem, src);
1716 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1717 }
1718 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1719 && XVECLEN (dst, 0) > 1)
1720 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1721 else if (CONSTANT_P (src))
1722 {
1723 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1724
1725 if (len == ssize)
1726 tmps[i] = src;
1727 else
1728 {
1729 rtx first, second;
1730
1731 /* TODO: const_wide_int can have sizes other than this... */
1732 gcc_assert (2 * len == ssize);
1733 split_double (src, &first, &second);
1734 if (i)
1735 tmps[i] = second;
1736 else
1737 tmps[i] = first;
1738 }
1739 }
1740 else if (REG_P (src) && GET_MODE (src) == mode)
1741 tmps[i] = src;
1742 else
1743 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1744 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1745 mode, mode);
1746
1747 if (shift)
1748 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1749 shift, tmps[i], 0);
1750 }
1751 }
1752
1753 /* Emit code to move a block SRC of type TYPE to a block DST,
1754 where DST is non-consecutive registers represented by a PARALLEL.
1755 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1756 if not known. */
1757
1758 void
1759 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1760 {
1761 rtx *tmps;
1762 int i;
1763
1764 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1765 emit_group_load_1 (tmps, dst, src, type, ssize);
1766
1767 /* Copy the extracted pieces into the proper (probable) hard regs. */
1768 for (i = 0; i < XVECLEN (dst, 0); i++)
1769 {
1770 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1771 if (d == NULL)
1772 continue;
1773 emit_move_insn (d, tmps[i]);
1774 }
1775 }
1776
1777 /* Similar, but load SRC into new pseudos in a format that looks like
1778 PARALLEL. This can later be fed to emit_group_move to get things
1779 in the right place. */
1780
1781 rtx
1782 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1783 {
1784 rtvec vec;
1785 int i;
1786
1787 vec = rtvec_alloc (XVECLEN (parallel, 0));
1788 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1789
1790 /* Convert the vector to look just like the original PARALLEL, except
1791 with the computed values. */
1792 for (i = 0; i < XVECLEN (parallel, 0); i++)
1793 {
1794 rtx e = XVECEXP (parallel, 0, i);
1795 rtx d = XEXP (e, 0);
1796
1797 if (d)
1798 {
1799 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1800 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1801 }
1802 RTVEC_ELT (vec, i) = e;
1803 }
1804
1805 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1806 }
1807
1808 /* Emit code to move a block SRC to block DST, where SRC and DST are
1809 non-consecutive groups of registers, each represented by a PARALLEL. */
1810
1811 void
1812 emit_group_move (rtx dst, rtx src)
1813 {
1814 int i;
1815
1816 gcc_assert (GET_CODE (src) == PARALLEL
1817 && GET_CODE (dst) == PARALLEL
1818 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1819
1820 /* Skip first entry if NULL. */
1821 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1822 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1823 XEXP (XVECEXP (src, 0, i), 0));
1824 }
1825
1826 /* Move a group of registers represented by a PARALLEL into pseudos. */
1827
1828 rtx
1829 emit_group_move_into_temps (rtx src)
1830 {
1831 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1832 int i;
1833
1834 for (i = 0; i < XVECLEN (src, 0); i++)
1835 {
1836 rtx e = XVECEXP (src, 0, i);
1837 rtx d = XEXP (e, 0);
1838
1839 if (d)
1840 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1841 RTVEC_ELT (vec, i) = e;
1842 }
1843
1844 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1845 }
1846
1847 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1848 where SRC is non-consecutive registers represented by a PARALLEL.
1849 SSIZE represents the total size of block ORIG_DST, or -1 if not
1850 known. */
1851
1852 void
1853 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1854 {
1855 rtx *tmps, dst;
1856 int start, finish, i;
1857 machine_mode m = GET_MODE (orig_dst);
1858
1859 gcc_assert (GET_CODE (src) == PARALLEL);
1860
1861 if (!SCALAR_INT_MODE_P (m)
1862 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1863 {
1864 machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1865 if (imode == BLKmode)
1866 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1867 else
1868 dst = gen_reg_rtx (imode);
1869 emit_group_store (dst, src, type, ssize);
1870 if (imode != BLKmode)
1871 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1872 emit_move_insn (orig_dst, dst);
1873 return;
1874 }
1875
1876 /* Check for a NULL entry, used to indicate that the parameter goes
1877 both on the stack and in registers. */
1878 if (XEXP (XVECEXP (src, 0, 0), 0))
1879 start = 0;
1880 else
1881 start = 1;
1882 finish = XVECLEN (src, 0);
1883
1884 tmps = XALLOCAVEC (rtx, finish);
1885
1886 /* Copy the (probable) hard regs into pseudos. */
1887 for (i = start; i < finish; i++)
1888 {
1889 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1890 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1891 {
1892 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1893 emit_move_insn (tmps[i], reg);
1894 }
1895 else
1896 tmps[i] = reg;
1897 }
1898
1899 /* If we won't be storing directly into memory, protect the real destination
1900 from strange tricks we might play. */
1901 dst = orig_dst;
1902 if (GET_CODE (dst) == PARALLEL)
1903 {
1904 rtx temp;
1905
1906 /* We can get a PARALLEL dst if there is a conditional expression in
1907 a return statement. In that case, the dst and src are the same,
1908 so no action is necessary. */
1909 if (rtx_equal_p (dst, src))
1910 return;
1911
1912 /* It is unclear if we can ever reach here, but we may as well handle
1913 it. Allocate a temporary, and split this into a store/load to/from
1914 the temporary. */
1915 temp = assign_stack_temp (GET_MODE (dst), ssize);
1916 emit_group_store (temp, src, type, ssize);
1917 emit_group_load (dst, temp, type, ssize);
1918 return;
1919 }
1920 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1921 {
1922 machine_mode outer = GET_MODE (dst);
1923 machine_mode inner;
1924 HOST_WIDE_INT bytepos;
1925 bool done = false;
1926 rtx temp;
1927
1928 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1929 dst = gen_reg_rtx (outer);
1930
1931 /* Make life a bit easier for combine. */
1932 /* If the first element of the vector is the low part
1933 of the destination mode, use a paradoxical subreg to
1934 initialize the destination. */
1935 if (start < finish)
1936 {
1937 inner = GET_MODE (tmps[start]);
1938 bytepos = subreg_lowpart_offset (inner, outer);
1939 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1940 {
1941 temp = simplify_gen_subreg (outer, tmps[start],
1942 inner, 0);
1943 if (temp)
1944 {
1945 emit_move_insn (dst, temp);
1946 done = true;
1947 start++;
1948 }
1949 }
1950 }
1951
1952 /* If the first element wasn't the low part, try the last. */
1953 if (!done
1954 && start < finish - 1)
1955 {
1956 inner = GET_MODE (tmps[finish - 1]);
1957 bytepos = subreg_lowpart_offset (inner, outer);
1958 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1959 {
1960 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1961 inner, 0);
1962 if (temp)
1963 {
1964 emit_move_insn (dst, temp);
1965 done = true;
1966 finish--;
1967 }
1968 }
1969 }
1970
1971 /* Otherwise, simply initialize the result to zero. */
1972 if (!done)
1973 emit_move_insn (dst, CONST0_RTX (outer));
1974 }
1975
1976 /* Process the pieces. */
1977 for (i = start; i < finish; i++)
1978 {
1979 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1980 machine_mode mode = GET_MODE (tmps[i]);
1981 unsigned int bytelen = GET_MODE_SIZE (mode);
1982 unsigned int adj_bytelen;
1983 rtx dest = dst;
1984
1985 /* Handle trailing fragments that run over the size of the struct. */
1986 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1987 adj_bytelen = ssize - bytepos;
1988 else
1989 adj_bytelen = bytelen;
1990
1991 if (GET_CODE (dst) == CONCAT)
1992 {
1993 if (bytepos + adj_bytelen
1994 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1995 dest = XEXP (dst, 0);
1996 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1997 {
1998 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1999 dest = XEXP (dst, 1);
2000 }
2001 else
2002 {
2003 machine_mode dest_mode = GET_MODE (dest);
2004 machine_mode tmp_mode = GET_MODE (tmps[i]);
2005
2006 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2007
2008 if (GET_MODE_ALIGNMENT (dest_mode)
2009 >= GET_MODE_ALIGNMENT (tmp_mode))
2010 {
2011 dest = assign_stack_temp (dest_mode,
2012 GET_MODE_SIZE (dest_mode));
2013 emit_move_insn (adjust_address (dest,
2014 tmp_mode,
2015 bytepos),
2016 tmps[i]);
2017 dst = dest;
2018 }
2019 else
2020 {
2021 dest = assign_stack_temp (tmp_mode,
2022 GET_MODE_SIZE (tmp_mode));
2023 emit_move_insn (dest, tmps[i]);
2024 dst = adjust_address (dest, dest_mode, bytepos);
2025 }
2026 break;
2027 }
2028 }
2029
2030 /* Handle trailing fragments that run over the size of the struct. */
2031 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2032 {
2033 /* store_bit_field always takes its value from the lsb.
2034 Move the fragment to the lsb if it's not already there. */
2035 if (
2036 #ifdef BLOCK_REG_PADDING
2037 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2038 == (BYTES_BIG_ENDIAN ? upward : downward)
2039 #else
2040 BYTES_BIG_ENDIAN
2041 #endif
2042 )
2043 {
2044 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2045 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2046 shift, tmps[i], 0);
2047 }
2048
2049 /* Make sure not to write past the end of the struct. */
2050 store_bit_field (dest,
2051 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2052 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2053 VOIDmode, tmps[i]);
2054 }
2055
2056 /* Optimize the access just a bit. */
2057 else if (MEM_P (dest)
2058 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2059 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2060 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2061 && bytelen == GET_MODE_SIZE (mode))
2062 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2063
2064 else
2065 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2066 0, 0, mode, tmps[i]);
2067 }
2068
2069 /* Copy from the pseudo into the (probable) hard reg. */
2070 if (orig_dst != dst)
2071 emit_move_insn (orig_dst, dst);
2072 }
2073
2074 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2075 of the value stored in X. */
2076
2077 rtx
2078 maybe_emit_group_store (rtx x, tree type)
2079 {
2080 machine_mode mode = TYPE_MODE (type);
2081 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2082 if (GET_CODE (x) == PARALLEL)
2083 {
2084 rtx result = gen_reg_rtx (mode);
2085 emit_group_store (result, x, type, int_size_in_bytes (type));
2086 return result;
2087 }
2088 return x;
2089 }
2090
2091 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2092
2093 This is used on targets that return BLKmode values in registers. */
2094
2095 void
2096 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2097 {
2098 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2099 rtx src = NULL, dst = NULL;
2100 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2101 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2102 machine_mode mode = GET_MODE (srcreg);
2103 machine_mode tmode = GET_MODE (target);
2104 machine_mode copy_mode;
2105
2106 /* BLKmode registers created in the back-end shouldn't have survived. */
2107 gcc_assert (mode != BLKmode);
2108
2109 /* If the structure doesn't take up a whole number of words, see whether
2110 SRCREG is padded on the left or on the right. If it's on the left,
2111 set PADDING_CORRECTION to the number of bits to skip.
2112
2113 In most ABIs, the structure will be returned at the least end of
2114 the register, which translates to right padding on little-endian
2115 targets and left padding on big-endian targets. The opposite
2116 holds if the structure is returned at the most significant
2117 end of the register. */
2118 if (bytes % UNITS_PER_WORD != 0
2119 && (targetm.calls.return_in_msb (type)
2120 ? !BYTES_BIG_ENDIAN
2121 : BYTES_BIG_ENDIAN))
2122 padding_correction
2123 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2124
2125 /* We can use a single move if we have an exact mode for the size. */
2126 else if (MEM_P (target)
2127 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2128 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2129 && bytes == GET_MODE_SIZE (mode))
2130 {
2131 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2132 return;
2133 }
2134
2135 /* And if we additionally have the same mode for a register. */
2136 else if (REG_P (target)
2137 && GET_MODE (target) == mode
2138 && bytes == GET_MODE_SIZE (mode))
2139 {
2140 emit_move_insn (target, srcreg);
2141 return;
2142 }
2143
2144 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2145 into a new pseudo which is a full word. */
2146 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2147 {
2148 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2149 mode = word_mode;
2150 }
2151
2152 /* Copy the structure BITSIZE bits at a time. If the target lives in
2153 memory, take care of not reading/writing past its end by selecting
2154 a copy mode suited to BITSIZE. This should always be possible given
2155 how it is computed.
2156
2157 If the target lives in register, make sure not to select a copy mode
2158 larger than the mode of the register.
2159
2160 We could probably emit more efficient code for machines which do not use
2161 strict alignment, but it doesn't seem worth the effort at the current
2162 time. */
2163
2164 copy_mode = word_mode;
2165 if (MEM_P (target))
2166 {
2167 machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2168 if (mem_mode != BLKmode)
2169 copy_mode = mem_mode;
2170 }
2171 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2172 copy_mode = tmode;
2173
2174 for (bitpos = 0, xbitpos = padding_correction;
2175 bitpos < bytes * BITS_PER_UNIT;
2176 bitpos += bitsize, xbitpos += bitsize)
2177 {
2178 /* We need a new source operand each time xbitpos is on a
2179 word boundary and when xbitpos == padding_correction
2180 (the first time through). */
2181 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2182 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2183
2184 /* We need a new destination operand each time bitpos is on
2185 a word boundary. */
2186 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2187 dst = target;
2188 else if (bitpos % BITS_PER_WORD == 0)
2189 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2190
2191 /* Use xbitpos for the source extraction (right justified) and
2192 bitpos for the destination store (left justified). */
2193 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2194 extract_bit_field (src, bitsize,
2195 xbitpos % BITS_PER_WORD, 1,
2196 NULL_RTX, copy_mode, copy_mode));
2197 }
2198 }
2199
2200 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2201 register if it contains any data, otherwise return null.
2202
2203 This is used on targets that return BLKmode values in registers. */
2204
2205 rtx
2206 copy_blkmode_to_reg (machine_mode mode, tree src)
2207 {
2208 int i, n_regs;
2209 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2210 unsigned int bitsize;
2211 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2212 machine_mode dst_mode;
2213
2214 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2215
2216 x = expand_normal (src);
2217
2218 bytes = int_size_in_bytes (TREE_TYPE (src));
2219 if (bytes == 0)
2220 return NULL_RTX;
2221
2222 /* If the structure doesn't take up a whole number of words, see
2223 whether the register value should be padded on the left or on
2224 the right. Set PADDING_CORRECTION to the number of padding
2225 bits needed on the left side.
2226
2227 In most ABIs, the structure will be returned at the least end of
2228 the register, which translates to right padding on little-endian
2229 targets and left padding on big-endian targets. The opposite
2230 holds if the structure is returned at the most significant
2231 end of the register. */
2232 if (bytes % UNITS_PER_WORD != 0
2233 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2234 ? !BYTES_BIG_ENDIAN
2235 : BYTES_BIG_ENDIAN))
2236 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2237 * BITS_PER_UNIT));
2238
2239 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2240 dst_words = XALLOCAVEC (rtx, n_regs);
2241 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2242
2243 /* Copy the structure BITSIZE bits at a time. */
2244 for (bitpos = 0, xbitpos = padding_correction;
2245 bitpos < bytes * BITS_PER_UNIT;
2246 bitpos += bitsize, xbitpos += bitsize)
2247 {
2248 /* We need a new destination pseudo each time xbitpos is
2249 on a word boundary and when xbitpos == padding_correction
2250 (the first time through). */
2251 if (xbitpos % BITS_PER_WORD == 0
2252 || xbitpos == padding_correction)
2253 {
2254 /* Generate an appropriate register. */
2255 dst_word = gen_reg_rtx (word_mode);
2256 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2257
2258 /* Clear the destination before we move anything into it. */
2259 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2260 }
2261
2262 /* We need a new source operand each time bitpos is on a word
2263 boundary. */
2264 if (bitpos % BITS_PER_WORD == 0)
2265 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2266
2267 /* Use bitpos for the source extraction (left justified) and
2268 xbitpos for the destination store (right justified). */
2269 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2270 0, 0, word_mode,
2271 extract_bit_field (src_word, bitsize,
2272 bitpos % BITS_PER_WORD, 1,
2273 NULL_RTX, word_mode, word_mode));
2274 }
2275
2276 if (mode == BLKmode)
2277 {
2278 /* Find the smallest integer mode large enough to hold the
2279 entire structure. */
2280 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2281 mode != VOIDmode;
2282 mode = GET_MODE_WIDER_MODE (mode))
2283 /* Have we found a large enough mode? */
2284 if (GET_MODE_SIZE (mode) >= bytes)
2285 break;
2286
2287 /* A suitable mode should have been found. */
2288 gcc_assert (mode != VOIDmode);
2289 }
2290
2291 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2292 dst_mode = word_mode;
2293 else
2294 dst_mode = mode;
2295 dst = gen_reg_rtx (dst_mode);
2296
2297 for (i = 0; i < n_regs; i++)
2298 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2299
2300 if (mode != dst_mode)
2301 dst = gen_lowpart (mode, dst);
2302
2303 return dst;
2304 }
2305
2306 /* Add a USE expression for REG to the (possibly empty) list pointed
2307 to by CALL_FUSAGE. REG must denote a hard register. */
2308
2309 void
2310 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2311 {
2312 gcc_assert (REG_P (reg));
2313
2314 if (!HARD_REGISTER_P (reg))
2315 return;
2316
2317 *call_fusage
2318 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2319 }
2320
2321 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2322 to by CALL_FUSAGE. REG must denote a hard register. */
2323
2324 void
2325 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2326 {
2327 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2328
2329 *call_fusage
2330 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2331 }
2332
2333 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2334 starting at REGNO. All of these registers must be hard registers. */
2335
2336 void
2337 use_regs (rtx *call_fusage, int regno, int nregs)
2338 {
2339 int i;
2340
2341 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2342
2343 for (i = 0; i < nregs; i++)
2344 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2345 }
2346
2347 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2348 PARALLEL REGS. This is for calls that pass values in multiple
2349 non-contiguous locations. The Irix 6 ABI has examples of this. */
2350
2351 void
2352 use_group_regs (rtx *call_fusage, rtx regs)
2353 {
2354 int i;
2355
2356 for (i = 0; i < XVECLEN (regs, 0); i++)
2357 {
2358 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2359
2360 /* A NULL entry means the parameter goes both on the stack and in
2361 registers. This can also be a MEM for targets that pass values
2362 partially on the stack and partially in registers. */
2363 if (reg != 0 && REG_P (reg))
2364 use_reg (call_fusage, reg);
2365 }
2366 }
2367
2368 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2369 assigment and the code of the expresion on the RHS is CODE. Return
2370 NULL otherwise. */
2371
2372 static gimple
2373 get_def_for_expr (tree name, enum tree_code code)
2374 {
2375 gimple def_stmt;
2376
2377 if (TREE_CODE (name) != SSA_NAME)
2378 return NULL;
2379
2380 def_stmt = get_gimple_for_ssa_name (name);
2381 if (!def_stmt
2382 || gimple_assign_rhs_code (def_stmt) != code)
2383 return NULL;
2384
2385 return def_stmt;
2386 }
2387
2388 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2389 assigment and the class of the expresion on the RHS is CLASS. Return
2390 NULL otherwise. */
2391
2392 static gimple
2393 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2394 {
2395 gimple def_stmt;
2396
2397 if (TREE_CODE (name) != SSA_NAME)
2398 return NULL;
2399
2400 def_stmt = get_gimple_for_ssa_name (name);
2401 if (!def_stmt
2402 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2403 return NULL;
2404
2405 return def_stmt;
2406 }
2407 \f
2408
2409 /* Determine whether the LEN bytes generated by CONSTFUN can be
2410 stored to memory using several move instructions. CONSTFUNDATA is
2411 a pointer which will be passed as argument in every CONSTFUN call.
2412 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2413 a memset operation and false if it's a copy of a constant string.
2414 Return nonzero if a call to store_by_pieces should succeed. */
2415
2416 int
2417 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2418 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2419 void *constfundata, unsigned int align, bool memsetp)
2420 {
2421 unsigned HOST_WIDE_INT l;
2422 unsigned int max_size;
2423 HOST_WIDE_INT offset = 0;
2424 machine_mode mode;
2425 enum insn_code icode;
2426 int reverse;
2427 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2428 rtx cst ATTRIBUTE_UNUSED;
2429
2430 if (len == 0)
2431 return 1;
2432
2433 if (!targetm.use_by_pieces_infrastructure_p (len, align,
2434 memsetp
2435 ? SET_BY_PIECES
2436 : STORE_BY_PIECES,
2437 optimize_insn_for_speed_p ()))
2438 return 0;
2439
2440 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2441
2442 /* We would first store what we can in the largest integer mode, then go to
2443 successively smaller modes. */
2444
2445 for (reverse = 0;
2446 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2447 reverse++)
2448 {
2449 l = len;
2450 max_size = STORE_MAX_PIECES + 1;
2451 while (max_size > 1 && l > 0)
2452 {
2453 mode = widest_int_mode_for_size (max_size);
2454
2455 if (mode == VOIDmode)
2456 break;
2457
2458 icode = optab_handler (mov_optab, mode);
2459 if (icode != CODE_FOR_nothing
2460 && align >= GET_MODE_ALIGNMENT (mode))
2461 {
2462 unsigned int size = GET_MODE_SIZE (mode);
2463
2464 while (l >= size)
2465 {
2466 if (reverse)
2467 offset -= size;
2468
2469 cst = (*constfun) (constfundata, offset, mode);
2470 if (!targetm.legitimate_constant_p (mode, cst))
2471 return 0;
2472
2473 if (!reverse)
2474 offset += size;
2475
2476 l -= size;
2477 }
2478 }
2479
2480 max_size = GET_MODE_SIZE (mode);
2481 }
2482
2483 /* The code above should have handled everything. */
2484 gcc_assert (!l);
2485 }
2486
2487 return 1;
2488 }
2489
2490 /* Generate several move instructions to store LEN bytes generated by
2491 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2492 pointer which will be passed as argument in every CONSTFUN call.
2493 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2494 a memset operation and false if it's a copy of a constant string.
2495 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2496 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2497 stpcpy. */
2498
2499 rtx
2500 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2501 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2502 void *constfundata, unsigned int align, bool memsetp, int endp)
2503 {
2504 machine_mode to_addr_mode = get_address_mode (to);
2505 struct store_by_pieces_d data;
2506
2507 if (len == 0)
2508 {
2509 gcc_assert (endp != 2);
2510 return to;
2511 }
2512
2513 gcc_assert (targetm.use_by_pieces_infrastructure_p
2514 (len, align,
2515 memsetp
2516 ? SET_BY_PIECES
2517 : STORE_BY_PIECES,
2518 optimize_insn_for_speed_p ()));
2519
2520 data.constfun = constfun;
2521 data.constfundata = constfundata;
2522 data.len = len;
2523 data.to = to;
2524 store_by_pieces_1 (&data, align);
2525 if (endp)
2526 {
2527 rtx to1;
2528
2529 gcc_assert (!data.reverse);
2530 if (data.autinc_to)
2531 {
2532 if (endp == 2)
2533 {
2534 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2535 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2536 else
2537 data.to_addr = copy_to_mode_reg (to_addr_mode,
2538 plus_constant (to_addr_mode,
2539 data.to_addr,
2540 -1));
2541 }
2542 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2543 data.offset);
2544 }
2545 else
2546 {
2547 if (endp == 2)
2548 --data.offset;
2549 to1 = adjust_address (data.to, QImode, data.offset);
2550 }
2551 return to1;
2552 }
2553 else
2554 return data.to;
2555 }
2556
2557 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2558 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2559
2560 static void
2561 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2562 {
2563 struct store_by_pieces_d data;
2564
2565 if (len == 0)
2566 return;
2567
2568 data.constfun = clear_by_pieces_1;
2569 data.constfundata = NULL;
2570 data.len = len;
2571 data.to = to;
2572 store_by_pieces_1 (&data, align);
2573 }
2574
2575 /* Callback routine for clear_by_pieces.
2576 Return const0_rtx unconditionally. */
2577
2578 static rtx
2579 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2580 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2581 machine_mode mode ATTRIBUTE_UNUSED)
2582 {
2583 return const0_rtx;
2584 }
2585
2586 /* Subroutine of clear_by_pieces and store_by_pieces.
2587 Generate several move instructions to store LEN bytes of block TO. (A MEM
2588 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2589
2590 static void
2591 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2592 unsigned int align ATTRIBUTE_UNUSED)
2593 {
2594 machine_mode to_addr_mode = get_address_mode (data->to);
2595 rtx to_addr = XEXP (data->to, 0);
2596 unsigned int max_size = STORE_MAX_PIECES + 1;
2597 enum insn_code icode;
2598
2599 data->offset = 0;
2600 data->to_addr = to_addr;
2601 data->autinc_to
2602 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2603 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2604
2605 data->explicit_inc_to = 0;
2606 data->reverse
2607 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2608 if (data->reverse)
2609 data->offset = data->len;
2610
2611 /* If storing requires more than two move insns,
2612 copy addresses to registers (to make displacements shorter)
2613 and use post-increment if available. */
2614 if (!data->autinc_to
2615 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2616 {
2617 /* Determine the main mode we'll be using.
2618 MODE might not be used depending on the definitions of the
2619 USE_* macros below. */
2620 machine_mode mode ATTRIBUTE_UNUSED
2621 = widest_int_mode_for_size (max_size);
2622
2623 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2624 {
2625 data->to_addr = copy_to_mode_reg (to_addr_mode,
2626 plus_constant (to_addr_mode,
2627 to_addr,
2628 data->len));
2629 data->autinc_to = 1;
2630 data->explicit_inc_to = -1;
2631 }
2632
2633 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2634 && ! data->autinc_to)
2635 {
2636 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2637 data->autinc_to = 1;
2638 data->explicit_inc_to = 1;
2639 }
2640
2641 if ( !data->autinc_to && CONSTANT_P (to_addr))
2642 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2643 }
2644
2645 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2646
2647 /* First store what we can in the largest integer mode, then go to
2648 successively smaller modes. */
2649
2650 while (max_size > 1 && data->len > 0)
2651 {
2652 machine_mode mode = widest_int_mode_for_size (max_size);
2653
2654 if (mode == VOIDmode)
2655 break;
2656
2657 icode = optab_handler (mov_optab, mode);
2658 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2659 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2660
2661 max_size = GET_MODE_SIZE (mode);
2662 }
2663
2664 /* The code above should have handled everything. */
2665 gcc_assert (!data->len);
2666 }
2667
2668 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2669 with move instructions for mode MODE. GENFUN is the gen_... function
2670 to make a move insn for that mode. DATA has all the other info. */
2671
2672 static void
2673 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2674 struct store_by_pieces_d *data)
2675 {
2676 unsigned int size = GET_MODE_SIZE (mode);
2677 rtx to1, cst;
2678
2679 while (data->len >= size)
2680 {
2681 if (data->reverse)
2682 data->offset -= size;
2683
2684 if (data->autinc_to)
2685 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2686 data->offset);
2687 else
2688 to1 = adjust_address (data->to, mode, data->offset);
2689
2690 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2691 emit_insn (gen_add2_insn (data->to_addr,
2692 gen_int_mode (-(HOST_WIDE_INT) size,
2693 GET_MODE (data->to_addr))));
2694
2695 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2696 emit_insn ((*genfun) (to1, cst));
2697
2698 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2699 emit_insn (gen_add2_insn (data->to_addr,
2700 gen_int_mode (size,
2701 GET_MODE (data->to_addr))));
2702
2703 if (! data->reverse)
2704 data->offset += size;
2705
2706 data->len -= size;
2707 }
2708 }
2709 \f
2710 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2711 its length in bytes. */
2712
2713 rtx
2714 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2715 unsigned int expected_align, HOST_WIDE_INT expected_size,
2716 unsigned HOST_WIDE_INT min_size,
2717 unsigned HOST_WIDE_INT max_size,
2718 unsigned HOST_WIDE_INT probable_max_size)
2719 {
2720 machine_mode mode = GET_MODE (object);
2721 unsigned int align;
2722
2723 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2724
2725 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2726 just move a zero. Otherwise, do this a piece at a time. */
2727 if (mode != BLKmode
2728 && CONST_INT_P (size)
2729 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2730 {
2731 rtx zero = CONST0_RTX (mode);
2732 if (zero != NULL)
2733 {
2734 emit_move_insn (object, zero);
2735 return NULL;
2736 }
2737
2738 if (COMPLEX_MODE_P (mode))
2739 {
2740 zero = CONST0_RTX (GET_MODE_INNER (mode));
2741 if (zero != NULL)
2742 {
2743 write_complex_part (object, zero, 0);
2744 write_complex_part (object, zero, 1);
2745 return NULL;
2746 }
2747 }
2748 }
2749
2750 if (size == const0_rtx)
2751 return NULL;
2752
2753 align = MEM_ALIGN (object);
2754
2755 if (CONST_INT_P (size)
2756 && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
2757 CLEAR_BY_PIECES,
2758 optimize_insn_for_speed_p ()))
2759 clear_by_pieces (object, INTVAL (size), align);
2760 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2761 expected_align, expected_size,
2762 min_size, max_size, probable_max_size))
2763 ;
2764 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2765 return set_storage_via_libcall (object, size, const0_rtx,
2766 method == BLOCK_OP_TAILCALL);
2767 else
2768 gcc_unreachable ();
2769
2770 return NULL;
2771 }
2772
2773 rtx
2774 clear_storage (rtx object, rtx size, enum block_op_methods method)
2775 {
2776 unsigned HOST_WIDE_INT max, min = 0;
2777 if (GET_CODE (size) == CONST_INT)
2778 min = max = UINTVAL (size);
2779 else
2780 max = GET_MODE_MASK (GET_MODE (size));
2781 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2782 }
2783
2784
2785 /* A subroutine of clear_storage. Expand a call to memset.
2786 Return the return value of memset, 0 otherwise. */
2787
2788 rtx
2789 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2790 {
2791 tree call_expr, fn, object_tree, size_tree, val_tree;
2792 machine_mode size_mode;
2793 rtx retval;
2794
2795 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2796 place those into new pseudos into a VAR_DECL and use them later. */
2797
2798 object = copy_addr_to_reg (XEXP (object, 0));
2799
2800 size_mode = TYPE_MODE (sizetype);
2801 size = convert_to_mode (size_mode, size, 1);
2802 size = copy_to_mode_reg (size_mode, size);
2803
2804 /* It is incorrect to use the libcall calling conventions to call
2805 memset in this context. This could be a user call to memset and
2806 the user may wish to examine the return value from memset. For
2807 targets where libcalls and normal calls have different conventions
2808 for returning pointers, we could end up generating incorrect code. */
2809
2810 object_tree = make_tree (ptr_type_node, object);
2811 if (!CONST_INT_P (val))
2812 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2813 size_tree = make_tree (sizetype, size);
2814 val_tree = make_tree (integer_type_node, val);
2815
2816 fn = clear_storage_libcall_fn (true);
2817 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2818 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2819
2820 retval = expand_normal (call_expr);
2821
2822 return retval;
2823 }
2824
2825 /* A subroutine of set_storage_via_libcall. Create the tree node
2826 for the function we use for block clears. */
2827
2828 tree block_clear_fn;
2829
2830 void
2831 init_block_clear_fn (const char *asmspec)
2832 {
2833 if (!block_clear_fn)
2834 {
2835 tree fn, args;
2836
2837 fn = get_identifier ("memset");
2838 args = build_function_type_list (ptr_type_node, ptr_type_node,
2839 integer_type_node, sizetype,
2840 NULL_TREE);
2841
2842 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2843 DECL_EXTERNAL (fn) = 1;
2844 TREE_PUBLIC (fn) = 1;
2845 DECL_ARTIFICIAL (fn) = 1;
2846 TREE_NOTHROW (fn) = 1;
2847 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2848 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2849
2850 block_clear_fn = fn;
2851 }
2852
2853 if (asmspec)
2854 set_user_assembler_name (block_clear_fn, asmspec);
2855 }
2856
2857 static tree
2858 clear_storage_libcall_fn (int for_call)
2859 {
2860 static bool emitted_extern;
2861
2862 if (!block_clear_fn)
2863 init_block_clear_fn (NULL);
2864
2865 if (for_call && !emitted_extern)
2866 {
2867 emitted_extern = true;
2868 make_decl_rtl (block_clear_fn);
2869 }
2870
2871 return block_clear_fn;
2872 }
2873 \f
2874 /* Expand a setmem pattern; return true if successful. */
2875
2876 bool
2877 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2878 unsigned int expected_align, HOST_WIDE_INT expected_size,
2879 unsigned HOST_WIDE_INT min_size,
2880 unsigned HOST_WIDE_INT max_size,
2881 unsigned HOST_WIDE_INT probable_max_size)
2882 {
2883 /* Try the most limited insn first, because there's no point
2884 including more than one in the machine description unless
2885 the more limited one has some advantage. */
2886
2887 machine_mode mode;
2888
2889 if (expected_align < align)
2890 expected_align = align;
2891 if (expected_size != -1)
2892 {
2893 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2894 expected_size = max_size;
2895 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2896 expected_size = min_size;
2897 }
2898
2899 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2900 mode = GET_MODE_WIDER_MODE (mode))
2901 {
2902 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2903
2904 if (code != CODE_FOR_nothing
2905 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2906 here because if SIZE is less than the mode mask, as it is
2907 returned by the macro, it will definitely be less than the
2908 actual mode mask. Since SIZE is within the Pmode address
2909 space, we limit MODE to Pmode. */
2910 && ((CONST_INT_P (size)
2911 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2912 <= (GET_MODE_MASK (mode) >> 1)))
2913 || max_size <= (GET_MODE_MASK (mode) >> 1)
2914 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2915 {
2916 struct expand_operand ops[9];
2917 unsigned int nops;
2918
2919 nops = insn_data[(int) code].n_generator_args;
2920 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2921
2922 create_fixed_operand (&ops[0], object);
2923 /* The check above guarantees that this size conversion is valid. */
2924 create_convert_operand_to (&ops[1], size, mode, true);
2925 create_convert_operand_from (&ops[2], val, byte_mode, true);
2926 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2927 if (nops >= 6)
2928 {
2929 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2930 create_integer_operand (&ops[5], expected_size);
2931 }
2932 if (nops >= 8)
2933 {
2934 create_integer_operand (&ops[6], min_size);
2935 /* If we can not represent the maximal size,
2936 make parameter NULL. */
2937 if ((HOST_WIDE_INT) max_size != -1)
2938 create_integer_operand (&ops[7], max_size);
2939 else
2940 create_fixed_operand (&ops[7], NULL);
2941 }
2942 if (nops == 9)
2943 {
2944 /* If we can not represent the maximal size,
2945 make parameter NULL. */
2946 if ((HOST_WIDE_INT) probable_max_size != -1)
2947 create_integer_operand (&ops[8], probable_max_size);
2948 else
2949 create_fixed_operand (&ops[8], NULL);
2950 }
2951 if (maybe_expand_insn (code, nops, ops))
2952 return true;
2953 }
2954 }
2955
2956 return false;
2957 }
2958
2959 \f
2960 /* Write to one of the components of the complex value CPLX. Write VAL to
2961 the real part if IMAG_P is false, and the imaginary part if its true. */
2962
2963 void
2964 write_complex_part (rtx cplx, rtx val, bool imag_p)
2965 {
2966 machine_mode cmode;
2967 machine_mode imode;
2968 unsigned ibitsize;
2969
2970 if (GET_CODE (cplx) == CONCAT)
2971 {
2972 emit_move_insn (XEXP (cplx, imag_p), val);
2973 return;
2974 }
2975
2976 cmode = GET_MODE (cplx);
2977 imode = GET_MODE_INNER (cmode);
2978 ibitsize = GET_MODE_BITSIZE (imode);
2979
2980 /* For MEMs simplify_gen_subreg may generate an invalid new address
2981 because, e.g., the original address is considered mode-dependent
2982 by the target, which restricts simplify_subreg from invoking
2983 adjust_address_nv. Instead of preparing fallback support for an
2984 invalid address, we call adjust_address_nv directly. */
2985 if (MEM_P (cplx))
2986 {
2987 emit_move_insn (adjust_address_nv (cplx, imode,
2988 imag_p ? GET_MODE_SIZE (imode) : 0),
2989 val);
2990 return;
2991 }
2992
2993 /* If the sub-object is at least word sized, then we know that subregging
2994 will work. This special case is important, since store_bit_field
2995 wants to operate on integer modes, and there's rarely an OImode to
2996 correspond to TCmode. */
2997 if (ibitsize >= BITS_PER_WORD
2998 /* For hard regs we have exact predicates. Assume we can split
2999 the original object if it spans an even number of hard regs.
3000 This special case is important for SCmode on 64-bit platforms
3001 where the natural size of floating-point regs is 32-bit. */
3002 || (REG_P (cplx)
3003 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3004 && REG_NREGS (cplx) % 2 == 0))
3005 {
3006 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3007 imag_p ? GET_MODE_SIZE (imode) : 0);
3008 if (part)
3009 {
3010 emit_move_insn (part, val);
3011 return;
3012 }
3013 else
3014 /* simplify_gen_subreg may fail for sub-word MEMs. */
3015 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3016 }
3017
3018 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
3019 }
3020
3021 /* Extract one of the components of the complex value CPLX. Extract the
3022 real part if IMAG_P is false, and the imaginary part if it's true. */
3023
3024 static rtx
3025 read_complex_part (rtx cplx, bool imag_p)
3026 {
3027 machine_mode cmode, imode;
3028 unsigned ibitsize;
3029
3030 if (GET_CODE (cplx) == CONCAT)
3031 return XEXP (cplx, imag_p);
3032
3033 cmode = GET_MODE (cplx);
3034 imode = GET_MODE_INNER (cmode);
3035 ibitsize = GET_MODE_BITSIZE (imode);
3036
3037 /* Special case reads from complex constants that got spilled to memory. */
3038 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3039 {
3040 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3041 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3042 {
3043 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3044 if (CONSTANT_CLASS_P (part))
3045 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3046 }
3047 }
3048
3049 /* For MEMs simplify_gen_subreg may generate an invalid new address
3050 because, e.g., the original address is considered mode-dependent
3051 by the target, which restricts simplify_subreg from invoking
3052 adjust_address_nv. Instead of preparing fallback support for an
3053 invalid address, we call adjust_address_nv directly. */
3054 if (MEM_P (cplx))
3055 return adjust_address_nv (cplx, imode,
3056 imag_p ? GET_MODE_SIZE (imode) : 0);
3057
3058 /* If the sub-object is at least word sized, then we know that subregging
3059 will work. This special case is important, since extract_bit_field
3060 wants to operate on integer modes, and there's rarely an OImode to
3061 correspond to TCmode. */
3062 if (ibitsize >= BITS_PER_WORD
3063 /* For hard regs we have exact predicates. Assume we can split
3064 the original object if it spans an even number of hard regs.
3065 This special case is important for SCmode on 64-bit platforms
3066 where the natural size of floating-point regs is 32-bit. */
3067 || (REG_P (cplx)
3068 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3069 && REG_NREGS (cplx) % 2 == 0))
3070 {
3071 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3072 imag_p ? GET_MODE_SIZE (imode) : 0);
3073 if (ret)
3074 return ret;
3075 else
3076 /* simplify_gen_subreg may fail for sub-word MEMs. */
3077 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3078 }
3079
3080 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3081 true, NULL_RTX, imode, imode);
3082 }
3083 \f
3084 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3085 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3086 represented in NEW_MODE. If FORCE is true, this will never happen, as
3087 we'll force-create a SUBREG if needed. */
3088
3089 static rtx
3090 emit_move_change_mode (machine_mode new_mode,
3091 machine_mode old_mode, rtx x, bool force)
3092 {
3093 rtx ret;
3094
3095 if (push_operand (x, GET_MODE (x)))
3096 {
3097 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3098 MEM_COPY_ATTRIBUTES (ret, x);
3099 }
3100 else if (MEM_P (x))
3101 {
3102 /* We don't have to worry about changing the address since the
3103 size in bytes is supposed to be the same. */
3104 if (reload_in_progress)
3105 {
3106 /* Copy the MEM to change the mode and move any
3107 substitutions from the old MEM to the new one. */
3108 ret = adjust_address_nv (x, new_mode, 0);
3109 copy_replacements (x, ret);
3110 }
3111 else
3112 ret = adjust_address (x, new_mode, 0);
3113 }
3114 else
3115 {
3116 /* Note that we do want simplify_subreg's behavior of validating
3117 that the new mode is ok for a hard register. If we were to use
3118 simplify_gen_subreg, we would create the subreg, but would
3119 probably run into the target not being able to implement it. */
3120 /* Except, of course, when FORCE is true, when this is exactly what
3121 we want. Which is needed for CCmodes on some targets. */
3122 if (force)
3123 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3124 else
3125 ret = simplify_subreg (new_mode, x, old_mode, 0);
3126 }
3127
3128 return ret;
3129 }
3130
3131 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3132 an integer mode of the same size as MODE. Returns the instruction
3133 emitted, or NULL if such a move could not be generated. */
3134
3135 static rtx_insn *
3136 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3137 {
3138 machine_mode imode;
3139 enum insn_code code;
3140
3141 /* There must exist a mode of the exact size we require. */
3142 imode = int_mode_for_mode (mode);
3143 if (imode == BLKmode)
3144 return NULL;
3145
3146 /* The target must support moves in this mode. */
3147 code = optab_handler (mov_optab, imode);
3148 if (code == CODE_FOR_nothing)
3149 return NULL;
3150
3151 x = emit_move_change_mode (imode, mode, x, force);
3152 if (x == NULL_RTX)
3153 return NULL;
3154 y = emit_move_change_mode (imode, mode, y, force);
3155 if (y == NULL_RTX)
3156 return NULL;
3157 return emit_insn (GEN_FCN (code) (x, y));
3158 }
3159
3160 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3161 Return an equivalent MEM that does not use an auto-increment. */
3162
3163 rtx
3164 emit_move_resolve_push (machine_mode mode, rtx x)
3165 {
3166 enum rtx_code code = GET_CODE (XEXP (x, 0));
3167 HOST_WIDE_INT adjust;
3168 rtx temp;
3169
3170 adjust = GET_MODE_SIZE (mode);
3171 #ifdef PUSH_ROUNDING
3172 adjust = PUSH_ROUNDING (adjust);
3173 #endif
3174 if (code == PRE_DEC || code == POST_DEC)
3175 adjust = -adjust;
3176 else if (code == PRE_MODIFY || code == POST_MODIFY)
3177 {
3178 rtx expr = XEXP (XEXP (x, 0), 1);
3179 HOST_WIDE_INT val;
3180
3181 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3182 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3183 val = INTVAL (XEXP (expr, 1));
3184 if (GET_CODE (expr) == MINUS)
3185 val = -val;
3186 gcc_assert (adjust == val || adjust == -val);
3187 adjust = val;
3188 }
3189
3190 /* Do not use anti_adjust_stack, since we don't want to update
3191 stack_pointer_delta. */
3192 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3193 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3194 0, OPTAB_LIB_WIDEN);
3195 if (temp != stack_pointer_rtx)
3196 emit_move_insn (stack_pointer_rtx, temp);
3197
3198 switch (code)
3199 {
3200 case PRE_INC:
3201 case PRE_DEC:
3202 case PRE_MODIFY:
3203 temp = stack_pointer_rtx;
3204 break;
3205 case POST_INC:
3206 case POST_DEC:
3207 case POST_MODIFY:
3208 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3209 break;
3210 default:
3211 gcc_unreachable ();
3212 }
3213
3214 return replace_equiv_address (x, temp);
3215 }
3216
3217 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3218 X is known to satisfy push_operand, and MODE is known to be complex.
3219 Returns the last instruction emitted. */
3220
3221 rtx_insn *
3222 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3223 {
3224 machine_mode submode = GET_MODE_INNER (mode);
3225 bool imag_first;
3226
3227 #ifdef PUSH_ROUNDING
3228 unsigned int submodesize = GET_MODE_SIZE (submode);
3229
3230 /* In case we output to the stack, but the size is smaller than the
3231 machine can push exactly, we need to use move instructions. */
3232 if (PUSH_ROUNDING (submodesize) != submodesize)
3233 {
3234 x = emit_move_resolve_push (mode, x);
3235 return emit_move_insn (x, y);
3236 }
3237 #endif
3238
3239 /* Note that the real part always precedes the imag part in memory
3240 regardless of machine's endianness. */
3241 switch (GET_CODE (XEXP (x, 0)))
3242 {
3243 case PRE_DEC:
3244 case POST_DEC:
3245 imag_first = true;
3246 break;
3247 case PRE_INC:
3248 case POST_INC:
3249 imag_first = false;
3250 break;
3251 default:
3252 gcc_unreachable ();
3253 }
3254
3255 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3256 read_complex_part (y, imag_first));
3257 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3258 read_complex_part (y, !imag_first));
3259 }
3260
3261 /* A subroutine of emit_move_complex. Perform the move from Y to X
3262 via two moves of the parts. Returns the last instruction emitted. */
3263
3264 rtx_insn *
3265 emit_move_complex_parts (rtx x, rtx y)
3266 {
3267 /* Show the output dies here. This is necessary for SUBREGs
3268 of pseudos since we cannot track their lifetimes correctly;
3269 hard regs shouldn't appear here except as return values. */
3270 if (!reload_completed && !reload_in_progress
3271 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3272 emit_clobber (x);
3273
3274 write_complex_part (x, read_complex_part (y, false), false);
3275 write_complex_part (x, read_complex_part (y, true), true);
3276
3277 return get_last_insn ();
3278 }
3279
3280 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3281 MODE is known to be complex. Returns the last instruction emitted. */
3282
3283 static rtx_insn *
3284 emit_move_complex (machine_mode mode, rtx x, rtx y)
3285 {
3286 bool try_int;
3287
3288 /* Need to take special care for pushes, to maintain proper ordering
3289 of the data, and possibly extra padding. */
3290 if (push_operand (x, mode))
3291 return emit_move_complex_push (mode, x, y);
3292
3293 /* See if we can coerce the target into moving both values at once, except
3294 for floating point where we favor moving as parts if this is easy. */
3295 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3296 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3297 && !(REG_P (x)
3298 && HARD_REGISTER_P (x)
3299 && REG_NREGS (x) == 1)
3300 && !(REG_P (y)
3301 && HARD_REGISTER_P (y)
3302 && REG_NREGS (y) == 1))
3303 try_int = false;
3304 /* Not possible if the values are inherently not adjacent. */
3305 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3306 try_int = false;
3307 /* Is possible if both are registers (or subregs of registers). */
3308 else if (register_operand (x, mode) && register_operand (y, mode))
3309 try_int = true;
3310 /* If one of the operands is a memory, and alignment constraints
3311 are friendly enough, we may be able to do combined memory operations.
3312 We do not attempt this if Y is a constant because that combination is
3313 usually better with the by-parts thing below. */
3314 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3315 && (!STRICT_ALIGNMENT
3316 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3317 try_int = true;
3318 else
3319 try_int = false;
3320
3321 if (try_int)
3322 {
3323 rtx_insn *ret;
3324
3325 /* For memory to memory moves, optimal behavior can be had with the
3326 existing block move logic. */
3327 if (MEM_P (x) && MEM_P (y))
3328 {
3329 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3330 BLOCK_OP_NO_LIBCALL);
3331 return get_last_insn ();
3332 }
3333
3334 ret = emit_move_via_integer (mode, x, y, true);
3335 if (ret)
3336 return ret;
3337 }
3338
3339 return emit_move_complex_parts (x, y);
3340 }
3341
3342 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3343 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3344
3345 static rtx_insn *
3346 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3347 {
3348 rtx_insn *ret;
3349
3350 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3351 if (mode != CCmode)
3352 {
3353 enum insn_code code = optab_handler (mov_optab, CCmode);
3354 if (code != CODE_FOR_nothing)
3355 {
3356 x = emit_move_change_mode (CCmode, mode, x, true);
3357 y = emit_move_change_mode (CCmode, mode, y, true);
3358 return emit_insn (GEN_FCN (code) (x, y));
3359 }
3360 }
3361
3362 /* Otherwise, find the MODE_INT mode of the same width. */
3363 ret = emit_move_via_integer (mode, x, y, false);
3364 gcc_assert (ret != NULL);
3365 return ret;
3366 }
3367
3368 /* Return true if word I of OP lies entirely in the
3369 undefined bits of a paradoxical subreg. */
3370
3371 static bool
3372 undefined_operand_subword_p (const_rtx op, int i)
3373 {
3374 machine_mode innermode, innermostmode;
3375 int offset;
3376 if (GET_CODE (op) != SUBREG)
3377 return false;
3378 innermode = GET_MODE (op);
3379 innermostmode = GET_MODE (SUBREG_REG (op));
3380 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3381 /* The SUBREG_BYTE represents offset, as if the value were stored in
3382 memory, except for a paradoxical subreg where we define
3383 SUBREG_BYTE to be 0; undo this exception as in
3384 simplify_subreg. */
3385 if (SUBREG_BYTE (op) == 0
3386 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3387 {
3388 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3389 if (WORDS_BIG_ENDIAN)
3390 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3391 if (BYTES_BIG_ENDIAN)
3392 offset += difference % UNITS_PER_WORD;
3393 }
3394 if (offset >= GET_MODE_SIZE (innermostmode)
3395 || offset <= -GET_MODE_SIZE (word_mode))
3396 return true;
3397 return false;
3398 }
3399
3400 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3401 MODE is any multi-word or full-word mode that lacks a move_insn
3402 pattern. Note that you will get better code if you define such
3403 patterns, even if they must turn into multiple assembler instructions. */
3404
3405 static rtx_insn *
3406 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3407 {
3408 rtx_insn *last_insn = 0;
3409 rtx_insn *seq;
3410 rtx inner;
3411 bool need_clobber;
3412 int i;
3413
3414 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3415
3416 /* If X is a push on the stack, do the push now and replace
3417 X with a reference to the stack pointer. */
3418 if (push_operand (x, mode))
3419 x = emit_move_resolve_push (mode, x);
3420
3421 /* If we are in reload, see if either operand is a MEM whose address
3422 is scheduled for replacement. */
3423 if (reload_in_progress && MEM_P (x)
3424 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3425 x = replace_equiv_address_nv (x, inner);
3426 if (reload_in_progress && MEM_P (y)
3427 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3428 y = replace_equiv_address_nv (y, inner);
3429
3430 start_sequence ();
3431
3432 need_clobber = false;
3433 for (i = 0;
3434 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3435 i++)
3436 {
3437 rtx xpart = operand_subword (x, i, 1, mode);
3438 rtx ypart;
3439
3440 /* Do not generate code for a move if it would come entirely
3441 from the undefined bits of a paradoxical subreg. */
3442 if (undefined_operand_subword_p (y, i))
3443 continue;
3444
3445 ypart = operand_subword (y, i, 1, mode);
3446
3447 /* If we can't get a part of Y, put Y into memory if it is a
3448 constant. Otherwise, force it into a register. Then we must
3449 be able to get a part of Y. */
3450 if (ypart == 0 && CONSTANT_P (y))
3451 {
3452 y = use_anchored_address (force_const_mem (mode, y));
3453 ypart = operand_subword (y, i, 1, mode);
3454 }
3455 else if (ypart == 0)
3456 ypart = operand_subword_force (y, i, mode);
3457
3458 gcc_assert (xpart && ypart);
3459
3460 need_clobber |= (GET_CODE (xpart) == SUBREG);
3461
3462 last_insn = emit_move_insn (xpart, ypart);
3463 }
3464
3465 seq = get_insns ();
3466 end_sequence ();
3467
3468 /* Show the output dies here. This is necessary for SUBREGs
3469 of pseudos since we cannot track their lifetimes correctly;
3470 hard regs shouldn't appear here except as return values.
3471 We never want to emit such a clobber after reload. */
3472 if (x != y
3473 && ! (reload_in_progress || reload_completed)
3474 && need_clobber != 0)
3475 emit_clobber (x);
3476
3477 emit_insn (seq);
3478
3479 return last_insn;
3480 }
3481
3482 /* Low level part of emit_move_insn.
3483 Called just like emit_move_insn, but assumes X and Y
3484 are basically valid. */
3485
3486 rtx_insn *
3487 emit_move_insn_1 (rtx x, rtx y)
3488 {
3489 machine_mode mode = GET_MODE (x);
3490 enum insn_code code;
3491
3492 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3493
3494 code = optab_handler (mov_optab, mode);
3495 if (code != CODE_FOR_nothing)
3496 return emit_insn (GEN_FCN (code) (x, y));
3497
3498 /* Expand complex moves by moving real part and imag part. */
3499 if (COMPLEX_MODE_P (mode))
3500 return emit_move_complex (mode, x, y);
3501
3502 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3503 || ALL_FIXED_POINT_MODE_P (mode))
3504 {
3505 rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3506
3507 /* If we can't find an integer mode, use multi words. */
3508 if (result)
3509 return result;
3510 else
3511 return emit_move_multi_word (mode, x, y);
3512 }
3513
3514 if (GET_MODE_CLASS (mode) == MODE_CC)
3515 return emit_move_ccmode (mode, x, y);
3516
3517 /* Try using a move pattern for the corresponding integer mode. This is
3518 only safe when simplify_subreg can convert MODE constants into integer
3519 constants. At present, it can only do this reliably if the value
3520 fits within a HOST_WIDE_INT. */
3521 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3522 {
3523 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3524
3525 if (ret)
3526 {
3527 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3528 return ret;
3529 }
3530 }
3531
3532 return emit_move_multi_word (mode, x, y);
3533 }
3534
3535 /* Generate code to copy Y into X.
3536 Both Y and X must have the same mode, except that
3537 Y can be a constant with VOIDmode.
3538 This mode cannot be BLKmode; use emit_block_move for that.
3539
3540 Return the last instruction emitted. */
3541
3542 rtx_insn *
3543 emit_move_insn (rtx x, rtx y)
3544 {
3545 machine_mode mode = GET_MODE (x);
3546 rtx y_cst = NULL_RTX;
3547 rtx_insn *last_insn;
3548 rtx set;
3549
3550 gcc_assert (mode != BLKmode
3551 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3552
3553 if (CONSTANT_P (y))
3554 {
3555 if (optimize
3556 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3557 && (last_insn = compress_float_constant (x, y)))
3558 return last_insn;
3559
3560 y_cst = y;
3561
3562 if (!targetm.legitimate_constant_p (mode, y))
3563 {
3564 y = force_const_mem (mode, y);
3565
3566 /* If the target's cannot_force_const_mem prevented the spill,
3567 assume that the target's move expanders will also take care
3568 of the non-legitimate constant. */
3569 if (!y)
3570 y = y_cst;
3571 else
3572 y = use_anchored_address (y);
3573 }
3574 }
3575
3576 /* If X or Y are memory references, verify that their addresses are valid
3577 for the machine. */
3578 if (MEM_P (x)
3579 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3580 MEM_ADDR_SPACE (x))
3581 && ! push_operand (x, GET_MODE (x))))
3582 x = validize_mem (x);
3583
3584 if (MEM_P (y)
3585 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3586 MEM_ADDR_SPACE (y)))
3587 y = validize_mem (y);
3588
3589 gcc_assert (mode != BLKmode);
3590
3591 last_insn = emit_move_insn_1 (x, y);
3592
3593 if (y_cst && REG_P (x)
3594 && (set = single_set (last_insn)) != NULL_RTX
3595 && SET_DEST (set) == x
3596 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3597 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3598
3599 return last_insn;
3600 }
3601
3602 /* Generate the body of an instruction to copy Y into X.
3603 It may be a list of insns, if one insn isn't enough. */
3604
3605 rtx_insn *
3606 gen_move_insn (rtx x, rtx y)
3607 {
3608 rtx_insn *seq;
3609
3610 start_sequence ();
3611 emit_move_insn_1 (x, y);
3612 seq = get_insns ();
3613 end_sequence ();
3614 return seq;
3615 }
3616
3617 /* If Y is representable exactly in a narrower mode, and the target can
3618 perform the extension directly from constant or memory, then emit the
3619 move as an extension. */
3620
3621 static rtx_insn *
3622 compress_float_constant (rtx x, rtx y)
3623 {
3624 machine_mode dstmode = GET_MODE (x);
3625 machine_mode orig_srcmode = GET_MODE (y);
3626 machine_mode srcmode;
3627 REAL_VALUE_TYPE r;
3628 int oldcost, newcost;
3629 bool speed = optimize_insn_for_speed_p ();
3630
3631 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3632
3633 if (targetm.legitimate_constant_p (dstmode, y))
3634 oldcost = set_src_cost (y, speed);
3635 else
3636 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3637
3638 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3639 srcmode != orig_srcmode;
3640 srcmode = GET_MODE_WIDER_MODE (srcmode))
3641 {
3642 enum insn_code ic;
3643 rtx trunc_y;
3644 rtx_insn *last_insn;
3645
3646 /* Skip if the target can't extend this way. */
3647 ic = can_extend_p (dstmode, srcmode, 0);
3648 if (ic == CODE_FOR_nothing)
3649 continue;
3650
3651 /* Skip if the narrowed value isn't exact. */
3652 if (! exact_real_truncate (srcmode, &r))
3653 continue;
3654
3655 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3656
3657 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3658 {
3659 /* Skip if the target needs extra instructions to perform
3660 the extension. */
3661 if (!insn_operand_matches (ic, 1, trunc_y))
3662 continue;
3663 /* This is valid, but may not be cheaper than the original. */
3664 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3665 speed);
3666 if (oldcost < newcost)
3667 continue;
3668 }
3669 else if (float_extend_from_mem[dstmode][srcmode])
3670 {
3671 trunc_y = force_const_mem (srcmode, trunc_y);
3672 /* This is valid, but may not be cheaper than the original. */
3673 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3674 speed);
3675 if (oldcost < newcost)
3676 continue;
3677 trunc_y = validize_mem (trunc_y);
3678 }
3679 else
3680 continue;
3681
3682 /* For CSE's benefit, force the compressed constant pool entry
3683 into a new pseudo. This constant may be used in different modes,
3684 and if not, combine will put things back together for us. */
3685 trunc_y = force_reg (srcmode, trunc_y);
3686
3687 /* If x is a hard register, perform the extension into a pseudo,
3688 so that e.g. stack realignment code is aware of it. */
3689 rtx target = x;
3690 if (REG_P (x) && HARD_REGISTER_P (x))
3691 target = gen_reg_rtx (dstmode);
3692
3693 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3694 last_insn = get_last_insn ();
3695
3696 if (REG_P (target))
3697 set_unique_reg_note (last_insn, REG_EQUAL, y);
3698
3699 if (target != x)
3700 return emit_move_insn (x, target);
3701 return last_insn;
3702 }
3703
3704 return NULL;
3705 }
3706 \f
3707 /* Pushing data onto the stack. */
3708
3709 /* Push a block of length SIZE (perhaps variable)
3710 and return an rtx to address the beginning of the block.
3711 The value may be virtual_outgoing_args_rtx.
3712
3713 EXTRA is the number of bytes of padding to push in addition to SIZE.
3714 BELOW nonzero means this padding comes at low addresses;
3715 otherwise, the padding comes at high addresses. */
3716
3717 rtx
3718 push_block (rtx size, int extra, int below)
3719 {
3720 rtx temp;
3721
3722 size = convert_modes (Pmode, ptr_mode, size, 1);
3723 if (CONSTANT_P (size))
3724 anti_adjust_stack (plus_constant (Pmode, size, extra));
3725 else if (REG_P (size) && extra == 0)
3726 anti_adjust_stack (size);
3727 else
3728 {
3729 temp = copy_to_mode_reg (Pmode, size);
3730 if (extra != 0)
3731 temp = expand_binop (Pmode, add_optab, temp,
3732 gen_int_mode (extra, Pmode),
3733 temp, 0, OPTAB_LIB_WIDEN);
3734 anti_adjust_stack (temp);
3735 }
3736
3737 if (STACK_GROWS_DOWNWARD)
3738 {
3739 temp = virtual_outgoing_args_rtx;
3740 if (extra != 0 && below)
3741 temp = plus_constant (Pmode, temp, extra);
3742 }
3743 else
3744 {
3745 if (CONST_INT_P (size))
3746 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3747 -INTVAL (size) - (below ? 0 : extra));
3748 else if (extra != 0 && !below)
3749 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3750 negate_rtx (Pmode, plus_constant (Pmode, size,
3751 extra)));
3752 else
3753 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3754 negate_rtx (Pmode, size));
3755 }
3756
3757 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3758 }
3759
3760 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3761
3762 static rtx
3763 mem_autoinc_base (rtx mem)
3764 {
3765 if (MEM_P (mem))
3766 {
3767 rtx addr = XEXP (mem, 0);
3768 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3769 return XEXP (addr, 0);
3770 }
3771 return NULL;
3772 }
3773
3774 /* A utility routine used here, in reload, and in try_split. The insns
3775 after PREV up to and including LAST are known to adjust the stack,
3776 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3777 placing notes as appropriate. PREV may be NULL, indicating the
3778 entire insn sequence prior to LAST should be scanned.
3779
3780 The set of allowed stack pointer modifications is small:
3781 (1) One or more auto-inc style memory references (aka pushes),
3782 (2) One or more addition/subtraction with the SP as destination,
3783 (3) A single move insn with the SP as destination,
3784 (4) A call_pop insn,
3785 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3786
3787 Insns in the sequence that do not modify the SP are ignored,
3788 except for noreturn calls.
3789
3790 The return value is the amount of adjustment that can be trivially
3791 verified, via immediate operand or auto-inc. If the adjustment
3792 cannot be trivially extracted, the return value is INT_MIN. */
3793
3794 HOST_WIDE_INT
3795 find_args_size_adjust (rtx_insn *insn)
3796 {
3797 rtx dest, set, pat;
3798 int i;
3799
3800 pat = PATTERN (insn);
3801 set = NULL;
3802
3803 /* Look for a call_pop pattern. */
3804 if (CALL_P (insn))
3805 {
3806 /* We have to allow non-call_pop patterns for the case
3807 of emit_single_push_insn of a TLS address. */
3808 if (GET_CODE (pat) != PARALLEL)
3809 return 0;
3810
3811 /* All call_pop have a stack pointer adjust in the parallel.
3812 The call itself is always first, and the stack adjust is
3813 usually last, so search from the end. */
3814 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3815 {
3816 set = XVECEXP (pat, 0, i);
3817 if (GET_CODE (set) != SET)
3818 continue;
3819 dest = SET_DEST (set);
3820 if (dest == stack_pointer_rtx)
3821 break;
3822 }
3823 /* We'd better have found the stack pointer adjust. */
3824 if (i == 0)
3825 return 0;
3826 /* Fall through to process the extracted SET and DEST
3827 as if it was a standalone insn. */
3828 }
3829 else if (GET_CODE (pat) == SET)
3830 set = pat;
3831 else if ((set = single_set (insn)) != NULL)
3832 ;
3833 else if (GET_CODE (pat) == PARALLEL)
3834 {
3835 /* ??? Some older ports use a parallel with a stack adjust
3836 and a store for a PUSH_ROUNDING pattern, rather than a
3837 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3838 /* ??? See h8300 and m68k, pushqi1. */
3839 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3840 {
3841 set = XVECEXP (pat, 0, i);
3842 if (GET_CODE (set) != SET)
3843 continue;
3844 dest = SET_DEST (set);
3845 if (dest == stack_pointer_rtx)
3846 break;
3847
3848 /* We do not expect an auto-inc of the sp in the parallel. */
3849 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3850 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3851 != stack_pointer_rtx);
3852 }
3853 if (i < 0)
3854 return 0;
3855 }
3856 else
3857 return 0;
3858
3859 dest = SET_DEST (set);
3860
3861 /* Look for direct modifications of the stack pointer. */
3862 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3863 {
3864 /* Look for a trivial adjustment, otherwise assume nothing. */
3865 /* Note that the SPU restore_stack_block pattern refers to
3866 the stack pointer in V4SImode. Consider that non-trivial. */
3867 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3868 && GET_CODE (SET_SRC (set)) == PLUS
3869 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3870 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3871 return INTVAL (XEXP (SET_SRC (set), 1));
3872 /* ??? Reload can generate no-op moves, which will be cleaned
3873 up later. Recognize it and continue searching. */
3874 else if (rtx_equal_p (dest, SET_SRC (set)))
3875 return 0;
3876 else
3877 return HOST_WIDE_INT_MIN;
3878 }
3879 else
3880 {
3881 rtx mem, addr;
3882
3883 /* Otherwise only think about autoinc patterns. */
3884 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3885 {
3886 mem = dest;
3887 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3888 != stack_pointer_rtx);
3889 }
3890 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3891 mem = SET_SRC (set);
3892 else
3893 return 0;
3894
3895 addr = XEXP (mem, 0);
3896 switch (GET_CODE (addr))
3897 {
3898 case PRE_INC:
3899 case POST_INC:
3900 return GET_MODE_SIZE (GET_MODE (mem));
3901 case PRE_DEC:
3902 case POST_DEC:
3903 return -GET_MODE_SIZE (GET_MODE (mem));
3904 case PRE_MODIFY:
3905 case POST_MODIFY:
3906 addr = XEXP (addr, 1);
3907 gcc_assert (GET_CODE (addr) == PLUS);
3908 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3909 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3910 return INTVAL (XEXP (addr, 1));
3911 default:
3912 gcc_unreachable ();
3913 }
3914 }
3915 }
3916
3917 int
3918 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, int end_args_size)
3919 {
3920 int args_size = end_args_size;
3921 bool saw_unknown = false;
3922 rtx_insn *insn;
3923
3924 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3925 {
3926 HOST_WIDE_INT this_delta;
3927
3928 if (!NONDEBUG_INSN_P (insn))
3929 continue;
3930
3931 this_delta = find_args_size_adjust (insn);
3932 if (this_delta == 0)
3933 {
3934 if (!CALL_P (insn)
3935 || ACCUMULATE_OUTGOING_ARGS
3936 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3937 continue;
3938 }
3939
3940 gcc_assert (!saw_unknown);
3941 if (this_delta == HOST_WIDE_INT_MIN)
3942 saw_unknown = true;
3943
3944 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3945 if (STACK_GROWS_DOWNWARD)
3946 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3947
3948 args_size -= this_delta;
3949 }
3950
3951 return saw_unknown ? INT_MIN : args_size;
3952 }
3953
3954 #ifdef PUSH_ROUNDING
3955 /* Emit single push insn. */
3956
3957 static void
3958 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
3959 {
3960 rtx dest_addr;
3961 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3962 rtx dest;
3963 enum insn_code icode;
3964
3965 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3966 /* If there is push pattern, use it. Otherwise try old way of throwing
3967 MEM representing push operation to move expander. */
3968 icode = optab_handler (push_optab, mode);
3969 if (icode != CODE_FOR_nothing)
3970 {
3971 struct expand_operand ops[1];
3972
3973 create_input_operand (&ops[0], x, mode);
3974 if (maybe_expand_insn (icode, 1, ops))
3975 return;
3976 }
3977 if (GET_MODE_SIZE (mode) == rounded_size)
3978 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3979 /* If we are to pad downward, adjust the stack pointer first and
3980 then store X into the stack location using an offset. This is
3981 because emit_move_insn does not know how to pad; it does not have
3982 access to type. */
3983 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3984 {
3985 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3986 HOST_WIDE_INT offset;
3987
3988 emit_move_insn (stack_pointer_rtx,
3989 expand_binop (Pmode,
3990 STACK_GROWS_DOWNWARD ? sub_optab
3991 : add_optab,
3992 stack_pointer_rtx,
3993 gen_int_mode (rounded_size, Pmode),
3994 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3995
3996 offset = (HOST_WIDE_INT) padding_size;
3997 if (STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_DEC)
3998 /* We have already decremented the stack pointer, so get the
3999 previous value. */
4000 offset += (HOST_WIDE_INT) rounded_size;
4001
4002 if (!STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_INC)
4003 /* We have already incremented the stack pointer, so get the
4004 previous value. */
4005 offset -= (HOST_WIDE_INT) rounded_size;
4006
4007 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4008 gen_int_mode (offset, Pmode));
4009 }
4010 else
4011 {
4012 if (STACK_GROWS_DOWNWARD)
4013 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4014 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4015 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4016 Pmode));
4017 else
4018 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4019 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4020 gen_int_mode (rounded_size, Pmode));
4021
4022 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4023 }
4024
4025 dest = gen_rtx_MEM (mode, dest_addr);
4026
4027 if (type != 0)
4028 {
4029 set_mem_attributes (dest, type, 1);
4030
4031 if (cfun->tail_call_marked)
4032 /* Function incoming arguments may overlap with sibling call
4033 outgoing arguments and we cannot allow reordering of reads
4034 from function arguments with stores to outgoing arguments
4035 of sibling calls. */
4036 set_mem_alias_set (dest, 0);
4037 }
4038 emit_move_insn (dest, x);
4039 }
4040
4041 /* Emit and annotate a single push insn. */
4042
4043 static void
4044 emit_single_push_insn (machine_mode mode, rtx x, tree type)
4045 {
4046 int delta, old_delta = stack_pointer_delta;
4047 rtx_insn *prev = get_last_insn ();
4048 rtx_insn *last;
4049
4050 emit_single_push_insn_1 (mode, x, type);
4051
4052 last = get_last_insn ();
4053
4054 /* Notice the common case where we emitted exactly one insn. */
4055 if (PREV_INSN (last) == prev)
4056 {
4057 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4058 return;
4059 }
4060
4061 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4062 gcc_assert (delta == INT_MIN || delta == old_delta);
4063 }
4064 #endif
4065
4066 /* If reading SIZE bytes from X will end up reading from
4067 Y return the number of bytes that overlap. Return -1
4068 if there is no overlap or -2 if we can't determine
4069 (for example when X and Y have different base registers). */
4070
4071 static int
4072 memory_load_overlap (rtx x, rtx y, HOST_WIDE_INT size)
4073 {
4074 rtx tmp = plus_constant (Pmode, x, size);
4075 rtx sub = simplify_gen_binary (MINUS, Pmode, tmp, y);
4076
4077 if (!CONST_INT_P (sub))
4078 return -2;
4079
4080 HOST_WIDE_INT val = INTVAL (sub);
4081
4082 return IN_RANGE (val, 1, size) ? val : -1;
4083 }
4084
4085 /* Generate code to push X onto the stack, assuming it has mode MODE and
4086 type TYPE.
4087 MODE is redundant except when X is a CONST_INT (since they don't
4088 carry mode info).
4089 SIZE is an rtx for the size of data to be copied (in bytes),
4090 needed only if X is BLKmode.
4091 Return true if successful. May return false if asked to push a
4092 partial argument during a sibcall optimization (as specified by
4093 SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4094 to not overlap.
4095
4096 ALIGN (in bits) is maximum alignment we can assume.
4097
4098 If PARTIAL and REG are both nonzero, then copy that many of the first
4099 bytes of X into registers starting with REG, and push the rest of X.
4100 The amount of space pushed is decreased by PARTIAL bytes.
4101 REG must be a hard register in this case.
4102 If REG is zero but PARTIAL is not, take any all others actions for an
4103 argument partially in registers, but do not actually load any
4104 registers.
4105
4106 EXTRA is the amount in bytes of extra space to leave next to this arg.
4107 This is ignored if an argument block has already been allocated.
4108
4109 On a machine that lacks real push insns, ARGS_ADDR is the address of
4110 the bottom of the argument block for this call. We use indexing off there
4111 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4112 argument block has not been preallocated.
4113
4114 ARGS_SO_FAR is the size of args previously pushed for this call.
4115
4116 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4117 for arguments passed in registers. If nonzero, it will be the number
4118 of bytes required. */
4119
4120 bool
4121 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4122 unsigned int align, int partial, rtx reg, int extra,
4123 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4124 rtx alignment_pad, bool sibcall_p)
4125 {
4126 rtx xinner;
4127 enum direction stack_direction = STACK_GROWS_DOWNWARD ? downward : upward;
4128
4129 /* Decide where to pad the argument: `downward' for below,
4130 `upward' for above, or `none' for don't pad it.
4131 Default is below for small data on big-endian machines; else above. */
4132 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4133
4134 /* Invert direction if stack is post-decrement.
4135 FIXME: why? */
4136 if (STACK_PUSH_CODE == POST_DEC)
4137 if (where_pad != none)
4138 where_pad = (where_pad == downward ? upward : downward);
4139
4140 xinner = x;
4141
4142 int nregs = partial / UNITS_PER_WORD;
4143 rtx *tmp_regs = NULL;
4144 int overlapping = 0;
4145
4146 if (mode == BLKmode
4147 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4148 {
4149 /* Copy a block into the stack, entirely or partially. */
4150
4151 rtx temp;
4152 int used;
4153 int offset;
4154 int skip;
4155
4156 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4157 used = partial - offset;
4158
4159 if (mode != BLKmode)
4160 {
4161 /* A value is to be stored in an insufficiently aligned
4162 stack slot; copy via a suitably aligned slot if
4163 necessary. */
4164 size = GEN_INT (GET_MODE_SIZE (mode));
4165 if (!MEM_P (xinner))
4166 {
4167 temp = assign_temp (type, 1, 1);
4168 emit_move_insn (temp, xinner);
4169 xinner = temp;
4170 }
4171 }
4172
4173 gcc_assert (size);
4174
4175 /* USED is now the # of bytes we need not copy to the stack
4176 because registers will take care of them. */
4177
4178 if (partial != 0)
4179 xinner = adjust_address (xinner, BLKmode, used);
4180
4181 /* If the partial register-part of the arg counts in its stack size,
4182 skip the part of stack space corresponding to the registers.
4183 Otherwise, start copying to the beginning of the stack space,
4184 by setting SKIP to 0. */
4185 skip = (reg_parm_stack_space == 0) ? 0 : used;
4186
4187 #ifdef PUSH_ROUNDING
4188 /* Do it with several push insns if that doesn't take lots of insns
4189 and if there is no difficulty with push insns that skip bytes
4190 on the stack for alignment purposes. */
4191 if (args_addr == 0
4192 && PUSH_ARGS
4193 && CONST_INT_P (size)
4194 && skip == 0
4195 && MEM_ALIGN (xinner) >= align
4196 && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4197 /* Here we avoid the case of a structure whose weak alignment
4198 forces many pushes of a small amount of data,
4199 and such small pushes do rounding that causes trouble. */
4200 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4201 || align >= BIGGEST_ALIGNMENT
4202 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4203 == (align / BITS_PER_UNIT)))
4204 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4205 {
4206 /* Push padding now if padding above and stack grows down,
4207 or if padding below and stack grows up.
4208 But if space already allocated, this has already been done. */
4209 if (extra && args_addr == 0
4210 && where_pad != none && where_pad != stack_direction)
4211 anti_adjust_stack (GEN_INT (extra));
4212
4213 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4214 }
4215 else
4216 #endif /* PUSH_ROUNDING */
4217 {
4218 rtx target;
4219
4220 /* Otherwise make space on the stack and copy the data
4221 to the address of that space. */
4222
4223 /* Deduct words put into registers from the size we must copy. */
4224 if (partial != 0)
4225 {
4226 if (CONST_INT_P (size))
4227 size = GEN_INT (INTVAL (size) - used);
4228 else
4229 size = expand_binop (GET_MODE (size), sub_optab, size,
4230 gen_int_mode (used, GET_MODE (size)),
4231 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4232 }
4233
4234 /* Get the address of the stack space.
4235 In this case, we do not deal with EXTRA separately.
4236 A single stack adjust will do. */
4237 if (! args_addr)
4238 {
4239 temp = push_block (size, extra, where_pad == downward);
4240 extra = 0;
4241 }
4242 else if (CONST_INT_P (args_so_far))
4243 temp = memory_address (BLKmode,
4244 plus_constant (Pmode, args_addr,
4245 skip + INTVAL (args_so_far)));
4246 else
4247 temp = memory_address (BLKmode,
4248 plus_constant (Pmode,
4249 gen_rtx_PLUS (Pmode,
4250 args_addr,
4251 args_so_far),
4252 skip));
4253
4254 if (!ACCUMULATE_OUTGOING_ARGS)
4255 {
4256 /* If the source is referenced relative to the stack pointer,
4257 copy it to another register to stabilize it. We do not need
4258 to do this if we know that we won't be changing sp. */
4259
4260 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4261 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4262 temp = copy_to_reg (temp);
4263 }
4264
4265 target = gen_rtx_MEM (BLKmode, temp);
4266
4267 /* We do *not* set_mem_attributes here, because incoming arguments
4268 may overlap with sibling call outgoing arguments and we cannot
4269 allow reordering of reads from function arguments with stores
4270 to outgoing arguments of sibling calls. We do, however, want
4271 to record the alignment of the stack slot. */
4272 /* ALIGN may well be better aligned than TYPE, e.g. due to
4273 PARM_BOUNDARY. Assume the caller isn't lying. */
4274 set_mem_align (target, align);
4275
4276 /* If part should go in registers and pushing to that part would
4277 overwrite some of the values that need to go into regs, load the
4278 overlapping values into temporary pseudos to be moved into the hard
4279 regs at the end after the stack pushing has completed.
4280 We cannot load them directly into the hard regs here because
4281 they can be clobbered by the block move expansions.
4282 See PR 65358. */
4283
4284 if (partial > 0 && reg != 0 && mode == BLKmode
4285 && GET_CODE (reg) != PARALLEL)
4286 {
4287 overlapping = memory_load_overlap (XEXP (x, 0), temp, partial);
4288 if (overlapping > 0)
4289 {
4290 gcc_assert (overlapping % UNITS_PER_WORD == 0);
4291 overlapping /= UNITS_PER_WORD;
4292
4293 tmp_regs = XALLOCAVEC (rtx, overlapping);
4294
4295 for (int i = 0; i < overlapping; i++)
4296 tmp_regs[i] = gen_reg_rtx (word_mode);
4297
4298 for (int i = 0; i < overlapping; i++)
4299 emit_move_insn (tmp_regs[i],
4300 operand_subword_force (target, i, mode));
4301 }
4302 else if (overlapping == -1)
4303 overlapping = 0;
4304 /* Could not determine whether there is overlap.
4305 Fail the sibcall. */
4306 else
4307 {
4308 overlapping = 0;
4309 if (sibcall_p)
4310 return false;
4311 }
4312 }
4313 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4314 }
4315 }
4316 else if (partial > 0)
4317 {
4318 /* Scalar partly in registers. */
4319
4320 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4321 int i;
4322 int not_stack;
4323 /* # bytes of start of argument
4324 that we must make space for but need not store. */
4325 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4326 int args_offset = INTVAL (args_so_far);
4327 int skip;
4328
4329 /* Push padding now if padding above and stack grows down,
4330 or if padding below and stack grows up.
4331 But if space already allocated, this has already been done. */
4332 if (extra && args_addr == 0
4333 && where_pad != none && where_pad != stack_direction)
4334 anti_adjust_stack (GEN_INT (extra));
4335
4336 /* If we make space by pushing it, we might as well push
4337 the real data. Otherwise, we can leave OFFSET nonzero
4338 and leave the space uninitialized. */
4339 if (args_addr == 0)
4340 offset = 0;
4341
4342 /* Now NOT_STACK gets the number of words that we don't need to
4343 allocate on the stack. Convert OFFSET to words too. */
4344 not_stack = (partial - offset) / UNITS_PER_WORD;
4345 offset /= UNITS_PER_WORD;
4346
4347 /* If the partial register-part of the arg counts in its stack size,
4348 skip the part of stack space corresponding to the registers.
4349 Otherwise, start copying to the beginning of the stack space,
4350 by setting SKIP to 0. */
4351 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4352
4353 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4354 x = validize_mem (force_const_mem (mode, x));
4355
4356 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4357 SUBREGs of such registers are not allowed. */
4358 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4359 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4360 x = copy_to_reg (x);
4361
4362 /* Loop over all the words allocated on the stack for this arg. */
4363 /* We can do it by words, because any scalar bigger than a word
4364 has a size a multiple of a word. */
4365 for (i = size - 1; i >= not_stack; i--)
4366 if (i >= not_stack + offset)
4367 if (!emit_push_insn (operand_subword_force (x, i, mode),
4368 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4369 0, args_addr,
4370 GEN_INT (args_offset + ((i - not_stack + skip)
4371 * UNITS_PER_WORD)),
4372 reg_parm_stack_space, alignment_pad, sibcall_p))
4373 return false;
4374 }
4375 else
4376 {
4377 rtx addr;
4378 rtx dest;
4379
4380 /* Push padding now if padding above and stack grows down,
4381 or if padding below and stack grows up.
4382 But if space already allocated, this has already been done. */
4383 if (extra && args_addr == 0
4384 && where_pad != none && where_pad != stack_direction)
4385 anti_adjust_stack (GEN_INT (extra));
4386
4387 #ifdef PUSH_ROUNDING
4388 if (args_addr == 0 && PUSH_ARGS)
4389 emit_single_push_insn (mode, x, type);
4390 else
4391 #endif
4392 {
4393 if (CONST_INT_P (args_so_far))
4394 addr
4395 = memory_address (mode,
4396 plus_constant (Pmode, args_addr,
4397 INTVAL (args_so_far)));
4398 else
4399 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4400 args_so_far));
4401 dest = gen_rtx_MEM (mode, addr);
4402
4403 /* We do *not* set_mem_attributes here, because incoming arguments
4404 may overlap with sibling call outgoing arguments and we cannot
4405 allow reordering of reads from function arguments with stores
4406 to outgoing arguments of sibling calls. We do, however, want
4407 to record the alignment of the stack slot. */
4408 /* ALIGN may well be better aligned than TYPE, e.g. due to
4409 PARM_BOUNDARY. Assume the caller isn't lying. */
4410 set_mem_align (dest, align);
4411
4412 emit_move_insn (dest, x);
4413 }
4414 }
4415
4416 /* Move the partial arguments into the registers and any overlapping
4417 values that we moved into the pseudos in tmp_regs. */
4418 if (partial > 0 && reg != 0)
4419 {
4420 /* Handle calls that pass values in multiple non-contiguous locations.
4421 The Irix 6 ABI has examples of this. */
4422 if (GET_CODE (reg) == PARALLEL)
4423 emit_group_load (reg, x, type, -1);
4424 else
4425 {
4426 gcc_assert (partial % UNITS_PER_WORD == 0);
4427 move_block_to_reg (REGNO (reg), x, nregs - overlapping, mode);
4428
4429 for (int i = 0; i < overlapping; i++)
4430 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg)
4431 + nregs - overlapping + i),
4432 tmp_regs[i]);
4433
4434 }
4435 }
4436
4437 if (extra && args_addr == 0 && where_pad == stack_direction)
4438 anti_adjust_stack (GEN_INT (extra));
4439
4440 if (alignment_pad && args_addr == 0)
4441 anti_adjust_stack (alignment_pad);
4442
4443 return true;
4444 }
4445 \f
4446 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4447 operations. */
4448
4449 static rtx
4450 get_subtarget (rtx x)
4451 {
4452 return (optimize
4453 || x == 0
4454 /* Only registers can be subtargets. */
4455 || !REG_P (x)
4456 /* Don't use hard regs to avoid extending their life. */
4457 || REGNO (x) < FIRST_PSEUDO_REGISTER
4458 ? 0 : x);
4459 }
4460
4461 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4462 FIELD is a bitfield. Returns true if the optimization was successful,
4463 and there's nothing else to do. */
4464
4465 static bool
4466 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4467 unsigned HOST_WIDE_INT bitpos,
4468 unsigned HOST_WIDE_INT bitregion_start,
4469 unsigned HOST_WIDE_INT bitregion_end,
4470 machine_mode mode1, rtx str_rtx,
4471 tree to, tree src)
4472 {
4473 machine_mode str_mode = GET_MODE (str_rtx);
4474 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4475 tree op0, op1;
4476 rtx value, result;
4477 optab binop;
4478 gimple srcstmt;
4479 enum tree_code code;
4480
4481 if (mode1 != VOIDmode
4482 || bitsize >= BITS_PER_WORD
4483 || str_bitsize > BITS_PER_WORD
4484 || TREE_SIDE_EFFECTS (to)
4485 || TREE_THIS_VOLATILE (to))
4486 return false;
4487
4488 STRIP_NOPS (src);
4489 if (TREE_CODE (src) != SSA_NAME)
4490 return false;
4491 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4492 return false;
4493
4494 srcstmt = get_gimple_for_ssa_name (src);
4495 if (!srcstmt
4496 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4497 return false;
4498
4499 code = gimple_assign_rhs_code (srcstmt);
4500
4501 op0 = gimple_assign_rhs1 (srcstmt);
4502
4503 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4504 to find its initialization. Hopefully the initialization will
4505 be from a bitfield load. */
4506 if (TREE_CODE (op0) == SSA_NAME)
4507 {
4508 gimple op0stmt = get_gimple_for_ssa_name (op0);
4509
4510 /* We want to eventually have OP0 be the same as TO, which
4511 should be a bitfield. */
4512 if (!op0stmt
4513 || !is_gimple_assign (op0stmt)
4514 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4515 return false;
4516 op0 = gimple_assign_rhs1 (op0stmt);
4517 }
4518
4519 op1 = gimple_assign_rhs2 (srcstmt);
4520
4521 if (!operand_equal_p (to, op0, 0))
4522 return false;
4523
4524 if (MEM_P (str_rtx))
4525 {
4526 unsigned HOST_WIDE_INT offset1;
4527
4528 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4529 str_mode = word_mode;
4530 str_mode = get_best_mode (bitsize, bitpos,
4531 bitregion_start, bitregion_end,
4532 MEM_ALIGN (str_rtx), str_mode, 0);
4533 if (str_mode == VOIDmode)
4534 return false;
4535 str_bitsize = GET_MODE_BITSIZE (str_mode);
4536
4537 offset1 = bitpos;
4538 bitpos %= str_bitsize;
4539 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4540 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4541 }
4542 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4543 return false;
4544
4545 /* If the bit field covers the whole REG/MEM, store_field
4546 will likely generate better code. */
4547 if (bitsize >= str_bitsize)
4548 return false;
4549
4550 /* We can't handle fields split across multiple entities. */
4551 if (bitpos + bitsize > str_bitsize)
4552 return false;
4553
4554 if (BYTES_BIG_ENDIAN)
4555 bitpos = str_bitsize - bitpos - bitsize;
4556
4557 switch (code)
4558 {
4559 case PLUS_EXPR:
4560 case MINUS_EXPR:
4561 /* For now, just optimize the case of the topmost bitfield
4562 where we don't need to do any masking and also
4563 1 bit bitfields where xor can be used.
4564 We might win by one instruction for the other bitfields
4565 too if insv/extv instructions aren't used, so that
4566 can be added later. */
4567 if (bitpos + bitsize != str_bitsize
4568 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4569 break;
4570
4571 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4572 value = convert_modes (str_mode,
4573 TYPE_MODE (TREE_TYPE (op1)), value,
4574 TYPE_UNSIGNED (TREE_TYPE (op1)));
4575
4576 /* We may be accessing data outside the field, which means
4577 we can alias adjacent data. */
4578 if (MEM_P (str_rtx))
4579 {
4580 str_rtx = shallow_copy_rtx (str_rtx);
4581 set_mem_alias_set (str_rtx, 0);
4582 set_mem_expr (str_rtx, 0);
4583 }
4584
4585 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4586 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4587 {
4588 value = expand_and (str_mode, value, const1_rtx, NULL);
4589 binop = xor_optab;
4590 }
4591 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4592 result = expand_binop (str_mode, binop, str_rtx,
4593 value, str_rtx, 1, OPTAB_WIDEN);
4594 if (result != str_rtx)
4595 emit_move_insn (str_rtx, result);
4596 return true;
4597
4598 case BIT_IOR_EXPR:
4599 case BIT_XOR_EXPR:
4600 if (TREE_CODE (op1) != INTEGER_CST)
4601 break;
4602 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4603 value = convert_modes (str_mode,
4604 TYPE_MODE (TREE_TYPE (op1)), value,
4605 TYPE_UNSIGNED (TREE_TYPE (op1)));
4606
4607 /* We may be accessing data outside the field, which means
4608 we can alias adjacent data. */
4609 if (MEM_P (str_rtx))
4610 {
4611 str_rtx = shallow_copy_rtx (str_rtx);
4612 set_mem_alias_set (str_rtx, 0);
4613 set_mem_expr (str_rtx, 0);
4614 }
4615
4616 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4617 if (bitpos + bitsize != str_bitsize)
4618 {
4619 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4620 str_mode);
4621 value = expand_and (str_mode, value, mask, NULL_RTX);
4622 }
4623 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4624 result = expand_binop (str_mode, binop, str_rtx,
4625 value, str_rtx, 1, OPTAB_WIDEN);
4626 if (result != str_rtx)
4627 emit_move_insn (str_rtx, result);
4628 return true;
4629
4630 default:
4631 break;
4632 }
4633
4634 return false;
4635 }
4636
4637 /* In the C++ memory model, consecutive bit fields in a structure are
4638 considered one memory location.
4639
4640 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4641 returns the bit range of consecutive bits in which this COMPONENT_REF
4642 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4643 and *OFFSET may be adjusted in the process.
4644
4645 If the access does not need to be restricted, 0 is returned in both
4646 *BITSTART and *BITEND. */
4647
4648 static void
4649 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4650 unsigned HOST_WIDE_INT *bitend,
4651 tree exp,
4652 HOST_WIDE_INT *bitpos,
4653 tree *offset)
4654 {
4655 HOST_WIDE_INT bitoffset;
4656 tree field, repr;
4657
4658 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4659
4660 field = TREE_OPERAND (exp, 1);
4661 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4662 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4663 need to limit the range we can access. */
4664 if (!repr)
4665 {
4666 *bitstart = *bitend = 0;
4667 return;
4668 }
4669
4670 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4671 part of a larger bit field, then the representative does not serve any
4672 useful purpose. This can occur in Ada. */
4673 if (handled_component_p (TREE_OPERAND (exp, 0)))
4674 {
4675 machine_mode rmode;
4676 HOST_WIDE_INT rbitsize, rbitpos;
4677 tree roffset;
4678 int unsignedp;
4679 int volatilep = 0;
4680 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4681 &roffset, &rmode, &unsignedp, &volatilep, false);
4682 if ((rbitpos % BITS_PER_UNIT) != 0)
4683 {
4684 *bitstart = *bitend = 0;
4685 return;
4686 }
4687 }
4688
4689 /* Compute the adjustment to bitpos from the offset of the field
4690 relative to the representative. DECL_FIELD_OFFSET of field and
4691 repr are the same by construction if they are not constants,
4692 see finish_bitfield_layout. */
4693 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4694 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4695 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4696 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4697 else
4698 bitoffset = 0;
4699 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4700 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4701
4702 /* If the adjustment is larger than bitpos, we would have a negative bit
4703 position for the lower bound and this may wreak havoc later. Adjust
4704 offset and bitpos to make the lower bound non-negative in that case. */
4705 if (bitoffset > *bitpos)
4706 {
4707 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4708 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4709
4710 *bitpos += adjust;
4711 if (*offset == NULL_TREE)
4712 *offset = size_int (-adjust / BITS_PER_UNIT);
4713 else
4714 *offset
4715 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4716 *bitstart = 0;
4717 }
4718 else
4719 *bitstart = *bitpos - bitoffset;
4720
4721 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4722 }
4723
4724 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4725 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4726 DECL_RTL was not set yet, return NORTL. */
4727
4728 static inline bool
4729 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4730 {
4731 if (TREE_CODE (addr) != ADDR_EXPR)
4732 return false;
4733
4734 tree base = TREE_OPERAND (addr, 0);
4735
4736 if (!DECL_P (base)
4737 || TREE_ADDRESSABLE (base)
4738 || DECL_MODE (base) == BLKmode)
4739 return false;
4740
4741 if (!DECL_RTL_SET_P (base))
4742 return nortl;
4743
4744 return (!MEM_P (DECL_RTL (base)));
4745 }
4746
4747 /* Returns true if the MEM_REF REF refers to an object that does not
4748 reside in memory and has non-BLKmode. */
4749
4750 static inline bool
4751 mem_ref_refers_to_non_mem_p (tree ref)
4752 {
4753 tree base = TREE_OPERAND (ref, 0);
4754 return addr_expr_of_non_mem_decl_p_1 (base, false);
4755 }
4756
4757 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4758 is true, try generating a nontemporal store. */
4759
4760 void
4761 expand_assignment (tree to, tree from, bool nontemporal)
4762 {
4763 rtx to_rtx = 0;
4764 rtx result;
4765 machine_mode mode;
4766 unsigned int align;
4767 enum insn_code icode;
4768
4769 /* Don't crash if the lhs of the assignment was erroneous. */
4770 if (TREE_CODE (to) == ERROR_MARK)
4771 {
4772 expand_normal (from);
4773 return;
4774 }
4775
4776 /* Optimize away no-op moves without side-effects. */
4777 if (operand_equal_p (to, from, 0))
4778 return;
4779
4780 /* Handle misaligned stores. */
4781 mode = TYPE_MODE (TREE_TYPE (to));
4782 if ((TREE_CODE (to) == MEM_REF
4783 || TREE_CODE (to) == TARGET_MEM_REF)
4784 && mode != BLKmode
4785 && !mem_ref_refers_to_non_mem_p (to)
4786 && ((align = get_object_alignment (to))
4787 < GET_MODE_ALIGNMENT (mode))
4788 && (((icode = optab_handler (movmisalign_optab, mode))
4789 != CODE_FOR_nothing)
4790 || SLOW_UNALIGNED_ACCESS (mode, align)))
4791 {
4792 rtx reg, mem;
4793
4794 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4795 reg = force_not_mem (reg);
4796 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4797
4798 if (icode != CODE_FOR_nothing)
4799 {
4800 struct expand_operand ops[2];
4801
4802 create_fixed_operand (&ops[0], mem);
4803 create_input_operand (&ops[1], reg, mode);
4804 /* The movmisalign<mode> pattern cannot fail, else the assignment
4805 would silently be omitted. */
4806 expand_insn (icode, 2, ops);
4807 }
4808 else
4809 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg);
4810 return;
4811 }
4812
4813 /* Assignment of a structure component needs special treatment
4814 if the structure component's rtx is not simply a MEM.
4815 Assignment of an array element at a constant index, and assignment of
4816 an array element in an unaligned packed structure field, has the same
4817 problem. Same for (partially) storing into a non-memory object. */
4818 if (handled_component_p (to)
4819 || (TREE_CODE (to) == MEM_REF
4820 && mem_ref_refers_to_non_mem_p (to))
4821 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4822 {
4823 machine_mode mode1;
4824 HOST_WIDE_INT bitsize, bitpos;
4825 unsigned HOST_WIDE_INT bitregion_start = 0;
4826 unsigned HOST_WIDE_INT bitregion_end = 0;
4827 tree offset;
4828 int unsignedp;
4829 int volatilep = 0;
4830 tree tem;
4831
4832 push_temp_slots ();
4833 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4834 &unsignedp, &volatilep, true);
4835
4836 /* Make sure bitpos is not negative, it can wreak havoc later. */
4837 if (bitpos < 0)
4838 {
4839 gcc_assert (offset == NULL_TREE);
4840 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4841 ? 3 : exact_log2 (BITS_PER_UNIT)));
4842 bitpos &= BITS_PER_UNIT - 1;
4843 }
4844
4845 if (TREE_CODE (to) == COMPONENT_REF
4846 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4847 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4848 /* The C++ memory model naturally applies to byte-aligned fields.
4849 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4850 BITSIZE are not byte-aligned, there is no need to limit the range
4851 we can access. This can occur with packed structures in Ada. */
4852 else if (bitsize > 0
4853 && bitsize % BITS_PER_UNIT == 0
4854 && bitpos % BITS_PER_UNIT == 0)
4855 {
4856 bitregion_start = bitpos;
4857 bitregion_end = bitpos + bitsize - 1;
4858 }
4859
4860 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4861
4862 /* If the field has a mode, we want to access it in the
4863 field's mode, not the computed mode.
4864 If a MEM has VOIDmode (external with incomplete type),
4865 use BLKmode for it instead. */
4866 if (MEM_P (to_rtx))
4867 {
4868 if (mode1 != VOIDmode)
4869 to_rtx = adjust_address (to_rtx, mode1, 0);
4870 else if (GET_MODE (to_rtx) == VOIDmode)
4871 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4872 }
4873
4874 if (offset != 0)
4875 {
4876 machine_mode address_mode;
4877 rtx offset_rtx;
4878
4879 if (!MEM_P (to_rtx))
4880 {
4881 /* We can get constant negative offsets into arrays with broken
4882 user code. Translate this to a trap instead of ICEing. */
4883 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4884 expand_builtin_trap ();
4885 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4886 }
4887
4888 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4889 address_mode = get_address_mode (to_rtx);
4890 if (GET_MODE (offset_rtx) != address_mode)
4891 {
4892 /* We cannot be sure that the RTL in offset_rtx is valid outside
4893 of a memory address context, so force it into a register
4894 before attempting to convert it to the desired mode. */
4895 offset_rtx = force_operand (offset_rtx, NULL_RTX);
4896 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4897 }
4898
4899 /* If we have an expression in OFFSET_RTX and a non-zero
4900 byte offset in BITPOS, adding the byte offset before the
4901 OFFSET_RTX results in better intermediate code, which makes
4902 later rtl optimization passes perform better.
4903
4904 We prefer intermediate code like this:
4905
4906 r124:DI=r123:DI+0x18
4907 [r124:DI]=r121:DI
4908
4909 ... instead of ...
4910
4911 r124:DI=r123:DI+0x10
4912 [r124:DI+0x8]=r121:DI
4913
4914 This is only done for aligned data values, as these can
4915 be expected to result in single move instructions. */
4916 if (mode1 != VOIDmode
4917 && bitpos != 0
4918 && bitsize > 0
4919 && (bitpos % bitsize) == 0
4920 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4921 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
4922 {
4923 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4924 bitregion_start = 0;
4925 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4926 bitregion_end -= bitpos;
4927 bitpos = 0;
4928 }
4929
4930 to_rtx = offset_address (to_rtx, offset_rtx,
4931 highest_pow2_factor_for_target (to,
4932 offset));
4933 }
4934
4935 /* No action is needed if the target is not a memory and the field
4936 lies completely outside that target. This can occur if the source
4937 code contains an out-of-bounds access to a small array. */
4938 if (!MEM_P (to_rtx)
4939 && GET_MODE (to_rtx) != BLKmode
4940 && (unsigned HOST_WIDE_INT) bitpos
4941 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4942 {
4943 expand_normal (from);
4944 result = NULL;
4945 }
4946 /* Handle expand_expr of a complex value returning a CONCAT. */
4947 else if (GET_CODE (to_rtx) == CONCAT)
4948 {
4949 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4950 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4951 && bitpos == 0
4952 && bitsize == mode_bitsize)
4953 result = store_expr (from, to_rtx, false, nontemporal);
4954 else if (bitsize == mode_bitsize / 2
4955 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4956 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4957 nontemporal);
4958 else if (bitpos + bitsize <= mode_bitsize / 2)
4959 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4960 bitregion_start, bitregion_end,
4961 mode1, from,
4962 get_alias_set (to), nontemporal);
4963 else if (bitpos >= mode_bitsize / 2)
4964 result = store_field (XEXP (to_rtx, 1), bitsize,
4965 bitpos - mode_bitsize / 2,
4966 bitregion_start, bitregion_end,
4967 mode1, from,
4968 get_alias_set (to), nontemporal);
4969 else if (bitpos == 0 && bitsize == mode_bitsize)
4970 {
4971 rtx from_rtx;
4972 result = expand_normal (from);
4973 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4974 TYPE_MODE (TREE_TYPE (from)), 0);
4975 emit_move_insn (XEXP (to_rtx, 0),
4976 read_complex_part (from_rtx, false));
4977 emit_move_insn (XEXP (to_rtx, 1),
4978 read_complex_part (from_rtx, true));
4979 }
4980 else
4981 {
4982 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4983 GET_MODE_SIZE (GET_MODE (to_rtx)));
4984 write_complex_part (temp, XEXP (to_rtx, 0), false);
4985 write_complex_part (temp, XEXP (to_rtx, 1), true);
4986 result = store_field (temp, bitsize, bitpos,
4987 bitregion_start, bitregion_end,
4988 mode1, from,
4989 get_alias_set (to), nontemporal);
4990 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4991 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4992 }
4993 }
4994 else
4995 {
4996 if (MEM_P (to_rtx))
4997 {
4998 /* If the field is at offset zero, we could have been given the
4999 DECL_RTX of the parent struct. Don't munge it. */
5000 to_rtx = shallow_copy_rtx (to_rtx);
5001 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
5002 if (volatilep)
5003 MEM_VOLATILE_P (to_rtx) = 1;
5004 }
5005
5006 if (optimize_bitfield_assignment_op (bitsize, bitpos,
5007 bitregion_start, bitregion_end,
5008 mode1,
5009 to_rtx, to, from))
5010 result = NULL;
5011 else
5012 result = store_field (to_rtx, bitsize, bitpos,
5013 bitregion_start, bitregion_end,
5014 mode1, from,
5015 get_alias_set (to), nontemporal);
5016 }
5017
5018 if (result)
5019 preserve_temp_slots (result);
5020 pop_temp_slots ();
5021 return;
5022 }
5023
5024 /* If the rhs is a function call and its value is not an aggregate,
5025 call the function before we start to compute the lhs.
5026 This is needed for correct code for cases such as
5027 val = setjmp (buf) on machines where reference to val
5028 requires loading up part of an address in a separate insn.
5029
5030 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5031 since it might be a promoted variable where the zero- or sign- extension
5032 needs to be done. Handling this in the normal way is safe because no
5033 computation is done before the call. The same is true for SSA names. */
5034 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5035 && COMPLETE_TYPE_P (TREE_TYPE (from))
5036 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5037 && ! (((TREE_CODE (to) == VAR_DECL
5038 || TREE_CODE (to) == PARM_DECL
5039 || TREE_CODE (to) == RESULT_DECL)
5040 && REG_P (DECL_RTL (to)))
5041 || TREE_CODE (to) == SSA_NAME))
5042 {
5043 rtx value;
5044 rtx bounds;
5045
5046 push_temp_slots ();
5047 value = expand_normal (from);
5048
5049 /* Split value and bounds to store them separately. */
5050 chkp_split_slot (value, &value, &bounds);
5051
5052 if (to_rtx == 0)
5053 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5054
5055 /* Handle calls that return values in multiple non-contiguous locations.
5056 The Irix 6 ABI has examples of this. */
5057 if (GET_CODE (to_rtx) == PARALLEL)
5058 {
5059 if (GET_CODE (value) == PARALLEL)
5060 emit_group_move (to_rtx, value);
5061 else
5062 emit_group_load (to_rtx, value, TREE_TYPE (from),
5063 int_size_in_bytes (TREE_TYPE (from)));
5064 }
5065 else if (GET_CODE (value) == PARALLEL)
5066 emit_group_store (to_rtx, value, TREE_TYPE (from),
5067 int_size_in_bytes (TREE_TYPE (from)));
5068 else if (GET_MODE (to_rtx) == BLKmode)
5069 {
5070 /* Handle calls that return BLKmode values in registers. */
5071 if (REG_P (value))
5072 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5073 else
5074 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5075 }
5076 else
5077 {
5078 if (POINTER_TYPE_P (TREE_TYPE (to)))
5079 value = convert_memory_address_addr_space
5080 (GET_MODE (to_rtx), value,
5081 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5082
5083 emit_move_insn (to_rtx, value);
5084 }
5085
5086 /* Store bounds if required. */
5087 if (bounds
5088 && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to))))
5089 {
5090 gcc_assert (MEM_P (to_rtx));
5091 chkp_emit_bounds_store (bounds, value, to_rtx);
5092 }
5093
5094 preserve_temp_slots (to_rtx);
5095 pop_temp_slots ();
5096 return;
5097 }
5098
5099 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5100 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5101
5102 /* Don't move directly into a return register. */
5103 if (TREE_CODE (to) == RESULT_DECL
5104 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5105 {
5106 rtx temp;
5107
5108 push_temp_slots ();
5109
5110 /* If the source is itself a return value, it still is in a pseudo at
5111 this point so we can move it back to the return register directly. */
5112 if (REG_P (to_rtx)
5113 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5114 && TREE_CODE (from) != CALL_EXPR)
5115 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5116 else
5117 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5118
5119 /* Handle calls that return values in multiple non-contiguous locations.
5120 The Irix 6 ABI has examples of this. */
5121 if (GET_CODE (to_rtx) == PARALLEL)
5122 {
5123 if (GET_CODE (temp) == PARALLEL)
5124 emit_group_move (to_rtx, temp);
5125 else
5126 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5127 int_size_in_bytes (TREE_TYPE (from)));
5128 }
5129 else if (temp)
5130 emit_move_insn (to_rtx, temp);
5131
5132 preserve_temp_slots (to_rtx);
5133 pop_temp_slots ();
5134 return;
5135 }
5136
5137 /* In case we are returning the contents of an object which overlaps
5138 the place the value is being stored, use a safe function when copying
5139 a value through a pointer into a structure value return block. */
5140 if (TREE_CODE (to) == RESULT_DECL
5141 && TREE_CODE (from) == INDIRECT_REF
5142 && ADDR_SPACE_GENERIC_P
5143 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5144 && refs_may_alias_p (to, from)
5145 && cfun->returns_struct
5146 && !cfun->returns_pcc_struct)
5147 {
5148 rtx from_rtx, size;
5149
5150 push_temp_slots ();
5151 size = expr_size (from);
5152 from_rtx = expand_normal (from);
5153
5154 emit_library_call (memmove_libfunc, LCT_NORMAL,
5155 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5156 XEXP (from_rtx, 0), Pmode,
5157 convert_to_mode (TYPE_MODE (sizetype),
5158 size, TYPE_UNSIGNED (sizetype)),
5159 TYPE_MODE (sizetype));
5160
5161 preserve_temp_slots (to_rtx);
5162 pop_temp_slots ();
5163 return;
5164 }
5165
5166 /* Compute FROM and store the value in the rtx we got. */
5167
5168 push_temp_slots ();
5169 result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, to);
5170 preserve_temp_slots (result);
5171 pop_temp_slots ();
5172 return;
5173 }
5174
5175 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5176 succeeded, false otherwise. */
5177
5178 bool
5179 emit_storent_insn (rtx to, rtx from)
5180 {
5181 struct expand_operand ops[2];
5182 machine_mode mode = GET_MODE (to);
5183 enum insn_code code = optab_handler (storent_optab, mode);
5184
5185 if (code == CODE_FOR_nothing)
5186 return false;
5187
5188 create_fixed_operand (&ops[0], to);
5189 create_input_operand (&ops[1], from, mode);
5190 return maybe_expand_insn (code, 2, ops);
5191 }
5192
5193 /* Generate code for computing expression EXP,
5194 and storing the value into TARGET.
5195
5196 If the mode is BLKmode then we may return TARGET itself.
5197 It turns out that in BLKmode it doesn't cause a problem.
5198 because C has no operators that could combine two different
5199 assignments into the same BLKmode object with different values
5200 with no sequence point. Will other languages need this to
5201 be more thorough?
5202
5203 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5204 stack, and block moves may need to be treated specially.
5205
5206 If NONTEMPORAL is true, try using a nontemporal store instruction.
5207
5208 If BTARGET is not NULL then computed bounds of EXP are
5209 associated with BTARGET. */
5210
5211 rtx
5212 store_expr_with_bounds (tree exp, rtx target, int call_param_p,
5213 bool nontemporal, tree btarget)
5214 {
5215 rtx temp;
5216 rtx alt_rtl = NULL_RTX;
5217 location_t loc = curr_insn_location ();
5218
5219 if (VOID_TYPE_P (TREE_TYPE (exp)))
5220 {
5221 /* C++ can generate ?: expressions with a throw expression in one
5222 branch and an rvalue in the other. Here, we resolve attempts to
5223 store the throw expression's nonexistent result. */
5224 gcc_assert (!call_param_p);
5225 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5226 return NULL_RTX;
5227 }
5228 if (TREE_CODE (exp) == COMPOUND_EXPR)
5229 {
5230 /* Perform first part of compound expression, then assign from second
5231 part. */
5232 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5233 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5234 return store_expr_with_bounds (TREE_OPERAND (exp, 1), target,
5235 call_param_p, nontemporal, btarget);
5236 }
5237 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5238 {
5239 /* For conditional expression, get safe form of the target. Then
5240 test the condition, doing the appropriate assignment on either
5241 side. This avoids the creation of unnecessary temporaries.
5242 For non-BLKmode, it is more efficient not to do this. */
5243
5244 rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5245
5246 do_pending_stack_adjust ();
5247 NO_DEFER_POP;
5248 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5249 store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p,
5250 nontemporal, btarget);
5251 emit_jump_insn (targetm.gen_jump (lab2));
5252 emit_barrier ();
5253 emit_label (lab1);
5254 store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p,
5255 nontemporal, btarget);
5256 emit_label (lab2);
5257 OK_DEFER_POP;
5258
5259 return NULL_RTX;
5260 }
5261 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5262 /* If this is a scalar in a register that is stored in a wider mode
5263 than the declared mode, compute the result into its declared mode
5264 and then convert to the wider mode. Our value is the computed
5265 expression. */
5266 {
5267 rtx inner_target = 0;
5268
5269 /* We can do the conversion inside EXP, which will often result
5270 in some optimizations. Do the conversion in two steps: first
5271 change the signedness, if needed, then the extend. But don't
5272 do this if the type of EXP is a subtype of something else
5273 since then the conversion might involve more than just
5274 converting modes. */
5275 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5276 && TREE_TYPE (TREE_TYPE (exp)) == 0
5277 && GET_MODE_PRECISION (GET_MODE (target))
5278 == TYPE_PRECISION (TREE_TYPE (exp)))
5279 {
5280 if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5281 TYPE_UNSIGNED (TREE_TYPE (exp))))
5282 {
5283 /* Some types, e.g. Fortran's logical*4, won't have a signed
5284 version, so use the mode instead. */
5285 tree ntype
5286 = (signed_or_unsigned_type_for
5287 (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5288 if (ntype == NULL)
5289 ntype = lang_hooks.types.type_for_mode
5290 (TYPE_MODE (TREE_TYPE (exp)),
5291 SUBREG_PROMOTED_SIGN (target));
5292
5293 exp = fold_convert_loc (loc, ntype, exp);
5294 }
5295
5296 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5297 (GET_MODE (SUBREG_REG (target)),
5298 SUBREG_PROMOTED_SIGN (target)),
5299 exp);
5300
5301 inner_target = SUBREG_REG (target);
5302 }
5303
5304 temp = expand_expr (exp, inner_target, VOIDmode,
5305 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5306
5307 /* Handle bounds returned by call. */
5308 if (TREE_CODE (exp) == CALL_EXPR)
5309 {
5310 rtx bounds;
5311 chkp_split_slot (temp, &temp, &bounds);
5312 if (bounds && btarget)
5313 {
5314 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5315 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5316 chkp_set_rtl_bounds (btarget, tmp);
5317 }
5318 }
5319
5320 /* If TEMP is a VOIDmode constant, use convert_modes to make
5321 sure that we properly convert it. */
5322 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5323 {
5324 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5325 temp, SUBREG_PROMOTED_SIGN (target));
5326 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5327 GET_MODE (target), temp,
5328 SUBREG_PROMOTED_SIGN (target));
5329 }
5330
5331 convert_move (SUBREG_REG (target), temp,
5332 SUBREG_PROMOTED_SIGN (target));
5333
5334 return NULL_RTX;
5335 }
5336 else if ((TREE_CODE (exp) == STRING_CST
5337 || (TREE_CODE (exp) == MEM_REF
5338 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5339 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5340 == STRING_CST
5341 && integer_zerop (TREE_OPERAND (exp, 1))))
5342 && !nontemporal && !call_param_p
5343 && MEM_P (target))
5344 {
5345 /* Optimize initialization of an array with a STRING_CST. */
5346 HOST_WIDE_INT exp_len, str_copy_len;
5347 rtx dest_mem;
5348 tree str = TREE_CODE (exp) == STRING_CST
5349 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5350
5351 exp_len = int_expr_size (exp);
5352 if (exp_len <= 0)
5353 goto normal_expr;
5354
5355 if (TREE_STRING_LENGTH (str) <= 0)
5356 goto normal_expr;
5357
5358 str_copy_len = strlen (TREE_STRING_POINTER (str));
5359 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5360 goto normal_expr;
5361
5362 str_copy_len = TREE_STRING_LENGTH (str);
5363 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5364 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5365 {
5366 str_copy_len += STORE_MAX_PIECES - 1;
5367 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5368 }
5369 str_copy_len = MIN (str_copy_len, exp_len);
5370 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5371 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5372 MEM_ALIGN (target), false))
5373 goto normal_expr;
5374
5375 dest_mem = target;
5376
5377 dest_mem = store_by_pieces (dest_mem,
5378 str_copy_len, builtin_strncpy_read_str,
5379 CONST_CAST (char *,
5380 TREE_STRING_POINTER (str)),
5381 MEM_ALIGN (target), false,
5382 exp_len > str_copy_len ? 1 : 0);
5383 if (exp_len > str_copy_len)
5384 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5385 GEN_INT (exp_len - str_copy_len),
5386 BLOCK_OP_NORMAL);
5387 return NULL_RTX;
5388 }
5389 else
5390 {
5391 rtx tmp_target;
5392
5393 normal_expr:
5394 /* If we want to use a nontemporal store, force the value to
5395 register first. */
5396 tmp_target = nontemporal ? NULL_RTX : target;
5397 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5398 (call_param_p
5399 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5400 &alt_rtl, false);
5401
5402 /* Handle bounds returned by call. */
5403 if (TREE_CODE (exp) == CALL_EXPR)
5404 {
5405 rtx bounds;
5406 chkp_split_slot (temp, &temp, &bounds);
5407 if (bounds && btarget)
5408 {
5409 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5410 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5411 chkp_set_rtl_bounds (btarget, tmp);
5412 }
5413 }
5414 }
5415
5416 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5417 the same as that of TARGET, adjust the constant. This is needed, for
5418 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5419 only a word-sized value. */
5420 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5421 && TREE_CODE (exp) != ERROR_MARK
5422 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5423 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5424 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5425
5426 /* If value was not generated in the target, store it there.
5427 Convert the value to TARGET's type first if necessary and emit the
5428 pending incrementations that have been queued when expanding EXP.
5429 Note that we cannot emit the whole queue blindly because this will
5430 effectively disable the POST_INC optimization later.
5431
5432 If TEMP and TARGET compare equal according to rtx_equal_p, but
5433 one or both of them are volatile memory refs, we have to distinguish
5434 two cases:
5435 - expand_expr has used TARGET. In this case, we must not generate
5436 another copy. This can be detected by TARGET being equal according
5437 to == .
5438 - expand_expr has not used TARGET - that means that the source just
5439 happens to have the same RTX form. Since temp will have been created
5440 by expand_expr, it will compare unequal according to == .
5441 We must generate a copy in this case, to reach the correct number
5442 of volatile memory references. */
5443
5444 if ((! rtx_equal_p (temp, target)
5445 || (temp != target && (side_effects_p (temp)
5446 || side_effects_p (target))))
5447 && TREE_CODE (exp) != ERROR_MARK
5448 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5449 but TARGET is not valid memory reference, TEMP will differ
5450 from TARGET although it is really the same location. */
5451 && !(alt_rtl
5452 && rtx_equal_p (alt_rtl, target)
5453 && !side_effects_p (alt_rtl)
5454 && !side_effects_p (target))
5455 /* If there's nothing to copy, don't bother. Don't call
5456 expr_size unless necessary, because some front-ends (C++)
5457 expr_size-hook must not be given objects that are not
5458 supposed to be bit-copied or bit-initialized. */
5459 && expr_size (exp) != const0_rtx)
5460 {
5461 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5462 {
5463 if (GET_MODE (target) == BLKmode)
5464 {
5465 /* Handle calls that return BLKmode values in registers. */
5466 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5467 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5468 else
5469 store_bit_field (target,
5470 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5471 0, 0, 0, GET_MODE (temp), temp);
5472 }
5473 else
5474 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5475 }
5476
5477 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5478 {
5479 /* Handle copying a string constant into an array. The string
5480 constant may be shorter than the array. So copy just the string's
5481 actual length, and clear the rest. First get the size of the data
5482 type of the string, which is actually the size of the target. */
5483 rtx size = expr_size (exp);
5484
5485 if (CONST_INT_P (size)
5486 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5487 emit_block_move (target, temp, size,
5488 (call_param_p
5489 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5490 else
5491 {
5492 machine_mode pointer_mode
5493 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5494 machine_mode address_mode = get_address_mode (target);
5495
5496 /* Compute the size of the data to copy from the string. */
5497 tree copy_size
5498 = size_binop_loc (loc, MIN_EXPR,
5499 make_tree (sizetype, size),
5500 size_int (TREE_STRING_LENGTH (exp)));
5501 rtx copy_size_rtx
5502 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5503 (call_param_p
5504 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5505 rtx_code_label *label = 0;
5506
5507 /* Copy that much. */
5508 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5509 TYPE_UNSIGNED (sizetype));
5510 emit_block_move (target, temp, copy_size_rtx,
5511 (call_param_p
5512 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5513
5514 /* Figure out how much is left in TARGET that we have to clear.
5515 Do all calculations in pointer_mode. */
5516 if (CONST_INT_P (copy_size_rtx))
5517 {
5518 size = plus_constant (address_mode, size,
5519 -INTVAL (copy_size_rtx));
5520 target = adjust_address (target, BLKmode,
5521 INTVAL (copy_size_rtx));
5522 }
5523 else
5524 {
5525 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5526 copy_size_rtx, NULL_RTX, 0,
5527 OPTAB_LIB_WIDEN);
5528
5529 if (GET_MODE (copy_size_rtx) != address_mode)
5530 copy_size_rtx = convert_to_mode (address_mode,
5531 copy_size_rtx,
5532 TYPE_UNSIGNED (sizetype));
5533
5534 target = offset_address (target, copy_size_rtx,
5535 highest_pow2_factor (copy_size));
5536 label = gen_label_rtx ();
5537 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5538 GET_MODE (size), 0, label);
5539 }
5540
5541 if (size != const0_rtx)
5542 clear_storage (target, size, BLOCK_OP_NORMAL);
5543
5544 if (label)
5545 emit_label (label);
5546 }
5547 }
5548 /* Handle calls that return values in multiple non-contiguous locations.
5549 The Irix 6 ABI has examples of this. */
5550 else if (GET_CODE (target) == PARALLEL)
5551 {
5552 if (GET_CODE (temp) == PARALLEL)
5553 emit_group_move (target, temp);
5554 else
5555 emit_group_load (target, temp, TREE_TYPE (exp),
5556 int_size_in_bytes (TREE_TYPE (exp)));
5557 }
5558 else if (GET_CODE (temp) == PARALLEL)
5559 emit_group_store (target, temp, TREE_TYPE (exp),
5560 int_size_in_bytes (TREE_TYPE (exp)));
5561 else if (GET_MODE (temp) == BLKmode)
5562 emit_block_move (target, temp, expr_size (exp),
5563 (call_param_p
5564 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5565 /* If we emit a nontemporal store, there is nothing else to do. */
5566 else if (nontemporal && emit_storent_insn (target, temp))
5567 ;
5568 else
5569 {
5570 temp = force_operand (temp, target);
5571 if (temp != target)
5572 emit_move_insn (target, temp);
5573 }
5574 }
5575
5576 return NULL_RTX;
5577 }
5578
5579 /* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5580 rtx
5581 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5582 {
5583 return store_expr_with_bounds (exp, target, call_param_p, nontemporal, NULL);
5584 }
5585 \f
5586 /* Return true if field F of structure TYPE is a flexible array. */
5587
5588 static bool
5589 flexible_array_member_p (const_tree f, const_tree type)
5590 {
5591 const_tree tf;
5592
5593 tf = TREE_TYPE (f);
5594 return (DECL_CHAIN (f) == NULL
5595 && TREE_CODE (tf) == ARRAY_TYPE
5596 && TYPE_DOMAIN (tf)
5597 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5598 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5599 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5600 && int_size_in_bytes (type) >= 0);
5601 }
5602
5603 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5604 must have in order for it to completely initialize a value of type TYPE.
5605 Return -1 if the number isn't known.
5606
5607 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5608
5609 static HOST_WIDE_INT
5610 count_type_elements (const_tree type, bool for_ctor_p)
5611 {
5612 switch (TREE_CODE (type))
5613 {
5614 case ARRAY_TYPE:
5615 {
5616 tree nelts;
5617
5618 nelts = array_type_nelts (type);
5619 if (nelts && tree_fits_uhwi_p (nelts))
5620 {
5621 unsigned HOST_WIDE_INT n;
5622
5623 n = tree_to_uhwi (nelts) + 1;
5624 if (n == 0 || for_ctor_p)
5625 return n;
5626 else
5627 return n * count_type_elements (TREE_TYPE (type), false);
5628 }
5629 return for_ctor_p ? -1 : 1;
5630 }
5631
5632 case RECORD_TYPE:
5633 {
5634 unsigned HOST_WIDE_INT n;
5635 tree f;
5636
5637 n = 0;
5638 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5639 if (TREE_CODE (f) == FIELD_DECL)
5640 {
5641 if (!for_ctor_p)
5642 n += count_type_elements (TREE_TYPE (f), false);
5643 else if (!flexible_array_member_p (f, type))
5644 /* Don't count flexible arrays, which are not supposed
5645 to be initialized. */
5646 n += 1;
5647 }
5648
5649 return n;
5650 }
5651
5652 case UNION_TYPE:
5653 case QUAL_UNION_TYPE:
5654 {
5655 tree f;
5656 HOST_WIDE_INT n, m;
5657
5658 gcc_assert (!for_ctor_p);
5659 /* Estimate the number of scalars in each field and pick the
5660 maximum. Other estimates would do instead; the idea is simply
5661 to make sure that the estimate is not sensitive to the ordering
5662 of the fields. */
5663 n = 1;
5664 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5665 if (TREE_CODE (f) == FIELD_DECL)
5666 {
5667 m = count_type_elements (TREE_TYPE (f), false);
5668 /* If the field doesn't span the whole union, add an extra
5669 scalar for the rest. */
5670 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5671 TYPE_SIZE (type)) != 1)
5672 m++;
5673 if (n < m)
5674 n = m;
5675 }
5676 return n;
5677 }
5678
5679 case COMPLEX_TYPE:
5680 return 2;
5681
5682 case VECTOR_TYPE:
5683 return TYPE_VECTOR_SUBPARTS (type);
5684
5685 case INTEGER_TYPE:
5686 case REAL_TYPE:
5687 case FIXED_POINT_TYPE:
5688 case ENUMERAL_TYPE:
5689 case BOOLEAN_TYPE:
5690 case POINTER_TYPE:
5691 case OFFSET_TYPE:
5692 case REFERENCE_TYPE:
5693 case NULLPTR_TYPE:
5694 return 1;
5695
5696 case ERROR_MARK:
5697 return 0;
5698
5699 case VOID_TYPE:
5700 case METHOD_TYPE:
5701 case FUNCTION_TYPE:
5702 case LANG_TYPE:
5703 default:
5704 gcc_unreachable ();
5705 }
5706 }
5707
5708 /* Helper for categorize_ctor_elements. Identical interface. */
5709
5710 static bool
5711 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5712 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5713 {
5714 unsigned HOST_WIDE_INT idx;
5715 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5716 tree value, purpose, elt_type;
5717
5718 /* Whether CTOR is a valid constant initializer, in accordance with what
5719 initializer_constant_valid_p does. If inferred from the constructor
5720 elements, true until proven otherwise. */
5721 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5722 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5723
5724 nz_elts = 0;
5725 init_elts = 0;
5726 num_fields = 0;
5727 elt_type = NULL_TREE;
5728
5729 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5730 {
5731 HOST_WIDE_INT mult = 1;
5732
5733 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5734 {
5735 tree lo_index = TREE_OPERAND (purpose, 0);
5736 tree hi_index = TREE_OPERAND (purpose, 1);
5737
5738 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5739 mult = (tree_to_uhwi (hi_index)
5740 - tree_to_uhwi (lo_index) + 1);
5741 }
5742 num_fields += mult;
5743 elt_type = TREE_TYPE (value);
5744
5745 switch (TREE_CODE (value))
5746 {
5747 case CONSTRUCTOR:
5748 {
5749 HOST_WIDE_INT nz = 0, ic = 0;
5750
5751 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5752 p_complete);
5753
5754 nz_elts += mult * nz;
5755 init_elts += mult * ic;
5756
5757 if (const_from_elts_p && const_p)
5758 const_p = const_elt_p;
5759 }
5760 break;
5761
5762 case INTEGER_CST:
5763 case REAL_CST:
5764 case FIXED_CST:
5765 if (!initializer_zerop (value))
5766 nz_elts += mult;
5767 init_elts += mult;
5768 break;
5769
5770 case STRING_CST:
5771 nz_elts += mult * TREE_STRING_LENGTH (value);
5772 init_elts += mult * TREE_STRING_LENGTH (value);
5773 break;
5774
5775 case COMPLEX_CST:
5776 if (!initializer_zerop (TREE_REALPART (value)))
5777 nz_elts += mult;
5778 if (!initializer_zerop (TREE_IMAGPART (value)))
5779 nz_elts += mult;
5780 init_elts += mult;
5781 break;
5782
5783 case VECTOR_CST:
5784 {
5785 unsigned i;
5786 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5787 {
5788 tree v = VECTOR_CST_ELT (value, i);
5789 if (!initializer_zerop (v))
5790 nz_elts += mult;
5791 init_elts += mult;
5792 }
5793 }
5794 break;
5795
5796 default:
5797 {
5798 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5799 nz_elts += mult * tc;
5800 init_elts += mult * tc;
5801
5802 if (const_from_elts_p && const_p)
5803 const_p = initializer_constant_valid_p (value, elt_type)
5804 != NULL_TREE;
5805 }
5806 break;
5807 }
5808 }
5809
5810 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5811 num_fields, elt_type))
5812 *p_complete = false;
5813
5814 *p_nz_elts += nz_elts;
5815 *p_init_elts += init_elts;
5816
5817 return const_p;
5818 }
5819
5820 /* Examine CTOR to discover:
5821 * how many scalar fields are set to nonzero values,
5822 and place it in *P_NZ_ELTS;
5823 * how many scalar fields in total are in CTOR,
5824 and place it in *P_ELT_COUNT.
5825 * whether the constructor is complete -- in the sense that every
5826 meaningful byte is explicitly given a value --
5827 and place it in *P_COMPLETE.
5828
5829 Return whether or not CTOR is a valid static constant initializer, the same
5830 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5831
5832 bool
5833 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5834 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5835 {
5836 *p_nz_elts = 0;
5837 *p_init_elts = 0;
5838 *p_complete = true;
5839
5840 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5841 }
5842
5843 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5844 of which had type LAST_TYPE. Each element was itself a complete
5845 initializer, in the sense that every meaningful byte was explicitly
5846 given a value. Return true if the same is true for the constructor
5847 as a whole. */
5848
5849 bool
5850 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5851 const_tree last_type)
5852 {
5853 if (TREE_CODE (type) == UNION_TYPE
5854 || TREE_CODE (type) == QUAL_UNION_TYPE)
5855 {
5856 if (num_elts == 0)
5857 return false;
5858
5859 gcc_assert (num_elts == 1 && last_type);
5860
5861 /* ??? We could look at each element of the union, and find the
5862 largest element. Which would avoid comparing the size of the
5863 initialized element against any tail padding in the union.
5864 Doesn't seem worth the effort... */
5865 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5866 }
5867
5868 return count_type_elements (type, true) == num_elts;
5869 }
5870
5871 /* Return 1 if EXP contains mostly (3/4) zeros. */
5872
5873 static int
5874 mostly_zeros_p (const_tree exp)
5875 {
5876 if (TREE_CODE (exp) == CONSTRUCTOR)
5877 {
5878 HOST_WIDE_INT nz_elts, init_elts;
5879 bool complete_p;
5880
5881 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5882 return !complete_p || nz_elts < init_elts / 4;
5883 }
5884
5885 return initializer_zerop (exp);
5886 }
5887
5888 /* Return 1 if EXP contains all zeros. */
5889
5890 static int
5891 all_zeros_p (const_tree exp)
5892 {
5893 if (TREE_CODE (exp) == CONSTRUCTOR)
5894 {
5895 HOST_WIDE_INT nz_elts, init_elts;
5896 bool complete_p;
5897
5898 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5899 return nz_elts == 0;
5900 }
5901
5902 return initializer_zerop (exp);
5903 }
5904 \f
5905 /* Helper function for store_constructor.
5906 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5907 CLEARED is as for store_constructor.
5908 ALIAS_SET is the alias set to use for any stores.
5909
5910 This provides a recursive shortcut back to store_constructor when it isn't
5911 necessary to go through store_field. This is so that we can pass through
5912 the cleared field to let store_constructor know that we may not have to
5913 clear a substructure if the outer structure has already been cleared. */
5914
5915 static void
5916 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5917 HOST_WIDE_INT bitpos, machine_mode mode,
5918 tree exp, int cleared, alias_set_type alias_set)
5919 {
5920 if (TREE_CODE (exp) == CONSTRUCTOR
5921 /* We can only call store_constructor recursively if the size and
5922 bit position are on a byte boundary. */
5923 && bitpos % BITS_PER_UNIT == 0
5924 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5925 /* If we have a nonzero bitpos for a register target, then we just
5926 let store_field do the bitfield handling. This is unlikely to
5927 generate unnecessary clear instructions anyways. */
5928 && (bitpos == 0 || MEM_P (target)))
5929 {
5930 if (MEM_P (target))
5931 target
5932 = adjust_address (target,
5933 GET_MODE (target) == BLKmode
5934 || 0 != (bitpos
5935 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5936 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5937
5938
5939 /* Update the alias set, if required. */
5940 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5941 && MEM_ALIAS_SET (target) != 0)
5942 {
5943 target = copy_rtx (target);
5944 set_mem_alias_set (target, alias_set);
5945 }
5946
5947 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5948 }
5949 else
5950 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5951 }
5952
5953
5954 /* Returns the number of FIELD_DECLs in TYPE. */
5955
5956 static int
5957 fields_length (const_tree type)
5958 {
5959 tree t = TYPE_FIELDS (type);
5960 int count = 0;
5961
5962 for (; t; t = DECL_CHAIN (t))
5963 if (TREE_CODE (t) == FIELD_DECL)
5964 ++count;
5965
5966 return count;
5967 }
5968
5969
5970 /* Store the value of constructor EXP into the rtx TARGET.
5971 TARGET is either a REG or a MEM; we know it cannot conflict, since
5972 safe_from_p has been called.
5973 CLEARED is true if TARGET is known to have been zero'd.
5974 SIZE is the number of bytes of TARGET we are allowed to modify: this
5975 may not be the same as the size of EXP if we are assigning to a field
5976 which has been packed to exclude padding bits. */
5977
5978 static void
5979 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5980 {
5981 tree type = TREE_TYPE (exp);
5982 #ifdef WORD_REGISTER_OPERATIONS
5983 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5984 #endif
5985
5986 switch (TREE_CODE (type))
5987 {
5988 case RECORD_TYPE:
5989 case UNION_TYPE:
5990 case QUAL_UNION_TYPE:
5991 {
5992 unsigned HOST_WIDE_INT idx;
5993 tree field, value;
5994
5995 /* If size is zero or the target is already cleared, do nothing. */
5996 if (size == 0 || cleared)
5997 cleared = 1;
5998 /* We either clear the aggregate or indicate the value is dead. */
5999 else if ((TREE_CODE (type) == UNION_TYPE
6000 || TREE_CODE (type) == QUAL_UNION_TYPE)
6001 && ! CONSTRUCTOR_ELTS (exp))
6002 /* If the constructor is empty, clear the union. */
6003 {
6004 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6005 cleared = 1;
6006 }
6007
6008 /* If we are building a static constructor into a register,
6009 set the initial value as zero so we can fold the value into
6010 a constant. But if more than one register is involved,
6011 this probably loses. */
6012 else if (REG_P (target) && TREE_STATIC (exp)
6013 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
6014 {
6015 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6016 cleared = 1;
6017 }
6018
6019 /* If the constructor has fewer fields than the structure or
6020 if we are initializing the structure to mostly zeros, clear
6021 the whole structure first. Don't do this if TARGET is a
6022 register whose mode size isn't equal to SIZE since
6023 clear_storage can't handle this case. */
6024 else if (size > 0
6025 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
6026 != fields_length (type))
6027 || mostly_zeros_p (exp))
6028 && (!REG_P (target)
6029 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
6030 == size)))
6031 {
6032 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6033 cleared = 1;
6034 }
6035
6036 if (REG_P (target) && !cleared)
6037 emit_clobber (target);
6038
6039 /* Store each element of the constructor into the
6040 corresponding field of TARGET. */
6041 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
6042 {
6043 machine_mode mode;
6044 HOST_WIDE_INT bitsize;
6045 HOST_WIDE_INT bitpos = 0;
6046 tree offset;
6047 rtx to_rtx = target;
6048
6049 /* Just ignore missing fields. We cleared the whole
6050 structure, above, if any fields are missing. */
6051 if (field == 0)
6052 continue;
6053
6054 if (cleared && initializer_zerop (value))
6055 continue;
6056
6057 if (tree_fits_uhwi_p (DECL_SIZE (field)))
6058 bitsize = tree_to_uhwi (DECL_SIZE (field));
6059 else
6060 bitsize = -1;
6061
6062 mode = DECL_MODE (field);
6063 if (DECL_BIT_FIELD (field))
6064 mode = VOIDmode;
6065
6066 offset = DECL_FIELD_OFFSET (field);
6067 if (tree_fits_shwi_p (offset)
6068 && tree_fits_shwi_p (bit_position (field)))
6069 {
6070 bitpos = int_bit_position (field);
6071 offset = 0;
6072 }
6073 else
6074 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
6075
6076 if (offset)
6077 {
6078 machine_mode address_mode;
6079 rtx offset_rtx;
6080
6081 offset
6082 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
6083 make_tree (TREE_TYPE (exp),
6084 target));
6085
6086 offset_rtx = expand_normal (offset);
6087 gcc_assert (MEM_P (to_rtx));
6088
6089 address_mode = get_address_mode (to_rtx);
6090 if (GET_MODE (offset_rtx) != address_mode)
6091 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
6092
6093 to_rtx = offset_address (to_rtx, offset_rtx,
6094 highest_pow2_factor (offset));
6095 }
6096
6097 #ifdef WORD_REGISTER_OPERATIONS
6098 /* If this initializes a field that is smaller than a
6099 word, at the start of a word, try to widen it to a full
6100 word. This special case allows us to output C++ member
6101 function initializations in a form that the optimizers
6102 can understand. */
6103 if (REG_P (target)
6104 && bitsize < BITS_PER_WORD
6105 && bitpos % BITS_PER_WORD == 0
6106 && GET_MODE_CLASS (mode) == MODE_INT
6107 && TREE_CODE (value) == INTEGER_CST
6108 && exp_size >= 0
6109 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6110 {
6111 tree type = TREE_TYPE (value);
6112
6113 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6114 {
6115 type = lang_hooks.types.type_for_mode
6116 (word_mode, TYPE_UNSIGNED (type));
6117 value = fold_convert (type, value);
6118 }
6119
6120 if (BYTES_BIG_ENDIAN)
6121 value
6122 = fold_build2 (LSHIFT_EXPR, type, value,
6123 build_int_cst (type,
6124 BITS_PER_WORD - bitsize));
6125 bitsize = BITS_PER_WORD;
6126 mode = word_mode;
6127 }
6128 #endif
6129
6130 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6131 && DECL_NONADDRESSABLE_P (field))
6132 {
6133 to_rtx = copy_rtx (to_rtx);
6134 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6135 }
6136
6137 store_constructor_field (to_rtx, bitsize, bitpos, mode,
6138 value, cleared,
6139 get_alias_set (TREE_TYPE (field)));
6140 }
6141 break;
6142 }
6143 case ARRAY_TYPE:
6144 {
6145 tree value, index;
6146 unsigned HOST_WIDE_INT i;
6147 int need_to_clear;
6148 tree domain;
6149 tree elttype = TREE_TYPE (type);
6150 int const_bounds_p;
6151 HOST_WIDE_INT minelt = 0;
6152 HOST_WIDE_INT maxelt = 0;
6153
6154 domain = TYPE_DOMAIN (type);
6155 const_bounds_p = (TYPE_MIN_VALUE (domain)
6156 && TYPE_MAX_VALUE (domain)
6157 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6158 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6159
6160 /* If we have constant bounds for the range of the type, get them. */
6161 if (const_bounds_p)
6162 {
6163 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6164 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6165 }
6166
6167 /* If the constructor has fewer elements than the array, clear
6168 the whole array first. Similarly if this is static
6169 constructor of a non-BLKmode object. */
6170 if (cleared)
6171 need_to_clear = 0;
6172 else if (REG_P (target) && TREE_STATIC (exp))
6173 need_to_clear = 1;
6174 else
6175 {
6176 unsigned HOST_WIDE_INT idx;
6177 tree index, value;
6178 HOST_WIDE_INT count = 0, zero_count = 0;
6179 need_to_clear = ! const_bounds_p;
6180
6181 /* This loop is a more accurate version of the loop in
6182 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6183 is also needed to check for missing elements. */
6184 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6185 {
6186 HOST_WIDE_INT this_node_count;
6187
6188 if (need_to_clear)
6189 break;
6190
6191 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6192 {
6193 tree lo_index = TREE_OPERAND (index, 0);
6194 tree hi_index = TREE_OPERAND (index, 1);
6195
6196 if (! tree_fits_uhwi_p (lo_index)
6197 || ! tree_fits_uhwi_p (hi_index))
6198 {
6199 need_to_clear = 1;
6200 break;
6201 }
6202
6203 this_node_count = (tree_to_uhwi (hi_index)
6204 - tree_to_uhwi (lo_index) + 1);
6205 }
6206 else
6207 this_node_count = 1;
6208
6209 count += this_node_count;
6210 if (mostly_zeros_p (value))
6211 zero_count += this_node_count;
6212 }
6213
6214 /* Clear the entire array first if there are any missing
6215 elements, or if the incidence of zero elements is >=
6216 75%. */
6217 if (! need_to_clear
6218 && (count < maxelt - minelt + 1
6219 || 4 * zero_count >= 3 * count))
6220 need_to_clear = 1;
6221 }
6222
6223 if (need_to_clear && size > 0)
6224 {
6225 if (REG_P (target))
6226 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6227 else
6228 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6229 cleared = 1;
6230 }
6231
6232 if (!cleared && REG_P (target))
6233 /* Inform later passes that the old value is dead. */
6234 emit_clobber (target);
6235
6236 /* Store each element of the constructor into the
6237 corresponding element of TARGET, determined by counting the
6238 elements. */
6239 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6240 {
6241 machine_mode mode;
6242 HOST_WIDE_INT bitsize;
6243 HOST_WIDE_INT bitpos;
6244 rtx xtarget = target;
6245
6246 if (cleared && initializer_zerop (value))
6247 continue;
6248
6249 mode = TYPE_MODE (elttype);
6250 if (mode == BLKmode)
6251 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6252 ? tree_to_uhwi (TYPE_SIZE (elttype))
6253 : -1);
6254 else
6255 bitsize = GET_MODE_BITSIZE (mode);
6256
6257 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6258 {
6259 tree lo_index = TREE_OPERAND (index, 0);
6260 tree hi_index = TREE_OPERAND (index, 1);
6261 rtx index_r, pos_rtx;
6262 HOST_WIDE_INT lo, hi, count;
6263 tree position;
6264
6265 /* If the range is constant and "small", unroll the loop. */
6266 if (const_bounds_p
6267 && tree_fits_shwi_p (lo_index)
6268 && tree_fits_shwi_p (hi_index)
6269 && (lo = tree_to_shwi (lo_index),
6270 hi = tree_to_shwi (hi_index),
6271 count = hi - lo + 1,
6272 (!MEM_P (target)
6273 || count <= 2
6274 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6275 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6276 <= 40 * 8)))))
6277 {
6278 lo -= minelt; hi -= minelt;
6279 for (; lo <= hi; lo++)
6280 {
6281 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6282
6283 if (MEM_P (target)
6284 && !MEM_KEEP_ALIAS_SET_P (target)
6285 && TREE_CODE (type) == ARRAY_TYPE
6286 && TYPE_NONALIASED_COMPONENT (type))
6287 {
6288 target = copy_rtx (target);
6289 MEM_KEEP_ALIAS_SET_P (target) = 1;
6290 }
6291
6292 store_constructor_field
6293 (target, bitsize, bitpos, mode, value, cleared,
6294 get_alias_set (elttype));
6295 }
6296 }
6297 else
6298 {
6299 rtx_code_label *loop_start = gen_label_rtx ();
6300 rtx_code_label *loop_end = gen_label_rtx ();
6301 tree exit_cond;
6302
6303 expand_normal (hi_index);
6304
6305 index = build_decl (EXPR_LOCATION (exp),
6306 VAR_DECL, NULL_TREE, domain);
6307 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6308 SET_DECL_RTL (index, index_r);
6309 store_expr (lo_index, index_r, 0, false);
6310
6311 /* Build the head of the loop. */
6312 do_pending_stack_adjust ();
6313 emit_label (loop_start);
6314
6315 /* Assign value to element index. */
6316 position =
6317 fold_convert (ssizetype,
6318 fold_build2 (MINUS_EXPR,
6319 TREE_TYPE (index),
6320 index,
6321 TYPE_MIN_VALUE (domain)));
6322
6323 position =
6324 size_binop (MULT_EXPR, position,
6325 fold_convert (ssizetype,
6326 TYPE_SIZE_UNIT (elttype)));
6327
6328 pos_rtx = expand_normal (position);
6329 xtarget = offset_address (target, pos_rtx,
6330 highest_pow2_factor (position));
6331 xtarget = adjust_address (xtarget, mode, 0);
6332 if (TREE_CODE (value) == CONSTRUCTOR)
6333 store_constructor (value, xtarget, cleared,
6334 bitsize / BITS_PER_UNIT);
6335 else
6336 store_expr (value, xtarget, 0, false);
6337
6338 /* Generate a conditional jump to exit the loop. */
6339 exit_cond = build2 (LT_EXPR, integer_type_node,
6340 index, hi_index);
6341 jumpif (exit_cond, loop_end, -1);
6342
6343 /* Update the loop counter, and jump to the head of
6344 the loop. */
6345 expand_assignment (index,
6346 build2 (PLUS_EXPR, TREE_TYPE (index),
6347 index, integer_one_node),
6348 false);
6349
6350 emit_jump (loop_start);
6351
6352 /* Build the end of the loop. */
6353 emit_label (loop_end);
6354 }
6355 }
6356 else if ((index != 0 && ! tree_fits_shwi_p (index))
6357 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6358 {
6359 tree position;
6360
6361 if (index == 0)
6362 index = ssize_int (1);
6363
6364 if (minelt)
6365 index = fold_convert (ssizetype,
6366 fold_build2 (MINUS_EXPR,
6367 TREE_TYPE (index),
6368 index,
6369 TYPE_MIN_VALUE (domain)));
6370
6371 position =
6372 size_binop (MULT_EXPR, index,
6373 fold_convert (ssizetype,
6374 TYPE_SIZE_UNIT (elttype)));
6375 xtarget = offset_address (target,
6376 expand_normal (position),
6377 highest_pow2_factor (position));
6378 xtarget = adjust_address (xtarget, mode, 0);
6379 store_expr (value, xtarget, 0, false);
6380 }
6381 else
6382 {
6383 if (index != 0)
6384 bitpos = ((tree_to_shwi (index) - minelt)
6385 * tree_to_uhwi (TYPE_SIZE (elttype)));
6386 else
6387 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6388
6389 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6390 && TREE_CODE (type) == ARRAY_TYPE
6391 && TYPE_NONALIASED_COMPONENT (type))
6392 {
6393 target = copy_rtx (target);
6394 MEM_KEEP_ALIAS_SET_P (target) = 1;
6395 }
6396 store_constructor_field (target, bitsize, bitpos, mode, value,
6397 cleared, get_alias_set (elttype));
6398 }
6399 }
6400 break;
6401 }
6402
6403 case VECTOR_TYPE:
6404 {
6405 unsigned HOST_WIDE_INT idx;
6406 constructor_elt *ce;
6407 int i;
6408 int need_to_clear;
6409 int icode = CODE_FOR_nothing;
6410 tree elttype = TREE_TYPE (type);
6411 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6412 machine_mode eltmode = TYPE_MODE (elttype);
6413 HOST_WIDE_INT bitsize;
6414 HOST_WIDE_INT bitpos;
6415 rtvec vector = NULL;
6416 unsigned n_elts;
6417 alias_set_type alias;
6418
6419 gcc_assert (eltmode != BLKmode);
6420
6421 n_elts = TYPE_VECTOR_SUBPARTS (type);
6422 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6423 {
6424 machine_mode mode = GET_MODE (target);
6425
6426 icode = (int) optab_handler (vec_init_optab, mode);
6427 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6428 if (icode != CODE_FOR_nothing)
6429 {
6430 tree value;
6431
6432 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6433 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6434 {
6435 icode = CODE_FOR_nothing;
6436 break;
6437 }
6438 }
6439 if (icode != CODE_FOR_nothing)
6440 {
6441 unsigned int i;
6442
6443 vector = rtvec_alloc (n_elts);
6444 for (i = 0; i < n_elts; i++)
6445 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6446 }
6447 }
6448
6449 /* If the constructor has fewer elements than the vector,
6450 clear the whole array first. Similarly if this is static
6451 constructor of a non-BLKmode object. */
6452 if (cleared)
6453 need_to_clear = 0;
6454 else if (REG_P (target) && TREE_STATIC (exp))
6455 need_to_clear = 1;
6456 else
6457 {
6458 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6459 tree value;
6460
6461 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6462 {
6463 int n_elts_here = tree_to_uhwi
6464 (int_const_binop (TRUNC_DIV_EXPR,
6465 TYPE_SIZE (TREE_TYPE (value)),
6466 TYPE_SIZE (elttype)));
6467
6468 count += n_elts_here;
6469 if (mostly_zeros_p (value))
6470 zero_count += n_elts_here;
6471 }
6472
6473 /* Clear the entire vector first if there are any missing elements,
6474 or if the incidence of zero elements is >= 75%. */
6475 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6476 }
6477
6478 if (need_to_clear && size > 0 && !vector)
6479 {
6480 if (REG_P (target))
6481 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6482 else
6483 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6484 cleared = 1;
6485 }
6486
6487 /* Inform later passes that the old value is dead. */
6488 if (!cleared && !vector && REG_P (target))
6489 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6490
6491 if (MEM_P (target))
6492 alias = MEM_ALIAS_SET (target);
6493 else
6494 alias = get_alias_set (elttype);
6495
6496 /* Store each element of the constructor into the corresponding
6497 element of TARGET, determined by counting the elements. */
6498 for (idx = 0, i = 0;
6499 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6500 idx++, i += bitsize / elt_size)
6501 {
6502 HOST_WIDE_INT eltpos;
6503 tree value = ce->value;
6504
6505 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6506 if (cleared && initializer_zerop (value))
6507 continue;
6508
6509 if (ce->index)
6510 eltpos = tree_to_uhwi (ce->index);
6511 else
6512 eltpos = i;
6513
6514 if (vector)
6515 {
6516 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6517 elements. */
6518 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6519 RTVEC_ELT (vector, eltpos)
6520 = expand_normal (value);
6521 }
6522 else
6523 {
6524 machine_mode value_mode =
6525 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6526 ? TYPE_MODE (TREE_TYPE (value))
6527 : eltmode;
6528 bitpos = eltpos * elt_size;
6529 store_constructor_field (target, bitsize, bitpos, value_mode,
6530 value, cleared, alias);
6531 }
6532 }
6533
6534 if (vector)
6535 emit_insn (GEN_FCN (icode)
6536 (target,
6537 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6538 break;
6539 }
6540
6541 default:
6542 gcc_unreachable ();
6543 }
6544 }
6545
6546 /* Store the value of EXP (an expression tree)
6547 into a subfield of TARGET which has mode MODE and occupies
6548 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6549 If MODE is VOIDmode, it means that we are storing into a bit-field.
6550
6551 BITREGION_START is bitpos of the first bitfield in this region.
6552 BITREGION_END is the bitpos of the ending bitfield in this region.
6553 These two fields are 0, if the C++ memory model does not apply,
6554 or we are not interested in keeping track of bitfield regions.
6555
6556 Always return const0_rtx unless we have something particular to
6557 return.
6558
6559 ALIAS_SET is the alias set for the destination. This value will
6560 (in general) be different from that for TARGET, since TARGET is a
6561 reference to the containing structure.
6562
6563 If NONTEMPORAL is true, try generating a nontemporal store. */
6564
6565 static rtx
6566 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6567 unsigned HOST_WIDE_INT bitregion_start,
6568 unsigned HOST_WIDE_INT bitregion_end,
6569 machine_mode mode, tree exp,
6570 alias_set_type alias_set, bool nontemporal)
6571 {
6572 if (TREE_CODE (exp) == ERROR_MARK)
6573 return const0_rtx;
6574
6575 /* If we have nothing to store, do nothing unless the expression has
6576 side-effects. */
6577 if (bitsize == 0)
6578 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6579
6580 if (GET_CODE (target) == CONCAT)
6581 {
6582 /* We're storing into a struct containing a single __complex. */
6583
6584 gcc_assert (!bitpos);
6585 return store_expr (exp, target, 0, nontemporal);
6586 }
6587
6588 /* If the structure is in a register or if the component
6589 is a bit field, we cannot use addressing to access it.
6590 Use bit-field techniques or SUBREG to store in it. */
6591
6592 if (mode == VOIDmode
6593 || (mode != BLKmode && ! direct_store[(int) mode]
6594 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6595 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6596 || REG_P (target)
6597 || GET_CODE (target) == SUBREG
6598 /* If the field isn't aligned enough to store as an ordinary memref,
6599 store it as a bit field. */
6600 || (mode != BLKmode
6601 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6602 || bitpos % GET_MODE_ALIGNMENT (mode))
6603 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6604 || (bitpos % BITS_PER_UNIT != 0)))
6605 || (bitsize >= 0 && mode != BLKmode
6606 && GET_MODE_BITSIZE (mode) > bitsize)
6607 /* If the RHS and field are a constant size and the size of the
6608 RHS isn't the same size as the bitfield, we must use bitfield
6609 operations. */
6610 || (bitsize >= 0
6611 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6612 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6613 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6614 decl we must use bitfield operations. */
6615 || (bitsize >= 0
6616 && TREE_CODE (exp) == MEM_REF
6617 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6618 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6619 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6620 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6621 {
6622 rtx temp;
6623 gimple nop_def;
6624
6625 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6626 implies a mask operation. If the precision is the same size as
6627 the field we're storing into, that mask is redundant. This is
6628 particularly common with bit field assignments generated by the
6629 C front end. */
6630 nop_def = get_def_for_expr (exp, NOP_EXPR);
6631 if (nop_def)
6632 {
6633 tree type = TREE_TYPE (exp);
6634 if (INTEGRAL_TYPE_P (type)
6635 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6636 && bitsize == TYPE_PRECISION (type))
6637 {
6638 tree op = gimple_assign_rhs1 (nop_def);
6639 type = TREE_TYPE (op);
6640 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6641 exp = op;
6642 }
6643 }
6644
6645 temp = expand_normal (exp);
6646
6647 /* If BITSIZE is narrower than the size of the type of EXP
6648 we will be narrowing TEMP. Normally, what's wanted are the
6649 low-order bits. However, if EXP's type is a record and this is
6650 big-endian machine, we want the upper BITSIZE bits. */
6651 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6652 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6653 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6654 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6655 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6656 NULL_RTX, 1);
6657
6658 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6659 if (mode != VOIDmode && mode != BLKmode
6660 && mode != TYPE_MODE (TREE_TYPE (exp)))
6661 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6662
6663 /* If TEMP is not a PARALLEL (see below) and its mode and that of TARGET
6664 are both BLKmode, both must be in memory and BITPOS must be aligned
6665 on a byte boundary. If so, we simply do a block copy. Likewise for
6666 a BLKmode-like TARGET. */
6667 if (GET_CODE (temp) != PARALLEL
6668 && GET_MODE (temp) == BLKmode
6669 && (GET_MODE (target) == BLKmode
6670 || (MEM_P (target)
6671 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6672 && (bitpos % BITS_PER_UNIT) == 0
6673 && (bitsize % BITS_PER_UNIT) == 0)))
6674 {
6675 gcc_assert (MEM_P (target) && MEM_P (temp)
6676 && (bitpos % BITS_PER_UNIT) == 0);
6677
6678 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6679 emit_block_move (target, temp,
6680 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6681 / BITS_PER_UNIT),
6682 BLOCK_OP_NORMAL);
6683
6684 return const0_rtx;
6685 }
6686
6687 /* Handle calls that return values in multiple non-contiguous locations.
6688 The Irix 6 ABI has examples of this. */
6689 if (GET_CODE (temp) == PARALLEL)
6690 {
6691 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6692 rtx temp_target;
6693 if (mode == BLKmode || mode == VOIDmode)
6694 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6695 temp_target = gen_reg_rtx (mode);
6696 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6697 temp = temp_target;
6698 }
6699 else if (mode == BLKmode)
6700 {
6701 /* Handle calls that return BLKmode values in registers. */
6702 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6703 {
6704 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6705 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6706 temp = temp_target;
6707 }
6708 else
6709 {
6710 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6711 rtx temp_target;
6712 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6713 temp_target = gen_reg_rtx (mode);
6714 temp_target
6715 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6716 temp_target, mode, mode);
6717 temp = temp_target;
6718 }
6719 }
6720
6721 /* Store the value in the bitfield. */
6722 store_bit_field (target, bitsize, bitpos,
6723 bitregion_start, bitregion_end,
6724 mode, temp);
6725
6726 return const0_rtx;
6727 }
6728 else
6729 {
6730 /* Now build a reference to just the desired component. */
6731 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6732
6733 if (to_rtx == target)
6734 to_rtx = copy_rtx (to_rtx);
6735
6736 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6737 set_mem_alias_set (to_rtx, alias_set);
6738
6739 return store_expr (exp, to_rtx, 0, nontemporal);
6740 }
6741 }
6742 \f
6743 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6744 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6745 codes and find the ultimate containing object, which we return.
6746
6747 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6748 bit position, and *PUNSIGNEDP to the signedness of the field.
6749 If the position of the field is variable, we store a tree
6750 giving the variable offset (in units) in *POFFSET.
6751 This offset is in addition to the bit position.
6752 If the position is not variable, we store 0 in *POFFSET.
6753
6754 If any of the extraction expressions is volatile,
6755 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6756
6757 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6758 Otherwise, it is a mode that can be used to access the field.
6759
6760 If the field describes a variable-sized object, *PMODE is set to
6761 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6762 this case, but the address of the object can be found.
6763
6764 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6765 look through nodes that serve as markers of a greater alignment than
6766 the one that can be deduced from the expression. These nodes make it
6767 possible for front-ends to prevent temporaries from being created by
6768 the middle-end on alignment considerations. For that purpose, the
6769 normal operating mode at high-level is to always pass FALSE so that
6770 the ultimate containing object is really returned; moreover, the
6771 associated predicate handled_component_p will always return TRUE
6772 on these nodes, thus indicating that they are essentially handled
6773 by get_inner_reference. TRUE should only be passed when the caller
6774 is scanning the expression in order to build another representation
6775 and specifically knows how to handle these nodes; as such, this is
6776 the normal operating mode in the RTL expanders. */
6777
6778 tree
6779 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6780 HOST_WIDE_INT *pbitpos, tree *poffset,
6781 machine_mode *pmode, int *punsignedp,
6782 int *pvolatilep, bool keep_aligning)
6783 {
6784 tree size_tree = 0;
6785 machine_mode mode = VOIDmode;
6786 bool blkmode_bitfield = false;
6787 tree offset = size_zero_node;
6788 offset_int bit_offset = 0;
6789
6790 /* First get the mode, signedness, and size. We do this from just the
6791 outermost expression. */
6792 *pbitsize = -1;
6793 if (TREE_CODE (exp) == COMPONENT_REF)
6794 {
6795 tree field = TREE_OPERAND (exp, 1);
6796 size_tree = DECL_SIZE (field);
6797 if (flag_strict_volatile_bitfields > 0
6798 && TREE_THIS_VOLATILE (exp)
6799 && DECL_BIT_FIELD_TYPE (field)
6800 && DECL_MODE (field) != BLKmode)
6801 /* Volatile bitfields should be accessed in the mode of the
6802 field's type, not the mode computed based on the bit
6803 size. */
6804 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6805 else if (!DECL_BIT_FIELD (field))
6806 mode = DECL_MODE (field);
6807 else if (DECL_MODE (field) == BLKmode)
6808 blkmode_bitfield = true;
6809
6810 *punsignedp = DECL_UNSIGNED (field);
6811 }
6812 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6813 {
6814 size_tree = TREE_OPERAND (exp, 1);
6815 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6816 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6817
6818 /* For vector types, with the correct size of access, use the mode of
6819 inner type. */
6820 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6821 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6822 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6823 mode = TYPE_MODE (TREE_TYPE (exp));
6824 }
6825 else
6826 {
6827 mode = TYPE_MODE (TREE_TYPE (exp));
6828 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6829
6830 if (mode == BLKmode)
6831 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6832 else
6833 *pbitsize = GET_MODE_BITSIZE (mode);
6834 }
6835
6836 if (size_tree != 0)
6837 {
6838 if (! tree_fits_uhwi_p (size_tree))
6839 mode = BLKmode, *pbitsize = -1;
6840 else
6841 *pbitsize = tree_to_uhwi (size_tree);
6842 }
6843
6844 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6845 and find the ultimate containing object. */
6846 while (1)
6847 {
6848 switch (TREE_CODE (exp))
6849 {
6850 case BIT_FIELD_REF:
6851 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
6852 break;
6853
6854 case COMPONENT_REF:
6855 {
6856 tree field = TREE_OPERAND (exp, 1);
6857 tree this_offset = component_ref_field_offset (exp);
6858
6859 /* If this field hasn't been filled in yet, don't go past it.
6860 This should only happen when folding expressions made during
6861 type construction. */
6862 if (this_offset == 0)
6863 break;
6864
6865 offset = size_binop (PLUS_EXPR, offset, this_offset);
6866 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
6867
6868 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6869 }
6870 break;
6871
6872 case ARRAY_REF:
6873 case ARRAY_RANGE_REF:
6874 {
6875 tree index = TREE_OPERAND (exp, 1);
6876 tree low_bound = array_ref_low_bound (exp);
6877 tree unit_size = array_ref_element_size (exp);
6878
6879 /* We assume all arrays have sizes that are a multiple of a byte.
6880 First subtract the lower bound, if any, in the type of the
6881 index, then convert to sizetype and multiply by the size of
6882 the array element. */
6883 if (! integer_zerop (low_bound))
6884 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6885 index, low_bound);
6886
6887 offset = size_binop (PLUS_EXPR, offset,
6888 size_binop (MULT_EXPR,
6889 fold_convert (sizetype, index),
6890 unit_size));
6891 }
6892 break;
6893
6894 case REALPART_EXPR:
6895 break;
6896
6897 case IMAGPART_EXPR:
6898 bit_offset += *pbitsize;
6899 break;
6900
6901 case VIEW_CONVERT_EXPR:
6902 if (keep_aligning && STRICT_ALIGNMENT
6903 && (TYPE_ALIGN (TREE_TYPE (exp))
6904 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6905 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6906 < BIGGEST_ALIGNMENT)
6907 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6908 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6909 goto done;
6910 break;
6911
6912 case MEM_REF:
6913 /* Hand back the decl for MEM[&decl, off]. */
6914 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6915 {
6916 tree off = TREE_OPERAND (exp, 1);
6917 if (!integer_zerop (off))
6918 {
6919 offset_int boff, coff = mem_ref_offset (exp);
6920 boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
6921 bit_offset += boff;
6922 }
6923 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6924 }
6925 goto done;
6926
6927 default:
6928 goto done;
6929 }
6930
6931 /* If any reference in the chain is volatile, the effect is volatile. */
6932 if (TREE_THIS_VOLATILE (exp))
6933 *pvolatilep = 1;
6934
6935 exp = TREE_OPERAND (exp, 0);
6936 }
6937 done:
6938
6939 /* If OFFSET is constant, see if we can return the whole thing as a
6940 constant bit position. Make sure to handle overflow during
6941 this conversion. */
6942 if (TREE_CODE (offset) == INTEGER_CST)
6943 {
6944 offset_int tem = wi::sext (wi::to_offset (offset),
6945 TYPE_PRECISION (sizetype));
6946 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
6947 tem += bit_offset;
6948 if (wi::fits_shwi_p (tem))
6949 {
6950 *pbitpos = tem.to_shwi ();
6951 *poffset = offset = NULL_TREE;
6952 }
6953 }
6954
6955 /* Otherwise, split it up. */
6956 if (offset)
6957 {
6958 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6959 if (wi::neg_p (bit_offset) || !wi::fits_shwi_p (bit_offset))
6960 {
6961 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
6962 offset_int tem = bit_offset.and_not (mask);
6963 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6964 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6965 bit_offset -= tem;
6966 tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
6967 offset = size_binop (PLUS_EXPR, offset,
6968 wide_int_to_tree (sizetype, tem));
6969 }
6970
6971 *pbitpos = bit_offset.to_shwi ();
6972 *poffset = offset;
6973 }
6974
6975 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6976 if (mode == VOIDmode
6977 && blkmode_bitfield
6978 && (*pbitpos % BITS_PER_UNIT) == 0
6979 && (*pbitsize % BITS_PER_UNIT) == 0)
6980 *pmode = BLKmode;
6981 else
6982 *pmode = mode;
6983
6984 return exp;
6985 }
6986
6987 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6988
6989 static unsigned HOST_WIDE_INT
6990 target_align (const_tree target)
6991 {
6992 /* We might have a chain of nested references with intermediate misaligning
6993 bitfields components, so need to recurse to find out. */
6994
6995 unsigned HOST_WIDE_INT this_align, outer_align;
6996
6997 switch (TREE_CODE (target))
6998 {
6999 case BIT_FIELD_REF:
7000 return 1;
7001
7002 case COMPONENT_REF:
7003 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7004 outer_align = target_align (TREE_OPERAND (target, 0));
7005 return MIN (this_align, outer_align);
7006
7007 case ARRAY_REF:
7008 case ARRAY_RANGE_REF:
7009 this_align = TYPE_ALIGN (TREE_TYPE (target));
7010 outer_align = target_align (TREE_OPERAND (target, 0));
7011 return MIN (this_align, outer_align);
7012
7013 CASE_CONVERT:
7014 case NON_LVALUE_EXPR:
7015 case VIEW_CONVERT_EXPR:
7016 this_align = TYPE_ALIGN (TREE_TYPE (target));
7017 outer_align = target_align (TREE_OPERAND (target, 0));
7018 return MAX (this_align, outer_align);
7019
7020 default:
7021 return TYPE_ALIGN (TREE_TYPE (target));
7022 }
7023 }
7024
7025 \f
7026 /* Given an rtx VALUE that may contain additions and multiplications, return
7027 an equivalent value that just refers to a register, memory, or constant.
7028 This is done by generating instructions to perform the arithmetic and
7029 returning a pseudo-register containing the value.
7030
7031 The returned value may be a REG, SUBREG, MEM or constant. */
7032
7033 rtx
7034 force_operand (rtx value, rtx target)
7035 {
7036 rtx op1, op2;
7037 /* Use subtarget as the target for operand 0 of a binary operation. */
7038 rtx subtarget = get_subtarget (target);
7039 enum rtx_code code = GET_CODE (value);
7040
7041 /* Check for subreg applied to an expression produced by loop optimizer. */
7042 if (code == SUBREG
7043 && !REG_P (SUBREG_REG (value))
7044 && !MEM_P (SUBREG_REG (value)))
7045 {
7046 value
7047 = simplify_gen_subreg (GET_MODE (value),
7048 force_reg (GET_MODE (SUBREG_REG (value)),
7049 force_operand (SUBREG_REG (value),
7050 NULL_RTX)),
7051 GET_MODE (SUBREG_REG (value)),
7052 SUBREG_BYTE (value));
7053 code = GET_CODE (value);
7054 }
7055
7056 /* Check for a PIC address load. */
7057 if ((code == PLUS || code == MINUS)
7058 && XEXP (value, 0) == pic_offset_table_rtx
7059 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7060 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7061 || GET_CODE (XEXP (value, 1)) == CONST))
7062 {
7063 if (!subtarget)
7064 subtarget = gen_reg_rtx (GET_MODE (value));
7065 emit_move_insn (subtarget, value);
7066 return subtarget;
7067 }
7068
7069 if (ARITHMETIC_P (value))
7070 {
7071 op2 = XEXP (value, 1);
7072 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7073 subtarget = 0;
7074 if (code == MINUS && CONST_INT_P (op2))
7075 {
7076 code = PLUS;
7077 op2 = negate_rtx (GET_MODE (value), op2);
7078 }
7079
7080 /* Check for an addition with OP2 a constant integer and our first
7081 operand a PLUS of a virtual register and something else. In that
7082 case, we want to emit the sum of the virtual register and the
7083 constant first and then add the other value. This allows virtual
7084 register instantiation to simply modify the constant rather than
7085 creating another one around this addition. */
7086 if (code == PLUS && CONST_INT_P (op2)
7087 && GET_CODE (XEXP (value, 0)) == PLUS
7088 && REG_P (XEXP (XEXP (value, 0), 0))
7089 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7090 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7091 {
7092 rtx temp = expand_simple_binop (GET_MODE (value), code,
7093 XEXP (XEXP (value, 0), 0), op2,
7094 subtarget, 0, OPTAB_LIB_WIDEN);
7095 return expand_simple_binop (GET_MODE (value), code, temp,
7096 force_operand (XEXP (XEXP (value,
7097 0), 1), 0),
7098 target, 0, OPTAB_LIB_WIDEN);
7099 }
7100
7101 op1 = force_operand (XEXP (value, 0), subtarget);
7102 op2 = force_operand (op2, NULL_RTX);
7103 switch (code)
7104 {
7105 case MULT:
7106 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7107 case DIV:
7108 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7109 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7110 target, 1, OPTAB_LIB_WIDEN);
7111 else
7112 return expand_divmod (0,
7113 FLOAT_MODE_P (GET_MODE (value))
7114 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7115 GET_MODE (value), op1, op2, target, 0);
7116 case MOD:
7117 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7118 target, 0);
7119 case UDIV:
7120 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7121 target, 1);
7122 case UMOD:
7123 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7124 target, 1);
7125 case ASHIFTRT:
7126 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7127 target, 0, OPTAB_LIB_WIDEN);
7128 default:
7129 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7130 target, 1, OPTAB_LIB_WIDEN);
7131 }
7132 }
7133 if (UNARY_P (value))
7134 {
7135 if (!target)
7136 target = gen_reg_rtx (GET_MODE (value));
7137 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7138 switch (code)
7139 {
7140 case ZERO_EXTEND:
7141 case SIGN_EXTEND:
7142 case TRUNCATE:
7143 case FLOAT_EXTEND:
7144 case FLOAT_TRUNCATE:
7145 convert_move (target, op1, code == ZERO_EXTEND);
7146 return target;
7147
7148 case FIX:
7149 case UNSIGNED_FIX:
7150 expand_fix (target, op1, code == UNSIGNED_FIX);
7151 return target;
7152
7153 case FLOAT:
7154 case UNSIGNED_FLOAT:
7155 expand_float (target, op1, code == UNSIGNED_FLOAT);
7156 return target;
7157
7158 default:
7159 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7160 }
7161 }
7162
7163 #ifdef INSN_SCHEDULING
7164 /* On machines that have insn scheduling, we want all memory reference to be
7165 explicit, so we need to deal with such paradoxical SUBREGs. */
7166 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7167 value
7168 = simplify_gen_subreg (GET_MODE (value),
7169 force_reg (GET_MODE (SUBREG_REG (value)),
7170 force_operand (SUBREG_REG (value),
7171 NULL_RTX)),
7172 GET_MODE (SUBREG_REG (value)),
7173 SUBREG_BYTE (value));
7174 #endif
7175
7176 return value;
7177 }
7178 \f
7179 /* Subroutine of expand_expr: return nonzero iff there is no way that
7180 EXP can reference X, which is being modified. TOP_P is nonzero if this
7181 call is going to be used to determine whether we need a temporary
7182 for EXP, as opposed to a recursive call to this function.
7183
7184 It is always safe for this routine to return zero since it merely
7185 searches for optimization opportunities. */
7186
7187 int
7188 safe_from_p (const_rtx x, tree exp, int top_p)
7189 {
7190 rtx exp_rtl = 0;
7191 int i, nops;
7192
7193 if (x == 0
7194 /* If EXP has varying size, we MUST use a target since we currently
7195 have no way of allocating temporaries of variable size
7196 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7197 So we assume here that something at a higher level has prevented a
7198 clash. This is somewhat bogus, but the best we can do. Only
7199 do this when X is BLKmode and when we are at the top level. */
7200 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7201 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7202 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7203 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7204 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7205 != INTEGER_CST)
7206 && GET_MODE (x) == BLKmode)
7207 /* If X is in the outgoing argument area, it is always safe. */
7208 || (MEM_P (x)
7209 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7210 || (GET_CODE (XEXP (x, 0)) == PLUS
7211 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7212 return 1;
7213
7214 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7215 find the underlying pseudo. */
7216 if (GET_CODE (x) == SUBREG)
7217 {
7218 x = SUBREG_REG (x);
7219 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7220 return 0;
7221 }
7222
7223 /* Now look at our tree code and possibly recurse. */
7224 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7225 {
7226 case tcc_declaration:
7227 exp_rtl = DECL_RTL_IF_SET (exp);
7228 break;
7229
7230 case tcc_constant:
7231 return 1;
7232
7233 case tcc_exceptional:
7234 if (TREE_CODE (exp) == TREE_LIST)
7235 {
7236 while (1)
7237 {
7238 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7239 return 0;
7240 exp = TREE_CHAIN (exp);
7241 if (!exp)
7242 return 1;
7243 if (TREE_CODE (exp) != TREE_LIST)
7244 return safe_from_p (x, exp, 0);
7245 }
7246 }
7247 else if (TREE_CODE (exp) == CONSTRUCTOR)
7248 {
7249 constructor_elt *ce;
7250 unsigned HOST_WIDE_INT idx;
7251
7252 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7253 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7254 || !safe_from_p (x, ce->value, 0))
7255 return 0;
7256 return 1;
7257 }
7258 else if (TREE_CODE (exp) == ERROR_MARK)
7259 return 1; /* An already-visited SAVE_EXPR? */
7260 else
7261 return 0;
7262
7263 case tcc_statement:
7264 /* The only case we look at here is the DECL_INITIAL inside a
7265 DECL_EXPR. */
7266 return (TREE_CODE (exp) != DECL_EXPR
7267 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7268 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7269 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7270
7271 case tcc_binary:
7272 case tcc_comparison:
7273 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7274 return 0;
7275 /* Fall through. */
7276
7277 case tcc_unary:
7278 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7279
7280 case tcc_expression:
7281 case tcc_reference:
7282 case tcc_vl_exp:
7283 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7284 the expression. If it is set, we conflict iff we are that rtx or
7285 both are in memory. Otherwise, we check all operands of the
7286 expression recursively. */
7287
7288 switch (TREE_CODE (exp))
7289 {
7290 case ADDR_EXPR:
7291 /* If the operand is static or we are static, we can't conflict.
7292 Likewise if we don't conflict with the operand at all. */
7293 if (staticp (TREE_OPERAND (exp, 0))
7294 || TREE_STATIC (exp)
7295 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7296 return 1;
7297
7298 /* Otherwise, the only way this can conflict is if we are taking
7299 the address of a DECL a that address if part of X, which is
7300 very rare. */
7301 exp = TREE_OPERAND (exp, 0);
7302 if (DECL_P (exp))
7303 {
7304 if (!DECL_RTL_SET_P (exp)
7305 || !MEM_P (DECL_RTL (exp)))
7306 return 0;
7307 else
7308 exp_rtl = XEXP (DECL_RTL (exp), 0);
7309 }
7310 break;
7311
7312 case MEM_REF:
7313 if (MEM_P (x)
7314 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7315 get_alias_set (exp)))
7316 return 0;
7317 break;
7318
7319 case CALL_EXPR:
7320 /* Assume that the call will clobber all hard registers and
7321 all of memory. */
7322 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7323 || MEM_P (x))
7324 return 0;
7325 break;
7326
7327 case WITH_CLEANUP_EXPR:
7328 case CLEANUP_POINT_EXPR:
7329 /* Lowered by gimplify.c. */
7330 gcc_unreachable ();
7331
7332 case SAVE_EXPR:
7333 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7334
7335 default:
7336 break;
7337 }
7338
7339 /* If we have an rtx, we do not need to scan our operands. */
7340 if (exp_rtl)
7341 break;
7342
7343 nops = TREE_OPERAND_LENGTH (exp);
7344 for (i = 0; i < nops; i++)
7345 if (TREE_OPERAND (exp, i) != 0
7346 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7347 return 0;
7348
7349 break;
7350
7351 case tcc_type:
7352 /* Should never get a type here. */
7353 gcc_unreachable ();
7354 }
7355
7356 /* If we have an rtl, find any enclosed object. Then see if we conflict
7357 with it. */
7358 if (exp_rtl)
7359 {
7360 if (GET_CODE (exp_rtl) == SUBREG)
7361 {
7362 exp_rtl = SUBREG_REG (exp_rtl);
7363 if (REG_P (exp_rtl)
7364 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7365 return 0;
7366 }
7367
7368 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7369 are memory and they conflict. */
7370 return ! (rtx_equal_p (x, exp_rtl)
7371 || (MEM_P (x) && MEM_P (exp_rtl)
7372 && true_dependence (exp_rtl, VOIDmode, x)));
7373 }
7374
7375 /* If we reach here, it is safe. */
7376 return 1;
7377 }
7378
7379 \f
7380 /* Return the highest power of two that EXP is known to be a multiple of.
7381 This is used in updating alignment of MEMs in array references. */
7382
7383 unsigned HOST_WIDE_INT
7384 highest_pow2_factor (const_tree exp)
7385 {
7386 unsigned HOST_WIDE_INT ret;
7387 int trailing_zeros = tree_ctz (exp);
7388 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7389 return BIGGEST_ALIGNMENT;
7390 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7391 if (ret > BIGGEST_ALIGNMENT)
7392 return BIGGEST_ALIGNMENT;
7393 return ret;
7394 }
7395
7396 /* Similar, except that the alignment requirements of TARGET are
7397 taken into account. Assume it is at least as aligned as its
7398 type, unless it is a COMPONENT_REF in which case the layout of
7399 the structure gives the alignment. */
7400
7401 static unsigned HOST_WIDE_INT
7402 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7403 {
7404 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7405 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7406
7407 return MAX (factor, talign);
7408 }
7409 \f
7410 /* Convert the tree comparison code TCODE to the rtl one where the
7411 signedness is UNSIGNEDP. */
7412
7413 static enum rtx_code
7414 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7415 {
7416 enum rtx_code code;
7417 switch (tcode)
7418 {
7419 case EQ_EXPR:
7420 code = EQ;
7421 break;
7422 case NE_EXPR:
7423 code = NE;
7424 break;
7425 case LT_EXPR:
7426 code = unsignedp ? LTU : LT;
7427 break;
7428 case LE_EXPR:
7429 code = unsignedp ? LEU : LE;
7430 break;
7431 case GT_EXPR:
7432 code = unsignedp ? GTU : GT;
7433 break;
7434 case GE_EXPR:
7435 code = unsignedp ? GEU : GE;
7436 break;
7437 case UNORDERED_EXPR:
7438 code = UNORDERED;
7439 break;
7440 case ORDERED_EXPR:
7441 code = ORDERED;
7442 break;
7443 case UNLT_EXPR:
7444 code = UNLT;
7445 break;
7446 case UNLE_EXPR:
7447 code = UNLE;
7448 break;
7449 case UNGT_EXPR:
7450 code = UNGT;
7451 break;
7452 case UNGE_EXPR:
7453 code = UNGE;
7454 break;
7455 case UNEQ_EXPR:
7456 code = UNEQ;
7457 break;
7458 case LTGT_EXPR:
7459 code = LTGT;
7460 break;
7461
7462 default:
7463 gcc_unreachable ();
7464 }
7465 return code;
7466 }
7467
7468 /* Subroutine of expand_expr. Expand the two operands of a binary
7469 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7470 The value may be stored in TARGET if TARGET is nonzero. The
7471 MODIFIER argument is as documented by expand_expr. */
7472
7473 void
7474 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7475 enum expand_modifier modifier)
7476 {
7477 if (! safe_from_p (target, exp1, 1))
7478 target = 0;
7479 if (operand_equal_p (exp0, exp1, 0))
7480 {
7481 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7482 *op1 = copy_rtx (*op0);
7483 }
7484 else
7485 {
7486 /* If we need to preserve evaluation order, copy exp0 into its own
7487 temporary variable so that it can't be clobbered by exp1. */
7488 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7489 exp0 = save_expr (exp0);
7490 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7491 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7492 }
7493 }
7494
7495 \f
7496 /* Return a MEM that contains constant EXP. DEFER is as for
7497 output_constant_def and MODIFIER is as for expand_expr. */
7498
7499 static rtx
7500 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7501 {
7502 rtx mem;
7503
7504 mem = output_constant_def (exp, defer);
7505 if (modifier != EXPAND_INITIALIZER)
7506 mem = use_anchored_address (mem);
7507 return mem;
7508 }
7509
7510 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7511 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7512
7513 static rtx
7514 expand_expr_addr_expr_1 (tree exp, rtx target, machine_mode tmode,
7515 enum expand_modifier modifier, addr_space_t as)
7516 {
7517 rtx result, subtarget;
7518 tree inner, offset;
7519 HOST_WIDE_INT bitsize, bitpos;
7520 int volatilep, unsignedp;
7521 machine_mode mode1;
7522
7523 /* If we are taking the address of a constant and are at the top level,
7524 we have to use output_constant_def since we can't call force_const_mem
7525 at top level. */
7526 /* ??? This should be considered a front-end bug. We should not be
7527 generating ADDR_EXPR of something that isn't an LVALUE. The only
7528 exception here is STRING_CST. */
7529 if (CONSTANT_CLASS_P (exp))
7530 {
7531 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7532 if (modifier < EXPAND_SUM)
7533 result = force_operand (result, target);
7534 return result;
7535 }
7536
7537 /* Everything must be something allowed by is_gimple_addressable. */
7538 switch (TREE_CODE (exp))
7539 {
7540 case INDIRECT_REF:
7541 /* This case will happen via recursion for &a->b. */
7542 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7543
7544 case MEM_REF:
7545 {
7546 tree tem = TREE_OPERAND (exp, 0);
7547 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7548 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7549 return expand_expr (tem, target, tmode, modifier);
7550 }
7551
7552 case CONST_DECL:
7553 /* Expand the initializer like constants above. */
7554 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7555 0, modifier), 0);
7556 if (modifier < EXPAND_SUM)
7557 result = force_operand (result, target);
7558 return result;
7559
7560 case REALPART_EXPR:
7561 /* The real part of the complex number is always first, therefore
7562 the address is the same as the address of the parent object. */
7563 offset = 0;
7564 bitpos = 0;
7565 inner = TREE_OPERAND (exp, 0);
7566 break;
7567
7568 case IMAGPART_EXPR:
7569 /* The imaginary part of the complex number is always second.
7570 The expression is therefore always offset by the size of the
7571 scalar type. */
7572 offset = 0;
7573 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7574 inner = TREE_OPERAND (exp, 0);
7575 break;
7576
7577 case COMPOUND_LITERAL_EXPR:
7578 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7579 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7580 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7581 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7582 the initializers aren't gimplified. */
7583 if (COMPOUND_LITERAL_EXPR_DECL (exp)
7584 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp)))
7585 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7586 target, tmode, modifier, as);
7587 /* FALLTHRU */
7588 default:
7589 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7590 expand_expr, as that can have various side effects; LABEL_DECLs for
7591 example, may not have their DECL_RTL set yet. Expand the rtl of
7592 CONSTRUCTORs too, which should yield a memory reference for the
7593 constructor's contents. Assume language specific tree nodes can
7594 be expanded in some interesting way. */
7595 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7596 if (DECL_P (exp)
7597 || TREE_CODE (exp) == CONSTRUCTOR
7598 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7599 {
7600 result = expand_expr (exp, target, tmode,
7601 modifier == EXPAND_INITIALIZER
7602 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7603
7604 /* If the DECL isn't in memory, then the DECL wasn't properly
7605 marked TREE_ADDRESSABLE, which will be either a front-end
7606 or a tree optimizer bug. */
7607
7608 if (TREE_ADDRESSABLE (exp)
7609 && ! MEM_P (result)
7610 && ! targetm.calls.allocate_stack_slots_for_args ())
7611 {
7612 error ("local frame unavailable (naked function?)");
7613 return result;
7614 }
7615 else
7616 gcc_assert (MEM_P (result));
7617 result = XEXP (result, 0);
7618
7619 /* ??? Is this needed anymore? */
7620 if (DECL_P (exp))
7621 TREE_USED (exp) = 1;
7622
7623 if (modifier != EXPAND_INITIALIZER
7624 && modifier != EXPAND_CONST_ADDRESS
7625 && modifier != EXPAND_SUM)
7626 result = force_operand (result, target);
7627 return result;
7628 }
7629
7630 /* Pass FALSE as the last argument to get_inner_reference although
7631 we are expanding to RTL. The rationale is that we know how to
7632 handle "aligning nodes" here: we can just bypass them because
7633 they won't change the final object whose address will be returned
7634 (they actually exist only for that purpose). */
7635 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7636 &mode1, &unsignedp, &volatilep, false);
7637 break;
7638 }
7639
7640 /* We must have made progress. */
7641 gcc_assert (inner != exp);
7642
7643 subtarget = offset || bitpos ? NULL_RTX : target;
7644 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7645 inner alignment, force the inner to be sufficiently aligned. */
7646 if (CONSTANT_CLASS_P (inner)
7647 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7648 {
7649 inner = copy_node (inner);
7650 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7651 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7652 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7653 }
7654 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7655
7656 if (offset)
7657 {
7658 rtx tmp;
7659
7660 if (modifier != EXPAND_NORMAL)
7661 result = force_operand (result, NULL);
7662 tmp = expand_expr (offset, NULL_RTX, tmode,
7663 modifier == EXPAND_INITIALIZER
7664 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7665
7666 /* expand_expr is allowed to return an object in a mode other
7667 than TMODE. If it did, we need to convert. */
7668 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7669 tmp = convert_modes (tmode, GET_MODE (tmp),
7670 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7671 result = convert_memory_address_addr_space (tmode, result, as);
7672 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7673
7674 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7675 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7676 else
7677 {
7678 subtarget = bitpos ? NULL_RTX : target;
7679 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7680 1, OPTAB_LIB_WIDEN);
7681 }
7682 }
7683
7684 if (bitpos)
7685 {
7686 /* Someone beforehand should have rejected taking the address
7687 of such an object. */
7688 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7689
7690 result = convert_memory_address_addr_space (tmode, result, as);
7691 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7692 if (modifier < EXPAND_SUM)
7693 result = force_operand (result, target);
7694 }
7695
7696 return result;
7697 }
7698
7699 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7700 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7701
7702 static rtx
7703 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
7704 enum expand_modifier modifier)
7705 {
7706 addr_space_t as = ADDR_SPACE_GENERIC;
7707 machine_mode address_mode = Pmode;
7708 machine_mode pointer_mode = ptr_mode;
7709 machine_mode rmode;
7710 rtx result;
7711
7712 /* Target mode of VOIDmode says "whatever's natural". */
7713 if (tmode == VOIDmode)
7714 tmode = TYPE_MODE (TREE_TYPE (exp));
7715
7716 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7717 {
7718 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7719 address_mode = targetm.addr_space.address_mode (as);
7720 pointer_mode = targetm.addr_space.pointer_mode (as);
7721 }
7722
7723 /* We can get called with some Weird Things if the user does silliness
7724 like "(short) &a". In that case, convert_memory_address won't do
7725 the right thing, so ignore the given target mode. */
7726 if (tmode != address_mode && tmode != pointer_mode)
7727 tmode = address_mode;
7728
7729 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7730 tmode, modifier, as);
7731
7732 /* Despite expand_expr claims concerning ignoring TMODE when not
7733 strictly convenient, stuff breaks if we don't honor it. Note
7734 that combined with the above, we only do this for pointer modes. */
7735 rmode = GET_MODE (result);
7736 if (rmode == VOIDmode)
7737 rmode = tmode;
7738 if (rmode != tmode)
7739 result = convert_memory_address_addr_space (tmode, result, as);
7740
7741 return result;
7742 }
7743
7744 /* Generate code for computing CONSTRUCTOR EXP.
7745 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7746 is TRUE, instead of creating a temporary variable in memory
7747 NULL is returned and the caller needs to handle it differently. */
7748
7749 static rtx
7750 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7751 bool avoid_temp_mem)
7752 {
7753 tree type = TREE_TYPE (exp);
7754 machine_mode mode = TYPE_MODE (type);
7755
7756 /* Try to avoid creating a temporary at all. This is possible
7757 if all of the initializer is zero.
7758 FIXME: try to handle all [0..255] initializers we can handle
7759 with memset. */
7760 if (TREE_STATIC (exp)
7761 && !TREE_ADDRESSABLE (exp)
7762 && target != 0 && mode == BLKmode
7763 && all_zeros_p (exp))
7764 {
7765 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7766 return target;
7767 }
7768
7769 /* All elts simple constants => refer to a constant in memory. But
7770 if this is a non-BLKmode mode, let it store a field at a time
7771 since that should make a CONST_INT, CONST_WIDE_INT or
7772 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7773 use, it is best to store directly into the target unless the type
7774 is large enough that memcpy will be used. If we are making an
7775 initializer and all operands are constant, put it in memory as
7776 well.
7777
7778 FIXME: Avoid trying to fill vector constructors piece-meal.
7779 Output them with output_constant_def below unless we're sure
7780 they're zeros. This should go away when vector initializers
7781 are treated like VECTOR_CST instead of arrays. */
7782 if ((TREE_STATIC (exp)
7783 && ((mode == BLKmode
7784 && ! (target != 0 && safe_from_p (target, exp, 1)))
7785 || TREE_ADDRESSABLE (exp)
7786 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7787 && (! can_move_by_pieces
7788 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7789 TYPE_ALIGN (type)))
7790 && ! mostly_zeros_p (exp))))
7791 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7792 && TREE_CONSTANT (exp)))
7793 {
7794 rtx constructor;
7795
7796 if (avoid_temp_mem)
7797 return NULL_RTX;
7798
7799 constructor = expand_expr_constant (exp, 1, modifier);
7800
7801 if (modifier != EXPAND_CONST_ADDRESS
7802 && modifier != EXPAND_INITIALIZER
7803 && modifier != EXPAND_SUM)
7804 constructor = validize_mem (constructor);
7805
7806 return constructor;
7807 }
7808
7809 /* Handle calls that pass values in multiple non-contiguous
7810 locations. The Irix 6 ABI has examples of this. */
7811 if (target == 0 || ! safe_from_p (target, exp, 1)
7812 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7813 {
7814 if (avoid_temp_mem)
7815 return NULL_RTX;
7816
7817 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
7818 }
7819
7820 store_constructor (exp, target, 0, int_expr_size (exp));
7821 return target;
7822 }
7823
7824
7825 /* expand_expr: generate code for computing expression EXP.
7826 An rtx for the computed value is returned. The value is never null.
7827 In the case of a void EXP, const0_rtx is returned.
7828
7829 The value may be stored in TARGET if TARGET is nonzero.
7830 TARGET is just a suggestion; callers must assume that
7831 the rtx returned may not be the same as TARGET.
7832
7833 If TARGET is CONST0_RTX, it means that the value will be ignored.
7834
7835 If TMODE is not VOIDmode, it suggests generating the
7836 result in mode TMODE. But this is done only when convenient.
7837 Otherwise, TMODE is ignored and the value generated in its natural mode.
7838 TMODE is just a suggestion; callers must assume that
7839 the rtx returned may not have mode TMODE.
7840
7841 Note that TARGET may have neither TMODE nor MODE. In that case, it
7842 probably will not be used.
7843
7844 If MODIFIER is EXPAND_SUM then when EXP is an addition
7845 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7846 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7847 products as above, or REG or MEM, or constant.
7848 Ordinarily in such cases we would output mul or add instructions
7849 and then return a pseudo reg containing the sum.
7850
7851 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7852 it also marks a label as absolutely required (it can't be dead).
7853 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7854 This is used for outputting expressions used in initializers.
7855
7856 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7857 with a constant address even if that address is not normally legitimate.
7858 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7859
7860 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7861 a call parameter. Such targets require special care as we haven't yet
7862 marked TARGET so that it's safe from being trashed by libcalls. We
7863 don't want to use TARGET for anything but the final result;
7864 Intermediate values must go elsewhere. Additionally, calls to
7865 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7866
7867 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7868 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7869 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7870 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7871 recursively.
7872
7873 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7874 In this case, we don't adjust a returned MEM rtx that wouldn't be
7875 sufficiently aligned for its mode; instead, it's up to the caller
7876 to deal with it afterwards. This is used to make sure that unaligned
7877 base objects for which out-of-bounds accesses are supported, for
7878 example record types with trailing arrays, aren't realigned behind
7879 the back of the caller.
7880 The normal operating mode is to pass FALSE for this parameter. */
7881
7882 rtx
7883 expand_expr_real (tree exp, rtx target, machine_mode tmode,
7884 enum expand_modifier modifier, rtx *alt_rtl,
7885 bool inner_reference_p)
7886 {
7887 rtx ret;
7888
7889 /* Handle ERROR_MARK before anybody tries to access its type. */
7890 if (TREE_CODE (exp) == ERROR_MARK
7891 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7892 {
7893 ret = CONST0_RTX (tmode);
7894 return ret ? ret : const0_rtx;
7895 }
7896
7897 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
7898 inner_reference_p);
7899 return ret;
7900 }
7901
7902 /* Try to expand the conditional expression which is represented by
7903 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7904 return the rtl reg which repsents the result. Otherwise return
7905 NULL_RTL. */
7906
7907 static rtx
7908 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7909 tree treeop1 ATTRIBUTE_UNUSED,
7910 tree treeop2 ATTRIBUTE_UNUSED)
7911 {
7912 rtx insn;
7913 rtx op00, op01, op1, op2;
7914 enum rtx_code comparison_code;
7915 machine_mode comparison_mode;
7916 gimple srcstmt;
7917 rtx temp;
7918 tree type = TREE_TYPE (treeop1);
7919 int unsignedp = TYPE_UNSIGNED (type);
7920 machine_mode mode = TYPE_MODE (type);
7921 machine_mode orig_mode = mode;
7922
7923 /* If we cannot do a conditional move on the mode, try doing it
7924 with the promoted mode. */
7925 if (!can_conditionally_move_p (mode))
7926 {
7927 mode = promote_mode (type, mode, &unsignedp);
7928 if (!can_conditionally_move_p (mode))
7929 return NULL_RTX;
7930 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
7931 }
7932 else
7933 temp = assign_temp (type, 0, 1);
7934
7935 start_sequence ();
7936 expand_operands (treeop1, treeop2,
7937 temp, &op1, &op2, EXPAND_NORMAL);
7938
7939 if (TREE_CODE (treeop0) == SSA_NAME
7940 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7941 {
7942 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7943 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7944 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7945 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7946 comparison_mode = TYPE_MODE (type);
7947 unsignedp = TYPE_UNSIGNED (type);
7948 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7949 }
7950 else if (COMPARISON_CLASS_P (treeop0))
7951 {
7952 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
7953 enum tree_code cmpcode = TREE_CODE (treeop0);
7954 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
7955 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
7956 unsignedp = TYPE_UNSIGNED (type);
7957 comparison_mode = TYPE_MODE (type);
7958 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7959 }
7960 else
7961 {
7962 op00 = expand_normal (treeop0);
7963 op01 = const0_rtx;
7964 comparison_code = NE;
7965 comparison_mode = GET_MODE (op00);
7966 if (comparison_mode == VOIDmode)
7967 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
7968 }
7969
7970 if (GET_MODE (op1) != mode)
7971 op1 = gen_lowpart (mode, op1);
7972
7973 if (GET_MODE (op2) != mode)
7974 op2 = gen_lowpart (mode, op2);
7975
7976 /* Try to emit the conditional move. */
7977 insn = emit_conditional_move (temp, comparison_code,
7978 op00, op01, comparison_mode,
7979 op1, op2, mode,
7980 unsignedp);
7981
7982 /* If we could do the conditional move, emit the sequence,
7983 and return. */
7984 if (insn)
7985 {
7986 rtx_insn *seq = get_insns ();
7987 end_sequence ();
7988 emit_insn (seq);
7989 return convert_modes (orig_mode, mode, temp, 0);
7990 }
7991
7992 /* Otherwise discard the sequence and fall back to code with
7993 branches. */
7994 end_sequence ();
7995 return NULL_RTX;
7996 }
7997
7998 rtx
7999 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
8000 enum expand_modifier modifier)
8001 {
8002 rtx op0, op1, op2, temp;
8003 rtx_code_label *lab;
8004 tree type;
8005 int unsignedp;
8006 machine_mode mode;
8007 enum tree_code code = ops->code;
8008 optab this_optab;
8009 rtx subtarget, original_target;
8010 int ignore;
8011 bool reduce_bit_field;
8012 location_t loc = ops->location;
8013 tree treeop0, treeop1, treeop2;
8014 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8015 ? reduce_to_bit_field_precision ((expr), \
8016 target, \
8017 type) \
8018 : (expr))
8019
8020 type = ops->type;
8021 mode = TYPE_MODE (type);
8022 unsignedp = TYPE_UNSIGNED (type);
8023
8024 treeop0 = ops->op0;
8025 treeop1 = ops->op1;
8026 treeop2 = ops->op2;
8027
8028 /* We should be called only on simple (binary or unary) expressions,
8029 exactly those that are valid in gimple expressions that aren't
8030 GIMPLE_SINGLE_RHS (or invalid). */
8031 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8032 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8033 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8034
8035 ignore = (target == const0_rtx
8036 || ((CONVERT_EXPR_CODE_P (code)
8037 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8038 && TREE_CODE (type) == VOID_TYPE));
8039
8040 /* We should be called only if we need the result. */
8041 gcc_assert (!ignore);
8042
8043 /* An operation in what may be a bit-field type needs the
8044 result to be reduced to the precision of the bit-field type,
8045 which is narrower than that of the type's mode. */
8046 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8047 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8048
8049 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8050 target = 0;
8051
8052 /* Use subtarget as the target for operand 0 of a binary operation. */
8053 subtarget = get_subtarget (target);
8054 original_target = target;
8055
8056 switch (code)
8057 {
8058 case NON_LVALUE_EXPR:
8059 case PAREN_EXPR:
8060 CASE_CONVERT:
8061 if (treeop0 == error_mark_node)
8062 return const0_rtx;
8063
8064 if (TREE_CODE (type) == UNION_TYPE)
8065 {
8066 tree valtype = TREE_TYPE (treeop0);
8067
8068 /* If both input and output are BLKmode, this conversion isn't doing
8069 anything except possibly changing memory attribute. */
8070 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8071 {
8072 rtx result = expand_expr (treeop0, target, tmode,
8073 modifier);
8074
8075 result = copy_rtx (result);
8076 set_mem_attributes (result, type, 0);
8077 return result;
8078 }
8079
8080 if (target == 0)
8081 {
8082 if (TYPE_MODE (type) != BLKmode)
8083 target = gen_reg_rtx (TYPE_MODE (type));
8084 else
8085 target = assign_temp (type, 1, 1);
8086 }
8087
8088 if (MEM_P (target))
8089 /* Store data into beginning of memory target. */
8090 store_expr (treeop0,
8091 adjust_address (target, TYPE_MODE (valtype), 0),
8092 modifier == EXPAND_STACK_PARM,
8093 false);
8094
8095 else
8096 {
8097 gcc_assert (REG_P (target));
8098
8099 /* Store this field into a union of the proper type. */
8100 store_field (target,
8101 MIN ((int_size_in_bytes (TREE_TYPE
8102 (treeop0))
8103 * BITS_PER_UNIT),
8104 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8105 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8106 }
8107
8108 /* Return the entire union. */
8109 return target;
8110 }
8111
8112 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8113 {
8114 op0 = expand_expr (treeop0, target, VOIDmode,
8115 modifier);
8116
8117 /* If the signedness of the conversion differs and OP0 is
8118 a promoted SUBREG, clear that indication since we now
8119 have to do the proper extension. */
8120 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8121 && GET_CODE (op0) == SUBREG)
8122 SUBREG_PROMOTED_VAR_P (op0) = 0;
8123
8124 return REDUCE_BIT_FIELD (op0);
8125 }
8126
8127 op0 = expand_expr (treeop0, NULL_RTX, mode,
8128 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8129 if (GET_MODE (op0) == mode)
8130 ;
8131
8132 /* If OP0 is a constant, just convert it into the proper mode. */
8133 else if (CONSTANT_P (op0))
8134 {
8135 tree inner_type = TREE_TYPE (treeop0);
8136 machine_mode inner_mode = GET_MODE (op0);
8137
8138 if (inner_mode == VOIDmode)
8139 inner_mode = TYPE_MODE (inner_type);
8140
8141 if (modifier == EXPAND_INITIALIZER)
8142 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8143 subreg_lowpart_offset (mode,
8144 inner_mode));
8145 else
8146 op0= convert_modes (mode, inner_mode, op0,
8147 TYPE_UNSIGNED (inner_type));
8148 }
8149
8150 else if (modifier == EXPAND_INITIALIZER)
8151 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8152
8153 else if (target == 0)
8154 op0 = convert_to_mode (mode, op0,
8155 TYPE_UNSIGNED (TREE_TYPE
8156 (treeop0)));
8157 else
8158 {
8159 convert_move (target, op0,
8160 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8161 op0 = target;
8162 }
8163
8164 return REDUCE_BIT_FIELD (op0);
8165
8166 case ADDR_SPACE_CONVERT_EXPR:
8167 {
8168 tree treeop0_type = TREE_TYPE (treeop0);
8169 addr_space_t as_to;
8170 addr_space_t as_from;
8171
8172 gcc_assert (POINTER_TYPE_P (type));
8173 gcc_assert (POINTER_TYPE_P (treeop0_type));
8174
8175 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8176 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8177
8178 /* Conversions between pointers to the same address space should
8179 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8180 gcc_assert (as_to != as_from);
8181
8182 /* Ask target code to handle conversion between pointers
8183 to overlapping address spaces. */
8184 if (targetm.addr_space.subset_p (as_to, as_from)
8185 || targetm.addr_space.subset_p (as_from, as_to))
8186 {
8187 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8188 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8189 gcc_assert (op0);
8190 return op0;
8191 }
8192
8193 /* For disjoint address spaces, converting anything but
8194 a null pointer invokes undefined behaviour. We simply
8195 always return a null pointer here. */
8196 return CONST0_RTX (mode);
8197 }
8198
8199 case POINTER_PLUS_EXPR:
8200 /* Even though the sizetype mode and the pointer's mode can be different
8201 expand is able to handle this correctly and get the correct result out
8202 of the PLUS_EXPR code. */
8203 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8204 if sizetype precision is smaller than pointer precision. */
8205 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8206 treeop1 = fold_convert_loc (loc, type,
8207 fold_convert_loc (loc, ssizetype,
8208 treeop1));
8209 /* If sizetype precision is larger than pointer precision, truncate the
8210 offset to have matching modes. */
8211 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8212 treeop1 = fold_convert_loc (loc, type, treeop1);
8213
8214 case PLUS_EXPR:
8215 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8216 something else, make sure we add the register to the constant and
8217 then to the other thing. This case can occur during strength
8218 reduction and doing it this way will produce better code if the
8219 frame pointer or argument pointer is eliminated.
8220
8221 fold-const.c will ensure that the constant is always in the inner
8222 PLUS_EXPR, so the only case we need to do anything about is if
8223 sp, ap, or fp is our second argument, in which case we must swap
8224 the innermost first argument and our second argument. */
8225
8226 if (TREE_CODE (treeop0) == PLUS_EXPR
8227 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8228 && TREE_CODE (treeop1) == VAR_DECL
8229 && (DECL_RTL (treeop1) == frame_pointer_rtx
8230 || DECL_RTL (treeop1) == stack_pointer_rtx
8231 || DECL_RTL (treeop1) == arg_pointer_rtx))
8232 {
8233 gcc_unreachable ();
8234 }
8235
8236 /* If the result is to be ptr_mode and we are adding an integer to
8237 something, we might be forming a constant. So try to use
8238 plus_constant. If it produces a sum and we can't accept it,
8239 use force_operand. This allows P = &ARR[const] to generate
8240 efficient code on machines where a SYMBOL_REF is not a valid
8241 address.
8242
8243 If this is an EXPAND_SUM call, always return the sum. */
8244 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8245 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8246 {
8247 if (modifier == EXPAND_STACK_PARM)
8248 target = 0;
8249 if (TREE_CODE (treeop0) == INTEGER_CST
8250 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8251 && TREE_CONSTANT (treeop1))
8252 {
8253 rtx constant_part;
8254 HOST_WIDE_INT wc;
8255 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8256
8257 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8258 EXPAND_SUM);
8259 /* Use wi::shwi to ensure that the constant is
8260 truncated according to the mode of OP1, then sign extended
8261 to a HOST_WIDE_INT. Using the constant directly can result
8262 in non-canonical RTL in a 64x32 cross compile. */
8263 wc = TREE_INT_CST_LOW (treeop0);
8264 constant_part =
8265 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8266 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8267 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8268 op1 = force_operand (op1, target);
8269 return REDUCE_BIT_FIELD (op1);
8270 }
8271
8272 else if (TREE_CODE (treeop1) == INTEGER_CST
8273 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8274 && TREE_CONSTANT (treeop0))
8275 {
8276 rtx constant_part;
8277 HOST_WIDE_INT wc;
8278 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8279
8280 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8281 (modifier == EXPAND_INITIALIZER
8282 ? EXPAND_INITIALIZER : EXPAND_SUM));
8283 if (! CONSTANT_P (op0))
8284 {
8285 op1 = expand_expr (treeop1, NULL_RTX,
8286 VOIDmode, modifier);
8287 /* Return a PLUS if modifier says it's OK. */
8288 if (modifier == EXPAND_SUM
8289 || modifier == EXPAND_INITIALIZER)
8290 return simplify_gen_binary (PLUS, mode, op0, op1);
8291 goto binop2;
8292 }
8293 /* Use wi::shwi to ensure that the constant is
8294 truncated according to the mode of OP1, then sign extended
8295 to a HOST_WIDE_INT. Using the constant directly can result
8296 in non-canonical RTL in a 64x32 cross compile. */
8297 wc = TREE_INT_CST_LOW (treeop1);
8298 constant_part
8299 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8300 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8301 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8302 op0 = force_operand (op0, target);
8303 return REDUCE_BIT_FIELD (op0);
8304 }
8305 }
8306
8307 /* Use TER to expand pointer addition of a negated value
8308 as pointer subtraction. */
8309 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8310 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8311 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8312 && TREE_CODE (treeop1) == SSA_NAME
8313 && TYPE_MODE (TREE_TYPE (treeop0))
8314 == TYPE_MODE (TREE_TYPE (treeop1)))
8315 {
8316 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8317 if (def)
8318 {
8319 treeop1 = gimple_assign_rhs1 (def);
8320 code = MINUS_EXPR;
8321 goto do_minus;
8322 }
8323 }
8324
8325 /* No sense saving up arithmetic to be done
8326 if it's all in the wrong mode to form part of an address.
8327 And force_operand won't know whether to sign-extend or
8328 zero-extend. */
8329 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8330 || mode != ptr_mode)
8331 {
8332 expand_operands (treeop0, treeop1,
8333 subtarget, &op0, &op1, EXPAND_NORMAL);
8334 if (op0 == const0_rtx)
8335 return op1;
8336 if (op1 == const0_rtx)
8337 return op0;
8338 goto binop2;
8339 }
8340
8341 expand_operands (treeop0, treeop1,
8342 subtarget, &op0, &op1, modifier);
8343 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8344
8345 case MINUS_EXPR:
8346 do_minus:
8347 /* For initializers, we are allowed to return a MINUS of two
8348 symbolic constants. Here we handle all cases when both operands
8349 are constant. */
8350 /* Handle difference of two symbolic constants,
8351 for the sake of an initializer. */
8352 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8353 && really_constant_p (treeop0)
8354 && really_constant_p (treeop1))
8355 {
8356 expand_operands (treeop0, treeop1,
8357 NULL_RTX, &op0, &op1, modifier);
8358
8359 /* If the last operand is a CONST_INT, use plus_constant of
8360 the negated constant. Else make the MINUS. */
8361 if (CONST_INT_P (op1))
8362 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8363 -INTVAL (op1)));
8364 else
8365 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8366 }
8367
8368 /* No sense saving up arithmetic to be done
8369 if it's all in the wrong mode to form part of an address.
8370 And force_operand won't know whether to sign-extend or
8371 zero-extend. */
8372 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8373 || mode != ptr_mode)
8374 goto binop;
8375
8376 expand_operands (treeop0, treeop1,
8377 subtarget, &op0, &op1, modifier);
8378
8379 /* Convert A - const to A + (-const). */
8380 if (CONST_INT_P (op1))
8381 {
8382 op1 = negate_rtx (mode, op1);
8383 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8384 }
8385
8386 goto binop2;
8387
8388 case WIDEN_MULT_PLUS_EXPR:
8389 case WIDEN_MULT_MINUS_EXPR:
8390 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8391 op2 = expand_normal (treeop2);
8392 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8393 target, unsignedp);
8394 return target;
8395
8396 case WIDEN_MULT_EXPR:
8397 /* If first operand is constant, swap them.
8398 Thus the following special case checks need only
8399 check the second operand. */
8400 if (TREE_CODE (treeop0) == INTEGER_CST)
8401 std::swap (treeop0, treeop1);
8402
8403 /* First, check if we have a multiplication of one signed and one
8404 unsigned operand. */
8405 if (TREE_CODE (treeop1) != INTEGER_CST
8406 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8407 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8408 {
8409 machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8410 this_optab = usmul_widen_optab;
8411 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8412 != CODE_FOR_nothing)
8413 {
8414 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8415 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8416 EXPAND_NORMAL);
8417 else
8418 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8419 EXPAND_NORMAL);
8420 /* op0 and op1 might still be constant, despite the above
8421 != INTEGER_CST check. Handle it. */
8422 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8423 {
8424 op0 = convert_modes (innermode, mode, op0, true);
8425 op1 = convert_modes (innermode, mode, op1, false);
8426 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8427 target, unsignedp));
8428 }
8429 goto binop3;
8430 }
8431 }
8432 /* Check for a multiplication with matching signedness. */
8433 else if ((TREE_CODE (treeop1) == INTEGER_CST
8434 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8435 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8436 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8437 {
8438 tree op0type = TREE_TYPE (treeop0);
8439 machine_mode innermode = TYPE_MODE (op0type);
8440 bool zextend_p = TYPE_UNSIGNED (op0type);
8441 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8442 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8443
8444 if (TREE_CODE (treeop0) != INTEGER_CST)
8445 {
8446 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8447 != CODE_FOR_nothing)
8448 {
8449 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8450 EXPAND_NORMAL);
8451 /* op0 and op1 might still be constant, despite the above
8452 != INTEGER_CST check. Handle it. */
8453 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8454 {
8455 widen_mult_const:
8456 op0 = convert_modes (innermode, mode, op0, zextend_p);
8457 op1
8458 = convert_modes (innermode, mode, op1,
8459 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8460 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8461 target,
8462 unsignedp));
8463 }
8464 temp = expand_widening_mult (mode, op0, op1, target,
8465 unsignedp, this_optab);
8466 return REDUCE_BIT_FIELD (temp);
8467 }
8468 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8469 != CODE_FOR_nothing
8470 && innermode == word_mode)
8471 {
8472 rtx htem, hipart;
8473 op0 = expand_normal (treeop0);
8474 if (TREE_CODE (treeop1) == INTEGER_CST)
8475 op1 = convert_modes (innermode, mode,
8476 expand_normal (treeop1),
8477 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8478 else
8479 op1 = expand_normal (treeop1);
8480 /* op0 and op1 might still be constant, despite the above
8481 != INTEGER_CST check. Handle it. */
8482 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8483 goto widen_mult_const;
8484 temp = expand_binop (mode, other_optab, op0, op1, target,
8485 unsignedp, OPTAB_LIB_WIDEN);
8486 hipart = gen_highpart (innermode, temp);
8487 htem = expand_mult_highpart_adjust (innermode, hipart,
8488 op0, op1, hipart,
8489 zextend_p);
8490 if (htem != hipart)
8491 emit_move_insn (hipart, htem);
8492 return REDUCE_BIT_FIELD (temp);
8493 }
8494 }
8495 }
8496 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8497 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8498 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8499 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8500
8501 case FMA_EXPR:
8502 {
8503 optab opt = fma_optab;
8504 gimple def0, def2;
8505
8506 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8507 call. */
8508 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8509 {
8510 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8511 tree call_expr;
8512
8513 gcc_assert (fn != NULL_TREE);
8514 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8515 return expand_builtin (call_expr, target, subtarget, mode, false);
8516 }
8517
8518 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8519 /* The multiplication is commutative - look at its 2nd operand
8520 if the first isn't fed by a negate. */
8521 if (!def0)
8522 {
8523 def0 = get_def_for_expr (treeop1, NEGATE_EXPR);
8524 /* Swap operands if the 2nd operand is fed by a negate. */
8525 if (def0)
8526 std::swap (treeop0, treeop1);
8527 }
8528 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8529
8530 op0 = op2 = NULL;
8531
8532 if (def0 && def2
8533 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8534 {
8535 opt = fnms_optab;
8536 op0 = expand_normal (gimple_assign_rhs1 (def0));
8537 op2 = expand_normal (gimple_assign_rhs1 (def2));
8538 }
8539 else if (def0
8540 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8541 {
8542 opt = fnma_optab;
8543 op0 = expand_normal (gimple_assign_rhs1 (def0));
8544 }
8545 else if (def2
8546 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8547 {
8548 opt = fms_optab;
8549 op2 = expand_normal (gimple_assign_rhs1 (def2));
8550 }
8551
8552 if (op0 == NULL)
8553 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8554 if (op2 == NULL)
8555 op2 = expand_normal (treeop2);
8556 op1 = expand_normal (treeop1);
8557
8558 return expand_ternary_op (TYPE_MODE (type), opt,
8559 op0, op1, op2, target, 0);
8560 }
8561
8562 case MULT_EXPR:
8563 /* If this is a fixed-point operation, then we cannot use the code
8564 below because "expand_mult" doesn't support sat/no-sat fixed-point
8565 multiplications. */
8566 if (ALL_FIXED_POINT_MODE_P (mode))
8567 goto binop;
8568
8569 /* If first operand is constant, swap them.
8570 Thus the following special case checks need only
8571 check the second operand. */
8572 if (TREE_CODE (treeop0) == INTEGER_CST)
8573 std::swap (treeop0, treeop1);
8574
8575 /* Attempt to return something suitable for generating an
8576 indexed address, for machines that support that. */
8577
8578 if (modifier == EXPAND_SUM && mode == ptr_mode
8579 && tree_fits_shwi_p (treeop1))
8580 {
8581 tree exp1 = treeop1;
8582
8583 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8584 EXPAND_SUM);
8585
8586 if (!REG_P (op0))
8587 op0 = force_operand (op0, NULL_RTX);
8588 if (!REG_P (op0))
8589 op0 = copy_to_mode_reg (mode, op0);
8590
8591 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8592 gen_int_mode (tree_to_shwi (exp1),
8593 TYPE_MODE (TREE_TYPE (exp1)))));
8594 }
8595
8596 if (modifier == EXPAND_STACK_PARM)
8597 target = 0;
8598
8599 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8600 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8601
8602 case TRUNC_DIV_EXPR:
8603 case FLOOR_DIV_EXPR:
8604 case CEIL_DIV_EXPR:
8605 case ROUND_DIV_EXPR:
8606 case EXACT_DIV_EXPR:
8607 /* If this is a fixed-point operation, then we cannot use the code
8608 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8609 divisions. */
8610 if (ALL_FIXED_POINT_MODE_P (mode))
8611 goto binop;
8612
8613 if (modifier == EXPAND_STACK_PARM)
8614 target = 0;
8615 /* Possible optimization: compute the dividend with EXPAND_SUM
8616 then if the divisor is constant can optimize the case
8617 where some terms of the dividend have coeffs divisible by it. */
8618 expand_operands (treeop0, treeop1,
8619 subtarget, &op0, &op1, EXPAND_NORMAL);
8620 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8621
8622 case RDIV_EXPR:
8623 goto binop;
8624
8625 case MULT_HIGHPART_EXPR:
8626 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8627 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8628 gcc_assert (temp);
8629 return temp;
8630
8631 case TRUNC_MOD_EXPR:
8632 case FLOOR_MOD_EXPR:
8633 case CEIL_MOD_EXPR:
8634 case ROUND_MOD_EXPR:
8635 if (modifier == EXPAND_STACK_PARM)
8636 target = 0;
8637 expand_operands (treeop0, treeop1,
8638 subtarget, &op0, &op1, EXPAND_NORMAL);
8639 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8640
8641 case FIXED_CONVERT_EXPR:
8642 op0 = expand_normal (treeop0);
8643 if (target == 0 || modifier == EXPAND_STACK_PARM)
8644 target = gen_reg_rtx (mode);
8645
8646 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8647 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8648 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8649 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8650 else
8651 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8652 return target;
8653
8654 case FIX_TRUNC_EXPR:
8655 op0 = expand_normal (treeop0);
8656 if (target == 0 || modifier == EXPAND_STACK_PARM)
8657 target = gen_reg_rtx (mode);
8658 expand_fix (target, op0, unsignedp);
8659 return target;
8660
8661 case FLOAT_EXPR:
8662 op0 = expand_normal (treeop0);
8663 if (target == 0 || modifier == EXPAND_STACK_PARM)
8664 target = gen_reg_rtx (mode);
8665 /* expand_float can't figure out what to do if FROM has VOIDmode.
8666 So give it the correct mode. With -O, cse will optimize this. */
8667 if (GET_MODE (op0) == VOIDmode)
8668 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8669 op0);
8670 expand_float (target, op0,
8671 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8672 return target;
8673
8674 case NEGATE_EXPR:
8675 op0 = expand_expr (treeop0, subtarget,
8676 VOIDmode, EXPAND_NORMAL);
8677 if (modifier == EXPAND_STACK_PARM)
8678 target = 0;
8679 temp = expand_unop (mode,
8680 optab_for_tree_code (NEGATE_EXPR, type,
8681 optab_default),
8682 op0, target, 0);
8683 gcc_assert (temp);
8684 return REDUCE_BIT_FIELD (temp);
8685
8686 case ABS_EXPR:
8687 op0 = expand_expr (treeop0, subtarget,
8688 VOIDmode, EXPAND_NORMAL);
8689 if (modifier == EXPAND_STACK_PARM)
8690 target = 0;
8691
8692 /* ABS_EXPR is not valid for complex arguments. */
8693 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8694 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8695
8696 /* Unsigned abs is simply the operand. Testing here means we don't
8697 risk generating incorrect code below. */
8698 if (TYPE_UNSIGNED (type))
8699 return op0;
8700
8701 return expand_abs (mode, op0, target, unsignedp,
8702 safe_from_p (target, treeop0, 1));
8703
8704 case MAX_EXPR:
8705 case MIN_EXPR:
8706 target = original_target;
8707 if (target == 0
8708 || modifier == EXPAND_STACK_PARM
8709 || (MEM_P (target) && MEM_VOLATILE_P (target))
8710 || GET_MODE (target) != mode
8711 || (REG_P (target)
8712 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8713 target = gen_reg_rtx (mode);
8714 expand_operands (treeop0, treeop1,
8715 target, &op0, &op1, EXPAND_NORMAL);
8716
8717 /* First try to do it with a special MIN or MAX instruction.
8718 If that does not win, use a conditional jump to select the proper
8719 value. */
8720 this_optab = optab_for_tree_code (code, type, optab_default);
8721 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8722 OPTAB_WIDEN);
8723 if (temp != 0)
8724 return temp;
8725
8726 /* At this point, a MEM target is no longer useful; we will get better
8727 code without it. */
8728
8729 if (! REG_P (target))
8730 target = gen_reg_rtx (mode);
8731
8732 /* If op1 was placed in target, swap op0 and op1. */
8733 if (target != op0 && target == op1)
8734 std::swap (op0, op1);
8735
8736 /* We generate better code and avoid problems with op1 mentioning
8737 target by forcing op1 into a pseudo if it isn't a constant. */
8738 if (! CONSTANT_P (op1))
8739 op1 = force_reg (mode, op1);
8740
8741 {
8742 enum rtx_code comparison_code;
8743 rtx cmpop1 = op1;
8744
8745 if (code == MAX_EXPR)
8746 comparison_code = unsignedp ? GEU : GE;
8747 else
8748 comparison_code = unsignedp ? LEU : LE;
8749
8750 /* Canonicalize to comparisons against 0. */
8751 if (op1 == const1_rtx)
8752 {
8753 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8754 or (a != 0 ? a : 1) for unsigned.
8755 For MIN we are safe converting (a <= 1 ? a : 1)
8756 into (a <= 0 ? a : 1) */
8757 cmpop1 = const0_rtx;
8758 if (code == MAX_EXPR)
8759 comparison_code = unsignedp ? NE : GT;
8760 }
8761 if (op1 == constm1_rtx && !unsignedp)
8762 {
8763 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8764 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8765 cmpop1 = const0_rtx;
8766 if (code == MIN_EXPR)
8767 comparison_code = LT;
8768 }
8769
8770 /* Use a conditional move if possible. */
8771 if (can_conditionally_move_p (mode))
8772 {
8773 rtx insn;
8774
8775 start_sequence ();
8776
8777 /* Try to emit the conditional move. */
8778 insn = emit_conditional_move (target, comparison_code,
8779 op0, cmpop1, mode,
8780 op0, op1, mode,
8781 unsignedp);
8782
8783 /* If we could do the conditional move, emit the sequence,
8784 and return. */
8785 if (insn)
8786 {
8787 rtx_insn *seq = get_insns ();
8788 end_sequence ();
8789 emit_insn (seq);
8790 return target;
8791 }
8792
8793 /* Otherwise discard the sequence and fall back to code with
8794 branches. */
8795 end_sequence ();
8796 }
8797
8798 if (target != op0)
8799 emit_move_insn (target, op0);
8800
8801 lab = gen_label_rtx ();
8802 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8803 unsignedp, mode, NULL_RTX, NULL, lab,
8804 -1);
8805 }
8806 emit_move_insn (target, op1);
8807 emit_label (lab);
8808 return target;
8809
8810 case BIT_NOT_EXPR:
8811 op0 = expand_expr (treeop0, subtarget,
8812 VOIDmode, EXPAND_NORMAL);
8813 if (modifier == EXPAND_STACK_PARM)
8814 target = 0;
8815 /* In case we have to reduce the result to bitfield precision
8816 for unsigned bitfield expand this as XOR with a proper constant
8817 instead. */
8818 if (reduce_bit_field && TYPE_UNSIGNED (type))
8819 {
8820 wide_int mask = wi::mask (TYPE_PRECISION (type),
8821 false, GET_MODE_PRECISION (mode));
8822
8823 temp = expand_binop (mode, xor_optab, op0,
8824 immed_wide_int_const (mask, mode),
8825 target, 1, OPTAB_LIB_WIDEN);
8826 }
8827 else
8828 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8829 gcc_assert (temp);
8830 return temp;
8831
8832 /* ??? Can optimize bitwise operations with one arg constant.
8833 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8834 and (a bitwise1 b) bitwise2 b (etc)
8835 but that is probably not worth while. */
8836
8837 case BIT_AND_EXPR:
8838 case BIT_IOR_EXPR:
8839 case BIT_XOR_EXPR:
8840 goto binop;
8841
8842 case LROTATE_EXPR:
8843 case RROTATE_EXPR:
8844 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8845 || (GET_MODE_PRECISION (TYPE_MODE (type))
8846 == TYPE_PRECISION (type)));
8847 /* fall through */
8848
8849 case LSHIFT_EXPR:
8850 case RSHIFT_EXPR:
8851 /* If this is a fixed-point operation, then we cannot use the code
8852 below because "expand_shift" doesn't support sat/no-sat fixed-point
8853 shifts. */
8854 if (ALL_FIXED_POINT_MODE_P (mode))
8855 goto binop;
8856
8857 if (! safe_from_p (subtarget, treeop1, 1))
8858 subtarget = 0;
8859 if (modifier == EXPAND_STACK_PARM)
8860 target = 0;
8861 op0 = expand_expr (treeop0, subtarget,
8862 VOIDmode, EXPAND_NORMAL);
8863 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8864 unsignedp);
8865 if (code == LSHIFT_EXPR)
8866 temp = REDUCE_BIT_FIELD (temp);
8867 return temp;
8868
8869 /* Could determine the answer when only additive constants differ. Also,
8870 the addition of one can be handled by changing the condition. */
8871 case LT_EXPR:
8872 case LE_EXPR:
8873 case GT_EXPR:
8874 case GE_EXPR:
8875 case EQ_EXPR:
8876 case NE_EXPR:
8877 case UNORDERED_EXPR:
8878 case ORDERED_EXPR:
8879 case UNLT_EXPR:
8880 case UNLE_EXPR:
8881 case UNGT_EXPR:
8882 case UNGE_EXPR:
8883 case UNEQ_EXPR:
8884 case LTGT_EXPR:
8885 {
8886 temp = do_store_flag (ops,
8887 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8888 tmode != VOIDmode ? tmode : mode);
8889 if (temp)
8890 return temp;
8891
8892 /* Use a compare and a jump for BLKmode comparisons, or for function
8893 type comparisons is have_canonicalize_funcptr_for_compare. */
8894
8895 if ((target == 0
8896 || modifier == EXPAND_STACK_PARM
8897 || ! safe_from_p (target, treeop0, 1)
8898 || ! safe_from_p (target, treeop1, 1)
8899 /* Make sure we don't have a hard reg (such as function's return
8900 value) live across basic blocks, if not optimizing. */
8901 || (!optimize && REG_P (target)
8902 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8903 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8904
8905 emit_move_insn (target, const0_rtx);
8906
8907 rtx_code_label *lab1 = gen_label_rtx ();
8908 jumpifnot_1 (code, treeop0, treeop1, lab1, -1);
8909
8910 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8911 emit_move_insn (target, constm1_rtx);
8912 else
8913 emit_move_insn (target, const1_rtx);
8914
8915 emit_label (lab1);
8916 return target;
8917 }
8918 case COMPLEX_EXPR:
8919 /* Get the rtx code of the operands. */
8920 op0 = expand_normal (treeop0);
8921 op1 = expand_normal (treeop1);
8922
8923 if (!target)
8924 target = gen_reg_rtx (TYPE_MODE (type));
8925 else
8926 /* If target overlaps with op1, then either we need to force
8927 op1 into a pseudo (if target also overlaps with op0),
8928 or write the complex parts in reverse order. */
8929 switch (GET_CODE (target))
8930 {
8931 case CONCAT:
8932 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
8933 {
8934 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
8935 {
8936 complex_expr_force_op1:
8937 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
8938 emit_move_insn (temp, op1);
8939 op1 = temp;
8940 break;
8941 }
8942 complex_expr_swap_order:
8943 /* Move the imaginary (op1) and real (op0) parts to their
8944 location. */
8945 write_complex_part (target, op1, true);
8946 write_complex_part (target, op0, false);
8947
8948 return target;
8949 }
8950 break;
8951 case MEM:
8952 temp = adjust_address_nv (target,
8953 GET_MODE_INNER (GET_MODE (target)), 0);
8954 if (reg_overlap_mentioned_p (temp, op1))
8955 {
8956 machine_mode imode = GET_MODE_INNER (GET_MODE (target));
8957 temp = adjust_address_nv (target, imode,
8958 GET_MODE_SIZE (imode));
8959 if (reg_overlap_mentioned_p (temp, op0))
8960 goto complex_expr_force_op1;
8961 goto complex_expr_swap_order;
8962 }
8963 break;
8964 default:
8965 if (reg_overlap_mentioned_p (target, op1))
8966 {
8967 if (reg_overlap_mentioned_p (target, op0))
8968 goto complex_expr_force_op1;
8969 goto complex_expr_swap_order;
8970 }
8971 break;
8972 }
8973
8974 /* Move the real (op0) and imaginary (op1) parts to their location. */
8975 write_complex_part (target, op0, false);
8976 write_complex_part (target, op1, true);
8977
8978 return target;
8979
8980 case WIDEN_SUM_EXPR:
8981 {
8982 tree oprnd0 = treeop0;
8983 tree oprnd1 = treeop1;
8984
8985 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8986 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8987 target, unsignedp);
8988 return target;
8989 }
8990
8991 case REDUC_MAX_EXPR:
8992 case REDUC_MIN_EXPR:
8993 case REDUC_PLUS_EXPR:
8994 {
8995 op0 = expand_normal (treeop0);
8996 this_optab = optab_for_tree_code (code, type, optab_default);
8997 machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0));
8998
8999 if (optab_handler (this_optab, vec_mode) != CODE_FOR_nothing)
9000 {
9001 struct expand_operand ops[2];
9002 enum insn_code icode = optab_handler (this_optab, vec_mode);
9003
9004 create_output_operand (&ops[0], target, mode);
9005 create_input_operand (&ops[1], op0, vec_mode);
9006 if (maybe_expand_insn (icode, 2, ops))
9007 {
9008 target = ops[0].value;
9009 if (GET_MODE (target) != mode)
9010 return gen_lowpart (tmode, target);
9011 return target;
9012 }
9013 }
9014 /* Fall back to optab with vector result, and then extract scalar. */
9015 this_optab = scalar_reduc_to_vector (this_optab, type);
9016 temp = expand_unop (vec_mode, this_optab, op0, NULL_RTX, unsignedp);
9017 gcc_assert (temp);
9018 /* The tree code produces a scalar result, but (somewhat by convention)
9019 the optab produces a vector with the result in element 0 if
9020 little-endian, or element N-1 if big-endian. So pull the scalar
9021 result out of that element. */
9022 int index = BYTES_BIG_ENDIAN ? GET_MODE_NUNITS (vec_mode) - 1 : 0;
9023 int bitsize = GET_MODE_BITSIZE (GET_MODE_INNER (vec_mode));
9024 temp = extract_bit_field (temp, bitsize, bitsize * index, unsignedp,
9025 target, mode, mode);
9026 gcc_assert (temp);
9027 return temp;
9028 }
9029
9030 case VEC_UNPACK_HI_EXPR:
9031 case VEC_UNPACK_LO_EXPR:
9032 {
9033 op0 = expand_normal (treeop0);
9034 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9035 target, unsignedp);
9036 gcc_assert (temp);
9037 return temp;
9038 }
9039
9040 case VEC_UNPACK_FLOAT_HI_EXPR:
9041 case VEC_UNPACK_FLOAT_LO_EXPR:
9042 {
9043 op0 = expand_normal (treeop0);
9044 /* The signedness is determined from input operand. */
9045 temp = expand_widen_pattern_expr
9046 (ops, op0, NULL_RTX, NULL_RTX,
9047 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9048
9049 gcc_assert (temp);
9050 return temp;
9051 }
9052
9053 case VEC_WIDEN_MULT_HI_EXPR:
9054 case VEC_WIDEN_MULT_LO_EXPR:
9055 case VEC_WIDEN_MULT_EVEN_EXPR:
9056 case VEC_WIDEN_MULT_ODD_EXPR:
9057 case VEC_WIDEN_LSHIFT_HI_EXPR:
9058 case VEC_WIDEN_LSHIFT_LO_EXPR:
9059 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9060 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9061 target, unsignedp);
9062 gcc_assert (target);
9063 return target;
9064
9065 case VEC_PACK_TRUNC_EXPR:
9066 case VEC_PACK_SAT_EXPR:
9067 case VEC_PACK_FIX_TRUNC_EXPR:
9068 mode = TYPE_MODE (TREE_TYPE (treeop0));
9069 goto binop;
9070
9071 case VEC_PERM_EXPR:
9072 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9073 op2 = expand_normal (treeop2);
9074
9075 /* Careful here: if the target doesn't support integral vector modes,
9076 a constant selection vector could wind up smooshed into a normal
9077 integral constant. */
9078 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9079 {
9080 tree sel_type = TREE_TYPE (treeop2);
9081 machine_mode vmode
9082 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9083 TYPE_VECTOR_SUBPARTS (sel_type));
9084 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9085 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9086 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9087 }
9088 else
9089 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9090
9091 temp = expand_vec_perm (mode, op0, op1, op2, target);
9092 gcc_assert (temp);
9093 return temp;
9094
9095 case DOT_PROD_EXPR:
9096 {
9097 tree oprnd0 = treeop0;
9098 tree oprnd1 = treeop1;
9099 tree oprnd2 = treeop2;
9100 rtx op2;
9101
9102 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9103 op2 = expand_normal (oprnd2);
9104 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9105 target, unsignedp);
9106 return target;
9107 }
9108
9109 case SAD_EXPR:
9110 {
9111 tree oprnd0 = treeop0;
9112 tree oprnd1 = treeop1;
9113 tree oprnd2 = treeop2;
9114 rtx op2;
9115
9116 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9117 op2 = expand_normal (oprnd2);
9118 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9119 target, unsignedp);
9120 return target;
9121 }
9122
9123 case REALIGN_LOAD_EXPR:
9124 {
9125 tree oprnd0 = treeop0;
9126 tree oprnd1 = treeop1;
9127 tree oprnd2 = treeop2;
9128 rtx op2;
9129
9130 this_optab = optab_for_tree_code (code, type, optab_default);
9131 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9132 op2 = expand_normal (oprnd2);
9133 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9134 target, unsignedp);
9135 gcc_assert (temp);
9136 return temp;
9137 }
9138
9139 case COND_EXPR:
9140 {
9141 /* A COND_EXPR with its type being VOID_TYPE represents a
9142 conditional jump and is handled in
9143 expand_gimple_cond_expr. */
9144 gcc_assert (!VOID_TYPE_P (type));
9145
9146 /* Note that COND_EXPRs whose type is a structure or union
9147 are required to be constructed to contain assignments of
9148 a temporary variable, so that we can evaluate them here
9149 for side effect only. If type is void, we must do likewise. */
9150
9151 gcc_assert (!TREE_ADDRESSABLE (type)
9152 && !ignore
9153 && TREE_TYPE (treeop1) != void_type_node
9154 && TREE_TYPE (treeop2) != void_type_node);
9155
9156 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9157 if (temp)
9158 return temp;
9159
9160 /* If we are not to produce a result, we have no target. Otherwise,
9161 if a target was specified use it; it will not be used as an
9162 intermediate target unless it is safe. If no target, use a
9163 temporary. */
9164
9165 if (modifier != EXPAND_STACK_PARM
9166 && original_target
9167 && safe_from_p (original_target, treeop0, 1)
9168 && GET_MODE (original_target) == mode
9169 && !MEM_P (original_target))
9170 temp = original_target;
9171 else
9172 temp = assign_temp (type, 0, 1);
9173
9174 do_pending_stack_adjust ();
9175 NO_DEFER_POP;
9176 rtx_code_label *lab0 = gen_label_rtx ();
9177 rtx_code_label *lab1 = gen_label_rtx ();
9178 jumpifnot (treeop0, lab0, -1);
9179 store_expr (treeop1, temp,
9180 modifier == EXPAND_STACK_PARM,
9181 false);
9182
9183 emit_jump_insn (targetm.gen_jump (lab1));
9184 emit_barrier ();
9185 emit_label (lab0);
9186 store_expr (treeop2, temp,
9187 modifier == EXPAND_STACK_PARM,
9188 false);
9189
9190 emit_label (lab1);
9191 OK_DEFER_POP;
9192 return temp;
9193 }
9194
9195 case VEC_COND_EXPR:
9196 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9197 return target;
9198
9199 default:
9200 gcc_unreachable ();
9201 }
9202
9203 /* Here to do an ordinary binary operator. */
9204 binop:
9205 expand_operands (treeop0, treeop1,
9206 subtarget, &op0, &op1, EXPAND_NORMAL);
9207 binop2:
9208 this_optab = optab_for_tree_code (code, type, optab_default);
9209 binop3:
9210 if (modifier == EXPAND_STACK_PARM)
9211 target = 0;
9212 temp = expand_binop (mode, this_optab, op0, op1, target,
9213 unsignedp, OPTAB_LIB_WIDEN);
9214 gcc_assert (temp);
9215 /* Bitwise operations do not need bitfield reduction as we expect their
9216 operands being properly truncated. */
9217 if (code == BIT_XOR_EXPR
9218 || code == BIT_AND_EXPR
9219 || code == BIT_IOR_EXPR)
9220 return temp;
9221 return REDUCE_BIT_FIELD (temp);
9222 }
9223 #undef REDUCE_BIT_FIELD
9224
9225
9226 /* Return TRUE if expression STMT is suitable for replacement.
9227 Never consider memory loads as replaceable, because those don't ever lead
9228 into constant expressions. */
9229
9230 static bool
9231 stmt_is_replaceable_p (gimple stmt)
9232 {
9233 if (ssa_is_replaceable_p (stmt))
9234 {
9235 /* Don't move around loads. */
9236 if (!gimple_assign_single_p (stmt)
9237 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9238 return true;
9239 }
9240 return false;
9241 }
9242
9243 rtx
9244 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9245 enum expand_modifier modifier, rtx *alt_rtl,
9246 bool inner_reference_p)
9247 {
9248 rtx op0, op1, temp, decl_rtl;
9249 tree type;
9250 int unsignedp;
9251 machine_mode mode;
9252 enum tree_code code = TREE_CODE (exp);
9253 rtx subtarget, original_target;
9254 int ignore;
9255 tree context;
9256 bool reduce_bit_field;
9257 location_t loc = EXPR_LOCATION (exp);
9258 struct separate_ops ops;
9259 tree treeop0, treeop1, treeop2;
9260 tree ssa_name = NULL_TREE;
9261 gimple g;
9262
9263 type = TREE_TYPE (exp);
9264 mode = TYPE_MODE (type);
9265 unsignedp = TYPE_UNSIGNED (type);
9266
9267 treeop0 = treeop1 = treeop2 = NULL_TREE;
9268 if (!VL_EXP_CLASS_P (exp))
9269 switch (TREE_CODE_LENGTH (code))
9270 {
9271 default:
9272 case 3: treeop2 = TREE_OPERAND (exp, 2);
9273 case 2: treeop1 = TREE_OPERAND (exp, 1);
9274 case 1: treeop0 = TREE_OPERAND (exp, 0);
9275 case 0: break;
9276 }
9277 ops.code = code;
9278 ops.type = type;
9279 ops.op0 = treeop0;
9280 ops.op1 = treeop1;
9281 ops.op2 = treeop2;
9282 ops.location = loc;
9283
9284 ignore = (target == const0_rtx
9285 || ((CONVERT_EXPR_CODE_P (code)
9286 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9287 && TREE_CODE (type) == VOID_TYPE));
9288
9289 /* An operation in what may be a bit-field type needs the
9290 result to be reduced to the precision of the bit-field type,
9291 which is narrower than that of the type's mode. */
9292 reduce_bit_field = (!ignore
9293 && INTEGRAL_TYPE_P (type)
9294 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9295
9296 /* If we are going to ignore this result, we need only do something
9297 if there is a side-effect somewhere in the expression. If there
9298 is, short-circuit the most common cases here. Note that we must
9299 not call expand_expr with anything but const0_rtx in case this
9300 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9301
9302 if (ignore)
9303 {
9304 if (! TREE_SIDE_EFFECTS (exp))
9305 return const0_rtx;
9306
9307 /* Ensure we reference a volatile object even if value is ignored, but
9308 don't do this if all we are doing is taking its address. */
9309 if (TREE_THIS_VOLATILE (exp)
9310 && TREE_CODE (exp) != FUNCTION_DECL
9311 && mode != VOIDmode && mode != BLKmode
9312 && modifier != EXPAND_CONST_ADDRESS)
9313 {
9314 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9315 if (MEM_P (temp))
9316 copy_to_reg (temp);
9317 return const0_rtx;
9318 }
9319
9320 if (TREE_CODE_CLASS (code) == tcc_unary
9321 || code == BIT_FIELD_REF
9322 || code == COMPONENT_REF
9323 || code == INDIRECT_REF)
9324 return expand_expr (treeop0, const0_rtx, VOIDmode,
9325 modifier);
9326
9327 else if (TREE_CODE_CLASS (code) == tcc_binary
9328 || TREE_CODE_CLASS (code) == tcc_comparison
9329 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9330 {
9331 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9332 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9333 return const0_rtx;
9334 }
9335
9336 target = 0;
9337 }
9338
9339 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9340 target = 0;
9341
9342 /* Use subtarget as the target for operand 0 of a binary operation. */
9343 subtarget = get_subtarget (target);
9344 original_target = target;
9345
9346 switch (code)
9347 {
9348 case LABEL_DECL:
9349 {
9350 tree function = decl_function_context (exp);
9351
9352 temp = label_rtx (exp);
9353 temp = gen_rtx_LABEL_REF (Pmode, temp);
9354
9355 if (function != current_function_decl
9356 && function != 0)
9357 LABEL_REF_NONLOCAL_P (temp) = 1;
9358
9359 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9360 return temp;
9361 }
9362
9363 case SSA_NAME:
9364 /* ??? ivopts calls expander, without any preparation from
9365 out-of-ssa. So fake instructions as if this was an access to the
9366 base variable. This unnecessarily allocates a pseudo, see how we can
9367 reuse it, if partition base vars have it set already. */
9368 if (!currently_expanding_to_rtl)
9369 {
9370 tree var = SSA_NAME_VAR (exp);
9371 if (var && DECL_RTL_SET_P (var))
9372 return DECL_RTL (var);
9373 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9374 LAST_VIRTUAL_REGISTER + 1);
9375 }
9376
9377 g = get_gimple_for_ssa_name (exp);
9378 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9379 if (g == NULL
9380 && modifier == EXPAND_INITIALIZER
9381 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9382 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9383 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9384 g = SSA_NAME_DEF_STMT (exp);
9385 if (g)
9386 {
9387 rtx r;
9388 ops.code = gimple_assign_rhs_code (g);
9389 switch (get_gimple_rhs_class (ops.code))
9390 {
9391 case GIMPLE_TERNARY_RHS:
9392 ops.op2 = gimple_assign_rhs3 (g);
9393 /* Fallthru */
9394 case GIMPLE_BINARY_RHS:
9395 ops.op1 = gimple_assign_rhs2 (g);
9396
9397 /* Try to expand conditonal compare. */
9398 if (targetm.gen_ccmp_first)
9399 {
9400 gcc_checking_assert (targetm.gen_ccmp_next != NULL);
9401 r = expand_ccmp_expr (g);
9402 if (r)
9403 break;
9404 }
9405 /* Fallthru */
9406 case GIMPLE_UNARY_RHS:
9407 ops.op0 = gimple_assign_rhs1 (g);
9408 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9409 ops.location = gimple_location (g);
9410 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9411 break;
9412 case GIMPLE_SINGLE_RHS:
9413 {
9414 location_t saved_loc = curr_insn_location ();
9415 set_curr_insn_location (gimple_location (g));
9416 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9417 tmode, modifier, NULL, inner_reference_p);
9418 set_curr_insn_location (saved_loc);
9419 break;
9420 }
9421 default:
9422 gcc_unreachable ();
9423 }
9424 if (REG_P (r) && !REG_EXPR (r))
9425 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9426 return r;
9427 }
9428
9429 ssa_name = exp;
9430 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9431 exp = SSA_NAME_VAR (ssa_name);
9432 goto expand_decl_rtl;
9433
9434 case PARM_DECL:
9435 case VAR_DECL:
9436 /* If a static var's type was incomplete when the decl was written,
9437 but the type is complete now, lay out the decl now. */
9438 if (DECL_SIZE (exp) == 0
9439 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9440 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9441 layout_decl (exp, 0);
9442
9443 /* ... fall through ... */
9444
9445 case FUNCTION_DECL:
9446 case RESULT_DECL:
9447 decl_rtl = DECL_RTL (exp);
9448 expand_decl_rtl:
9449 gcc_assert (decl_rtl);
9450 decl_rtl = copy_rtx (decl_rtl);
9451 /* Record writes to register variables. */
9452 if (modifier == EXPAND_WRITE
9453 && REG_P (decl_rtl)
9454 && HARD_REGISTER_P (decl_rtl))
9455 add_to_hard_reg_set (&crtl->asm_clobbers,
9456 GET_MODE (decl_rtl), REGNO (decl_rtl));
9457
9458 /* Ensure variable marked as used even if it doesn't go through
9459 a parser. If it hasn't be used yet, write out an external
9460 definition. */
9461 TREE_USED (exp) = 1;
9462
9463 /* Show we haven't gotten RTL for this yet. */
9464 temp = 0;
9465
9466 /* Variables inherited from containing functions should have
9467 been lowered by this point. */
9468 context = decl_function_context (exp);
9469 gcc_assert (SCOPE_FILE_SCOPE_P (context)
9470 || context == current_function_decl
9471 || TREE_STATIC (exp)
9472 || DECL_EXTERNAL (exp)
9473 /* ??? C++ creates functions that are not TREE_STATIC. */
9474 || TREE_CODE (exp) == FUNCTION_DECL);
9475
9476 /* This is the case of an array whose size is to be determined
9477 from its initializer, while the initializer is still being parsed.
9478 ??? We aren't parsing while expanding anymore. */
9479
9480 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9481 temp = validize_mem (decl_rtl);
9482
9483 /* If DECL_RTL is memory, we are in the normal case and the
9484 address is not valid, get the address into a register. */
9485
9486 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9487 {
9488 if (alt_rtl)
9489 *alt_rtl = decl_rtl;
9490 decl_rtl = use_anchored_address (decl_rtl);
9491 if (modifier != EXPAND_CONST_ADDRESS
9492 && modifier != EXPAND_SUM
9493 && !memory_address_addr_space_p (DECL_MODE (exp),
9494 XEXP (decl_rtl, 0),
9495 MEM_ADDR_SPACE (decl_rtl)))
9496 temp = replace_equiv_address (decl_rtl,
9497 copy_rtx (XEXP (decl_rtl, 0)));
9498 }
9499
9500 /* If we got something, return it. But first, set the alignment
9501 if the address is a register. */
9502 if (temp != 0)
9503 {
9504 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9505 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9506
9507 return temp;
9508 }
9509
9510 /* If the mode of DECL_RTL does not match that of the decl,
9511 there are two cases: we are dealing with a BLKmode value
9512 that is returned in a register, or we are dealing with
9513 a promoted value. In the latter case, return a SUBREG
9514 of the wanted mode, but mark it so that we know that it
9515 was already extended. */
9516 if (REG_P (decl_rtl)
9517 && DECL_MODE (exp) != BLKmode
9518 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9519 {
9520 machine_mode pmode;
9521
9522 /* Get the signedness to be used for this variable. Ensure we get
9523 the same mode we got when the variable was declared. */
9524 if (code == SSA_NAME
9525 && (g = SSA_NAME_DEF_STMT (ssa_name))
9526 && gimple_code (g) == GIMPLE_CALL
9527 && !gimple_call_internal_p (g))
9528 pmode = promote_function_mode (type, mode, &unsignedp,
9529 gimple_call_fntype (g),
9530 2);
9531 else
9532 pmode = promote_decl_mode (exp, &unsignedp);
9533 gcc_assert (GET_MODE (decl_rtl) == pmode);
9534
9535 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9536 SUBREG_PROMOTED_VAR_P (temp) = 1;
9537 SUBREG_PROMOTED_SET (temp, unsignedp);
9538 return temp;
9539 }
9540
9541 return decl_rtl;
9542
9543 case INTEGER_CST:
9544 /* Given that TYPE_PRECISION (type) is not always equal to
9545 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9546 the former to the latter according to the signedness of the
9547 type. */
9548 temp = immed_wide_int_const (wide_int::from
9549 (exp,
9550 GET_MODE_PRECISION (TYPE_MODE (type)),
9551 TYPE_SIGN (type)),
9552 TYPE_MODE (type));
9553 return temp;
9554
9555 case VECTOR_CST:
9556 {
9557 tree tmp = NULL_TREE;
9558 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9559 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9560 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9561 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9562 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9563 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9564 return const_vector_from_tree (exp);
9565 if (GET_MODE_CLASS (mode) == MODE_INT)
9566 {
9567 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9568 if (type_for_mode)
9569 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9570 }
9571 if (!tmp)
9572 {
9573 vec<constructor_elt, va_gc> *v;
9574 unsigned i;
9575 vec_alloc (v, VECTOR_CST_NELTS (exp));
9576 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9577 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9578 tmp = build_constructor (type, v);
9579 }
9580 return expand_expr (tmp, ignore ? const0_rtx : target,
9581 tmode, modifier);
9582 }
9583
9584 case CONST_DECL:
9585 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9586
9587 case REAL_CST:
9588 /* If optimized, generate immediate CONST_DOUBLE
9589 which will be turned into memory by reload if necessary.
9590
9591 We used to force a register so that loop.c could see it. But
9592 this does not allow gen_* patterns to perform optimizations with
9593 the constants. It also produces two insns in cases like "x = 1.0;".
9594 On most machines, floating-point constants are not permitted in
9595 many insns, so we'd end up copying it to a register in any case.
9596
9597 Now, we do the copying in expand_binop, if appropriate. */
9598 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9599 TYPE_MODE (TREE_TYPE (exp)));
9600
9601 case FIXED_CST:
9602 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9603 TYPE_MODE (TREE_TYPE (exp)));
9604
9605 case COMPLEX_CST:
9606 /* Handle evaluating a complex constant in a CONCAT target. */
9607 if (original_target && GET_CODE (original_target) == CONCAT)
9608 {
9609 machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9610 rtx rtarg, itarg;
9611
9612 rtarg = XEXP (original_target, 0);
9613 itarg = XEXP (original_target, 1);
9614
9615 /* Move the real and imaginary parts separately. */
9616 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9617 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9618
9619 if (op0 != rtarg)
9620 emit_move_insn (rtarg, op0);
9621 if (op1 != itarg)
9622 emit_move_insn (itarg, op1);
9623
9624 return original_target;
9625 }
9626
9627 /* ... fall through ... */
9628
9629 case STRING_CST:
9630 temp = expand_expr_constant (exp, 1, modifier);
9631
9632 /* temp contains a constant address.
9633 On RISC machines where a constant address isn't valid,
9634 make some insns to get that address into a register. */
9635 if (modifier != EXPAND_CONST_ADDRESS
9636 && modifier != EXPAND_INITIALIZER
9637 && modifier != EXPAND_SUM
9638 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9639 MEM_ADDR_SPACE (temp)))
9640 return replace_equiv_address (temp,
9641 copy_rtx (XEXP (temp, 0)));
9642 return temp;
9643
9644 case SAVE_EXPR:
9645 {
9646 tree val = treeop0;
9647 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9648 inner_reference_p);
9649
9650 if (!SAVE_EXPR_RESOLVED_P (exp))
9651 {
9652 /* We can indeed still hit this case, typically via builtin
9653 expanders calling save_expr immediately before expanding
9654 something. Assume this means that we only have to deal
9655 with non-BLKmode values. */
9656 gcc_assert (GET_MODE (ret) != BLKmode);
9657
9658 val = build_decl (curr_insn_location (),
9659 VAR_DECL, NULL, TREE_TYPE (exp));
9660 DECL_ARTIFICIAL (val) = 1;
9661 DECL_IGNORED_P (val) = 1;
9662 treeop0 = val;
9663 TREE_OPERAND (exp, 0) = treeop0;
9664 SAVE_EXPR_RESOLVED_P (exp) = 1;
9665
9666 if (!CONSTANT_P (ret))
9667 ret = copy_to_reg (ret);
9668 SET_DECL_RTL (val, ret);
9669 }
9670
9671 return ret;
9672 }
9673
9674
9675 case CONSTRUCTOR:
9676 /* If we don't need the result, just ensure we evaluate any
9677 subexpressions. */
9678 if (ignore)
9679 {
9680 unsigned HOST_WIDE_INT idx;
9681 tree value;
9682
9683 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9684 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9685
9686 return const0_rtx;
9687 }
9688
9689 return expand_constructor (exp, target, modifier, false);
9690
9691 case TARGET_MEM_REF:
9692 {
9693 addr_space_t as
9694 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9695 enum insn_code icode;
9696 unsigned int align;
9697
9698 op0 = addr_for_mem_ref (exp, as, true);
9699 op0 = memory_address_addr_space (mode, op0, as);
9700 temp = gen_rtx_MEM (mode, op0);
9701 set_mem_attributes (temp, exp, 0);
9702 set_mem_addr_space (temp, as);
9703 align = get_object_alignment (exp);
9704 if (modifier != EXPAND_WRITE
9705 && modifier != EXPAND_MEMORY
9706 && mode != BLKmode
9707 && align < GET_MODE_ALIGNMENT (mode)
9708 /* If the target does not have special handling for unaligned
9709 loads of mode then it can use regular moves for them. */
9710 && ((icode = optab_handler (movmisalign_optab, mode))
9711 != CODE_FOR_nothing))
9712 {
9713 struct expand_operand ops[2];
9714
9715 /* We've already validated the memory, and we're creating a
9716 new pseudo destination. The predicates really can't fail,
9717 nor can the generator. */
9718 create_output_operand (&ops[0], NULL_RTX, mode);
9719 create_fixed_operand (&ops[1], temp);
9720 expand_insn (icode, 2, ops);
9721 temp = ops[0].value;
9722 }
9723 return temp;
9724 }
9725
9726 case MEM_REF:
9727 {
9728 addr_space_t as
9729 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9730 machine_mode address_mode;
9731 tree base = TREE_OPERAND (exp, 0);
9732 gimple def_stmt;
9733 enum insn_code icode;
9734 unsigned align;
9735 /* Handle expansion of non-aliased memory with non-BLKmode. That
9736 might end up in a register. */
9737 if (mem_ref_refers_to_non_mem_p (exp))
9738 {
9739 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
9740 base = TREE_OPERAND (base, 0);
9741 if (offset == 0
9742 && tree_fits_uhwi_p (TYPE_SIZE (type))
9743 && (GET_MODE_BITSIZE (DECL_MODE (base))
9744 == tree_to_uhwi (TYPE_SIZE (type))))
9745 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9746 target, tmode, modifier);
9747 if (TYPE_MODE (type) == BLKmode)
9748 {
9749 temp = assign_stack_temp (DECL_MODE (base),
9750 GET_MODE_SIZE (DECL_MODE (base)));
9751 store_expr (base, temp, 0, false);
9752 temp = adjust_address (temp, BLKmode, offset);
9753 set_mem_size (temp, int_size_in_bytes (type));
9754 return temp;
9755 }
9756 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9757 bitsize_int (offset * BITS_PER_UNIT));
9758 return expand_expr (exp, target, tmode, modifier);
9759 }
9760 address_mode = targetm.addr_space.address_mode (as);
9761 base = TREE_OPERAND (exp, 0);
9762 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9763 {
9764 tree mask = gimple_assign_rhs2 (def_stmt);
9765 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9766 gimple_assign_rhs1 (def_stmt), mask);
9767 TREE_OPERAND (exp, 0) = base;
9768 }
9769 align = get_object_alignment (exp);
9770 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9771 op0 = memory_address_addr_space (mode, op0, as);
9772 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9773 {
9774 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
9775 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9776 op0 = memory_address_addr_space (mode, op0, as);
9777 }
9778 temp = gen_rtx_MEM (mode, op0);
9779 set_mem_attributes (temp, exp, 0);
9780 set_mem_addr_space (temp, as);
9781 if (TREE_THIS_VOLATILE (exp))
9782 MEM_VOLATILE_P (temp) = 1;
9783 if (modifier != EXPAND_WRITE
9784 && modifier != EXPAND_MEMORY
9785 && !inner_reference_p
9786 && mode != BLKmode
9787 && align < GET_MODE_ALIGNMENT (mode))
9788 {
9789 if ((icode = optab_handler (movmisalign_optab, mode))
9790 != CODE_FOR_nothing)
9791 {
9792 struct expand_operand ops[2];
9793
9794 /* We've already validated the memory, and we're creating a
9795 new pseudo destination. The predicates really can't fail,
9796 nor can the generator. */
9797 create_output_operand (&ops[0], NULL_RTX, mode);
9798 create_fixed_operand (&ops[1], temp);
9799 expand_insn (icode, 2, ops);
9800 temp = ops[0].value;
9801 }
9802 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9803 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9804 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9805 (modifier == EXPAND_STACK_PARM
9806 ? NULL_RTX : target),
9807 mode, mode);
9808 }
9809 return temp;
9810 }
9811
9812 case ARRAY_REF:
9813
9814 {
9815 tree array = treeop0;
9816 tree index = treeop1;
9817 tree init;
9818
9819 /* Fold an expression like: "foo"[2].
9820 This is not done in fold so it won't happen inside &.
9821 Don't fold if this is for wide characters since it's too
9822 difficult to do correctly and this is a very rare case. */
9823
9824 if (modifier != EXPAND_CONST_ADDRESS
9825 && modifier != EXPAND_INITIALIZER
9826 && modifier != EXPAND_MEMORY)
9827 {
9828 tree t = fold_read_from_constant_string (exp);
9829
9830 if (t)
9831 return expand_expr (t, target, tmode, modifier);
9832 }
9833
9834 /* If this is a constant index into a constant array,
9835 just get the value from the array. Handle both the cases when
9836 we have an explicit constructor and when our operand is a variable
9837 that was declared const. */
9838
9839 if (modifier != EXPAND_CONST_ADDRESS
9840 && modifier != EXPAND_INITIALIZER
9841 && modifier != EXPAND_MEMORY
9842 && TREE_CODE (array) == CONSTRUCTOR
9843 && ! TREE_SIDE_EFFECTS (array)
9844 && TREE_CODE (index) == INTEGER_CST)
9845 {
9846 unsigned HOST_WIDE_INT ix;
9847 tree field, value;
9848
9849 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9850 field, value)
9851 if (tree_int_cst_equal (field, index))
9852 {
9853 if (!TREE_SIDE_EFFECTS (value))
9854 return expand_expr (fold (value), target, tmode, modifier);
9855 break;
9856 }
9857 }
9858
9859 else if (optimize >= 1
9860 && modifier != EXPAND_CONST_ADDRESS
9861 && modifier != EXPAND_INITIALIZER
9862 && modifier != EXPAND_MEMORY
9863 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9864 && TREE_CODE (index) == INTEGER_CST
9865 && (TREE_CODE (array) == VAR_DECL
9866 || TREE_CODE (array) == CONST_DECL)
9867 && (init = ctor_for_folding (array)) != error_mark_node)
9868 {
9869 if (init == NULL_TREE)
9870 {
9871 tree value = build_zero_cst (type);
9872 if (TREE_CODE (value) == CONSTRUCTOR)
9873 {
9874 /* If VALUE is a CONSTRUCTOR, this optimization is only
9875 useful if this doesn't store the CONSTRUCTOR into
9876 memory. If it does, it is more efficient to just
9877 load the data from the array directly. */
9878 rtx ret = expand_constructor (value, target,
9879 modifier, true);
9880 if (ret == NULL_RTX)
9881 value = NULL_TREE;
9882 }
9883
9884 if (value)
9885 return expand_expr (value, target, tmode, modifier);
9886 }
9887 else if (TREE_CODE (init) == CONSTRUCTOR)
9888 {
9889 unsigned HOST_WIDE_INT ix;
9890 tree field, value;
9891
9892 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9893 field, value)
9894 if (tree_int_cst_equal (field, index))
9895 {
9896 if (TREE_SIDE_EFFECTS (value))
9897 break;
9898
9899 if (TREE_CODE (value) == CONSTRUCTOR)
9900 {
9901 /* If VALUE is a CONSTRUCTOR, this
9902 optimization is only useful if
9903 this doesn't store the CONSTRUCTOR
9904 into memory. If it does, it is more
9905 efficient to just load the data from
9906 the array directly. */
9907 rtx ret = expand_constructor (value, target,
9908 modifier, true);
9909 if (ret == NULL_RTX)
9910 break;
9911 }
9912
9913 return
9914 expand_expr (fold (value), target, tmode, modifier);
9915 }
9916 }
9917 else if (TREE_CODE (init) == STRING_CST)
9918 {
9919 tree low_bound = array_ref_low_bound (exp);
9920 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
9921
9922 /* Optimize the special case of a zero lower bound.
9923
9924 We convert the lower bound to sizetype to avoid problems
9925 with constant folding. E.g. suppose the lower bound is
9926 1 and its mode is QI. Without the conversion
9927 (ARRAY + (INDEX - (unsigned char)1))
9928 becomes
9929 (ARRAY + (-(unsigned char)1) + INDEX)
9930 which becomes
9931 (ARRAY + 255 + INDEX). Oops! */
9932 if (!integer_zerop (low_bound))
9933 index1 = size_diffop_loc (loc, index1,
9934 fold_convert_loc (loc, sizetype,
9935 low_bound));
9936
9937 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
9938 {
9939 tree type = TREE_TYPE (TREE_TYPE (init));
9940 machine_mode mode = TYPE_MODE (type);
9941
9942 if (GET_MODE_CLASS (mode) == MODE_INT
9943 && GET_MODE_SIZE (mode) == 1)
9944 return gen_int_mode (TREE_STRING_POINTER (init)
9945 [TREE_INT_CST_LOW (index1)],
9946 mode);
9947 }
9948 }
9949 }
9950 }
9951 goto normal_inner_ref;
9952
9953 case COMPONENT_REF:
9954 /* If the operand is a CONSTRUCTOR, we can just extract the
9955 appropriate field if it is present. */
9956 if (TREE_CODE (treeop0) == CONSTRUCTOR)
9957 {
9958 unsigned HOST_WIDE_INT idx;
9959 tree field, value;
9960
9961 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9962 idx, field, value)
9963 if (field == treeop1
9964 /* We can normally use the value of the field in the
9965 CONSTRUCTOR. However, if this is a bitfield in
9966 an integral mode that we can fit in a HOST_WIDE_INT,
9967 we must mask only the number of bits in the bitfield,
9968 since this is done implicitly by the constructor. If
9969 the bitfield does not meet either of those conditions,
9970 we can't do this optimization. */
9971 && (! DECL_BIT_FIELD (field)
9972 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
9973 && (GET_MODE_PRECISION (DECL_MODE (field))
9974 <= HOST_BITS_PER_WIDE_INT))))
9975 {
9976 if (DECL_BIT_FIELD (field)
9977 && modifier == EXPAND_STACK_PARM)
9978 target = 0;
9979 op0 = expand_expr (value, target, tmode, modifier);
9980 if (DECL_BIT_FIELD (field))
9981 {
9982 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
9983 machine_mode imode = TYPE_MODE (TREE_TYPE (field));
9984
9985 if (TYPE_UNSIGNED (TREE_TYPE (field)))
9986 {
9987 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
9988 imode);
9989 op0 = expand_and (imode, op0, op1, target);
9990 }
9991 else
9992 {
9993 int count = GET_MODE_PRECISION (imode) - bitsize;
9994
9995 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
9996 target, 0);
9997 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9998 target, 0);
9999 }
10000 }
10001
10002 return op0;
10003 }
10004 }
10005 goto normal_inner_ref;
10006
10007 case BIT_FIELD_REF:
10008 case ARRAY_RANGE_REF:
10009 normal_inner_ref:
10010 {
10011 machine_mode mode1, mode2;
10012 HOST_WIDE_INT bitsize, bitpos;
10013 tree offset;
10014 int volatilep = 0, must_force_mem;
10015 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
10016 &mode1, &unsignedp, &volatilep, true);
10017 rtx orig_op0, memloc;
10018 bool clear_mem_expr = false;
10019
10020 /* If we got back the original object, something is wrong. Perhaps
10021 we are evaluating an expression too early. In any event, don't
10022 infinitely recurse. */
10023 gcc_assert (tem != exp);
10024
10025 /* If TEM's type is a union of variable size, pass TARGET to the inner
10026 computation, since it will need a temporary and TARGET is known
10027 to have to do. This occurs in unchecked conversion in Ada. */
10028 orig_op0 = op0
10029 = expand_expr_real (tem,
10030 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10031 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10032 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10033 != INTEGER_CST)
10034 && modifier != EXPAND_STACK_PARM
10035 ? target : NULL_RTX),
10036 VOIDmode,
10037 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10038 NULL, true);
10039
10040 /* If the field has a mode, we want to access it in the
10041 field's mode, not the computed mode.
10042 If a MEM has VOIDmode (external with incomplete type),
10043 use BLKmode for it instead. */
10044 if (MEM_P (op0))
10045 {
10046 if (mode1 != VOIDmode)
10047 op0 = adjust_address (op0, mode1, 0);
10048 else if (GET_MODE (op0) == VOIDmode)
10049 op0 = adjust_address (op0, BLKmode, 0);
10050 }
10051
10052 mode2
10053 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10054
10055 /* If we have either an offset, a BLKmode result, or a reference
10056 outside the underlying object, we must force it to memory.
10057 Such a case can occur in Ada if we have unchecked conversion
10058 of an expression from a scalar type to an aggregate type or
10059 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10060 passed a partially uninitialized object or a view-conversion
10061 to a larger size. */
10062 must_force_mem = (offset
10063 || mode1 == BLKmode
10064 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10065
10066 /* Handle CONCAT first. */
10067 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10068 {
10069 if (bitpos == 0
10070 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
10071 return op0;
10072 if (bitpos == 0
10073 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10074 && bitsize)
10075 {
10076 op0 = XEXP (op0, 0);
10077 mode2 = GET_MODE (op0);
10078 }
10079 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10080 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10081 && bitpos
10082 && bitsize)
10083 {
10084 op0 = XEXP (op0, 1);
10085 bitpos = 0;
10086 mode2 = GET_MODE (op0);
10087 }
10088 else
10089 /* Otherwise force into memory. */
10090 must_force_mem = 1;
10091 }
10092
10093 /* If this is a constant, put it in a register if it is a legitimate
10094 constant and we don't need a memory reference. */
10095 if (CONSTANT_P (op0)
10096 && mode2 != BLKmode
10097 && targetm.legitimate_constant_p (mode2, op0)
10098 && !must_force_mem)
10099 op0 = force_reg (mode2, op0);
10100
10101 /* Otherwise, if this is a constant, try to force it to the constant
10102 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10103 is a legitimate constant. */
10104 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10105 op0 = validize_mem (memloc);
10106
10107 /* Otherwise, if this is a constant or the object is not in memory
10108 and need be, put it there. */
10109 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10110 {
10111 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10112 emit_move_insn (memloc, op0);
10113 op0 = memloc;
10114 clear_mem_expr = true;
10115 }
10116
10117 if (offset)
10118 {
10119 machine_mode address_mode;
10120 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10121 EXPAND_SUM);
10122
10123 gcc_assert (MEM_P (op0));
10124
10125 address_mode = get_address_mode (op0);
10126 if (GET_MODE (offset_rtx) != address_mode)
10127 {
10128 /* We cannot be sure that the RTL in offset_rtx is valid outside
10129 of a memory address context, so force it into a register
10130 before attempting to convert it to the desired mode. */
10131 offset_rtx = force_operand (offset_rtx, NULL_RTX);
10132 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10133 }
10134
10135 /* See the comment in expand_assignment for the rationale. */
10136 if (mode1 != VOIDmode
10137 && bitpos != 0
10138 && bitsize > 0
10139 && (bitpos % bitsize) == 0
10140 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10141 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10142 {
10143 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10144 bitpos = 0;
10145 }
10146
10147 op0 = offset_address (op0, offset_rtx,
10148 highest_pow2_factor (offset));
10149 }
10150
10151 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10152 record its alignment as BIGGEST_ALIGNMENT. */
10153 if (MEM_P (op0) && bitpos == 0 && offset != 0
10154 && is_aligning_offset (offset, tem))
10155 set_mem_align (op0, BIGGEST_ALIGNMENT);
10156
10157 /* Don't forget about volatility even if this is a bitfield. */
10158 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10159 {
10160 if (op0 == orig_op0)
10161 op0 = copy_rtx (op0);
10162
10163 MEM_VOLATILE_P (op0) = 1;
10164 }
10165
10166 /* In cases where an aligned union has an unaligned object
10167 as a field, we might be extracting a BLKmode value from
10168 an integer-mode (e.g., SImode) object. Handle this case
10169 by doing the extract into an object as wide as the field
10170 (which we know to be the width of a basic mode), then
10171 storing into memory, and changing the mode to BLKmode. */
10172 if (mode1 == VOIDmode
10173 || REG_P (op0) || GET_CODE (op0) == SUBREG
10174 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10175 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10176 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10177 && modifier != EXPAND_CONST_ADDRESS
10178 && modifier != EXPAND_INITIALIZER
10179 && modifier != EXPAND_MEMORY)
10180 /* If the bitfield is volatile and the bitsize
10181 is narrower than the access size of the bitfield,
10182 we need to extract bitfields from the access. */
10183 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10184 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10185 && mode1 != BLKmode
10186 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10187 /* If the field isn't aligned enough to fetch as a memref,
10188 fetch it as a bit field. */
10189 || (mode1 != BLKmode
10190 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10191 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10192 || (MEM_P (op0)
10193 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10194 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10195 && modifier != EXPAND_MEMORY
10196 && ((modifier == EXPAND_CONST_ADDRESS
10197 || modifier == EXPAND_INITIALIZER)
10198 ? STRICT_ALIGNMENT
10199 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10200 || (bitpos % BITS_PER_UNIT != 0)))
10201 /* If the type and the field are a constant size and the
10202 size of the type isn't the same size as the bitfield,
10203 we must use bitfield operations. */
10204 || (bitsize >= 0
10205 && TYPE_SIZE (TREE_TYPE (exp))
10206 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10207 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10208 bitsize)))
10209 {
10210 machine_mode ext_mode = mode;
10211
10212 if (ext_mode == BLKmode
10213 && ! (target != 0 && MEM_P (op0)
10214 && MEM_P (target)
10215 && bitpos % BITS_PER_UNIT == 0))
10216 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10217
10218 if (ext_mode == BLKmode)
10219 {
10220 if (target == 0)
10221 target = assign_temp (type, 1, 1);
10222
10223 /* ??? Unlike the similar test a few lines below, this one is
10224 very likely obsolete. */
10225 if (bitsize == 0)
10226 return target;
10227
10228 /* In this case, BITPOS must start at a byte boundary and
10229 TARGET, if specified, must be a MEM. */
10230 gcc_assert (MEM_P (op0)
10231 && (!target || MEM_P (target))
10232 && !(bitpos % BITS_PER_UNIT));
10233
10234 emit_block_move (target,
10235 adjust_address (op0, VOIDmode,
10236 bitpos / BITS_PER_UNIT),
10237 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10238 / BITS_PER_UNIT),
10239 (modifier == EXPAND_STACK_PARM
10240 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10241
10242 return target;
10243 }
10244
10245 /* If we have nothing to extract, the result will be 0 for targets
10246 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10247 return 0 for the sake of consistency, as reading a zero-sized
10248 bitfield is valid in Ada and the value is fully specified. */
10249 if (bitsize == 0)
10250 return const0_rtx;
10251
10252 op0 = validize_mem (op0);
10253
10254 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10255 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10256
10257 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10258 (modifier == EXPAND_STACK_PARM
10259 ? NULL_RTX : target),
10260 ext_mode, ext_mode);
10261
10262 /* If the result is a record type and BITSIZE is narrower than
10263 the mode of OP0, an integral mode, and this is a big endian
10264 machine, we must put the field into the high-order bits. */
10265 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10266 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10267 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10268 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10269 GET_MODE_BITSIZE (GET_MODE (op0))
10270 - bitsize, op0, 1);
10271
10272 /* If the result type is BLKmode, store the data into a temporary
10273 of the appropriate type, but with the mode corresponding to the
10274 mode for the data we have (op0's mode). */
10275 if (mode == BLKmode)
10276 {
10277 rtx new_rtx
10278 = assign_stack_temp_for_type (ext_mode,
10279 GET_MODE_BITSIZE (ext_mode),
10280 type);
10281 emit_move_insn (new_rtx, op0);
10282 op0 = copy_rtx (new_rtx);
10283 PUT_MODE (op0, BLKmode);
10284 }
10285
10286 return op0;
10287 }
10288
10289 /* If the result is BLKmode, use that to access the object
10290 now as well. */
10291 if (mode == BLKmode)
10292 mode1 = BLKmode;
10293
10294 /* Get a reference to just this component. */
10295 if (modifier == EXPAND_CONST_ADDRESS
10296 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10297 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10298 else
10299 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10300
10301 if (op0 == orig_op0)
10302 op0 = copy_rtx (op0);
10303
10304 set_mem_attributes (op0, exp, 0);
10305
10306 if (REG_P (XEXP (op0, 0)))
10307 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10308
10309 /* If op0 is a temporary because the original expressions was forced
10310 to memory, clear MEM_EXPR so that the original expression cannot
10311 be marked as addressable through MEM_EXPR of the temporary. */
10312 if (clear_mem_expr)
10313 set_mem_expr (op0, NULL_TREE);
10314
10315 MEM_VOLATILE_P (op0) |= volatilep;
10316 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10317 || modifier == EXPAND_CONST_ADDRESS
10318 || modifier == EXPAND_INITIALIZER)
10319 return op0;
10320
10321 if (target == 0)
10322 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10323
10324 convert_move (target, op0, unsignedp);
10325 return target;
10326 }
10327
10328 case OBJ_TYPE_REF:
10329 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10330
10331 case CALL_EXPR:
10332 /* All valid uses of __builtin_va_arg_pack () are removed during
10333 inlining. */
10334 if (CALL_EXPR_VA_ARG_PACK (exp))
10335 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10336 {
10337 tree fndecl = get_callee_fndecl (exp), attr;
10338
10339 if (fndecl
10340 && (attr = lookup_attribute ("error",
10341 DECL_ATTRIBUTES (fndecl))) != NULL)
10342 error ("%Kcall to %qs declared with attribute error: %s",
10343 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10344 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10345 if (fndecl
10346 && (attr = lookup_attribute ("warning",
10347 DECL_ATTRIBUTES (fndecl))) != NULL)
10348 warning_at (tree_nonartificial_location (exp),
10349 0, "%Kcall to %qs declared with attribute warning: %s",
10350 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10351 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10352
10353 /* Check for a built-in function. */
10354 if (fndecl && DECL_BUILT_IN (fndecl))
10355 {
10356 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10357 if (CALL_WITH_BOUNDS_P (exp))
10358 return expand_builtin_with_bounds (exp, target, subtarget,
10359 tmode, ignore);
10360 else
10361 return expand_builtin (exp, target, subtarget, tmode, ignore);
10362 }
10363 }
10364 return expand_call (exp, target, ignore);
10365
10366 case VIEW_CONVERT_EXPR:
10367 op0 = NULL_RTX;
10368
10369 /* If we are converting to BLKmode, try to avoid an intermediate
10370 temporary by fetching an inner memory reference. */
10371 if (mode == BLKmode
10372 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10373 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10374 && handled_component_p (treeop0))
10375 {
10376 machine_mode mode1;
10377 HOST_WIDE_INT bitsize, bitpos;
10378 tree offset;
10379 int unsignedp;
10380 int volatilep = 0;
10381 tree tem
10382 = get_inner_reference (treeop0, &bitsize, &bitpos,
10383 &offset, &mode1, &unsignedp, &volatilep,
10384 true);
10385 rtx orig_op0;
10386
10387 /* ??? We should work harder and deal with non-zero offsets. */
10388 if (!offset
10389 && (bitpos % BITS_PER_UNIT) == 0
10390 && bitsize >= 0
10391 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10392 {
10393 /* See the normal_inner_ref case for the rationale. */
10394 orig_op0
10395 = expand_expr_real (tem,
10396 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10397 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10398 != INTEGER_CST)
10399 && modifier != EXPAND_STACK_PARM
10400 ? target : NULL_RTX),
10401 VOIDmode,
10402 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10403 NULL, true);
10404
10405 if (MEM_P (orig_op0))
10406 {
10407 op0 = orig_op0;
10408
10409 /* Get a reference to just this component. */
10410 if (modifier == EXPAND_CONST_ADDRESS
10411 || modifier == EXPAND_SUM
10412 || modifier == EXPAND_INITIALIZER)
10413 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10414 else
10415 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10416
10417 if (op0 == orig_op0)
10418 op0 = copy_rtx (op0);
10419
10420 set_mem_attributes (op0, treeop0, 0);
10421 if (REG_P (XEXP (op0, 0)))
10422 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10423
10424 MEM_VOLATILE_P (op0) |= volatilep;
10425 }
10426 }
10427 }
10428
10429 if (!op0)
10430 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10431 NULL, inner_reference_p);
10432
10433 /* If the input and output modes are both the same, we are done. */
10434 if (mode == GET_MODE (op0))
10435 ;
10436 /* If neither mode is BLKmode, and both modes are the same size
10437 then we can use gen_lowpart. */
10438 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10439 && (GET_MODE_PRECISION (mode)
10440 == GET_MODE_PRECISION (GET_MODE (op0)))
10441 && !COMPLEX_MODE_P (GET_MODE (op0)))
10442 {
10443 if (GET_CODE (op0) == SUBREG)
10444 op0 = force_reg (GET_MODE (op0), op0);
10445 temp = gen_lowpart_common (mode, op0);
10446 if (temp)
10447 op0 = temp;
10448 else
10449 {
10450 if (!REG_P (op0) && !MEM_P (op0))
10451 op0 = force_reg (GET_MODE (op0), op0);
10452 op0 = gen_lowpart (mode, op0);
10453 }
10454 }
10455 /* If both types are integral, convert from one mode to the other. */
10456 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10457 op0 = convert_modes (mode, GET_MODE (op0), op0,
10458 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10459 /* If the output type is a bit-field type, do an extraction. */
10460 else if (reduce_bit_field)
10461 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10462 TYPE_UNSIGNED (type), NULL_RTX,
10463 mode, mode);
10464 /* As a last resort, spill op0 to memory, and reload it in a
10465 different mode. */
10466 else if (!MEM_P (op0))
10467 {
10468 /* If the operand is not a MEM, force it into memory. Since we
10469 are going to be changing the mode of the MEM, don't call
10470 force_const_mem for constants because we don't allow pool
10471 constants to change mode. */
10472 tree inner_type = TREE_TYPE (treeop0);
10473
10474 gcc_assert (!TREE_ADDRESSABLE (exp));
10475
10476 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10477 target
10478 = assign_stack_temp_for_type
10479 (TYPE_MODE (inner_type),
10480 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10481
10482 emit_move_insn (target, op0);
10483 op0 = target;
10484 }
10485
10486 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10487 output type is such that the operand is known to be aligned, indicate
10488 that it is. Otherwise, we need only be concerned about alignment for
10489 non-BLKmode results. */
10490 if (MEM_P (op0))
10491 {
10492 enum insn_code icode;
10493
10494 if (TYPE_ALIGN_OK (type))
10495 {
10496 /* ??? Copying the MEM without substantially changing it might
10497 run afoul of the code handling volatile memory references in
10498 store_expr, which assumes that TARGET is returned unmodified
10499 if it has been used. */
10500 op0 = copy_rtx (op0);
10501 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10502 }
10503 else if (modifier != EXPAND_WRITE
10504 && modifier != EXPAND_MEMORY
10505 && !inner_reference_p
10506 && mode != BLKmode
10507 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10508 {
10509 /* If the target does have special handling for unaligned
10510 loads of mode then use them. */
10511 if ((icode = optab_handler (movmisalign_optab, mode))
10512 != CODE_FOR_nothing)
10513 {
10514 rtx reg;
10515
10516 op0 = adjust_address (op0, mode, 0);
10517 /* We've already validated the memory, and we're creating a
10518 new pseudo destination. The predicates really can't
10519 fail. */
10520 reg = gen_reg_rtx (mode);
10521
10522 /* Nor can the insn generator. */
10523 rtx_insn *insn = GEN_FCN (icode) (reg, op0);
10524 emit_insn (insn);
10525 return reg;
10526 }
10527 else if (STRICT_ALIGNMENT)
10528 {
10529 tree inner_type = TREE_TYPE (treeop0);
10530 HOST_WIDE_INT temp_size
10531 = MAX (int_size_in_bytes (inner_type),
10532 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10533 rtx new_rtx
10534 = assign_stack_temp_for_type (mode, temp_size, type);
10535 rtx new_with_op0_mode
10536 = adjust_address (new_rtx, GET_MODE (op0), 0);
10537
10538 gcc_assert (!TREE_ADDRESSABLE (exp));
10539
10540 if (GET_MODE (op0) == BLKmode)
10541 emit_block_move (new_with_op0_mode, op0,
10542 GEN_INT (GET_MODE_SIZE (mode)),
10543 (modifier == EXPAND_STACK_PARM
10544 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10545 else
10546 emit_move_insn (new_with_op0_mode, op0);
10547
10548 op0 = new_rtx;
10549 }
10550 }
10551
10552 op0 = adjust_address (op0, mode, 0);
10553 }
10554
10555 return op0;
10556
10557 case MODIFY_EXPR:
10558 {
10559 tree lhs = treeop0;
10560 tree rhs = treeop1;
10561 gcc_assert (ignore);
10562
10563 /* Check for |= or &= of a bitfield of size one into another bitfield
10564 of size 1. In this case, (unless we need the result of the
10565 assignment) we can do this more efficiently with a
10566 test followed by an assignment, if necessary.
10567
10568 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10569 things change so we do, this code should be enhanced to
10570 support it. */
10571 if (TREE_CODE (lhs) == COMPONENT_REF
10572 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10573 || TREE_CODE (rhs) == BIT_AND_EXPR)
10574 && TREE_OPERAND (rhs, 0) == lhs
10575 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10576 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10577 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10578 {
10579 rtx_code_label *label = gen_label_rtx ();
10580 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10581 do_jump (TREE_OPERAND (rhs, 1),
10582 value ? label : 0,
10583 value ? 0 : label, -1);
10584 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10585 false);
10586 do_pending_stack_adjust ();
10587 emit_label (label);
10588 return const0_rtx;
10589 }
10590
10591 expand_assignment (lhs, rhs, false);
10592 return const0_rtx;
10593 }
10594
10595 case ADDR_EXPR:
10596 return expand_expr_addr_expr (exp, target, tmode, modifier);
10597
10598 case REALPART_EXPR:
10599 op0 = expand_normal (treeop0);
10600 return read_complex_part (op0, false);
10601
10602 case IMAGPART_EXPR:
10603 op0 = expand_normal (treeop0);
10604 return read_complex_part (op0, true);
10605
10606 case RETURN_EXPR:
10607 case LABEL_EXPR:
10608 case GOTO_EXPR:
10609 case SWITCH_EXPR:
10610 case ASM_EXPR:
10611 /* Expanded in cfgexpand.c. */
10612 gcc_unreachable ();
10613
10614 case TRY_CATCH_EXPR:
10615 case CATCH_EXPR:
10616 case EH_FILTER_EXPR:
10617 case TRY_FINALLY_EXPR:
10618 /* Lowered by tree-eh.c. */
10619 gcc_unreachable ();
10620
10621 case WITH_CLEANUP_EXPR:
10622 case CLEANUP_POINT_EXPR:
10623 case TARGET_EXPR:
10624 case CASE_LABEL_EXPR:
10625 case VA_ARG_EXPR:
10626 case BIND_EXPR:
10627 case INIT_EXPR:
10628 case CONJ_EXPR:
10629 case COMPOUND_EXPR:
10630 case PREINCREMENT_EXPR:
10631 case PREDECREMENT_EXPR:
10632 case POSTINCREMENT_EXPR:
10633 case POSTDECREMENT_EXPR:
10634 case LOOP_EXPR:
10635 case EXIT_EXPR:
10636 case COMPOUND_LITERAL_EXPR:
10637 /* Lowered by gimplify.c. */
10638 gcc_unreachable ();
10639
10640 case FDESC_EXPR:
10641 /* Function descriptors are not valid except for as
10642 initialization constants, and should not be expanded. */
10643 gcc_unreachable ();
10644
10645 case WITH_SIZE_EXPR:
10646 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10647 have pulled out the size to use in whatever context it needed. */
10648 return expand_expr_real (treeop0, original_target, tmode,
10649 modifier, alt_rtl, inner_reference_p);
10650
10651 default:
10652 return expand_expr_real_2 (&ops, target, tmode, modifier);
10653 }
10654 }
10655 \f
10656 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10657 signedness of TYPE), possibly returning the result in TARGET. */
10658 static rtx
10659 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10660 {
10661 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10662 if (target && GET_MODE (target) != GET_MODE (exp))
10663 target = 0;
10664 /* For constant values, reduce using build_int_cst_type. */
10665 if (CONST_INT_P (exp))
10666 {
10667 HOST_WIDE_INT value = INTVAL (exp);
10668 tree t = build_int_cst_type (type, value);
10669 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10670 }
10671 else if (TYPE_UNSIGNED (type))
10672 {
10673 machine_mode mode = GET_MODE (exp);
10674 rtx mask = immed_wide_int_const
10675 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
10676 return expand_and (mode, exp, mask, target);
10677 }
10678 else
10679 {
10680 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10681 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10682 exp, count, target, 0);
10683 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10684 exp, count, target, 0);
10685 }
10686 }
10687 \f
10688 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10689 when applied to the address of EXP produces an address known to be
10690 aligned more than BIGGEST_ALIGNMENT. */
10691
10692 static int
10693 is_aligning_offset (const_tree offset, const_tree exp)
10694 {
10695 /* Strip off any conversions. */
10696 while (CONVERT_EXPR_P (offset))
10697 offset = TREE_OPERAND (offset, 0);
10698
10699 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10700 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10701 if (TREE_CODE (offset) != BIT_AND_EXPR
10702 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10703 || compare_tree_int (TREE_OPERAND (offset, 1),
10704 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10705 || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10706 return 0;
10707
10708 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10709 It must be NEGATE_EXPR. Then strip any more conversions. */
10710 offset = TREE_OPERAND (offset, 0);
10711 while (CONVERT_EXPR_P (offset))
10712 offset = TREE_OPERAND (offset, 0);
10713
10714 if (TREE_CODE (offset) != NEGATE_EXPR)
10715 return 0;
10716
10717 offset = TREE_OPERAND (offset, 0);
10718 while (CONVERT_EXPR_P (offset))
10719 offset = TREE_OPERAND (offset, 0);
10720
10721 /* This must now be the address of EXP. */
10722 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10723 }
10724 \f
10725 /* Return the tree node if an ARG corresponds to a string constant or zero
10726 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10727 in bytes within the string that ARG is accessing. The type of the
10728 offset will be `sizetype'. */
10729
10730 tree
10731 string_constant (tree arg, tree *ptr_offset)
10732 {
10733 tree array, offset, lower_bound;
10734 STRIP_NOPS (arg);
10735
10736 if (TREE_CODE (arg) == ADDR_EXPR)
10737 {
10738 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10739 {
10740 *ptr_offset = size_zero_node;
10741 return TREE_OPERAND (arg, 0);
10742 }
10743 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10744 {
10745 array = TREE_OPERAND (arg, 0);
10746 offset = size_zero_node;
10747 }
10748 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10749 {
10750 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10751 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10752 if (TREE_CODE (array) != STRING_CST
10753 && TREE_CODE (array) != VAR_DECL)
10754 return 0;
10755
10756 /* Check if the array has a nonzero lower bound. */
10757 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10758 if (!integer_zerop (lower_bound))
10759 {
10760 /* If the offset and base aren't both constants, return 0. */
10761 if (TREE_CODE (lower_bound) != INTEGER_CST)
10762 return 0;
10763 if (TREE_CODE (offset) != INTEGER_CST)
10764 return 0;
10765 /* Adjust offset by the lower bound. */
10766 offset = size_diffop (fold_convert (sizetype, offset),
10767 fold_convert (sizetype, lower_bound));
10768 }
10769 }
10770 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10771 {
10772 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10773 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10774 if (TREE_CODE (array) != ADDR_EXPR)
10775 return 0;
10776 array = TREE_OPERAND (array, 0);
10777 if (TREE_CODE (array) != STRING_CST
10778 && TREE_CODE (array) != VAR_DECL)
10779 return 0;
10780 }
10781 else
10782 return 0;
10783 }
10784 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10785 {
10786 tree arg0 = TREE_OPERAND (arg, 0);
10787 tree arg1 = TREE_OPERAND (arg, 1);
10788
10789 STRIP_NOPS (arg0);
10790 STRIP_NOPS (arg1);
10791
10792 if (TREE_CODE (arg0) == ADDR_EXPR
10793 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10794 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10795 {
10796 array = TREE_OPERAND (arg0, 0);
10797 offset = arg1;
10798 }
10799 else if (TREE_CODE (arg1) == ADDR_EXPR
10800 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10801 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10802 {
10803 array = TREE_OPERAND (arg1, 0);
10804 offset = arg0;
10805 }
10806 else
10807 return 0;
10808 }
10809 else
10810 return 0;
10811
10812 if (TREE_CODE (array) == STRING_CST)
10813 {
10814 *ptr_offset = fold_convert (sizetype, offset);
10815 return array;
10816 }
10817 else if (TREE_CODE (array) == VAR_DECL
10818 || TREE_CODE (array) == CONST_DECL)
10819 {
10820 int length;
10821 tree init = ctor_for_folding (array);
10822
10823 /* Variables initialized to string literals can be handled too. */
10824 if (init == error_mark_node
10825 || !init
10826 || TREE_CODE (init) != STRING_CST)
10827 return 0;
10828
10829 /* Avoid const char foo[4] = "abcde"; */
10830 if (DECL_SIZE_UNIT (array) == NULL_TREE
10831 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10832 || (length = TREE_STRING_LENGTH (init)) <= 0
10833 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10834 return 0;
10835
10836 /* If variable is bigger than the string literal, OFFSET must be constant
10837 and inside of the bounds of the string literal. */
10838 offset = fold_convert (sizetype, offset);
10839 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10840 && (! tree_fits_uhwi_p (offset)
10841 || compare_tree_int (offset, length) >= 0))
10842 return 0;
10843
10844 *ptr_offset = offset;
10845 return init;
10846 }
10847
10848 return 0;
10849 }
10850 \f
10851 /* Generate code to calculate OPS, and exploded expression
10852 using a store-flag instruction and return an rtx for the result.
10853 OPS reflects a comparison.
10854
10855 If TARGET is nonzero, store the result there if convenient.
10856
10857 Return zero if there is no suitable set-flag instruction
10858 available on this machine.
10859
10860 Once expand_expr has been called on the arguments of the comparison,
10861 we are committed to doing the store flag, since it is not safe to
10862 re-evaluate the expression. We emit the store-flag insn by calling
10863 emit_store_flag, but only expand the arguments if we have a reason
10864 to believe that emit_store_flag will be successful. If we think that
10865 it will, but it isn't, we have to simulate the store-flag with a
10866 set/jump/set sequence. */
10867
10868 static rtx
10869 do_store_flag (sepops ops, rtx target, machine_mode mode)
10870 {
10871 enum rtx_code code;
10872 tree arg0, arg1, type;
10873 machine_mode operand_mode;
10874 int unsignedp;
10875 rtx op0, op1;
10876 rtx subtarget = target;
10877 location_t loc = ops->location;
10878
10879 arg0 = ops->op0;
10880 arg1 = ops->op1;
10881
10882 /* Don't crash if the comparison was erroneous. */
10883 if (arg0 == error_mark_node || arg1 == error_mark_node)
10884 return const0_rtx;
10885
10886 type = TREE_TYPE (arg0);
10887 operand_mode = TYPE_MODE (type);
10888 unsignedp = TYPE_UNSIGNED (type);
10889
10890 /* We won't bother with BLKmode store-flag operations because it would mean
10891 passing a lot of information to emit_store_flag. */
10892 if (operand_mode == BLKmode)
10893 return 0;
10894
10895 /* We won't bother with store-flag operations involving function pointers
10896 when function pointers must be canonicalized before comparisons. */
10897 if (targetm.have_canonicalize_funcptr_for_compare ()
10898 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10899 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10900 == FUNCTION_TYPE))
10901 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10902 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10903 == FUNCTION_TYPE))))
10904 return 0;
10905
10906 STRIP_NOPS (arg0);
10907 STRIP_NOPS (arg1);
10908
10909 /* For vector typed comparisons emit code to generate the desired
10910 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10911 expander for this. */
10912 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10913 {
10914 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10915 tree if_true = constant_boolean_node (true, ops->type);
10916 tree if_false = constant_boolean_node (false, ops->type);
10917 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10918 }
10919
10920 /* Get the rtx comparison code to use. We know that EXP is a comparison
10921 operation of some type. Some comparisons against 1 and -1 can be
10922 converted to comparisons with zero. Do so here so that the tests
10923 below will be aware that we have a comparison with zero. These
10924 tests will not catch constants in the first operand, but constants
10925 are rarely passed as the first operand. */
10926
10927 switch (ops->code)
10928 {
10929 case EQ_EXPR:
10930 code = EQ;
10931 break;
10932 case NE_EXPR:
10933 code = NE;
10934 break;
10935 case LT_EXPR:
10936 if (integer_onep (arg1))
10937 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10938 else
10939 code = unsignedp ? LTU : LT;
10940 break;
10941 case LE_EXPR:
10942 if (! unsignedp && integer_all_onesp (arg1))
10943 arg1 = integer_zero_node, code = LT;
10944 else
10945 code = unsignedp ? LEU : LE;
10946 break;
10947 case GT_EXPR:
10948 if (! unsignedp && integer_all_onesp (arg1))
10949 arg1 = integer_zero_node, code = GE;
10950 else
10951 code = unsignedp ? GTU : GT;
10952 break;
10953 case GE_EXPR:
10954 if (integer_onep (arg1))
10955 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10956 else
10957 code = unsignedp ? GEU : GE;
10958 break;
10959
10960 case UNORDERED_EXPR:
10961 code = UNORDERED;
10962 break;
10963 case ORDERED_EXPR:
10964 code = ORDERED;
10965 break;
10966 case UNLT_EXPR:
10967 code = UNLT;
10968 break;
10969 case UNLE_EXPR:
10970 code = UNLE;
10971 break;
10972 case UNGT_EXPR:
10973 code = UNGT;
10974 break;
10975 case UNGE_EXPR:
10976 code = UNGE;
10977 break;
10978 case UNEQ_EXPR:
10979 code = UNEQ;
10980 break;
10981 case LTGT_EXPR:
10982 code = LTGT;
10983 break;
10984
10985 default:
10986 gcc_unreachable ();
10987 }
10988
10989 /* Put a constant second. */
10990 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10991 || TREE_CODE (arg0) == FIXED_CST)
10992 {
10993 std::swap (arg0, arg1);
10994 code = swap_condition (code);
10995 }
10996
10997 /* If this is an equality or inequality test of a single bit, we can
10998 do this by shifting the bit being tested to the low-order bit and
10999 masking the result with the constant 1. If the condition was EQ,
11000 we xor it with 1. This does not require an scc insn and is faster
11001 than an scc insn even if we have it.
11002
11003 The code to make this transformation was moved into fold_single_bit_test,
11004 so we just call into the folder and expand its result. */
11005
11006 if ((code == NE || code == EQ)
11007 && integer_zerop (arg1)
11008 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11009 {
11010 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11011 if (srcstmt
11012 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11013 {
11014 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11015 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11016 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11017 gimple_assign_rhs1 (srcstmt),
11018 gimple_assign_rhs2 (srcstmt));
11019 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11020 if (temp)
11021 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11022 }
11023 }
11024
11025 if (! get_subtarget (target)
11026 || GET_MODE (subtarget) != operand_mode)
11027 subtarget = 0;
11028
11029 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11030
11031 if (target == 0)
11032 target = gen_reg_rtx (mode);
11033
11034 /* Try a cstore if possible. */
11035 return emit_store_flag_force (target, code, op0, op1,
11036 operand_mode, unsignedp,
11037 (TYPE_PRECISION (ops->type) == 1
11038 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11039 }
11040 \f
11041 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11042 0 otherwise (i.e. if there is no casesi instruction).
11043
11044 DEFAULT_PROBABILITY is the probability of jumping to the default
11045 label. */
11046 int
11047 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11048 rtx table_label, rtx default_label, rtx fallback_label,
11049 int default_probability)
11050 {
11051 struct expand_operand ops[5];
11052 machine_mode index_mode = SImode;
11053 rtx op1, op2, index;
11054
11055 if (! targetm.have_casesi ())
11056 return 0;
11057
11058 /* Convert the index to SImode. */
11059 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11060 {
11061 machine_mode omode = TYPE_MODE (index_type);
11062 rtx rangertx = expand_normal (range);
11063
11064 /* We must handle the endpoints in the original mode. */
11065 index_expr = build2 (MINUS_EXPR, index_type,
11066 index_expr, minval);
11067 minval = integer_zero_node;
11068 index = expand_normal (index_expr);
11069 if (default_label)
11070 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11071 omode, 1, default_label,
11072 default_probability);
11073 /* Now we can safely truncate. */
11074 index = convert_to_mode (index_mode, index, 0);
11075 }
11076 else
11077 {
11078 if (TYPE_MODE (index_type) != index_mode)
11079 {
11080 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11081 index_expr = fold_convert (index_type, index_expr);
11082 }
11083
11084 index = expand_normal (index_expr);
11085 }
11086
11087 do_pending_stack_adjust ();
11088
11089 op1 = expand_normal (minval);
11090 op2 = expand_normal (range);
11091
11092 create_input_operand (&ops[0], index, index_mode);
11093 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11094 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11095 create_fixed_operand (&ops[3], table_label);
11096 create_fixed_operand (&ops[4], (default_label
11097 ? default_label
11098 : fallback_label));
11099 expand_jump_insn (targetm.code_for_casesi, 5, ops);
11100 return 1;
11101 }
11102
11103 /* Attempt to generate a tablejump instruction; same concept. */
11104 /* Subroutine of the next function.
11105
11106 INDEX is the value being switched on, with the lowest value
11107 in the table already subtracted.
11108 MODE is its expected mode (needed if INDEX is constant).
11109 RANGE is the length of the jump table.
11110 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11111
11112 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11113 index value is out of range.
11114 DEFAULT_PROBABILITY is the probability of jumping to
11115 the default label. */
11116
11117 static void
11118 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
11119 rtx default_label, int default_probability)
11120 {
11121 rtx temp, vector;
11122
11123 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11124 cfun->cfg->max_jumptable_ents = INTVAL (range);
11125
11126 /* Do an unsigned comparison (in the proper mode) between the index
11127 expression and the value which represents the length of the range.
11128 Since we just finished subtracting the lower bound of the range
11129 from the index expression, this comparison allows us to simultaneously
11130 check that the original index expression value is both greater than
11131 or equal to the minimum value of the range and less than or equal to
11132 the maximum value of the range. */
11133
11134 if (default_label)
11135 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11136 default_label, default_probability);
11137
11138
11139 /* If index is in range, it must fit in Pmode.
11140 Convert to Pmode so we can index with it. */
11141 if (mode != Pmode)
11142 index = convert_to_mode (Pmode, index, 1);
11143
11144 /* Don't let a MEM slip through, because then INDEX that comes
11145 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11146 and break_out_memory_refs will go to work on it and mess it up. */
11147 #ifdef PIC_CASE_VECTOR_ADDRESS
11148 if (flag_pic && !REG_P (index))
11149 index = copy_to_mode_reg (Pmode, index);
11150 #endif
11151
11152 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11153 GET_MODE_SIZE, because this indicates how large insns are. The other
11154 uses should all be Pmode, because they are addresses. This code
11155 could fail if addresses and insns are not the same size. */
11156 index = simplify_gen_binary (MULT, Pmode, index,
11157 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11158 Pmode));
11159 index = simplify_gen_binary (PLUS, Pmode, index,
11160 gen_rtx_LABEL_REF (Pmode, table_label));
11161
11162 #ifdef PIC_CASE_VECTOR_ADDRESS
11163 if (flag_pic)
11164 index = PIC_CASE_VECTOR_ADDRESS (index);
11165 else
11166 #endif
11167 index = memory_address (CASE_VECTOR_MODE, index);
11168 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11169 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11170 convert_move (temp, vector, 0);
11171
11172 emit_jump_insn (targetm.gen_tablejump (temp, table_label));
11173
11174 /* If we are generating PIC code or if the table is PC-relative, the
11175 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11176 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11177 emit_barrier ();
11178 }
11179
11180 int
11181 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11182 rtx table_label, rtx default_label, int default_probability)
11183 {
11184 rtx index;
11185
11186 if (! targetm.have_tablejump ())
11187 return 0;
11188
11189 index_expr = fold_build2 (MINUS_EXPR, index_type,
11190 fold_convert (index_type, index_expr),
11191 fold_convert (index_type, minval));
11192 index = expand_normal (index_expr);
11193 do_pending_stack_adjust ();
11194
11195 do_tablejump (index, TYPE_MODE (index_type),
11196 convert_modes (TYPE_MODE (index_type),
11197 TYPE_MODE (TREE_TYPE (range)),
11198 expand_normal (range),
11199 TYPE_UNSIGNED (TREE_TYPE (range))),
11200 table_label, default_label, default_probability);
11201 return 1;
11202 }
11203
11204 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11205 static rtx
11206 const_vector_from_tree (tree exp)
11207 {
11208 rtvec v;
11209 unsigned i;
11210 int units;
11211 tree elt;
11212 machine_mode inner, mode;
11213
11214 mode = TYPE_MODE (TREE_TYPE (exp));
11215
11216 if (initializer_zerop (exp))
11217 return CONST0_RTX (mode);
11218
11219 units = GET_MODE_NUNITS (mode);
11220 inner = GET_MODE_INNER (mode);
11221
11222 v = rtvec_alloc (units);
11223
11224 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11225 {
11226 elt = VECTOR_CST_ELT (exp, i);
11227
11228 if (TREE_CODE (elt) == REAL_CST)
11229 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11230 inner);
11231 else if (TREE_CODE (elt) == FIXED_CST)
11232 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11233 inner);
11234 else
11235 RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11236 }
11237
11238 return gen_rtx_CONST_VECTOR (mode, v);
11239 }
11240
11241 /* Build a decl for a personality function given a language prefix. */
11242
11243 tree
11244 build_personality_function (const char *lang)
11245 {
11246 const char *unwind_and_version;
11247 tree decl, type;
11248 char *name;
11249
11250 switch (targetm_common.except_unwind_info (&global_options))
11251 {
11252 case UI_NONE:
11253 return NULL;
11254 case UI_SJLJ:
11255 unwind_and_version = "_sj0";
11256 break;
11257 case UI_DWARF2:
11258 case UI_TARGET:
11259 unwind_and_version = "_v0";
11260 break;
11261 case UI_SEH:
11262 unwind_and_version = "_seh0";
11263 break;
11264 default:
11265 gcc_unreachable ();
11266 }
11267
11268 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11269
11270 type = build_function_type_list (integer_type_node, integer_type_node,
11271 long_long_unsigned_type_node,
11272 ptr_type_node, ptr_type_node, NULL_TREE);
11273 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11274 get_identifier (name), type);
11275 DECL_ARTIFICIAL (decl) = 1;
11276 DECL_EXTERNAL (decl) = 1;
11277 TREE_PUBLIC (decl) = 1;
11278
11279 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11280 are the flags assigned by targetm.encode_section_info. */
11281 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11282
11283 return decl;
11284 }
11285
11286 /* Extracts the personality function of DECL and returns the corresponding
11287 libfunc. */
11288
11289 rtx
11290 get_personality_function (tree decl)
11291 {
11292 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11293 enum eh_personality_kind pk;
11294
11295 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11296 if (pk == eh_personality_none)
11297 return NULL;
11298
11299 if (!personality
11300 && pk == eh_personality_any)
11301 personality = lang_hooks.eh_personality ();
11302
11303 if (pk == eh_personality_lang)
11304 gcc_assert (personality != NULL_TREE);
11305
11306 return XEXP (DECL_RTL (personality), 0);
11307 }
11308
11309 /* Returns a tree for the size of EXP in bytes. */
11310
11311 static tree
11312 tree_expr_size (const_tree exp)
11313 {
11314 if (DECL_P (exp)
11315 && DECL_SIZE_UNIT (exp) != 0)
11316 return DECL_SIZE_UNIT (exp);
11317 else
11318 return size_in_bytes (TREE_TYPE (exp));
11319 }
11320
11321 /* Return an rtx for the size in bytes of the value of EXP. */
11322
11323 rtx
11324 expr_size (tree exp)
11325 {
11326 tree size;
11327
11328 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11329 size = TREE_OPERAND (exp, 1);
11330 else
11331 {
11332 size = tree_expr_size (exp);
11333 gcc_assert (size);
11334 gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
11335 }
11336
11337 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
11338 }
11339
11340 /* Return a wide integer for the size in bytes of the value of EXP, or -1
11341 if the size can vary or is larger than an integer. */
11342
11343 static HOST_WIDE_INT
11344 int_expr_size (tree exp)
11345 {
11346 tree size;
11347
11348 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11349 size = TREE_OPERAND (exp, 1);
11350 else
11351 {
11352 size = tree_expr_size (exp);
11353 gcc_assert (size);
11354 }
11355
11356 if (size == 0 || !tree_fits_shwi_p (size))
11357 return -1;
11358
11359 return tree_to_shwi (size);
11360 }
11361
11362 #include "gt-expr.h"