]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/expr.c
alias.h (alias_set_type): Define new type.
[thirdparty/gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
55 #include "df.h"
56
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
59
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
62
63 #ifdef PUSH_ROUNDING
64
65 #ifndef PUSH_ARGS_REVERSED
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED /* If it's last to first. */
68 #endif
69 #endif
70
71 #endif
72
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
76 #else
77 #define STACK_PUSH_CODE PRE_INC
78 #endif
79 #endif
80
81
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
88 int cse_not_expected;
89
90 /* This structure is used by move_by_pieces to describe the move to
91 be performed. */
92 struct move_by_pieces
93 {
94 rtx to;
95 rtx to_addr;
96 int autinc_to;
97 int explicit_inc_to;
98 rtx from;
99 rtx from_addr;
100 int autinc_from;
101 int explicit_inc_from;
102 unsigned HOST_WIDE_INT len;
103 HOST_WIDE_INT offset;
104 int reverse;
105 };
106
107 /* This structure is used by store_by_pieces to describe the clear to
108 be performed. */
109
110 struct store_by_pieces
111 {
112 rtx to;
113 rtx to_addr;
114 int autinc_to;
115 int explicit_inc_to;
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
119 void *constfundata;
120 int reverse;
121 };
122
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124 unsigned int,
125 unsigned int);
126 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static tree clear_storage_libcall_fn (int);
138 static rtx compress_float_constant (rtx, rtx);
139 static rtx get_subtarget (rtx);
140 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, alias_set_type);
143 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
144 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
145 tree, tree, alias_set_type, bool);
146
147 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
148
149 static int is_aligning_offset (tree, tree);
150 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
151 enum expand_modifier);
152 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
153 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
154 #ifdef PUSH_ROUNDING
155 static void emit_single_push_insn (enum machine_mode, rtx, tree);
156 #endif
157 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
158 static rtx const_vector_from_tree (tree);
159 static void write_complex_part (rtx, rtx, bool);
160
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
164
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
167
168 /* Record for each mode whether we can float-extend from memory. */
169
170 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
171
172 /* This macro is used to determine whether move_by_pieces should be called
173 to perform a structure copy. */
174 #ifndef MOVE_BY_PIECES_P
175 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
176 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
177 < (unsigned int) MOVE_RATIO)
178 #endif
179
180 /* This macro is used to determine whether clear_by_pieces should be
181 called to clear storage. */
182 #ifndef CLEAR_BY_PIECES_P
183 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
185 < (unsigned int) CLEAR_RATIO)
186 #endif
187
188 /* This macro is used to determine whether store_by_pieces should be
189 called to "memset" storage with byte values other than zero, or
190 to "memcpy" storage when the source is a constant string. */
191 #ifndef STORE_BY_PIECES_P
192 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
193 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
194 < (unsigned int) MOVE_RATIO)
195 #endif
196
197 /* This array records the insn_code of insns to perform block moves. */
198 enum insn_code movmem_optab[NUM_MACHINE_MODES];
199
200 /* This array records the insn_code of insns to perform block sets. */
201 enum insn_code setmem_optab[NUM_MACHINE_MODES];
202
203 /* These arrays record the insn_code of three different kinds of insns
204 to perform block compares. */
205 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
206 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
207 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
208
209 /* Synchronization primitives. */
210 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
211 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
212 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
229 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
230 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
231 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
232
233 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
234
235 #ifndef SLOW_UNALIGNED_ACCESS
236 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
237 #endif
238 \f
239 /* This is run once per compilation to set up which modes can be used
240 directly in memory and to initialize the block move optab. */
241
242 void
243 init_expr_once (void)
244 {
245 rtx insn, pat;
246 enum machine_mode mode;
247 int num_clobbers;
248 rtx mem, mem1;
249 rtx reg;
250
251 /* Try indexing by frame ptr and try by stack ptr.
252 It is known that on the Convex the stack ptr isn't a valid index.
253 With luck, one or the other is valid on any machine. */
254 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
255 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
256
257 /* A scratch register we can modify in-place below to avoid
258 useless RTL allocations. */
259 reg = gen_rtx_REG (VOIDmode, -1);
260
261 insn = rtx_alloc (INSN);
262 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
263 PATTERN (insn) = pat;
264
265 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
266 mode = (enum machine_mode) ((int) mode + 1))
267 {
268 int regno;
269
270 direct_load[(int) mode] = direct_store[(int) mode] = 0;
271 PUT_MODE (mem, mode);
272 PUT_MODE (mem1, mode);
273 PUT_MODE (reg, mode);
274
275 /* See if there is some register that can be used in this mode and
276 directly loaded or stored from memory. */
277
278 if (mode != VOIDmode && mode != BLKmode)
279 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
280 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
281 regno++)
282 {
283 if (! HARD_REGNO_MODE_OK (regno, mode))
284 continue;
285
286 SET_REGNO (reg, regno);
287
288 SET_SRC (pat) = mem;
289 SET_DEST (pat) = reg;
290 if (recog (pat, insn, &num_clobbers) >= 0)
291 direct_load[(int) mode] = 1;
292
293 SET_SRC (pat) = mem1;
294 SET_DEST (pat) = reg;
295 if (recog (pat, insn, &num_clobbers) >= 0)
296 direct_load[(int) mode] = 1;
297
298 SET_SRC (pat) = reg;
299 SET_DEST (pat) = mem;
300 if (recog (pat, insn, &num_clobbers) >= 0)
301 direct_store[(int) mode] = 1;
302
303 SET_SRC (pat) = reg;
304 SET_DEST (pat) = mem1;
305 if (recog (pat, insn, &num_clobbers) >= 0)
306 direct_store[(int) mode] = 1;
307 }
308 }
309
310 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
311
312 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
313 mode = GET_MODE_WIDER_MODE (mode))
314 {
315 enum machine_mode srcmode;
316 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
317 srcmode = GET_MODE_WIDER_MODE (srcmode))
318 {
319 enum insn_code ic;
320
321 ic = can_extend_p (mode, srcmode, 0);
322 if (ic == CODE_FOR_nothing)
323 continue;
324
325 PUT_MODE (mem, srcmode);
326
327 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
328 float_extend_from_mem[mode][srcmode] = true;
329 }
330 }
331 }
332
333 /* This is run at the start of compiling a function. */
334
335 void
336 init_expr (void)
337 {
338 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
339 }
340 \f
341 /* Copy data from FROM to TO, where the machine modes are not the same.
342 Both modes may be integer, or both may be floating.
343 UNSIGNEDP should be nonzero if FROM is an unsigned type.
344 This causes zero-extension instead of sign-extension. */
345
346 void
347 convert_move (rtx to, rtx from, int unsignedp)
348 {
349 enum machine_mode to_mode = GET_MODE (to);
350 enum machine_mode from_mode = GET_MODE (from);
351 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
352 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
353 enum insn_code code;
354 rtx libcall;
355
356 /* rtx code for making an equivalent value. */
357 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
358 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
359
360
361 gcc_assert (to_real == from_real);
362 gcc_assert (to_mode != BLKmode);
363 gcc_assert (from_mode != BLKmode);
364
365 /* If the source and destination are already the same, then there's
366 nothing to do. */
367 if (to == from)
368 return;
369
370 /* If FROM is a SUBREG that indicates that we have already done at least
371 the required extension, strip it. We don't handle such SUBREGs as
372 TO here. */
373
374 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
375 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
376 >= GET_MODE_SIZE (to_mode))
377 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
378 from = gen_lowpart (to_mode, from), from_mode = to_mode;
379
380 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
381
382 if (to_mode == from_mode
383 || (from_mode == VOIDmode && CONSTANT_P (from)))
384 {
385 emit_move_insn (to, from);
386 return;
387 }
388
389 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
390 {
391 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
392
393 if (VECTOR_MODE_P (to_mode))
394 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
395 else
396 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
397
398 emit_move_insn (to, from);
399 return;
400 }
401
402 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
403 {
404 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
405 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
406 return;
407 }
408
409 if (to_real)
410 {
411 rtx value, insns;
412 convert_optab tab;
413
414 gcc_assert ((GET_MODE_PRECISION (from_mode)
415 != GET_MODE_PRECISION (to_mode))
416 || (DECIMAL_FLOAT_MODE_P (from_mode)
417 != DECIMAL_FLOAT_MODE_P (to_mode)));
418
419 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
420 /* Conversion between decimal float and binary float, same size. */
421 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
422 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
423 tab = sext_optab;
424 else
425 tab = trunc_optab;
426
427 /* Try converting directly if the insn is supported. */
428
429 code = convert_optab_handler (tab, to_mode, from_mode)->insn_code;
430 if (code != CODE_FOR_nothing)
431 {
432 emit_unop_insn (code, to, from,
433 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
434 return;
435 }
436
437 /* Otherwise use a libcall. */
438 libcall = convert_optab_handler (tab, to_mode, from_mode)->libfunc;
439
440 /* Is this conversion implemented yet? */
441 gcc_assert (libcall);
442
443 start_sequence ();
444 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
445 1, from, from_mode);
446 insns = get_insns ();
447 end_sequence ();
448 emit_libcall_block (insns, to, value,
449 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
450 from)
451 : gen_rtx_FLOAT_EXTEND (to_mode, from));
452 return;
453 }
454
455 /* Handle pointer conversion. */ /* SPEE 900220. */
456 /* Targets are expected to provide conversion insns between PxImode and
457 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
458 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
459 {
460 enum machine_mode full_mode
461 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
462
463 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code
464 != CODE_FOR_nothing);
465
466 if (full_mode != from_mode)
467 from = convert_to_mode (full_mode, from, unsignedp);
468 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code,
469 to, from, UNKNOWN);
470 return;
471 }
472 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
473 {
474 rtx new_from;
475 enum machine_mode full_mode
476 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
477
478 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code
479 != CODE_FOR_nothing);
480
481 if (to_mode == full_mode)
482 {
483 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
484 to, from, UNKNOWN);
485 return;
486 }
487
488 new_from = gen_reg_rtx (full_mode);
489 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
490 new_from, from, UNKNOWN);
491
492 /* else proceed to integer conversions below. */
493 from_mode = full_mode;
494 from = new_from;
495 }
496
497 /* Now both modes are integers. */
498
499 /* Handle expanding beyond a word. */
500 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
501 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
502 {
503 rtx insns;
504 rtx lowpart;
505 rtx fill_value;
506 rtx lowfrom;
507 int i;
508 enum machine_mode lowpart_mode;
509 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
510
511 /* Try converting directly if the insn is supported. */
512 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
513 != CODE_FOR_nothing)
514 {
515 /* If FROM is a SUBREG, put it into a register. Do this
516 so that we always generate the same set of insns for
517 better cse'ing; if an intermediate assignment occurred,
518 we won't be doing the operation directly on the SUBREG. */
519 if (optimize > 0 && GET_CODE (from) == SUBREG)
520 from = force_reg (from_mode, from);
521 emit_unop_insn (code, to, from, equiv_code);
522 return;
523 }
524 /* Next, try converting via full word. */
525 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
526 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
527 != CODE_FOR_nothing))
528 {
529 if (REG_P (to))
530 {
531 if (reg_overlap_mentioned_p (to, from))
532 from = force_reg (from_mode, from);
533 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
534 }
535 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
536 emit_unop_insn (code, to,
537 gen_lowpart (word_mode, to), equiv_code);
538 return;
539 }
540
541 /* No special multiword conversion insn; do it by hand. */
542 start_sequence ();
543
544 /* Since we will turn this into a no conflict block, we must ensure
545 that the source does not overlap the target. */
546
547 if (reg_overlap_mentioned_p (to, from))
548 from = force_reg (from_mode, from);
549
550 /* Get a copy of FROM widened to a word, if necessary. */
551 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
552 lowpart_mode = word_mode;
553 else
554 lowpart_mode = from_mode;
555
556 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
557
558 lowpart = gen_lowpart (lowpart_mode, to);
559 emit_move_insn (lowpart, lowfrom);
560
561 /* Compute the value to put in each remaining word. */
562 if (unsignedp)
563 fill_value = const0_rtx;
564 else
565 {
566 #ifdef HAVE_slt
567 if (HAVE_slt
568 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
569 && STORE_FLAG_VALUE == -1)
570 {
571 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
572 lowpart_mode, 0);
573 fill_value = gen_reg_rtx (word_mode);
574 emit_insn (gen_slt (fill_value));
575 }
576 else
577 #endif
578 {
579 fill_value
580 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
581 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
582 NULL_RTX, 0);
583 fill_value = convert_to_mode (word_mode, fill_value, 1);
584 }
585 }
586
587 /* Fill the remaining words. */
588 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
589 {
590 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
591 rtx subword = operand_subword (to, index, 1, to_mode);
592
593 gcc_assert (subword);
594
595 if (fill_value != subword)
596 emit_move_insn (subword, fill_value);
597 }
598
599 insns = get_insns ();
600 end_sequence ();
601
602 emit_no_conflict_block (insns, to, from, NULL_RTX,
603 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
604 return;
605 }
606
607 /* Truncating multi-word to a word or less. */
608 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
609 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
610 {
611 if (!((MEM_P (from)
612 && ! MEM_VOLATILE_P (from)
613 && direct_load[(int) to_mode]
614 && ! mode_dependent_address_p (XEXP (from, 0)))
615 || REG_P (from)
616 || GET_CODE (from) == SUBREG))
617 from = force_reg (from_mode, from);
618 convert_move (to, gen_lowpart (word_mode, from), 0);
619 return;
620 }
621
622 /* Now follow all the conversions between integers
623 no more than a word long. */
624
625 /* For truncation, usually we can just refer to FROM in a narrower mode. */
626 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
627 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
628 GET_MODE_BITSIZE (from_mode)))
629 {
630 if (!((MEM_P (from)
631 && ! MEM_VOLATILE_P (from)
632 && direct_load[(int) to_mode]
633 && ! mode_dependent_address_p (XEXP (from, 0)))
634 || REG_P (from)
635 || GET_CODE (from) == SUBREG))
636 from = force_reg (from_mode, from);
637 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
638 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
639 from = copy_to_reg (from);
640 emit_move_insn (to, gen_lowpart (to_mode, from));
641 return;
642 }
643
644 /* Handle extension. */
645 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
646 {
647 /* Convert directly if that works. */
648 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
649 != CODE_FOR_nothing)
650 {
651 emit_unop_insn (code, to, from, equiv_code);
652 return;
653 }
654 else
655 {
656 enum machine_mode intermediate;
657 rtx tmp;
658 tree shift_amount;
659
660 /* Search for a mode to convert via. */
661 for (intermediate = from_mode; intermediate != VOIDmode;
662 intermediate = GET_MODE_WIDER_MODE (intermediate))
663 if (((can_extend_p (to_mode, intermediate, unsignedp)
664 != CODE_FOR_nothing)
665 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
666 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
667 GET_MODE_BITSIZE (intermediate))))
668 && (can_extend_p (intermediate, from_mode, unsignedp)
669 != CODE_FOR_nothing))
670 {
671 convert_move (to, convert_to_mode (intermediate, from,
672 unsignedp), unsignedp);
673 return;
674 }
675
676 /* No suitable intermediate mode.
677 Generate what we need with shifts. */
678 shift_amount = build_int_cst (NULL_TREE,
679 GET_MODE_BITSIZE (to_mode)
680 - GET_MODE_BITSIZE (from_mode));
681 from = gen_lowpart (to_mode, force_reg (from_mode, from));
682 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
683 to, unsignedp);
684 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
685 to, unsignedp);
686 if (tmp != to)
687 emit_move_insn (to, tmp);
688 return;
689 }
690 }
691
692 /* Support special truncate insns for certain modes. */
693 if (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code != CODE_FOR_nothing)
694 {
695 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code,
696 to, from, UNKNOWN);
697 return;
698 }
699
700 /* Handle truncation of volatile memrefs, and so on;
701 the things that couldn't be truncated directly,
702 and for which there was no special instruction.
703
704 ??? Code above formerly short-circuited this, for most integer
705 mode pairs, with a force_reg in from_mode followed by a recursive
706 call to this routine. Appears always to have been wrong. */
707 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
708 {
709 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
710 emit_move_insn (to, temp);
711 return;
712 }
713
714 /* Mode combination is not recognized. */
715 gcc_unreachable ();
716 }
717
718 /* Return an rtx for a value that would result
719 from converting X to mode MODE.
720 Both X and MODE may be floating, or both integer.
721 UNSIGNEDP is nonzero if X is an unsigned value.
722 This can be done by referring to a part of X in place
723 or by copying to a new temporary with conversion. */
724
725 rtx
726 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
727 {
728 return convert_modes (mode, VOIDmode, x, unsignedp);
729 }
730
731 /* Return an rtx for a value that would result
732 from converting X from mode OLDMODE to mode MODE.
733 Both modes may be floating, or both integer.
734 UNSIGNEDP is nonzero if X is an unsigned value.
735
736 This can be done by referring to a part of X in place
737 or by copying to a new temporary with conversion.
738
739 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
740
741 rtx
742 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
743 {
744 rtx temp;
745
746 /* If FROM is a SUBREG that indicates that we have already done at least
747 the required extension, strip it. */
748
749 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
750 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
751 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
752 x = gen_lowpart (mode, x);
753
754 if (GET_MODE (x) != VOIDmode)
755 oldmode = GET_MODE (x);
756
757 if (mode == oldmode)
758 return x;
759
760 /* There is one case that we must handle specially: If we are converting
761 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
762 we are to interpret the constant as unsigned, gen_lowpart will do
763 the wrong if the constant appears negative. What we want to do is
764 make the high-order word of the constant zero, not all ones. */
765
766 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
767 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
768 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
769 {
770 HOST_WIDE_INT val = INTVAL (x);
771
772 if (oldmode != VOIDmode
773 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
774 {
775 int width = GET_MODE_BITSIZE (oldmode);
776
777 /* We need to zero extend VAL. */
778 val &= ((HOST_WIDE_INT) 1 << width) - 1;
779 }
780
781 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
782 }
783
784 /* We can do this with a gen_lowpart if both desired and current modes
785 are integer, and this is either a constant integer, a register, or a
786 non-volatile MEM. Except for the constant case where MODE is no
787 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
788
789 if ((GET_CODE (x) == CONST_INT
790 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
791 || (GET_MODE_CLASS (mode) == MODE_INT
792 && GET_MODE_CLASS (oldmode) == MODE_INT
793 && (GET_CODE (x) == CONST_DOUBLE
794 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
795 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
796 && direct_load[(int) mode])
797 || (REG_P (x)
798 && (! HARD_REGISTER_P (x)
799 || HARD_REGNO_MODE_OK (REGNO (x), mode))
800 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
801 GET_MODE_BITSIZE (GET_MODE (x)))))))))
802 {
803 /* ?? If we don't know OLDMODE, we have to assume here that
804 X does not need sign- or zero-extension. This may not be
805 the case, but it's the best we can do. */
806 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
807 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
808 {
809 HOST_WIDE_INT val = INTVAL (x);
810 int width = GET_MODE_BITSIZE (oldmode);
811
812 /* We must sign or zero-extend in this case. Start by
813 zero-extending, then sign extend if we need to. */
814 val &= ((HOST_WIDE_INT) 1 << width) - 1;
815 if (! unsignedp
816 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
817 val |= (HOST_WIDE_INT) (-1) << width;
818
819 return gen_int_mode (val, mode);
820 }
821
822 return gen_lowpart (mode, x);
823 }
824
825 /* Converting from integer constant into mode is always equivalent to an
826 subreg operation. */
827 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
828 {
829 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
830 return simplify_gen_subreg (mode, x, oldmode, 0);
831 }
832
833 temp = gen_reg_rtx (mode);
834 convert_move (temp, x, unsignedp);
835 return temp;
836 }
837 \f
838 /* STORE_MAX_PIECES is the number of bytes at a time that we can
839 store efficiently. Due to internal GCC limitations, this is
840 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
841 for an immediate constant. */
842
843 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
844
845 /* Determine whether the LEN bytes can be moved by using several move
846 instructions. Return nonzero if a call to move_by_pieces should
847 succeed. */
848
849 int
850 can_move_by_pieces (unsigned HOST_WIDE_INT len,
851 unsigned int align ATTRIBUTE_UNUSED)
852 {
853 return MOVE_BY_PIECES_P (len, align);
854 }
855
856 /* Generate several move instructions to copy LEN bytes from block FROM to
857 block TO. (These are MEM rtx's with BLKmode).
858
859 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
860 used to push FROM to the stack.
861
862 ALIGN is maximum stack alignment we can assume.
863
864 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
865 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
866 stpcpy. */
867
868 rtx
869 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
870 unsigned int align, int endp)
871 {
872 struct move_by_pieces data;
873 rtx to_addr, from_addr = XEXP (from, 0);
874 unsigned int max_size = MOVE_MAX_PIECES + 1;
875 enum machine_mode mode = VOIDmode, tmode;
876 enum insn_code icode;
877
878 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
879
880 data.offset = 0;
881 data.from_addr = from_addr;
882 if (to)
883 {
884 to_addr = XEXP (to, 0);
885 data.to = to;
886 data.autinc_to
887 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
888 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
889 data.reverse
890 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
891 }
892 else
893 {
894 to_addr = NULL_RTX;
895 data.to = NULL_RTX;
896 data.autinc_to = 1;
897 #ifdef STACK_GROWS_DOWNWARD
898 data.reverse = 1;
899 #else
900 data.reverse = 0;
901 #endif
902 }
903 data.to_addr = to_addr;
904 data.from = from;
905 data.autinc_from
906 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
907 || GET_CODE (from_addr) == POST_INC
908 || GET_CODE (from_addr) == POST_DEC);
909
910 data.explicit_inc_from = 0;
911 data.explicit_inc_to = 0;
912 if (data.reverse) data.offset = len;
913 data.len = len;
914
915 /* If copying requires more than two move insns,
916 copy addresses to registers (to make displacements shorter)
917 and use post-increment if available. */
918 if (!(data.autinc_from && data.autinc_to)
919 && move_by_pieces_ninsns (len, align, max_size) > 2)
920 {
921 /* Find the mode of the largest move... */
922 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
923 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
924 if (GET_MODE_SIZE (tmode) < max_size)
925 mode = tmode;
926
927 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
928 {
929 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
930 data.autinc_from = 1;
931 data.explicit_inc_from = -1;
932 }
933 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
934 {
935 data.from_addr = copy_addr_to_reg (from_addr);
936 data.autinc_from = 1;
937 data.explicit_inc_from = 1;
938 }
939 if (!data.autinc_from && CONSTANT_P (from_addr))
940 data.from_addr = copy_addr_to_reg (from_addr);
941 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
942 {
943 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
944 data.autinc_to = 1;
945 data.explicit_inc_to = -1;
946 }
947 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
948 {
949 data.to_addr = copy_addr_to_reg (to_addr);
950 data.autinc_to = 1;
951 data.explicit_inc_to = 1;
952 }
953 if (!data.autinc_to && CONSTANT_P (to_addr))
954 data.to_addr = copy_addr_to_reg (to_addr);
955 }
956
957 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
958 if (align >= GET_MODE_ALIGNMENT (tmode))
959 align = GET_MODE_ALIGNMENT (tmode);
960 else
961 {
962 enum machine_mode xmode;
963
964 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
965 tmode != VOIDmode;
966 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
967 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
968 || SLOW_UNALIGNED_ACCESS (tmode, align))
969 break;
970
971 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
972 }
973
974 /* First move what we can in the largest integer mode, then go to
975 successively smaller modes. */
976
977 while (max_size > 1)
978 {
979 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
980 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
981 if (GET_MODE_SIZE (tmode) < max_size)
982 mode = tmode;
983
984 if (mode == VOIDmode)
985 break;
986
987 icode = optab_handler (mov_optab, mode)->insn_code;
988 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
989 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
990
991 max_size = GET_MODE_SIZE (mode);
992 }
993
994 /* The code above should have handled everything. */
995 gcc_assert (!data.len);
996
997 if (endp)
998 {
999 rtx to1;
1000
1001 gcc_assert (!data.reverse);
1002 if (data.autinc_to)
1003 {
1004 if (endp == 2)
1005 {
1006 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1007 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1008 else
1009 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1010 -1));
1011 }
1012 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1013 data.offset);
1014 }
1015 else
1016 {
1017 if (endp == 2)
1018 --data.offset;
1019 to1 = adjust_address (data.to, QImode, data.offset);
1020 }
1021 return to1;
1022 }
1023 else
1024 return data.to;
1025 }
1026
1027 /* Return number of insns required to move L bytes by pieces.
1028 ALIGN (in bits) is maximum alignment we can assume. */
1029
1030 static unsigned HOST_WIDE_INT
1031 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1032 unsigned int max_size)
1033 {
1034 unsigned HOST_WIDE_INT n_insns = 0;
1035 enum machine_mode tmode;
1036
1037 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1038 if (align >= GET_MODE_ALIGNMENT (tmode))
1039 align = GET_MODE_ALIGNMENT (tmode);
1040 else
1041 {
1042 enum machine_mode tmode, xmode;
1043
1044 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1045 tmode != VOIDmode;
1046 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1047 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1048 || SLOW_UNALIGNED_ACCESS (tmode, align))
1049 break;
1050
1051 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1052 }
1053
1054 while (max_size > 1)
1055 {
1056 enum machine_mode mode = VOIDmode;
1057 enum insn_code icode;
1058
1059 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1060 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1061 if (GET_MODE_SIZE (tmode) < max_size)
1062 mode = tmode;
1063
1064 if (mode == VOIDmode)
1065 break;
1066
1067 icode = optab_handler (mov_optab, mode)->insn_code;
1068 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1069 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1070
1071 max_size = GET_MODE_SIZE (mode);
1072 }
1073
1074 gcc_assert (!l);
1075 return n_insns;
1076 }
1077
1078 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1079 with move instructions for mode MODE. GENFUN is the gen_... function
1080 to make a move insn for that mode. DATA has all the other info. */
1081
1082 static void
1083 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1084 struct move_by_pieces *data)
1085 {
1086 unsigned int size = GET_MODE_SIZE (mode);
1087 rtx to1 = NULL_RTX, from1;
1088
1089 while (data->len >= size)
1090 {
1091 if (data->reverse)
1092 data->offset -= size;
1093
1094 if (data->to)
1095 {
1096 if (data->autinc_to)
1097 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1098 data->offset);
1099 else
1100 to1 = adjust_address (data->to, mode, data->offset);
1101 }
1102
1103 if (data->autinc_from)
1104 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1105 data->offset);
1106 else
1107 from1 = adjust_address (data->from, mode, data->offset);
1108
1109 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1110 emit_insn (gen_add2_insn (data->to_addr,
1111 GEN_INT (-(HOST_WIDE_INT)size)));
1112 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1113 emit_insn (gen_add2_insn (data->from_addr,
1114 GEN_INT (-(HOST_WIDE_INT)size)));
1115
1116 if (data->to)
1117 emit_insn ((*genfun) (to1, from1));
1118 else
1119 {
1120 #ifdef PUSH_ROUNDING
1121 emit_single_push_insn (mode, from1, NULL);
1122 #else
1123 gcc_unreachable ();
1124 #endif
1125 }
1126
1127 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1128 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1129 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1130 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1131
1132 if (! data->reverse)
1133 data->offset += size;
1134
1135 data->len -= size;
1136 }
1137 }
1138 \f
1139 /* Emit code to move a block Y to a block X. This may be done with
1140 string-move instructions, with multiple scalar move instructions,
1141 or with a library call.
1142
1143 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1144 SIZE is an rtx that says how long they are.
1145 ALIGN is the maximum alignment we can assume they have.
1146 METHOD describes what kind of copy this is, and what mechanisms may be used.
1147
1148 Return the address of the new block, if memcpy is called and returns it,
1149 0 otherwise. */
1150
1151 rtx
1152 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1153 unsigned int expected_align, HOST_WIDE_INT expected_size)
1154 {
1155 bool may_use_call;
1156 rtx retval = 0;
1157 unsigned int align;
1158
1159 switch (method)
1160 {
1161 case BLOCK_OP_NORMAL:
1162 case BLOCK_OP_TAILCALL:
1163 may_use_call = true;
1164 break;
1165
1166 case BLOCK_OP_CALL_PARM:
1167 may_use_call = block_move_libcall_safe_for_call_parm ();
1168
1169 /* Make inhibit_defer_pop nonzero around the library call
1170 to force it to pop the arguments right away. */
1171 NO_DEFER_POP;
1172 break;
1173
1174 case BLOCK_OP_NO_LIBCALL:
1175 may_use_call = false;
1176 break;
1177
1178 default:
1179 gcc_unreachable ();
1180 }
1181
1182 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1183
1184 gcc_assert (MEM_P (x));
1185 gcc_assert (MEM_P (y));
1186 gcc_assert (size);
1187
1188 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1189 block copy is more efficient for other large modes, e.g. DCmode. */
1190 x = adjust_address (x, BLKmode, 0);
1191 y = adjust_address (y, BLKmode, 0);
1192
1193 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1194 can be incorrect is coming from __builtin_memcpy. */
1195 if (GET_CODE (size) == CONST_INT)
1196 {
1197 if (INTVAL (size) == 0)
1198 return 0;
1199
1200 x = shallow_copy_rtx (x);
1201 y = shallow_copy_rtx (y);
1202 set_mem_size (x, size);
1203 set_mem_size (y, size);
1204 }
1205
1206 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1207 move_by_pieces (x, y, INTVAL (size), align, 0);
1208 else if (emit_block_move_via_movmem (x, y, size, align,
1209 expected_align, expected_size))
1210 ;
1211 else if (may_use_call)
1212 retval = emit_block_move_via_libcall (x, y, size,
1213 method == BLOCK_OP_TAILCALL);
1214 else
1215 emit_block_move_via_loop (x, y, size, align);
1216
1217 if (method == BLOCK_OP_CALL_PARM)
1218 OK_DEFER_POP;
1219
1220 return retval;
1221 }
1222
1223 rtx
1224 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1225 {
1226 return emit_block_move_hints (x, y, size, method, 0, -1);
1227 }
1228
1229 /* A subroutine of emit_block_move. Returns true if calling the
1230 block move libcall will not clobber any parameters which may have
1231 already been placed on the stack. */
1232
1233 static bool
1234 block_move_libcall_safe_for_call_parm (void)
1235 {
1236 /* If arguments are pushed on the stack, then they're safe. */
1237 if (PUSH_ARGS)
1238 return true;
1239
1240 /* If registers go on the stack anyway, any argument is sure to clobber
1241 an outgoing argument. */
1242 #if defined (REG_PARM_STACK_SPACE)
1243 if (OUTGOING_REG_PARM_STACK_SPACE)
1244 {
1245 tree fn;
1246 fn = emit_block_move_libcall_fn (false);
1247 if (REG_PARM_STACK_SPACE (fn) != 0)
1248 return false;
1249 }
1250 #endif
1251
1252 /* If any argument goes in memory, then it might clobber an outgoing
1253 argument. */
1254 {
1255 CUMULATIVE_ARGS args_so_far;
1256 tree fn, arg;
1257
1258 fn = emit_block_move_libcall_fn (false);
1259 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1260
1261 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1262 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1263 {
1264 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1265 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1266 if (!tmp || !REG_P (tmp))
1267 return false;
1268 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1269 return false;
1270 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1271 }
1272 }
1273 return true;
1274 }
1275
1276 /* A subroutine of emit_block_move. Expand a movmem pattern;
1277 return true if successful. */
1278
1279 static bool
1280 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1281 unsigned int expected_align, HOST_WIDE_INT expected_size)
1282 {
1283 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1284 int save_volatile_ok = volatile_ok;
1285 enum machine_mode mode;
1286
1287 if (expected_align < align)
1288 expected_align = align;
1289
1290 /* Since this is a move insn, we don't care about volatility. */
1291 volatile_ok = 1;
1292
1293 /* Try the most limited insn first, because there's no point
1294 including more than one in the machine description unless
1295 the more limited one has some advantage. */
1296
1297 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1298 mode = GET_MODE_WIDER_MODE (mode))
1299 {
1300 enum insn_code code = movmem_optab[(int) mode];
1301 insn_operand_predicate_fn pred;
1302
1303 if (code != CODE_FOR_nothing
1304 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1305 here because if SIZE is less than the mode mask, as it is
1306 returned by the macro, it will definitely be less than the
1307 actual mode mask. */
1308 && ((GET_CODE (size) == CONST_INT
1309 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1310 <= (GET_MODE_MASK (mode) >> 1)))
1311 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1312 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1313 || (*pred) (x, BLKmode))
1314 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1315 || (*pred) (y, BLKmode))
1316 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1317 || (*pred) (opalign, VOIDmode)))
1318 {
1319 rtx op2;
1320 rtx last = get_last_insn ();
1321 rtx pat;
1322
1323 op2 = convert_to_mode (mode, size, 1);
1324 pred = insn_data[(int) code].operand[2].predicate;
1325 if (pred != 0 && ! (*pred) (op2, mode))
1326 op2 = copy_to_mode_reg (mode, op2);
1327
1328 /* ??? When called via emit_block_move_for_call, it'd be
1329 nice if there were some way to inform the backend, so
1330 that it doesn't fail the expansion because it thinks
1331 emitting the libcall would be more efficient. */
1332
1333 if (insn_data[(int) code].n_operands == 4)
1334 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1335 else
1336 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1337 GEN_INT (expected_align),
1338 GEN_INT (expected_size));
1339 if (pat)
1340 {
1341 emit_insn (pat);
1342 volatile_ok = save_volatile_ok;
1343 return true;
1344 }
1345 else
1346 delete_insns_since (last);
1347 }
1348 }
1349
1350 volatile_ok = save_volatile_ok;
1351 return false;
1352 }
1353
1354 /* A subroutine of emit_block_move. Expand a call to memcpy.
1355 Return the return value from memcpy, 0 otherwise. */
1356
1357 rtx
1358 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1359 {
1360 rtx dst_addr, src_addr;
1361 tree call_expr, fn, src_tree, dst_tree, size_tree;
1362 enum machine_mode size_mode;
1363 rtx retval;
1364
1365 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1366 pseudos. We can then place those new pseudos into a VAR_DECL and
1367 use them later. */
1368
1369 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1370 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1371
1372 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1373 src_addr = convert_memory_address (ptr_mode, src_addr);
1374
1375 dst_tree = make_tree (ptr_type_node, dst_addr);
1376 src_tree = make_tree (ptr_type_node, src_addr);
1377
1378 size_mode = TYPE_MODE (sizetype);
1379
1380 size = convert_to_mode (size_mode, size, 1);
1381 size = copy_to_mode_reg (size_mode, size);
1382
1383 /* It is incorrect to use the libcall calling conventions to call
1384 memcpy in this context. This could be a user call to memcpy and
1385 the user may wish to examine the return value from memcpy. For
1386 targets where libcalls and normal calls have different conventions
1387 for returning pointers, we could end up generating incorrect code. */
1388
1389 size_tree = make_tree (sizetype, size);
1390
1391 fn = emit_block_move_libcall_fn (true);
1392 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1393 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1394
1395 retval = expand_normal (call_expr);
1396
1397 return retval;
1398 }
1399
1400 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1401 for the function we use for block copies. The first time FOR_CALL
1402 is true, we call assemble_external. */
1403
1404 static GTY(()) tree block_move_fn;
1405
1406 void
1407 init_block_move_fn (const char *asmspec)
1408 {
1409 if (!block_move_fn)
1410 {
1411 tree args, fn;
1412
1413 fn = get_identifier ("memcpy");
1414 args = build_function_type_list (ptr_type_node, ptr_type_node,
1415 const_ptr_type_node, sizetype,
1416 NULL_TREE);
1417
1418 fn = build_decl (FUNCTION_DECL, fn, args);
1419 DECL_EXTERNAL (fn) = 1;
1420 TREE_PUBLIC (fn) = 1;
1421 DECL_ARTIFICIAL (fn) = 1;
1422 TREE_NOTHROW (fn) = 1;
1423 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1424 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1425
1426 block_move_fn = fn;
1427 }
1428
1429 if (asmspec)
1430 set_user_assembler_name (block_move_fn, asmspec);
1431 }
1432
1433 static tree
1434 emit_block_move_libcall_fn (int for_call)
1435 {
1436 static bool emitted_extern;
1437
1438 if (!block_move_fn)
1439 init_block_move_fn (NULL);
1440
1441 if (for_call && !emitted_extern)
1442 {
1443 emitted_extern = true;
1444 make_decl_rtl (block_move_fn);
1445 assemble_external (block_move_fn);
1446 }
1447
1448 return block_move_fn;
1449 }
1450
1451 /* A subroutine of emit_block_move. Copy the data via an explicit
1452 loop. This is used only when libcalls are forbidden. */
1453 /* ??? It'd be nice to copy in hunks larger than QImode. */
1454
1455 static void
1456 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1457 unsigned int align ATTRIBUTE_UNUSED)
1458 {
1459 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1460 enum machine_mode iter_mode;
1461
1462 iter_mode = GET_MODE (size);
1463 if (iter_mode == VOIDmode)
1464 iter_mode = word_mode;
1465
1466 top_label = gen_label_rtx ();
1467 cmp_label = gen_label_rtx ();
1468 iter = gen_reg_rtx (iter_mode);
1469
1470 emit_move_insn (iter, const0_rtx);
1471
1472 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1473 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1474 do_pending_stack_adjust ();
1475
1476 emit_jump (cmp_label);
1477 emit_label (top_label);
1478
1479 tmp = convert_modes (Pmode, iter_mode, iter, true);
1480 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1481 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1482 x = change_address (x, QImode, x_addr);
1483 y = change_address (y, QImode, y_addr);
1484
1485 emit_move_insn (x, y);
1486
1487 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1488 true, OPTAB_LIB_WIDEN);
1489 if (tmp != iter)
1490 emit_move_insn (iter, tmp);
1491
1492 emit_label (cmp_label);
1493
1494 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1495 true, top_label);
1496 }
1497 \f
1498 /* Copy all or part of a value X into registers starting at REGNO.
1499 The number of registers to be filled is NREGS. */
1500
1501 void
1502 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1503 {
1504 int i;
1505 #ifdef HAVE_load_multiple
1506 rtx pat;
1507 rtx last;
1508 #endif
1509
1510 if (nregs == 0)
1511 return;
1512
1513 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1514 x = validize_mem (force_const_mem (mode, x));
1515
1516 /* See if the machine can do this with a load multiple insn. */
1517 #ifdef HAVE_load_multiple
1518 if (HAVE_load_multiple)
1519 {
1520 last = get_last_insn ();
1521 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1522 GEN_INT (nregs));
1523 if (pat)
1524 {
1525 emit_insn (pat);
1526 return;
1527 }
1528 else
1529 delete_insns_since (last);
1530 }
1531 #endif
1532
1533 for (i = 0; i < nregs; i++)
1534 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1535 operand_subword_force (x, i, mode));
1536 }
1537
1538 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1539 The number of registers to be filled is NREGS. */
1540
1541 void
1542 move_block_from_reg (int regno, rtx x, int nregs)
1543 {
1544 int i;
1545
1546 if (nregs == 0)
1547 return;
1548
1549 /* See if the machine can do this with a store multiple insn. */
1550 #ifdef HAVE_store_multiple
1551 if (HAVE_store_multiple)
1552 {
1553 rtx last = get_last_insn ();
1554 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1555 GEN_INT (nregs));
1556 if (pat)
1557 {
1558 emit_insn (pat);
1559 return;
1560 }
1561 else
1562 delete_insns_since (last);
1563 }
1564 #endif
1565
1566 for (i = 0; i < nregs; i++)
1567 {
1568 rtx tem = operand_subword (x, i, 1, BLKmode);
1569
1570 gcc_assert (tem);
1571
1572 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1573 }
1574 }
1575
1576 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1577 ORIG, where ORIG is a non-consecutive group of registers represented by
1578 a PARALLEL. The clone is identical to the original except in that the
1579 original set of registers is replaced by a new set of pseudo registers.
1580 The new set has the same modes as the original set. */
1581
1582 rtx
1583 gen_group_rtx (rtx orig)
1584 {
1585 int i, length;
1586 rtx *tmps;
1587
1588 gcc_assert (GET_CODE (orig) == PARALLEL);
1589
1590 length = XVECLEN (orig, 0);
1591 tmps = alloca (sizeof (rtx) * length);
1592
1593 /* Skip a NULL entry in first slot. */
1594 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1595
1596 if (i)
1597 tmps[0] = 0;
1598
1599 for (; i < length; i++)
1600 {
1601 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1602 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1603
1604 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1605 }
1606
1607 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1608 }
1609
1610 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1611 except that values are placed in TMPS[i], and must later be moved
1612 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1613
1614 static void
1615 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1616 {
1617 rtx src;
1618 int start, i;
1619 enum machine_mode m = GET_MODE (orig_src);
1620
1621 gcc_assert (GET_CODE (dst) == PARALLEL);
1622
1623 if (m != VOIDmode
1624 && !SCALAR_INT_MODE_P (m)
1625 && !MEM_P (orig_src)
1626 && GET_CODE (orig_src) != CONCAT)
1627 {
1628 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1629 if (imode == BLKmode)
1630 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1631 else
1632 src = gen_reg_rtx (imode);
1633 if (imode != BLKmode)
1634 src = gen_lowpart (GET_MODE (orig_src), src);
1635 emit_move_insn (src, orig_src);
1636 /* ...and back again. */
1637 if (imode != BLKmode)
1638 src = gen_lowpart (imode, src);
1639 emit_group_load_1 (tmps, dst, src, type, ssize);
1640 return;
1641 }
1642
1643 /* Check for a NULL entry, used to indicate that the parameter goes
1644 both on the stack and in registers. */
1645 if (XEXP (XVECEXP (dst, 0, 0), 0))
1646 start = 0;
1647 else
1648 start = 1;
1649
1650 /* Process the pieces. */
1651 for (i = start; i < XVECLEN (dst, 0); i++)
1652 {
1653 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1654 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1655 unsigned int bytelen = GET_MODE_SIZE (mode);
1656 int shift = 0;
1657
1658 /* Handle trailing fragments that run over the size of the struct. */
1659 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1660 {
1661 /* Arrange to shift the fragment to where it belongs.
1662 extract_bit_field loads to the lsb of the reg. */
1663 if (
1664 #ifdef BLOCK_REG_PADDING
1665 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1666 == (BYTES_BIG_ENDIAN ? upward : downward)
1667 #else
1668 BYTES_BIG_ENDIAN
1669 #endif
1670 )
1671 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1672 bytelen = ssize - bytepos;
1673 gcc_assert (bytelen > 0);
1674 }
1675
1676 /* If we won't be loading directly from memory, protect the real source
1677 from strange tricks we might play; but make sure that the source can
1678 be loaded directly into the destination. */
1679 src = orig_src;
1680 if (!MEM_P (orig_src)
1681 && (!CONSTANT_P (orig_src)
1682 || (GET_MODE (orig_src) != mode
1683 && GET_MODE (orig_src) != VOIDmode)))
1684 {
1685 if (GET_MODE (orig_src) == VOIDmode)
1686 src = gen_reg_rtx (mode);
1687 else
1688 src = gen_reg_rtx (GET_MODE (orig_src));
1689
1690 emit_move_insn (src, orig_src);
1691 }
1692
1693 /* Optimize the access just a bit. */
1694 if (MEM_P (src)
1695 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1696 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1697 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1698 && bytelen == GET_MODE_SIZE (mode))
1699 {
1700 tmps[i] = gen_reg_rtx (mode);
1701 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1702 }
1703 else if (COMPLEX_MODE_P (mode)
1704 && GET_MODE (src) == mode
1705 && bytelen == GET_MODE_SIZE (mode))
1706 /* Let emit_move_complex do the bulk of the work. */
1707 tmps[i] = src;
1708 else if (GET_CODE (src) == CONCAT)
1709 {
1710 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1711 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1712
1713 if ((bytepos == 0 && bytelen == slen0)
1714 || (bytepos != 0 && bytepos + bytelen <= slen))
1715 {
1716 /* The following assumes that the concatenated objects all
1717 have the same size. In this case, a simple calculation
1718 can be used to determine the object and the bit field
1719 to be extracted. */
1720 tmps[i] = XEXP (src, bytepos / slen0);
1721 if (! CONSTANT_P (tmps[i])
1722 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1723 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1724 (bytepos % slen0) * BITS_PER_UNIT,
1725 1, NULL_RTX, mode, mode);
1726 }
1727 else
1728 {
1729 rtx mem;
1730
1731 gcc_assert (!bytepos);
1732 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1733 emit_move_insn (mem, src);
1734 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1735 0, 1, NULL_RTX, mode, mode);
1736 }
1737 }
1738 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1739 SIMD register, which is currently broken. While we get GCC
1740 to emit proper RTL for these cases, let's dump to memory. */
1741 else if (VECTOR_MODE_P (GET_MODE (dst))
1742 && REG_P (src))
1743 {
1744 int slen = GET_MODE_SIZE (GET_MODE (src));
1745 rtx mem;
1746
1747 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1748 emit_move_insn (mem, src);
1749 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1750 }
1751 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1752 && XVECLEN (dst, 0) > 1)
1753 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1754 else if (CONSTANT_P (src)
1755 || (REG_P (src) && GET_MODE (src) == mode))
1756 tmps[i] = src;
1757 else
1758 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1759 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1760 mode, mode);
1761
1762 if (shift)
1763 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1764 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1765 }
1766 }
1767
1768 /* Emit code to move a block SRC of type TYPE to a block DST,
1769 where DST is non-consecutive registers represented by a PARALLEL.
1770 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1771 if not known. */
1772
1773 void
1774 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1775 {
1776 rtx *tmps;
1777 int i;
1778
1779 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1780 emit_group_load_1 (tmps, dst, src, type, ssize);
1781
1782 /* Copy the extracted pieces into the proper (probable) hard regs. */
1783 for (i = 0; i < XVECLEN (dst, 0); i++)
1784 {
1785 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1786 if (d == NULL)
1787 continue;
1788 emit_move_insn (d, tmps[i]);
1789 }
1790 }
1791
1792 /* Similar, but load SRC into new pseudos in a format that looks like
1793 PARALLEL. This can later be fed to emit_group_move to get things
1794 in the right place. */
1795
1796 rtx
1797 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1798 {
1799 rtvec vec;
1800 int i;
1801
1802 vec = rtvec_alloc (XVECLEN (parallel, 0));
1803 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1804
1805 /* Convert the vector to look just like the original PARALLEL, except
1806 with the computed values. */
1807 for (i = 0; i < XVECLEN (parallel, 0); i++)
1808 {
1809 rtx e = XVECEXP (parallel, 0, i);
1810 rtx d = XEXP (e, 0);
1811
1812 if (d)
1813 {
1814 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1815 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1816 }
1817 RTVEC_ELT (vec, i) = e;
1818 }
1819
1820 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1821 }
1822
1823 /* Emit code to move a block SRC to block DST, where SRC and DST are
1824 non-consecutive groups of registers, each represented by a PARALLEL. */
1825
1826 void
1827 emit_group_move (rtx dst, rtx src)
1828 {
1829 int i;
1830
1831 gcc_assert (GET_CODE (src) == PARALLEL
1832 && GET_CODE (dst) == PARALLEL
1833 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1834
1835 /* Skip first entry if NULL. */
1836 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1837 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1838 XEXP (XVECEXP (src, 0, i), 0));
1839 }
1840
1841 /* Move a group of registers represented by a PARALLEL into pseudos. */
1842
1843 rtx
1844 emit_group_move_into_temps (rtx src)
1845 {
1846 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1847 int i;
1848
1849 for (i = 0; i < XVECLEN (src, 0); i++)
1850 {
1851 rtx e = XVECEXP (src, 0, i);
1852 rtx d = XEXP (e, 0);
1853
1854 if (d)
1855 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1856 RTVEC_ELT (vec, i) = e;
1857 }
1858
1859 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1860 }
1861
1862 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1863 where SRC is non-consecutive registers represented by a PARALLEL.
1864 SSIZE represents the total size of block ORIG_DST, or -1 if not
1865 known. */
1866
1867 void
1868 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1869 {
1870 rtx *tmps, dst;
1871 int start, finish, i;
1872 enum machine_mode m = GET_MODE (orig_dst);
1873
1874 gcc_assert (GET_CODE (src) == PARALLEL);
1875
1876 if (!SCALAR_INT_MODE_P (m)
1877 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1878 {
1879 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1880 if (imode == BLKmode)
1881 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1882 else
1883 dst = gen_reg_rtx (imode);
1884 emit_group_store (dst, src, type, ssize);
1885 if (imode != BLKmode)
1886 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1887 emit_move_insn (orig_dst, dst);
1888 return;
1889 }
1890
1891 /* Check for a NULL entry, used to indicate that the parameter goes
1892 both on the stack and in registers. */
1893 if (XEXP (XVECEXP (src, 0, 0), 0))
1894 start = 0;
1895 else
1896 start = 1;
1897 finish = XVECLEN (src, 0);
1898
1899 tmps = alloca (sizeof (rtx) * finish);
1900
1901 /* Copy the (probable) hard regs into pseudos. */
1902 for (i = start; i < finish; i++)
1903 {
1904 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1905 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1906 {
1907 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1908 emit_move_insn (tmps[i], reg);
1909 }
1910 else
1911 tmps[i] = reg;
1912 }
1913
1914 /* If we won't be storing directly into memory, protect the real destination
1915 from strange tricks we might play. */
1916 dst = orig_dst;
1917 if (GET_CODE (dst) == PARALLEL)
1918 {
1919 rtx temp;
1920
1921 /* We can get a PARALLEL dst if there is a conditional expression in
1922 a return statement. In that case, the dst and src are the same,
1923 so no action is necessary. */
1924 if (rtx_equal_p (dst, src))
1925 return;
1926
1927 /* It is unclear if we can ever reach here, but we may as well handle
1928 it. Allocate a temporary, and split this into a store/load to/from
1929 the temporary. */
1930
1931 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1932 emit_group_store (temp, src, type, ssize);
1933 emit_group_load (dst, temp, type, ssize);
1934 return;
1935 }
1936 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1937 {
1938 enum machine_mode outer = GET_MODE (dst);
1939 enum machine_mode inner;
1940 HOST_WIDE_INT bytepos;
1941 bool done = false;
1942 rtx temp;
1943
1944 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1945 dst = gen_reg_rtx (outer);
1946
1947 /* Make life a bit easier for combine. */
1948 /* If the first element of the vector is the low part
1949 of the destination mode, use a paradoxical subreg to
1950 initialize the destination. */
1951 if (start < finish)
1952 {
1953 inner = GET_MODE (tmps[start]);
1954 bytepos = subreg_lowpart_offset (inner, outer);
1955 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1956 {
1957 temp = simplify_gen_subreg (outer, tmps[start],
1958 inner, 0);
1959 if (temp)
1960 {
1961 emit_move_insn (dst, temp);
1962 done = true;
1963 start++;
1964 }
1965 }
1966 }
1967
1968 /* If the first element wasn't the low part, try the last. */
1969 if (!done
1970 && start < finish - 1)
1971 {
1972 inner = GET_MODE (tmps[finish - 1]);
1973 bytepos = subreg_lowpart_offset (inner, outer);
1974 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1975 {
1976 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1977 inner, 0);
1978 if (temp)
1979 {
1980 emit_move_insn (dst, temp);
1981 done = true;
1982 finish--;
1983 }
1984 }
1985 }
1986
1987 /* Otherwise, simply initialize the result to zero. */
1988 if (!done)
1989 emit_move_insn (dst, CONST0_RTX (outer));
1990 }
1991
1992 /* Process the pieces. */
1993 for (i = start; i < finish; i++)
1994 {
1995 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1996 enum machine_mode mode = GET_MODE (tmps[i]);
1997 unsigned int bytelen = GET_MODE_SIZE (mode);
1998 rtx dest = dst;
1999
2000 /* Handle trailing fragments that run over the size of the struct. */
2001 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2002 {
2003 /* store_bit_field always takes its value from the lsb.
2004 Move the fragment to the lsb if it's not already there. */
2005 if (
2006 #ifdef BLOCK_REG_PADDING
2007 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2008 == (BYTES_BIG_ENDIAN ? upward : downward)
2009 #else
2010 BYTES_BIG_ENDIAN
2011 #endif
2012 )
2013 {
2014 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2015 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2016 build_int_cst (NULL_TREE, shift),
2017 tmps[i], 0);
2018 }
2019 bytelen = ssize - bytepos;
2020 }
2021
2022 if (GET_CODE (dst) == CONCAT)
2023 {
2024 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2025 dest = XEXP (dst, 0);
2026 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2027 {
2028 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2029 dest = XEXP (dst, 1);
2030 }
2031 else
2032 {
2033 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2034 dest = assign_stack_temp (GET_MODE (dest),
2035 GET_MODE_SIZE (GET_MODE (dest)), 0);
2036 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2037 tmps[i]);
2038 dst = dest;
2039 break;
2040 }
2041 }
2042
2043 /* Optimize the access just a bit. */
2044 if (MEM_P (dest)
2045 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2046 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2047 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2048 && bytelen == GET_MODE_SIZE (mode))
2049 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2050 else
2051 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2052 mode, tmps[i]);
2053 }
2054
2055 /* Copy from the pseudo into the (probable) hard reg. */
2056 if (orig_dst != dst)
2057 emit_move_insn (orig_dst, dst);
2058 }
2059
2060 /* Generate code to copy a BLKmode object of TYPE out of a
2061 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2062 is null, a stack temporary is created. TGTBLK is returned.
2063
2064 The purpose of this routine is to handle functions that return
2065 BLKmode structures in registers. Some machines (the PA for example)
2066 want to return all small structures in registers regardless of the
2067 structure's alignment. */
2068
2069 rtx
2070 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2071 {
2072 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2073 rtx src = NULL, dst = NULL;
2074 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2075 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2076
2077 if (tgtblk == 0)
2078 {
2079 tgtblk = assign_temp (build_qualified_type (type,
2080 (TYPE_QUALS (type)
2081 | TYPE_QUAL_CONST)),
2082 0, 1, 1);
2083 preserve_temp_slots (tgtblk);
2084 }
2085
2086 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2087 into a new pseudo which is a full word. */
2088
2089 if (GET_MODE (srcreg) != BLKmode
2090 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2091 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2092
2093 /* If the structure doesn't take up a whole number of words, see whether
2094 SRCREG is padded on the left or on the right. If it's on the left,
2095 set PADDING_CORRECTION to the number of bits to skip.
2096
2097 In most ABIs, the structure will be returned at the least end of
2098 the register, which translates to right padding on little-endian
2099 targets and left padding on big-endian targets. The opposite
2100 holds if the structure is returned at the most significant
2101 end of the register. */
2102 if (bytes % UNITS_PER_WORD != 0
2103 && (targetm.calls.return_in_msb (type)
2104 ? !BYTES_BIG_ENDIAN
2105 : BYTES_BIG_ENDIAN))
2106 padding_correction
2107 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2108
2109 /* Copy the structure BITSIZE bites at a time.
2110
2111 We could probably emit more efficient code for machines which do not use
2112 strict alignment, but it doesn't seem worth the effort at the current
2113 time. */
2114 for (bitpos = 0, xbitpos = padding_correction;
2115 bitpos < bytes * BITS_PER_UNIT;
2116 bitpos += bitsize, xbitpos += bitsize)
2117 {
2118 /* We need a new source operand each time xbitpos is on a
2119 word boundary and when xbitpos == padding_correction
2120 (the first time through). */
2121 if (xbitpos % BITS_PER_WORD == 0
2122 || xbitpos == padding_correction)
2123 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2124 GET_MODE (srcreg));
2125
2126 /* We need a new destination operand each time bitpos is on
2127 a word boundary. */
2128 if (bitpos % BITS_PER_WORD == 0)
2129 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2130
2131 /* Use xbitpos for the source extraction (right justified) and
2132 xbitpos for the destination store (left justified). */
2133 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2134 extract_bit_field (src, bitsize,
2135 xbitpos % BITS_PER_WORD, 1,
2136 NULL_RTX, word_mode, word_mode));
2137 }
2138
2139 return tgtblk;
2140 }
2141
2142 /* Add a USE expression for REG to the (possibly empty) list pointed
2143 to by CALL_FUSAGE. REG must denote a hard register. */
2144
2145 void
2146 use_reg (rtx *call_fusage, rtx reg)
2147 {
2148 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2149
2150 *call_fusage
2151 = gen_rtx_EXPR_LIST (VOIDmode,
2152 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2153 }
2154
2155 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2156 starting at REGNO. All of these registers must be hard registers. */
2157
2158 void
2159 use_regs (rtx *call_fusage, int regno, int nregs)
2160 {
2161 int i;
2162
2163 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2164
2165 for (i = 0; i < nregs; i++)
2166 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2167 }
2168
2169 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2170 PARALLEL REGS. This is for calls that pass values in multiple
2171 non-contiguous locations. The Irix 6 ABI has examples of this. */
2172
2173 void
2174 use_group_regs (rtx *call_fusage, rtx regs)
2175 {
2176 int i;
2177
2178 for (i = 0; i < XVECLEN (regs, 0); i++)
2179 {
2180 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2181
2182 /* A NULL entry means the parameter goes both on the stack and in
2183 registers. This can also be a MEM for targets that pass values
2184 partially on the stack and partially in registers. */
2185 if (reg != 0 && REG_P (reg))
2186 use_reg (call_fusage, reg);
2187 }
2188 }
2189 \f
2190
2191 /* Determine whether the LEN bytes generated by CONSTFUN can be
2192 stored to memory using several move instructions. CONSTFUNDATA is
2193 a pointer which will be passed as argument in every CONSTFUN call.
2194 ALIGN is maximum alignment we can assume. Return nonzero if a
2195 call to store_by_pieces should succeed. */
2196
2197 int
2198 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2199 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2200 void *constfundata, unsigned int align)
2201 {
2202 unsigned HOST_WIDE_INT l;
2203 unsigned int max_size;
2204 HOST_WIDE_INT offset = 0;
2205 enum machine_mode mode, tmode;
2206 enum insn_code icode;
2207 int reverse;
2208 rtx cst;
2209
2210 if (len == 0)
2211 return 1;
2212
2213 if (! STORE_BY_PIECES_P (len, align))
2214 return 0;
2215
2216 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2217 if (align >= GET_MODE_ALIGNMENT (tmode))
2218 align = GET_MODE_ALIGNMENT (tmode);
2219 else
2220 {
2221 enum machine_mode xmode;
2222
2223 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2224 tmode != VOIDmode;
2225 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2226 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2227 || SLOW_UNALIGNED_ACCESS (tmode, align))
2228 break;
2229
2230 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2231 }
2232
2233 /* We would first store what we can in the largest integer mode, then go to
2234 successively smaller modes. */
2235
2236 for (reverse = 0;
2237 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2238 reverse++)
2239 {
2240 l = len;
2241 mode = VOIDmode;
2242 max_size = STORE_MAX_PIECES + 1;
2243 while (max_size > 1)
2244 {
2245 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2246 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2247 if (GET_MODE_SIZE (tmode) < max_size)
2248 mode = tmode;
2249
2250 if (mode == VOIDmode)
2251 break;
2252
2253 icode = optab_handler (mov_optab, mode)->insn_code;
2254 if (icode != CODE_FOR_nothing
2255 && align >= GET_MODE_ALIGNMENT (mode))
2256 {
2257 unsigned int size = GET_MODE_SIZE (mode);
2258
2259 while (l >= size)
2260 {
2261 if (reverse)
2262 offset -= size;
2263
2264 cst = (*constfun) (constfundata, offset, mode);
2265 if (!LEGITIMATE_CONSTANT_P (cst))
2266 return 0;
2267
2268 if (!reverse)
2269 offset += size;
2270
2271 l -= size;
2272 }
2273 }
2274
2275 max_size = GET_MODE_SIZE (mode);
2276 }
2277
2278 /* The code above should have handled everything. */
2279 gcc_assert (!l);
2280 }
2281
2282 return 1;
2283 }
2284
2285 /* Generate several move instructions to store LEN bytes generated by
2286 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2287 pointer which will be passed as argument in every CONSTFUN call.
2288 ALIGN is maximum alignment we can assume.
2289 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2290 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2291 stpcpy. */
2292
2293 rtx
2294 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2295 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2296 void *constfundata, unsigned int align, int endp)
2297 {
2298 struct store_by_pieces data;
2299
2300 if (len == 0)
2301 {
2302 gcc_assert (endp != 2);
2303 return to;
2304 }
2305
2306 gcc_assert (STORE_BY_PIECES_P (len, align));
2307 data.constfun = constfun;
2308 data.constfundata = constfundata;
2309 data.len = len;
2310 data.to = to;
2311 store_by_pieces_1 (&data, align);
2312 if (endp)
2313 {
2314 rtx to1;
2315
2316 gcc_assert (!data.reverse);
2317 if (data.autinc_to)
2318 {
2319 if (endp == 2)
2320 {
2321 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2322 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2323 else
2324 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2325 -1));
2326 }
2327 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2328 data.offset);
2329 }
2330 else
2331 {
2332 if (endp == 2)
2333 --data.offset;
2334 to1 = adjust_address (data.to, QImode, data.offset);
2335 }
2336 return to1;
2337 }
2338 else
2339 return data.to;
2340 }
2341
2342 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2343 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2344
2345 static void
2346 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2347 {
2348 struct store_by_pieces data;
2349
2350 if (len == 0)
2351 return;
2352
2353 data.constfun = clear_by_pieces_1;
2354 data.constfundata = NULL;
2355 data.len = len;
2356 data.to = to;
2357 store_by_pieces_1 (&data, align);
2358 }
2359
2360 /* Callback routine for clear_by_pieces.
2361 Return const0_rtx unconditionally. */
2362
2363 static rtx
2364 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2365 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2366 enum machine_mode mode ATTRIBUTE_UNUSED)
2367 {
2368 return const0_rtx;
2369 }
2370
2371 /* Subroutine of clear_by_pieces and store_by_pieces.
2372 Generate several move instructions to store LEN bytes of block TO. (A MEM
2373 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2374
2375 static void
2376 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2377 unsigned int align ATTRIBUTE_UNUSED)
2378 {
2379 rtx to_addr = XEXP (data->to, 0);
2380 unsigned int max_size = STORE_MAX_PIECES + 1;
2381 enum machine_mode mode = VOIDmode, tmode;
2382 enum insn_code icode;
2383
2384 data->offset = 0;
2385 data->to_addr = to_addr;
2386 data->autinc_to
2387 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2388 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2389
2390 data->explicit_inc_to = 0;
2391 data->reverse
2392 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2393 if (data->reverse)
2394 data->offset = data->len;
2395
2396 /* If storing requires more than two move insns,
2397 copy addresses to registers (to make displacements shorter)
2398 and use post-increment if available. */
2399 if (!data->autinc_to
2400 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2401 {
2402 /* Determine the main mode we'll be using. */
2403 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2404 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2405 if (GET_MODE_SIZE (tmode) < max_size)
2406 mode = tmode;
2407
2408 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2409 {
2410 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2411 data->autinc_to = 1;
2412 data->explicit_inc_to = -1;
2413 }
2414
2415 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2416 && ! data->autinc_to)
2417 {
2418 data->to_addr = copy_addr_to_reg (to_addr);
2419 data->autinc_to = 1;
2420 data->explicit_inc_to = 1;
2421 }
2422
2423 if ( !data->autinc_to && CONSTANT_P (to_addr))
2424 data->to_addr = copy_addr_to_reg (to_addr);
2425 }
2426
2427 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2428 if (align >= GET_MODE_ALIGNMENT (tmode))
2429 align = GET_MODE_ALIGNMENT (tmode);
2430 else
2431 {
2432 enum machine_mode xmode;
2433
2434 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2435 tmode != VOIDmode;
2436 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2437 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2438 || SLOW_UNALIGNED_ACCESS (tmode, align))
2439 break;
2440
2441 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2442 }
2443
2444 /* First store what we can in the largest integer mode, then go to
2445 successively smaller modes. */
2446
2447 while (max_size > 1)
2448 {
2449 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2450 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2451 if (GET_MODE_SIZE (tmode) < max_size)
2452 mode = tmode;
2453
2454 if (mode == VOIDmode)
2455 break;
2456
2457 icode = optab_handler (mov_optab, mode)->insn_code;
2458 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2459 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2460
2461 max_size = GET_MODE_SIZE (mode);
2462 }
2463
2464 /* The code above should have handled everything. */
2465 gcc_assert (!data->len);
2466 }
2467
2468 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2469 with move instructions for mode MODE. GENFUN is the gen_... function
2470 to make a move insn for that mode. DATA has all the other info. */
2471
2472 static void
2473 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2474 struct store_by_pieces *data)
2475 {
2476 unsigned int size = GET_MODE_SIZE (mode);
2477 rtx to1, cst;
2478
2479 while (data->len >= size)
2480 {
2481 if (data->reverse)
2482 data->offset -= size;
2483
2484 if (data->autinc_to)
2485 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2486 data->offset);
2487 else
2488 to1 = adjust_address (data->to, mode, data->offset);
2489
2490 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2491 emit_insn (gen_add2_insn (data->to_addr,
2492 GEN_INT (-(HOST_WIDE_INT) size)));
2493
2494 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2495 emit_insn ((*genfun) (to1, cst));
2496
2497 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2498 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2499
2500 if (! data->reverse)
2501 data->offset += size;
2502
2503 data->len -= size;
2504 }
2505 }
2506 \f
2507 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2508 its length in bytes. */
2509
2510 rtx
2511 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2512 unsigned int expected_align, HOST_WIDE_INT expected_size)
2513 {
2514 enum machine_mode mode = GET_MODE (object);
2515 unsigned int align;
2516
2517 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2518
2519 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2520 just move a zero. Otherwise, do this a piece at a time. */
2521 if (mode != BLKmode
2522 && GET_CODE (size) == CONST_INT
2523 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2524 {
2525 rtx zero = CONST0_RTX (mode);
2526 if (zero != NULL)
2527 {
2528 emit_move_insn (object, zero);
2529 return NULL;
2530 }
2531
2532 if (COMPLEX_MODE_P (mode))
2533 {
2534 zero = CONST0_RTX (GET_MODE_INNER (mode));
2535 if (zero != NULL)
2536 {
2537 write_complex_part (object, zero, 0);
2538 write_complex_part (object, zero, 1);
2539 return NULL;
2540 }
2541 }
2542 }
2543
2544 if (size == const0_rtx)
2545 return NULL;
2546
2547 align = MEM_ALIGN (object);
2548
2549 if (GET_CODE (size) == CONST_INT
2550 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2551 clear_by_pieces (object, INTVAL (size), align);
2552 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2553 expected_align, expected_size))
2554 ;
2555 else
2556 return set_storage_via_libcall (object, size, const0_rtx,
2557 method == BLOCK_OP_TAILCALL);
2558
2559 return NULL;
2560 }
2561
2562 rtx
2563 clear_storage (rtx object, rtx size, enum block_op_methods method)
2564 {
2565 return clear_storage_hints (object, size, method, 0, -1);
2566 }
2567
2568
2569 /* A subroutine of clear_storage. Expand a call to memset.
2570 Return the return value of memset, 0 otherwise. */
2571
2572 rtx
2573 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2574 {
2575 tree call_expr, fn, object_tree, size_tree, val_tree;
2576 enum machine_mode size_mode;
2577 rtx retval;
2578
2579 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2580 place those into new pseudos into a VAR_DECL and use them later. */
2581
2582 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2583
2584 size_mode = TYPE_MODE (sizetype);
2585 size = convert_to_mode (size_mode, size, 1);
2586 size = copy_to_mode_reg (size_mode, size);
2587
2588 /* It is incorrect to use the libcall calling conventions to call
2589 memset in this context. This could be a user call to memset and
2590 the user may wish to examine the return value from memset. For
2591 targets where libcalls and normal calls have different conventions
2592 for returning pointers, we could end up generating incorrect code. */
2593
2594 object_tree = make_tree (ptr_type_node, object);
2595 if (GET_CODE (val) != CONST_INT)
2596 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2597 size_tree = make_tree (sizetype, size);
2598 val_tree = make_tree (integer_type_node, val);
2599
2600 fn = clear_storage_libcall_fn (true);
2601 call_expr = build_call_expr (fn, 3,
2602 object_tree, integer_zero_node, size_tree);
2603 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2604
2605 retval = expand_normal (call_expr);
2606
2607 return retval;
2608 }
2609
2610 /* A subroutine of set_storage_via_libcall. Create the tree node
2611 for the function we use for block clears. The first time FOR_CALL
2612 is true, we call assemble_external. */
2613
2614 static GTY(()) tree block_clear_fn;
2615
2616 void
2617 init_block_clear_fn (const char *asmspec)
2618 {
2619 if (!block_clear_fn)
2620 {
2621 tree fn, args;
2622
2623 fn = get_identifier ("memset");
2624 args = build_function_type_list (ptr_type_node, ptr_type_node,
2625 integer_type_node, sizetype,
2626 NULL_TREE);
2627
2628 fn = build_decl (FUNCTION_DECL, fn, args);
2629 DECL_EXTERNAL (fn) = 1;
2630 TREE_PUBLIC (fn) = 1;
2631 DECL_ARTIFICIAL (fn) = 1;
2632 TREE_NOTHROW (fn) = 1;
2633 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2634 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2635
2636 block_clear_fn = fn;
2637 }
2638
2639 if (asmspec)
2640 set_user_assembler_name (block_clear_fn, asmspec);
2641 }
2642
2643 static tree
2644 clear_storage_libcall_fn (int for_call)
2645 {
2646 static bool emitted_extern;
2647
2648 if (!block_clear_fn)
2649 init_block_clear_fn (NULL);
2650
2651 if (for_call && !emitted_extern)
2652 {
2653 emitted_extern = true;
2654 make_decl_rtl (block_clear_fn);
2655 assemble_external (block_clear_fn);
2656 }
2657
2658 return block_clear_fn;
2659 }
2660 \f
2661 /* Expand a setmem pattern; return true if successful. */
2662
2663 bool
2664 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2665 unsigned int expected_align, HOST_WIDE_INT expected_size)
2666 {
2667 /* Try the most limited insn first, because there's no point
2668 including more than one in the machine description unless
2669 the more limited one has some advantage. */
2670
2671 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2672 enum machine_mode mode;
2673
2674 if (expected_align < align)
2675 expected_align = align;
2676
2677 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2678 mode = GET_MODE_WIDER_MODE (mode))
2679 {
2680 enum insn_code code = setmem_optab[(int) mode];
2681 insn_operand_predicate_fn pred;
2682
2683 if (code != CODE_FOR_nothing
2684 /* We don't need MODE to be narrower than
2685 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2686 the mode mask, as it is returned by the macro, it will
2687 definitely be less than the actual mode mask. */
2688 && ((GET_CODE (size) == CONST_INT
2689 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2690 <= (GET_MODE_MASK (mode) >> 1)))
2691 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2692 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2693 || (*pred) (object, BLKmode))
2694 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2695 || (*pred) (opalign, VOIDmode)))
2696 {
2697 rtx opsize, opchar;
2698 enum machine_mode char_mode;
2699 rtx last = get_last_insn ();
2700 rtx pat;
2701
2702 opsize = convert_to_mode (mode, size, 1);
2703 pred = insn_data[(int) code].operand[1].predicate;
2704 if (pred != 0 && ! (*pred) (opsize, mode))
2705 opsize = copy_to_mode_reg (mode, opsize);
2706
2707 opchar = val;
2708 char_mode = insn_data[(int) code].operand[2].mode;
2709 if (char_mode != VOIDmode)
2710 {
2711 opchar = convert_to_mode (char_mode, opchar, 1);
2712 pred = insn_data[(int) code].operand[2].predicate;
2713 if (pred != 0 && ! (*pred) (opchar, char_mode))
2714 opchar = copy_to_mode_reg (char_mode, opchar);
2715 }
2716
2717 if (insn_data[(int) code].n_operands == 4)
2718 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2719 else
2720 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2721 GEN_INT (expected_align),
2722 GEN_INT (expected_size));
2723 if (pat)
2724 {
2725 emit_insn (pat);
2726 return true;
2727 }
2728 else
2729 delete_insns_since (last);
2730 }
2731 }
2732
2733 return false;
2734 }
2735
2736 \f
2737 /* Write to one of the components of the complex value CPLX. Write VAL to
2738 the real part if IMAG_P is false, and the imaginary part if its true. */
2739
2740 static void
2741 write_complex_part (rtx cplx, rtx val, bool imag_p)
2742 {
2743 enum machine_mode cmode;
2744 enum machine_mode imode;
2745 unsigned ibitsize;
2746
2747 if (GET_CODE (cplx) == CONCAT)
2748 {
2749 emit_move_insn (XEXP (cplx, imag_p), val);
2750 return;
2751 }
2752
2753 cmode = GET_MODE (cplx);
2754 imode = GET_MODE_INNER (cmode);
2755 ibitsize = GET_MODE_BITSIZE (imode);
2756
2757 /* For MEMs simplify_gen_subreg may generate an invalid new address
2758 because, e.g., the original address is considered mode-dependent
2759 by the target, which restricts simplify_subreg from invoking
2760 adjust_address_nv. Instead of preparing fallback support for an
2761 invalid address, we call adjust_address_nv directly. */
2762 if (MEM_P (cplx))
2763 {
2764 emit_move_insn (adjust_address_nv (cplx, imode,
2765 imag_p ? GET_MODE_SIZE (imode) : 0),
2766 val);
2767 return;
2768 }
2769
2770 /* If the sub-object is at least word sized, then we know that subregging
2771 will work. This special case is important, since store_bit_field
2772 wants to operate on integer modes, and there's rarely an OImode to
2773 correspond to TCmode. */
2774 if (ibitsize >= BITS_PER_WORD
2775 /* For hard regs we have exact predicates. Assume we can split
2776 the original object if it spans an even number of hard regs.
2777 This special case is important for SCmode on 64-bit platforms
2778 where the natural size of floating-point regs is 32-bit. */
2779 || (REG_P (cplx)
2780 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2781 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2782 {
2783 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2784 imag_p ? GET_MODE_SIZE (imode) : 0);
2785 if (part)
2786 {
2787 emit_move_insn (part, val);
2788 return;
2789 }
2790 else
2791 /* simplify_gen_subreg may fail for sub-word MEMs. */
2792 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2793 }
2794
2795 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2796 }
2797
2798 /* Extract one of the components of the complex value CPLX. Extract the
2799 real part if IMAG_P is false, and the imaginary part if it's true. */
2800
2801 static rtx
2802 read_complex_part (rtx cplx, bool imag_p)
2803 {
2804 enum machine_mode cmode, imode;
2805 unsigned ibitsize;
2806
2807 if (GET_CODE (cplx) == CONCAT)
2808 return XEXP (cplx, imag_p);
2809
2810 cmode = GET_MODE (cplx);
2811 imode = GET_MODE_INNER (cmode);
2812 ibitsize = GET_MODE_BITSIZE (imode);
2813
2814 /* Special case reads from complex constants that got spilled to memory. */
2815 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2816 {
2817 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2818 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2819 {
2820 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2821 if (CONSTANT_CLASS_P (part))
2822 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2823 }
2824 }
2825
2826 /* For MEMs simplify_gen_subreg may generate an invalid new address
2827 because, e.g., the original address is considered mode-dependent
2828 by the target, which restricts simplify_subreg from invoking
2829 adjust_address_nv. Instead of preparing fallback support for an
2830 invalid address, we call adjust_address_nv directly. */
2831 if (MEM_P (cplx))
2832 return adjust_address_nv (cplx, imode,
2833 imag_p ? GET_MODE_SIZE (imode) : 0);
2834
2835 /* If the sub-object is at least word sized, then we know that subregging
2836 will work. This special case is important, since extract_bit_field
2837 wants to operate on integer modes, and there's rarely an OImode to
2838 correspond to TCmode. */
2839 if (ibitsize >= BITS_PER_WORD
2840 /* For hard regs we have exact predicates. Assume we can split
2841 the original object if it spans an even number of hard regs.
2842 This special case is important for SCmode on 64-bit platforms
2843 where the natural size of floating-point regs is 32-bit. */
2844 || (REG_P (cplx)
2845 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2846 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2847 {
2848 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2849 imag_p ? GET_MODE_SIZE (imode) : 0);
2850 if (ret)
2851 return ret;
2852 else
2853 /* simplify_gen_subreg may fail for sub-word MEMs. */
2854 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2855 }
2856
2857 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2858 true, NULL_RTX, imode, imode);
2859 }
2860 \f
2861 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2862 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2863 represented in NEW_MODE. If FORCE is true, this will never happen, as
2864 we'll force-create a SUBREG if needed. */
2865
2866 static rtx
2867 emit_move_change_mode (enum machine_mode new_mode,
2868 enum machine_mode old_mode, rtx x, bool force)
2869 {
2870 rtx ret;
2871
2872 if (push_operand (x, GET_MODE (x)))
2873 {
2874 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2875 MEM_COPY_ATTRIBUTES (ret, x);
2876 }
2877 else if (MEM_P (x))
2878 {
2879 /* We don't have to worry about changing the address since the
2880 size in bytes is supposed to be the same. */
2881 if (reload_in_progress)
2882 {
2883 /* Copy the MEM to change the mode and move any
2884 substitutions from the old MEM to the new one. */
2885 ret = adjust_address_nv (x, new_mode, 0);
2886 copy_replacements (x, ret);
2887 }
2888 else
2889 ret = adjust_address (x, new_mode, 0);
2890 }
2891 else
2892 {
2893 /* Note that we do want simplify_subreg's behavior of validating
2894 that the new mode is ok for a hard register. If we were to use
2895 simplify_gen_subreg, we would create the subreg, but would
2896 probably run into the target not being able to implement it. */
2897 /* Except, of course, when FORCE is true, when this is exactly what
2898 we want. Which is needed for CCmodes on some targets. */
2899 if (force)
2900 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2901 else
2902 ret = simplify_subreg (new_mode, x, old_mode, 0);
2903 }
2904
2905 return ret;
2906 }
2907
2908 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2909 an integer mode of the same size as MODE. Returns the instruction
2910 emitted, or NULL if such a move could not be generated. */
2911
2912 static rtx
2913 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2914 {
2915 enum machine_mode imode;
2916 enum insn_code code;
2917
2918 /* There must exist a mode of the exact size we require. */
2919 imode = int_mode_for_mode (mode);
2920 if (imode == BLKmode)
2921 return NULL_RTX;
2922
2923 /* The target must support moves in this mode. */
2924 code = optab_handler (mov_optab, imode)->insn_code;
2925 if (code == CODE_FOR_nothing)
2926 return NULL_RTX;
2927
2928 x = emit_move_change_mode (imode, mode, x, force);
2929 if (x == NULL_RTX)
2930 return NULL_RTX;
2931 y = emit_move_change_mode (imode, mode, y, force);
2932 if (y == NULL_RTX)
2933 return NULL_RTX;
2934 return emit_insn (GEN_FCN (code) (x, y));
2935 }
2936
2937 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2938 Return an equivalent MEM that does not use an auto-increment. */
2939
2940 static rtx
2941 emit_move_resolve_push (enum machine_mode mode, rtx x)
2942 {
2943 enum rtx_code code = GET_CODE (XEXP (x, 0));
2944 HOST_WIDE_INT adjust;
2945 rtx temp;
2946
2947 adjust = GET_MODE_SIZE (mode);
2948 #ifdef PUSH_ROUNDING
2949 adjust = PUSH_ROUNDING (adjust);
2950 #endif
2951 if (code == PRE_DEC || code == POST_DEC)
2952 adjust = -adjust;
2953 else if (code == PRE_MODIFY || code == POST_MODIFY)
2954 {
2955 rtx expr = XEXP (XEXP (x, 0), 1);
2956 HOST_WIDE_INT val;
2957
2958 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2959 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2960 val = INTVAL (XEXP (expr, 1));
2961 if (GET_CODE (expr) == MINUS)
2962 val = -val;
2963 gcc_assert (adjust == val || adjust == -val);
2964 adjust = val;
2965 }
2966
2967 /* Do not use anti_adjust_stack, since we don't want to update
2968 stack_pointer_delta. */
2969 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2970 GEN_INT (adjust), stack_pointer_rtx,
2971 0, OPTAB_LIB_WIDEN);
2972 if (temp != stack_pointer_rtx)
2973 emit_move_insn (stack_pointer_rtx, temp);
2974
2975 switch (code)
2976 {
2977 case PRE_INC:
2978 case PRE_DEC:
2979 case PRE_MODIFY:
2980 temp = stack_pointer_rtx;
2981 break;
2982 case POST_INC:
2983 case POST_DEC:
2984 case POST_MODIFY:
2985 temp = plus_constant (stack_pointer_rtx, -adjust);
2986 break;
2987 default:
2988 gcc_unreachable ();
2989 }
2990
2991 return replace_equiv_address (x, temp);
2992 }
2993
2994 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2995 X is known to satisfy push_operand, and MODE is known to be complex.
2996 Returns the last instruction emitted. */
2997
2998 rtx
2999 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3000 {
3001 enum machine_mode submode = GET_MODE_INNER (mode);
3002 bool imag_first;
3003
3004 #ifdef PUSH_ROUNDING
3005 unsigned int submodesize = GET_MODE_SIZE (submode);
3006
3007 /* In case we output to the stack, but the size is smaller than the
3008 machine can push exactly, we need to use move instructions. */
3009 if (PUSH_ROUNDING (submodesize) != submodesize)
3010 {
3011 x = emit_move_resolve_push (mode, x);
3012 return emit_move_insn (x, y);
3013 }
3014 #endif
3015
3016 /* Note that the real part always precedes the imag part in memory
3017 regardless of machine's endianness. */
3018 switch (GET_CODE (XEXP (x, 0)))
3019 {
3020 case PRE_DEC:
3021 case POST_DEC:
3022 imag_first = true;
3023 break;
3024 case PRE_INC:
3025 case POST_INC:
3026 imag_first = false;
3027 break;
3028 default:
3029 gcc_unreachable ();
3030 }
3031
3032 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3033 read_complex_part (y, imag_first));
3034 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3035 read_complex_part (y, !imag_first));
3036 }
3037
3038 /* A subroutine of emit_move_complex. Perform the move from Y to X
3039 via two moves of the parts. Returns the last instruction emitted. */
3040
3041 rtx
3042 emit_move_complex_parts (rtx x, rtx y)
3043 {
3044 /* Show the output dies here. This is necessary for SUBREGs
3045 of pseudos since we cannot track their lifetimes correctly;
3046 hard regs shouldn't appear here except as return values. */
3047 if (!reload_completed && !reload_in_progress
3048 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3049 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3050
3051 write_complex_part (x, read_complex_part (y, false), false);
3052 write_complex_part (x, read_complex_part (y, true), true);
3053
3054 return get_last_insn ();
3055 }
3056
3057 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3058 MODE is known to be complex. Returns the last instruction emitted. */
3059
3060 static rtx
3061 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3062 {
3063 bool try_int;
3064
3065 /* Need to take special care for pushes, to maintain proper ordering
3066 of the data, and possibly extra padding. */
3067 if (push_operand (x, mode))
3068 return emit_move_complex_push (mode, x, y);
3069
3070 /* See if we can coerce the target into moving both values at once. */
3071
3072 /* Move floating point as parts. */
3073 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3074 && optab_handler (mov_optab, GET_MODE_INNER (mode))->insn_code != CODE_FOR_nothing)
3075 try_int = false;
3076 /* Not possible if the values are inherently not adjacent. */
3077 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3078 try_int = false;
3079 /* Is possible if both are registers (or subregs of registers). */
3080 else if (register_operand (x, mode) && register_operand (y, mode))
3081 try_int = true;
3082 /* If one of the operands is a memory, and alignment constraints
3083 are friendly enough, we may be able to do combined memory operations.
3084 We do not attempt this if Y is a constant because that combination is
3085 usually better with the by-parts thing below. */
3086 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3087 && (!STRICT_ALIGNMENT
3088 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3089 try_int = true;
3090 else
3091 try_int = false;
3092
3093 if (try_int)
3094 {
3095 rtx ret;
3096
3097 /* For memory to memory moves, optimal behavior can be had with the
3098 existing block move logic. */
3099 if (MEM_P (x) && MEM_P (y))
3100 {
3101 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3102 BLOCK_OP_NO_LIBCALL);
3103 return get_last_insn ();
3104 }
3105
3106 ret = emit_move_via_integer (mode, x, y, true);
3107 if (ret)
3108 return ret;
3109 }
3110
3111 return emit_move_complex_parts (x, y);
3112 }
3113
3114 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3115 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3116
3117 static rtx
3118 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3119 {
3120 rtx ret;
3121
3122 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3123 if (mode != CCmode)
3124 {
3125 enum insn_code code = optab_handler (mov_optab, CCmode)->insn_code;
3126 if (code != CODE_FOR_nothing)
3127 {
3128 x = emit_move_change_mode (CCmode, mode, x, true);
3129 y = emit_move_change_mode (CCmode, mode, y, true);
3130 return emit_insn (GEN_FCN (code) (x, y));
3131 }
3132 }
3133
3134 /* Otherwise, find the MODE_INT mode of the same width. */
3135 ret = emit_move_via_integer (mode, x, y, false);
3136 gcc_assert (ret != NULL);
3137 return ret;
3138 }
3139
3140 /* Return true if word I of OP lies entirely in the
3141 undefined bits of a paradoxical subreg. */
3142
3143 static bool
3144 undefined_operand_subword_p (rtx op, int i)
3145 {
3146 enum machine_mode innermode, innermostmode;
3147 int offset;
3148 if (GET_CODE (op) != SUBREG)
3149 return false;
3150 innermode = GET_MODE (op);
3151 innermostmode = GET_MODE (SUBREG_REG (op));
3152 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3153 /* The SUBREG_BYTE represents offset, as if the value were stored in
3154 memory, except for a paradoxical subreg where we define
3155 SUBREG_BYTE to be 0; undo this exception as in
3156 simplify_subreg. */
3157 if (SUBREG_BYTE (op) == 0
3158 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3159 {
3160 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3161 if (WORDS_BIG_ENDIAN)
3162 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3163 if (BYTES_BIG_ENDIAN)
3164 offset += difference % UNITS_PER_WORD;
3165 }
3166 if (offset >= GET_MODE_SIZE (innermostmode)
3167 || offset <= -GET_MODE_SIZE (word_mode))
3168 return true;
3169 return false;
3170 }
3171
3172 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3173 MODE is any multi-word or full-word mode that lacks a move_insn
3174 pattern. Note that you will get better code if you define such
3175 patterns, even if they must turn into multiple assembler instructions. */
3176
3177 static rtx
3178 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3179 {
3180 rtx last_insn = 0;
3181 rtx seq, inner;
3182 bool need_clobber;
3183 int i;
3184
3185 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3186
3187 /* If X is a push on the stack, do the push now and replace
3188 X with a reference to the stack pointer. */
3189 if (push_operand (x, mode))
3190 x = emit_move_resolve_push (mode, x);
3191
3192 /* If we are in reload, see if either operand is a MEM whose address
3193 is scheduled for replacement. */
3194 if (reload_in_progress && MEM_P (x)
3195 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3196 x = replace_equiv_address_nv (x, inner);
3197 if (reload_in_progress && MEM_P (y)
3198 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3199 y = replace_equiv_address_nv (y, inner);
3200
3201 start_sequence ();
3202
3203 need_clobber = false;
3204 for (i = 0;
3205 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3206 i++)
3207 {
3208 rtx xpart = operand_subword (x, i, 1, mode);
3209 rtx ypart;
3210
3211 /* Do not generate code for a move if it would come entirely
3212 from the undefined bits of a paradoxical subreg. */
3213 if (undefined_operand_subword_p (y, i))
3214 continue;
3215
3216 ypart = operand_subword (y, i, 1, mode);
3217
3218 /* If we can't get a part of Y, put Y into memory if it is a
3219 constant. Otherwise, force it into a register. Then we must
3220 be able to get a part of Y. */
3221 if (ypart == 0 && CONSTANT_P (y))
3222 {
3223 y = use_anchored_address (force_const_mem (mode, y));
3224 ypart = operand_subword (y, i, 1, mode);
3225 }
3226 else if (ypart == 0)
3227 ypart = operand_subword_force (y, i, mode);
3228
3229 gcc_assert (xpart && ypart);
3230
3231 need_clobber |= (GET_CODE (xpart) == SUBREG);
3232
3233 last_insn = emit_move_insn (xpart, ypart);
3234 }
3235
3236 seq = get_insns ();
3237 end_sequence ();
3238
3239 /* Show the output dies here. This is necessary for SUBREGs
3240 of pseudos since we cannot track their lifetimes correctly;
3241 hard regs shouldn't appear here except as return values.
3242 We never want to emit such a clobber after reload. */
3243 if (x != y
3244 && ! (reload_in_progress || reload_completed)
3245 && need_clobber != 0)
3246 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3247
3248 emit_insn (seq);
3249
3250 return last_insn;
3251 }
3252
3253 /* Low level part of emit_move_insn.
3254 Called just like emit_move_insn, but assumes X and Y
3255 are basically valid. */
3256
3257 rtx
3258 emit_move_insn_1 (rtx x, rtx y)
3259 {
3260 enum machine_mode mode = GET_MODE (x);
3261 enum insn_code code;
3262
3263 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3264
3265 code = optab_handler (mov_optab, mode)->insn_code;
3266 if (code != CODE_FOR_nothing)
3267 return emit_insn (GEN_FCN (code) (x, y));
3268
3269 /* Expand complex moves by moving real part and imag part. */
3270 if (COMPLEX_MODE_P (mode))
3271 return emit_move_complex (mode, x, y);
3272
3273 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT)
3274 {
3275 rtx result = emit_move_via_integer (mode, x, y, true);
3276
3277 /* If we can't find an integer mode, use multi words. */
3278 if (result)
3279 return result;
3280 else
3281 return emit_move_multi_word (mode, x, y);
3282 }
3283
3284 if (GET_MODE_CLASS (mode) == MODE_CC)
3285 return emit_move_ccmode (mode, x, y);
3286
3287 /* Try using a move pattern for the corresponding integer mode. This is
3288 only safe when simplify_subreg can convert MODE constants into integer
3289 constants. At present, it can only do this reliably if the value
3290 fits within a HOST_WIDE_INT. */
3291 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3292 {
3293 rtx ret = emit_move_via_integer (mode, x, y, false);
3294 if (ret)
3295 return ret;
3296 }
3297
3298 return emit_move_multi_word (mode, x, y);
3299 }
3300
3301 /* Generate code to copy Y into X.
3302 Both Y and X must have the same mode, except that
3303 Y can be a constant with VOIDmode.
3304 This mode cannot be BLKmode; use emit_block_move for that.
3305
3306 Return the last instruction emitted. */
3307
3308 rtx
3309 emit_move_insn (rtx x, rtx y)
3310 {
3311 enum machine_mode mode = GET_MODE (x);
3312 rtx y_cst = NULL_RTX;
3313 rtx last_insn, set;
3314
3315 gcc_assert (mode != BLKmode
3316 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3317
3318 if (CONSTANT_P (y))
3319 {
3320 if (optimize
3321 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3322 && (last_insn = compress_float_constant (x, y)))
3323 return last_insn;
3324
3325 y_cst = y;
3326
3327 if (!LEGITIMATE_CONSTANT_P (y))
3328 {
3329 y = force_const_mem (mode, y);
3330
3331 /* If the target's cannot_force_const_mem prevented the spill,
3332 assume that the target's move expanders will also take care
3333 of the non-legitimate constant. */
3334 if (!y)
3335 y = y_cst;
3336 else
3337 y = use_anchored_address (y);
3338 }
3339 }
3340
3341 /* If X or Y are memory references, verify that their addresses are valid
3342 for the machine. */
3343 if (MEM_P (x)
3344 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3345 && ! push_operand (x, GET_MODE (x)))
3346 || (flag_force_addr
3347 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3348 x = validize_mem (x);
3349
3350 if (MEM_P (y)
3351 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3352 || (flag_force_addr
3353 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3354 y = validize_mem (y);
3355
3356 gcc_assert (mode != BLKmode);
3357
3358 last_insn = emit_move_insn_1 (x, y);
3359
3360 if (y_cst && REG_P (x)
3361 && (set = single_set (last_insn)) != NULL_RTX
3362 && SET_DEST (set) == x
3363 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3364 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3365
3366 return last_insn;
3367 }
3368
3369 /* If Y is representable exactly in a narrower mode, and the target can
3370 perform the extension directly from constant or memory, then emit the
3371 move as an extension. */
3372
3373 static rtx
3374 compress_float_constant (rtx x, rtx y)
3375 {
3376 enum machine_mode dstmode = GET_MODE (x);
3377 enum machine_mode orig_srcmode = GET_MODE (y);
3378 enum machine_mode srcmode;
3379 REAL_VALUE_TYPE r;
3380 int oldcost, newcost;
3381
3382 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3383
3384 if (LEGITIMATE_CONSTANT_P (y))
3385 oldcost = rtx_cost (y, SET);
3386 else
3387 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3388
3389 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3390 srcmode != orig_srcmode;
3391 srcmode = GET_MODE_WIDER_MODE (srcmode))
3392 {
3393 enum insn_code ic;
3394 rtx trunc_y, last_insn;
3395
3396 /* Skip if the target can't extend this way. */
3397 ic = can_extend_p (dstmode, srcmode, 0);
3398 if (ic == CODE_FOR_nothing)
3399 continue;
3400
3401 /* Skip if the narrowed value isn't exact. */
3402 if (! exact_real_truncate (srcmode, &r))
3403 continue;
3404
3405 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3406
3407 if (LEGITIMATE_CONSTANT_P (trunc_y))
3408 {
3409 /* Skip if the target needs extra instructions to perform
3410 the extension. */
3411 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3412 continue;
3413 /* This is valid, but may not be cheaper than the original. */
3414 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3415 if (oldcost < newcost)
3416 continue;
3417 }
3418 else if (float_extend_from_mem[dstmode][srcmode])
3419 {
3420 trunc_y = force_const_mem (srcmode, trunc_y);
3421 /* This is valid, but may not be cheaper than the original. */
3422 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3423 if (oldcost < newcost)
3424 continue;
3425 trunc_y = validize_mem (trunc_y);
3426 }
3427 else
3428 continue;
3429
3430 /* For CSE's benefit, force the compressed constant pool entry
3431 into a new pseudo. This constant may be used in different modes,
3432 and if not, combine will put things back together for us. */
3433 trunc_y = force_reg (srcmode, trunc_y);
3434 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3435 last_insn = get_last_insn ();
3436
3437 if (REG_P (x))
3438 set_unique_reg_note (last_insn, REG_EQUAL, y);
3439
3440 return last_insn;
3441 }
3442
3443 return NULL_RTX;
3444 }
3445 \f
3446 /* Pushing data onto the stack. */
3447
3448 /* Push a block of length SIZE (perhaps variable)
3449 and return an rtx to address the beginning of the block.
3450 The value may be virtual_outgoing_args_rtx.
3451
3452 EXTRA is the number of bytes of padding to push in addition to SIZE.
3453 BELOW nonzero means this padding comes at low addresses;
3454 otherwise, the padding comes at high addresses. */
3455
3456 rtx
3457 push_block (rtx size, int extra, int below)
3458 {
3459 rtx temp;
3460
3461 size = convert_modes (Pmode, ptr_mode, size, 1);
3462 if (CONSTANT_P (size))
3463 anti_adjust_stack (plus_constant (size, extra));
3464 else if (REG_P (size) && extra == 0)
3465 anti_adjust_stack (size);
3466 else
3467 {
3468 temp = copy_to_mode_reg (Pmode, size);
3469 if (extra != 0)
3470 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3471 temp, 0, OPTAB_LIB_WIDEN);
3472 anti_adjust_stack (temp);
3473 }
3474
3475 #ifndef STACK_GROWS_DOWNWARD
3476 if (0)
3477 #else
3478 if (1)
3479 #endif
3480 {
3481 temp = virtual_outgoing_args_rtx;
3482 if (extra != 0 && below)
3483 temp = plus_constant (temp, extra);
3484 }
3485 else
3486 {
3487 if (GET_CODE (size) == CONST_INT)
3488 temp = plus_constant (virtual_outgoing_args_rtx,
3489 -INTVAL (size) - (below ? 0 : extra));
3490 else if (extra != 0 && !below)
3491 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3492 negate_rtx (Pmode, plus_constant (size, extra)));
3493 else
3494 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3495 negate_rtx (Pmode, size));
3496 }
3497
3498 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3499 }
3500
3501 #ifdef PUSH_ROUNDING
3502
3503 /* Emit single push insn. */
3504
3505 static void
3506 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3507 {
3508 rtx dest_addr;
3509 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3510 rtx dest;
3511 enum insn_code icode;
3512 insn_operand_predicate_fn pred;
3513
3514 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3515 /* If there is push pattern, use it. Otherwise try old way of throwing
3516 MEM representing push operation to move expander. */
3517 icode = optab_handler (push_optab, mode)->insn_code;
3518 if (icode != CODE_FOR_nothing)
3519 {
3520 if (((pred = insn_data[(int) icode].operand[0].predicate)
3521 && !((*pred) (x, mode))))
3522 x = force_reg (mode, x);
3523 emit_insn (GEN_FCN (icode) (x));
3524 return;
3525 }
3526 if (GET_MODE_SIZE (mode) == rounded_size)
3527 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3528 /* If we are to pad downward, adjust the stack pointer first and
3529 then store X into the stack location using an offset. This is
3530 because emit_move_insn does not know how to pad; it does not have
3531 access to type. */
3532 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3533 {
3534 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3535 HOST_WIDE_INT offset;
3536
3537 emit_move_insn (stack_pointer_rtx,
3538 expand_binop (Pmode,
3539 #ifdef STACK_GROWS_DOWNWARD
3540 sub_optab,
3541 #else
3542 add_optab,
3543 #endif
3544 stack_pointer_rtx,
3545 GEN_INT (rounded_size),
3546 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3547
3548 offset = (HOST_WIDE_INT) padding_size;
3549 #ifdef STACK_GROWS_DOWNWARD
3550 if (STACK_PUSH_CODE == POST_DEC)
3551 /* We have already decremented the stack pointer, so get the
3552 previous value. */
3553 offset += (HOST_WIDE_INT) rounded_size;
3554 #else
3555 if (STACK_PUSH_CODE == POST_INC)
3556 /* We have already incremented the stack pointer, so get the
3557 previous value. */
3558 offset -= (HOST_WIDE_INT) rounded_size;
3559 #endif
3560 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3561 }
3562 else
3563 {
3564 #ifdef STACK_GROWS_DOWNWARD
3565 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3566 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3567 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3568 #else
3569 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3570 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3571 GEN_INT (rounded_size));
3572 #endif
3573 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3574 }
3575
3576 dest = gen_rtx_MEM (mode, dest_addr);
3577
3578 if (type != 0)
3579 {
3580 set_mem_attributes (dest, type, 1);
3581
3582 if (flag_optimize_sibling_calls)
3583 /* Function incoming arguments may overlap with sibling call
3584 outgoing arguments and we cannot allow reordering of reads
3585 from function arguments with stores to outgoing arguments
3586 of sibling calls. */
3587 set_mem_alias_set (dest, 0);
3588 }
3589 emit_move_insn (dest, x);
3590 }
3591 #endif
3592
3593 /* Generate code to push X onto the stack, assuming it has mode MODE and
3594 type TYPE.
3595 MODE is redundant except when X is a CONST_INT (since they don't
3596 carry mode info).
3597 SIZE is an rtx for the size of data to be copied (in bytes),
3598 needed only if X is BLKmode.
3599
3600 ALIGN (in bits) is maximum alignment we can assume.
3601
3602 If PARTIAL and REG are both nonzero, then copy that many of the first
3603 bytes of X into registers starting with REG, and push the rest of X.
3604 The amount of space pushed is decreased by PARTIAL bytes.
3605 REG must be a hard register in this case.
3606 If REG is zero but PARTIAL is not, take any all others actions for an
3607 argument partially in registers, but do not actually load any
3608 registers.
3609
3610 EXTRA is the amount in bytes of extra space to leave next to this arg.
3611 This is ignored if an argument block has already been allocated.
3612
3613 On a machine that lacks real push insns, ARGS_ADDR is the address of
3614 the bottom of the argument block for this call. We use indexing off there
3615 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3616 argument block has not been preallocated.
3617
3618 ARGS_SO_FAR is the size of args previously pushed for this call.
3619
3620 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3621 for arguments passed in registers. If nonzero, it will be the number
3622 of bytes required. */
3623
3624 void
3625 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3626 unsigned int align, int partial, rtx reg, int extra,
3627 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3628 rtx alignment_pad)
3629 {
3630 rtx xinner;
3631 enum direction stack_direction
3632 #ifdef STACK_GROWS_DOWNWARD
3633 = downward;
3634 #else
3635 = upward;
3636 #endif
3637
3638 /* Decide where to pad the argument: `downward' for below,
3639 `upward' for above, or `none' for don't pad it.
3640 Default is below for small data on big-endian machines; else above. */
3641 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3642
3643 /* Invert direction if stack is post-decrement.
3644 FIXME: why? */
3645 if (STACK_PUSH_CODE == POST_DEC)
3646 if (where_pad != none)
3647 where_pad = (where_pad == downward ? upward : downward);
3648
3649 xinner = x;
3650
3651 if (mode == BLKmode
3652 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3653 {
3654 /* Copy a block into the stack, entirely or partially. */
3655
3656 rtx temp;
3657 int used;
3658 int offset;
3659 int skip;
3660
3661 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3662 used = partial - offset;
3663
3664 if (mode != BLKmode)
3665 {
3666 /* A value is to be stored in an insufficiently aligned
3667 stack slot; copy via a suitably aligned slot if
3668 necessary. */
3669 size = GEN_INT (GET_MODE_SIZE (mode));
3670 if (!MEM_P (xinner))
3671 {
3672 temp = assign_temp (type, 0, 1, 1);
3673 emit_move_insn (temp, xinner);
3674 xinner = temp;
3675 }
3676 }
3677
3678 gcc_assert (size);
3679
3680 /* USED is now the # of bytes we need not copy to the stack
3681 because registers will take care of them. */
3682
3683 if (partial != 0)
3684 xinner = adjust_address (xinner, BLKmode, used);
3685
3686 /* If the partial register-part of the arg counts in its stack size,
3687 skip the part of stack space corresponding to the registers.
3688 Otherwise, start copying to the beginning of the stack space,
3689 by setting SKIP to 0. */
3690 skip = (reg_parm_stack_space == 0) ? 0 : used;
3691
3692 #ifdef PUSH_ROUNDING
3693 /* Do it with several push insns if that doesn't take lots of insns
3694 and if there is no difficulty with push insns that skip bytes
3695 on the stack for alignment purposes. */
3696 if (args_addr == 0
3697 && PUSH_ARGS
3698 && GET_CODE (size) == CONST_INT
3699 && skip == 0
3700 && MEM_ALIGN (xinner) >= align
3701 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3702 /* Here we avoid the case of a structure whose weak alignment
3703 forces many pushes of a small amount of data,
3704 and such small pushes do rounding that causes trouble. */
3705 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3706 || align >= BIGGEST_ALIGNMENT
3707 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3708 == (align / BITS_PER_UNIT)))
3709 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3710 {
3711 /* Push padding now if padding above and stack grows down,
3712 or if padding below and stack grows up.
3713 But if space already allocated, this has already been done. */
3714 if (extra && args_addr == 0
3715 && where_pad != none && where_pad != stack_direction)
3716 anti_adjust_stack (GEN_INT (extra));
3717
3718 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3719 }
3720 else
3721 #endif /* PUSH_ROUNDING */
3722 {
3723 rtx target;
3724
3725 /* Otherwise make space on the stack and copy the data
3726 to the address of that space. */
3727
3728 /* Deduct words put into registers from the size we must copy. */
3729 if (partial != 0)
3730 {
3731 if (GET_CODE (size) == CONST_INT)
3732 size = GEN_INT (INTVAL (size) - used);
3733 else
3734 size = expand_binop (GET_MODE (size), sub_optab, size,
3735 GEN_INT (used), NULL_RTX, 0,
3736 OPTAB_LIB_WIDEN);
3737 }
3738
3739 /* Get the address of the stack space.
3740 In this case, we do not deal with EXTRA separately.
3741 A single stack adjust will do. */
3742 if (! args_addr)
3743 {
3744 temp = push_block (size, extra, where_pad == downward);
3745 extra = 0;
3746 }
3747 else if (GET_CODE (args_so_far) == CONST_INT)
3748 temp = memory_address (BLKmode,
3749 plus_constant (args_addr,
3750 skip + INTVAL (args_so_far)));
3751 else
3752 temp = memory_address (BLKmode,
3753 plus_constant (gen_rtx_PLUS (Pmode,
3754 args_addr,
3755 args_so_far),
3756 skip));
3757
3758 if (!ACCUMULATE_OUTGOING_ARGS)
3759 {
3760 /* If the source is referenced relative to the stack pointer,
3761 copy it to another register to stabilize it. We do not need
3762 to do this if we know that we won't be changing sp. */
3763
3764 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3765 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3766 temp = copy_to_reg (temp);
3767 }
3768
3769 target = gen_rtx_MEM (BLKmode, temp);
3770
3771 /* We do *not* set_mem_attributes here, because incoming arguments
3772 may overlap with sibling call outgoing arguments and we cannot
3773 allow reordering of reads from function arguments with stores
3774 to outgoing arguments of sibling calls. We do, however, want
3775 to record the alignment of the stack slot. */
3776 /* ALIGN may well be better aligned than TYPE, e.g. due to
3777 PARM_BOUNDARY. Assume the caller isn't lying. */
3778 set_mem_align (target, align);
3779
3780 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3781 }
3782 }
3783 else if (partial > 0)
3784 {
3785 /* Scalar partly in registers. */
3786
3787 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3788 int i;
3789 int not_stack;
3790 /* # bytes of start of argument
3791 that we must make space for but need not store. */
3792 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3793 int args_offset = INTVAL (args_so_far);
3794 int skip;
3795
3796 /* Push padding now if padding above and stack grows down,
3797 or if padding below and stack grows up.
3798 But if space already allocated, this has already been done. */
3799 if (extra && args_addr == 0
3800 && where_pad != none && where_pad != stack_direction)
3801 anti_adjust_stack (GEN_INT (extra));
3802
3803 /* If we make space by pushing it, we might as well push
3804 the real data. Otherwise, we can leave OFFSET nonzero
3805 and leave the space uninitialized. */
3806 if (args_addr == 0)
3807 offset = 0;
3808
3809 /* Now NOT_STACK gets the number of words that we don't need to
3810 allocate on the stack. Convert OFFSET to words too. */
3811 not_stack = (partial - offset) / UNITS_PER_WORD;
3812 offset /= UNITS_PER_WORD;
3813
3814 /* If the partial register-part of the arg counts in its stack size,
3815 skip the part of stack space corresponding to the registers.
3816 Otherwise, start copying to the beginning of the stack space,
3817 by setting SKIP to 0. */
3818 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3819
3820 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3821 x = validize_mem (force_const_mem (mode, x));
3822
3823 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3824 SUBREGs of such registers are not allowed. */
3825 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3826 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3827 x = copy_to_reg (x);
3828
3829 /* Loop over all the words allocated on the stack for this arg. */
3830 /* We can do it by words, because any scalar bigger than a word
3831 has a size a multiple of a word. */
3832 #ifndef PUSH_ARGS_REVERSED
3833 for (i = not_stack; i < size; i++)
3834 #else
3835 for (i = size - 1; i >= not_stack; i--)
3836 #endif
3837 if (i >= not_stack + offset)
3838 emit_push_insn (operand_subword_force (x, i, mode),
3839 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3840 0, args_addr,
3841 GEN_INT (args_offset + ((i - not_stack + skip)
3842 * UNITS_PER_WORD)),
3843 reg_parm_stack_space, alignment_pad);
3844 }
3845 else
3846 {
3847 rtx addr;
3848 rtx dest;
3849
3850 /* Push padding now if padding above and stack grows down,
3851 or if padding below and stack grows up.
3852 But if space already allocated, this has already been done. */
3853 if (extra && args_addr == 0
3854 && where_pad != none && where_pad != stack_direction)
3855 anti_adjust_stack (GEN_INT (extra));
3856
3857 #ifdef PUSH_ROUNDING
3858 if (args_addr == 0 && PUSH_ARGS)
3859 emit_single_push_insn (mode, x, type);
3860 else
3861 #endif
3862 {
3863 if (GET_CODE (args_so_far) == CONST_INT)
3864 addr
3865 = memory_address (mode,
3866 plus_constant (args_addr,
3867 INTVAL (args_so_far)));
3868 else
3869 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3870 args_so_far));
3871 dest = gen_rtx_MEM (mode, addr);
3872
3873 /* We do *not* set_mem_attributes here, because incoming arguments
3874 may overlap with sibling call outgoing arguments and we cannot
3875 allow reordering of reads from function arguments with stores
3876 to outgoing arguments of sibling calls. We do, however, want
3877 to record the alignment of the stack slot. */
3878 /* ALIGN may well be better aligned than TYPE, e.g. due to
3879 PARM_BOUNDARY. Assume the caller isn't lying. */
3880 set_mem_align (dest, align);
3881
3882 emit_move_insn (dest, x);
3883 }
3884 }
3885
3886 /* If part should go in registers, copy that part
3887 into the appropriate registers. Do this now, at the end,
3888 since mem-to-mem copies above may do function calls. */
3889 if (partial > 0 && reg != 0)
3890 {
3891 /* Handle calls that pass values in multiple non-contiguous locations.
3892 The Irix 6 ABI has examples of this. */
3893 if (GET_CODE (reg) == PARALLEL)
3894 emit_group_load (reg, x, type, -1);
3895 else
3896 {
3897 gcc_assert (partial % UNITS_PER_WORD == 0);
3898 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3899 }
3900 }
3901
3902 if (extra && args_addr == 0 && where_pad == stack_direction)
3903 anti_adjust_stack (GEN_INT (extra));
3904
3905 if (alignment_pad && args_addr == 0)
3906 anti_adjust_stack (alignment_pad);
3907 }
3908 \f
3909 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3910 operations. */
3911
3912 static rtx
3913 get_subtarget (rtx x)
3914 {
3915 return (optimize
3916 || x == 0
3917 /* Only registers can be subtargets. */
3918 || !REG_P (x)
3919 /* Don't use hard regs to avoid extending their life. */
3920 || REGNO (x) < FIRST_PSEUDO_REGISTER
3921 ? 0 : x);
3922 }
3923
3924 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3925 FIELD is a bitfield. Returns true if the optimization was successful,
3926 and there's nothing else to do. */
3927
3928 static bool
3929 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3930 unsigned HOST_WIDE_INT bitpos,
3931 enum machine_mode mode1, rtx str_rtx,
3932 tree to, tree src)
3933 {
3934 enum machine_mode str_mode = GET_MODE (str_rtx);
3935 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3936 tree op0, op1;
3937 rtx value, result;
3938 optab binop;
3939
3940 if (mode1 != VOIDmode
3941 || bitsize >= BITS_PER_WORD
3942 || str_bitsize > BITS_PER_WORD
3943 || TREE_SIDE_EFFECTS (to)
3944 || TREE_THIS_VOLATILE (to))
3945 return false;
3946
3947 STRIP_NOPS (src);
3948 if (!BINARY_CLASS_P (src)
3949 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3950 return false;
3951
3952 op0 = TREE_OPERAND (src, 0);
3953 op1 = TREE_OPERAND (src, 1);
3954 STRIP_NOPS (op0);
3955
3956 if (!operand_equal_p (to, op0, 0))
3957 return false;
3958
3959 if (MEM_P (str_rtx))
3960 {
3961 unsigned HOST_WIDE_INT offset1;
3962
3963 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3964 str_mode = word_mode;
3965 str_mode = get_best_mode (bitsize, bitpos,
3966 MEM_ALIGN (str_rtx), str_mode, 0);
3967 if (str_mode == VOIDmode)
3968 return false;
3969 str_bitsize = GET_MODE_BITSIZE (str_mode);
3970
3971 offset1 = bitpos;
3972 bitpos %= str_bitsize;
3973 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3974 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3975 }
3976 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3977 return false;
3978
3979 /* If the bit field covers the whole REG/MEM, store_field
3980 will likely generate better code. */
3981 if (bitsize >= str_bitsize)
3982 return false;
3983
3984 /* We can't handle fields split across multiple entities. */
3985 if (bitpos + bitsize > str_bitsize)
3986 return false;
3987
3988 if (BYTES_BIG_ENDIAN)
3989 bitpos = str_bitsize - bitpos - bitsize;
3990
3991 switch (TREE_CODE (src))
3992 {
3993 case PLUS_EXPR:
3994 case MINUS_EXPR:
3995 /* For now, just optimize the case of the topmost bitfield
3996 where we don't need to do any masking and also
3997 1 bit bitfields where xor can be used.
3998 We might win by one instruction for the other bitfields
3999 too if insv/extv instructions aren't used, so that
4000 can be added later. */
4001 if (bitpos + bitsize != str_bitsize
4002 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4003 break;
4004
4005 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4006 value = convert_modes (str_mode,
4007 TYPE_MODE (TREE_TYPE (op1)), value,
4008 TYPE_UNSIGNED (TREE_TYPE (op1)));
4009
4010 /* We may be accessing data outside the field, which means
4011 we can alias adjacent data. */
4012 if (MEM_P (str_rtx))
4013 {
4014 str_rtx = shallow_copy_rtx (str_rtx);
4015 set_mem_alias_set (str_rtx, 0);
4016 set_mem_expr (str_rtx, 0);
4017 }
4018
4019 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4020 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4021 {
4022 value = expand_and (str_mode, value, const1_rtx, NULL);
4023 binop = xor_optab;
4024 }
4025 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4026 build_int_cst (NULL_TREE, bitpos),
4027 NULL_RTX, 1);
4028 result = expand_binop (str_mode, binop, str_rtx,
4029 value, str_rtx, 1, OPTAB_WIDEN);
4030 if (result != str_rtx)
4031 emit_move_insn (str_rtx, result);
4032 return true;
4033
4034 case BIT_IOR_EXPR:
4035 case BIT_XOR_EXPR:
4036 if (TREE_CODE (op1) != INTEGER_CST)
4037 break;
4038 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4039 value = convert_modes (GET_MODE (str_rtx),
4040 TYPE_MODE (TREE_TYPE (op1)), value,
4041 TYPE_UNSIGNED (TREE_TYPE (op1)));
4042
4043 /* We may be accessing data outside the field, which means
4044 we can alias adjacent data. */
4045 if (MEM_P (str_rtx))
4046 {
4047 str_rtx = shallow_copy_rtx (str_rtx);
4048 set_mem_alias_set (str_rtx, 0);
4049 set_mem_expr (str_rtx, 0);
4050 }
4051
4052 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4053 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4054 {
4055 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4056 - 1);
4057 value = expand_and (GET_MODE (str_rtx), value, mask,
4058 NULL_RTX);
4059 }
4060 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4061 build_int_cst (NULL_TREE, bitpos),
4062 NULL_RTX, 1);
4063 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4064 value, str_rtx, 1, OPTAB_WIDEN);
4065 if (result != str_rtx)
4066 emit_move_insn (str_rtx, result);
4067 return true;
4068
4069 default:
4070 break;
4071 }
4072
4073 return false;
4074 }
4075
4076
4077 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4078 is true, try generating a nontemporal store. */
4079
4080 void
4081 expand_assignment (tree to, tree from, bool nontemporal)
4082 {
4083 rtx to_rtx = 0;
4084 rtx result;
4085
4086 /* Don't crash if the lhs of the assignment was erroneous. */
4087 if (TREE_CODE (to) == ERROR_MARK)
4088 {
4089 result = expand_normal (from);
4090 return;
4091 }
4092
4093 /* Optimize away no-op moves without side-effects. */
4094 if (operand_equal_p (to, from, 0))
4095 return;
4096
4097 /* Assignment of a structure component needs special treatment
4098 if the structure component's rtx is not simply a MEM.
4099 Assignment of an array element at a constant index, and assignment of
4100 an array element in an unaligned packed structure field, has the same
4101 problem. */
4102 if (handled_component_p (to)
4103 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4104 {
4105 enum machine_mode mode1;
4106 HOST_WIDE_INT bitsize, bitpos;
4107 tree offset;
4108 int unsignedp;
4109 int volatilep = 0;
4110 tree tem;
4111
4112 push_temp_slots ();
4113 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4114 &unsignedp, &volatilep, true);
4115
4116 /* If we are going to use store_bit_field and extract_bit_field,
4117 make sure to_rtx will be safe for multiple use. */
4118
4119 to_rtx = expand_normal (tem);
4120
4121 if (offset != 0)
4122 {
4123 rtx offset_rtx;
4124
4125 if (!MEM_P (to_rtx))
4126 {
4127 /* We can get constant negative offsets into arrays with broken
4128 user code. Translate this to a trap instead of ICEing. */
4129 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4130 expand_builtin_trap ();
4131 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4132 }
4133
4134 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4135 #ifdef POINTERS_EXTEND_UNSIGNED
4136 if (GET_MODE (offset_rtx) != Pmode)
4137 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4138 #else
4139 if (GET_MODE (offset_rtx) != ptr_mode)
4140 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4141 #endif
4142
4143 /* A constant address in TO_RTX can have VOIDmode, we must not try
4144 to call force_reg for that case. Avoid that case. */
4145 if (MEM_P (to_rtx)
4146 && GET_MODE (to_rtx) == BLKmode
4147 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4148 && bitsize > 0
4149 && (bitpos % bitsize) == 0
4150 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4151 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4152 {
4153 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4154 bitpos = 0;
4155 }
4156
4157 to_rtx = offset_address (to_rtx, offset_rtx,
4158 highest_pow2_factor_for_target (to,
4159 offset));
4160 }
4161
4162 /* Handle expand_expr of a complex value returning a CONCAT. */
4163 if (GET_CODE (to_rtx) == CONCAT)
4164 {
4165 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4166 {
4167 gcc_assert (bitpos == 0);
4168 result = store_expr (from, to_rtx, false, nontemporal);
4169 }
4170 else
4171 {
4172 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4173 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4174 nontemporal);
4175 }
4176 }
4177 else
4178 {
4179 if (MEM_P (to_rtx))
4180 {
4181 /* If the field is at offset zero, we could have been given the
4182 DECL_RTX of the parent struct. Don't munge it. */
4183 to_rtx = shallow_copy_rtx (to_rtx);
4184
4185 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4186
4187 /* Deal with volatile and readonly fields. The former is only
4188 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4189 if (volatilep)
4190 MEM_VOLATILE_P (to_rtx) = 1;
4191 if (component_uses_parent_alias_set (to))
4192 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4193 }
4194
4195 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4196 to_rtx, to, from))
4197 result = NULL;
4198 else
4199 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4200 TREE_TYPE (tem), get_alias_set (to),
4201 nontemporal);
4202 }
4203
4204 if (result)
4205 preserve_temp_slots (result);
4206 free_temp_slots ();
4207 pop_temp_slots ();
4208 return;
4209 }
4210
4211 /* If the rhs is a function call and its value is not an aggregate,
4212 call the function before we start to compute the lhs.
4213 This is needed for correct code for cases such as
4214 val = setjmp (buf) on machines where reference to val
4215 requires loading up part of an address in a separate insn.
4216
4217 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4218 since it might be a promoted variable where the zero- or sign- extension
4219 needs to be done. Handling this in the normal way is safe because no
4220 computation is done before the call. */
4221 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4222 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4223 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4224 && REG_P (DECL_RTL (to))))
4225 {
4226 rtx value;
4227
4228 push_temp_slots ();
4229 value = expand_normal (from);
4230 if (to_rtx == 0)
4231 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4232
4233 /* Handle calls that return values in multiple non-contiguous locations.
4234 The Irix 6 ABI has examples of this. */
4235 if (GET_CODE (to_rtx) == PARALLEL)
4236 emit_group_load (to_rtx, value, TREE_TYPE (from),
4237 int_size_in_bytes (TREE_TYPE (from)));
4238 else if (GET_MODE (to_rtx) == BLKmode)
4239 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4240 else
4241 {
4242 if (POINTER_TYPE_P (TREE_TYPE (to)))
4243 value = convert_memory_address (GET_MODE (to_rtx), value);
4244 emit_move_insn (to_rtx, value);
4245 }
4246 preserve_temp_slots (to_rtx);
4247 free_temp_slots ();
4248 pop_temp_slots ();
4249 return;
4250 }
4251
4252 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4253 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4254
4255 if (to_rtx == 0)
4256 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4257
4258 /* Don't move directly into a return register. */
4259 if (TREE_CODE (to) == RESULT_DECL
4260 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4261 {
4262 rtx temp;
4263
4264 push_temp_slots ();
4265 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4266
4267 if (GET_CODE (to_rtx) == PARALLEL)
4268 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4269 int_size_in_bytes (TREE_TYPE (from)));
4270 else
4271 emit_move_insn (to_rtx, temp);
4272
4273 preserve_temp_slots (to_rtx);
4274 free_temp_slots ();
4275 pop_temp_slots ();
4276 return;
4277 }
4278
4279 /* In case we are returning the contents of an object which overlaps
4280 the place the value is being stored, use a safe function when copying
4281 a value through a pointer into a structure value return block. */
4282 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4283 && current_function_returns_struct
4284 && !current_function_returns_pcc_struct)
4285 {
4286 rtx from_rtx, size;
4287
4288 push_temp_slots ();
4289 size = expr_size (from);
4290 from_rtx = expand_normal (from);
4291
4292 emit_library_call (memmove_libfunc, LCT_NORMAL,
4293 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4294 XEXP (from_rtx, 0), Pmode,
4295 convert_to_mode (TYPE_MODE (sizetype),
4296 size, TYPE_UNSIGNED (sizetype)),
4297 TYPE_MODE (sizetype));
4298
4299 preserve_temp_slots (to_rtx);
4300 free_temp_slots ();
4301 pop_temp_slots ();
4302 return;
4303 }
4304
4305 /* Compute FROM and store the value in the rtx we got. */
4306
4307 push_temp_slots ();
4308 result = store_expr (from, to_rtx, 0, nontemporal);
4309 preserve_temp_slots (result);
4310 free_temp_slots ();
4311 pop_temp_slots ();
4312 return;
4313 }
4314
4315 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4316 succeeded, false otherwise. */
4317
4318 static bool
4319 emit_storent_insn (rtx to, rtx from)
4320 {
4321 enum machine_mode mode = GET_MODE (to), imode;
4322 enum insn_code code = optab_handler (storent_optab, mode)->insn_code;
4323 rtx pattern;
4324
4325 if (code == CODE_FOR_nothing)
4326 return false;
4327
4328 imode = insn_data[code].operand[0].mode;
4329 if (!insn_data[code].operand[0].predicate (to, imode))
4330 return false;
4331
4332 imode = insn_data[code].operand[1].mode;
4333 if (!insn_data[code].operand[1].predicate (from, imode))
4334 {
4335 from = copy_to_mode_reg (imode, from);
4336 if (!insn_data[code].operand[1].predicate (from, imode))
4337 return false;
4338 }
4339
4340 pattern = GEN_FCN (code) (to, from);
4341 if (pattern == NULL_RTX)
4342 return false;
4343
4344 emit_insn (pattern);
4345 return true;
4346 }
4347
4348 /* Generate code for computing expression EXP,
4349 and storing the value into TARGET.
4350
4351 If the mode is BLKmode then we may return TARGET itself.
4352 It turns out that in BLKmode it doesn't cause a problem.
4353 because C has no operators that could combine two different
4354 assignments into the same BLKmode object with different values
4355 with no sequence point. Will other languages need this to
4356 be more thorough?
4357
4358 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4359 stack, and block moves may need to be treated specially.
4360
4361 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4362
4363 rtx
4364 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4365 {
4366 rtx temp;
4367 rtx alt_rtl = NULL_RTX;
4368 int dont_return_target = 0;
4369
4370 if (VOID_TYPE_P (TREE_TYPE (exp)))
4371 {
4372 /* C++ can generate ?: expressions with a throw expression in one
4373 branch and an rvalue in the other. Here, we resolve attempts to
4374 store the throw expression's nonexistent result. */
4375 gcc_assert (!call_param_p);
4376 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4377 return NULL_RTX;
4378 }
4379 if (TREE_CODE (exp) == COMPOUND_EXPR)
4380 {
4381 /* Perform first part of compound expression, then assign from second
4382 part. */
4383 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4384 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4385 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4386 nontemporal);
4387 }
4388 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4389 {
4390 /* For conditional expression, get safe form of the target. Then
4391 test the condition, doing the appropriate assignment on either
4392 side. This avoids the creation of unnecessary temporaries.
4393 For non-BLKmode, it is more efficient not to do this. */
4394
4395 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4396
4397 do_pending_stack_adjust ();
4398 NO_DEFER_POP;
4399 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4400 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4401 nontemporal);
4402 emit_jump_insn (gen_jump (lab2));
4403 emit_barrier ();
4404 emit_label (lab1);
4405 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4406 nontemporal);
4407 emit_label (lab2);
4408 OK_DEFER_POP;
4409
4410 return NULL_RTX;
4411 }
4412 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4413 /* If this is a scalar in a register that is stored in a wider mode
4414 than the declared mode, compute the result into its declared mode
4415 and then convert to the wider mode. Our value is the computed
4416 expression. */
4417 {
4418 rtx inner_target = 0;
4419
4420 /* We can do the conversion inside EXP, which will often result
4421 in some optimizations. Do the conversion in two steps: first
4422 change the signedness, if needed, then the extend. But don't
4423 do this if the type of EXP is a subtype of something else
4424 since then the conversion might involve more than just
4425 converting modes. */
4426 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4427 && TREE_TYPE (TREE_TYPE (exp)) == 0
4428 && (!lang_hooks.reduce_bit_field_operations
4429 || (GET_MODE_PRECISION (GET_MODE (target))
4430 == TYPE_PRECISION (TREE_TYPE (exp)))))
4431 {
4432 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4433 != SUBREG_PROMOTED_UNSIGNED_P (target))
4434 {
4435 /* Some types, e.g. Fortran's logical*4, won't have a signed
4436 version, so use the mode instead. */
4437 tree ntype
4438 = (signed_or_unsigned_type_for
4439 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4440 if (ntype == NULL)
4441 ntype = lang_hooks.types.type_for_mode
4442 (TYPE_MODE (TREE_TYPE (exp)),
4443 SUBREG_PROMOTED_UNSIGNED_P (target));
4444
4445 exp = fold_convert (ntype, exp);
4446 }
4447
4448 exp = fold_convert (lang_hooks.types.type_for_mode
4449 (GET_MODE (SUBREG_REG (target)),
4450 SUBREG_PROMOTED_UNSIGNED_P (target)),
4451 exp);
4452
4453 inner_target = SUBREG_REG (target);
4454 }
4455
4456 temp = expand_expr (exp, inner_target, VOIDmode,
4457 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4458
4459 /* If TEMP is a VOIDmode constant, use convert_modes to make
4460 sure that we properly convert it. */
4461 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4462 {
4463 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4464 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4465 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4466 GET_MODE (target), temp,
4467 SUBREG_PROMOTED_UNSIGNED_P (target));
4468 }
4469
4470 convert_move (SUBREG_REG (target), temp,
4471 SUBREG_PROMOTED_UNSIGNED_P (target));
4472
4473 return NULL_RTX;
4474 }
4475 else
4476 {
4477 rtx tmp_target;
4478
4479 /* If we want to use a nontemporal store, force the value to
4480 register first. */
4481 tmp_target = nontemporal ? NULL_RTX : target;
4482 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4483 (call_param_p
4484 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4485 &alt_rtl);
4486 /* Return TARGET if it's a specified hardware register.
4487 If TARGET is a volatile mem ref, either return TARGET
4488 or return a reg copied *from* TARGET; ANSI requires this.
4489
4490 Otherwise, if TEMP is not TARGET, return TEMP
4491 if it is constant (for efficiency),
4492 or if we really want the correct value. */
4493 if (!(target && REG_P (target)
4494 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4495 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4496 && ! rtx_equal_p (temp, target)
4497 && CONSTANT_P (temp))
4498 dont_return_target = 1;
4499 }
4500
4501 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4502 the same as that of TARGET, adjust the constant. This is needed, for
4503 example, in case it is a CONST_DOUBLE and we want only a word-sized
4504 value. */
4505 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4506 && TREE_CODE (exp) != ERROR_MARK
4507 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4508 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4509 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4510
4511 /* If value was not generated in the target, store it there.
4512 Convert the value to TARGET's type first if necessary and emit the
4513 pending incrementations that have been queued when expanding EXP.
4514 Note that we cannot emit the whole queue blindly because this will
4515 effectively disable the POST_INC optimization later.
4516
4517 If TEMP and TARGET compare equal according to rtx_equal_p, but
4518 one or both of them are volatile memory refs, we have to distinguish
4519 two cases:
4520 - expand_expr has used TARGET. In this case, we must not generate
4521 another copy. This can be detected by TARGET being equal according
4522 to == .
4523 - expand_expr has not used TARGET - that means that the source just
4524 happens to have the same RTX form. Since temp will have been created
4525 by expand_expr, it will compare unequal according to == .
4526 We must generate a copy in this case, to reach the correct number
4527 of volatile memory references. */
4528
4529 if ((! rtx_equal_p (temp, target)
4530 || (temp != target && (side_effects_p (temp)
4531 || side_effects_p (target))))
4532 && TREE_CODE (exp) != ERROR_MARK
4533 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4534 but TARGET is not valid memory reference, TEMP will differ
4535 from TARGET although it is really the same location. */
4536 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4537 /* If there's nothing to copy, don't bother. Don't call
4538 expr_size unless necessary, because some front-ends (C++)
4539 expr_size-hook must not be given objects that are not
4540 supposed to be bit-copied or bit-initialized. */
4541 && expr_size (exp) != const0_rtx)
4542 {
4543 if (GET_MODE (temp) != GET_MODE (target)
4544 && GET_MODE (temp) != VOIDmode)
4545 {
4546 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4547 if (dont_return_target)
4548 {
4549 /* In this case, we will return TEMP,
4550 so make sure it has the proper mode.
4551 But don't forget to store the value into TARGET. */
4552 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4553 emit_move_insn (target, temp);
4554 }
4555 else if (GET_MODE (target) == BLKmode)
4556 emit_block_move (target, temp, expr_size (exp),
4557 (call_param_p
4558 ? BLOCK_OP_CALL_PARM
4559 : BLOCK_OP_NORMAL));
4560 else
4561 convert_move (target, temp, unsignedp);
4562 }
4563
4564 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4565 {
4566 /* Handle copying a string constant into an array. The string
4567 constant may be shorter than the array. So copy just the string's
4568 actual length, and clear the rest. First get the size of the data
4569 type of the string, which is actually the size of the target. */
4570 rtx size = expr_size (exp);
4571
4572 if (GET_CODE (size) == CONST_INT
4573 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4574 emit_block_move (target, temp, size,
4575 (call_param_p
4576 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4577 else
4578 {
4579 /* Compute the size of the data to copy from the string. */
4580 tree copy_size
4581 = size_binop (MIN_EXPR,
4582 make_tree (sizetype, size),
4583 size_int (TREE_STRING_LENGTH (exp)));
4584 rtx copy_size_rtx
4585 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4586 (call_param_p
4587 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4588 rtx label = 0;
4589
4590 /* Copy that much. */
4591 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4592 TYPE_UNSIGNED (sizetype));
4593 emit_block_move (target, temp, copy_size_rtx,
4594 (call_param_p
4595 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4596
4597 /* Figure out how much is left in TARGET that we have to clear.
4598 Do all calculations in ptr_mode. */
4599 if (GET_CODE (copy_size_rtx) == CONST_INT)
4600 {
4601 size = plus_constant (size, -INTVAL (copy_size_rtx));
4602 target = adjust_address (target, BLKmode,
4603 INTVAL (copy_size_rtx));
4604 }
4605 else
4606 {
4607 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4608 copy_size_rtx, NULL_RTX, 0,
4609 OPTAB_LIB_WIDEN);
4610
4611 #ifdef POINTERS_EXTEND_UNSIGNED
4612 if (GET_MODE (copy_size_rtx) != Pmode)
4613 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4614 TYPE_UNSIGNED (sizetype));
4615 #endif
4616
4617 target = offset_address (target, copy_size_rtx,
4618 highest_pow2_factor (copy_size));
4619 label = gen_label_rtx ();
4620 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4621 GET_MODE (size), 0, label);
4622 }
4623
4624 if (size != const0_rtx)
4625 clear_storage (target, size, BLOCK_OP_NORMAL);
4626
4627 if (label)
4628 emit_label (label);
4629 }
4630 }
4631 /* Handle calls that return values in multiple non-contiguous locations.
4632 The Irix 6 ABI has examples of this. */
4633 else if (GET_CODE (target) == PARALLEL)
4634 emit_group_load (target, temp, TREE_TYPE (exp),
4635 int_size_in_bytes (TREE_TYPE (exp)));
4636 else if (GET_MODE (temp) == BLKmode)
4637 emit_block_move (target, temp, expr_size (exp),
4638 (call_param_p
4639 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4640 else if (nontemporal
4641 && emit_storent_insn (target, temp))
4642 /* If we managed to emit a nontemporal store, there is nothing else to
4643 do. */
4644 ;
4645 else
4646 {
4647 temp = force_operand (temp, target);
4648 if (temp != target)
4649 emit_move_insn (target, temp);
4650 }
4651 }
4652
4653 return NULL_RTX;
4654 }
4655 \f
4656 /* Helper for categorize_ctor_elements. Identical interface. */
4657
4658 static bool
4659 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4660 HOST_WIDE_INT *p_elt_count,
4661 bool *p_must_clear)
4662 {
4663 unsigned HOST_WIDE_INT idx;
4664 HOST_WIDE_INT nz_elts, elt_count;
4665 tree value, purpose;
4666
4667 /* Whether CTOR is a valid constant initializer, in accordance with what
4668 initializer_constant_valid_p does. If inferred from the constructor
4669 elements, true until proven otherwise. */
4670 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4671 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4672
4673 nz_elts = 0;
4674 elt_count = 0;
4675
4676 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4677 {
4678 HOST_WIDE_INT mult;
4679
4680 mult = 1;
4681 if (TREE_CODE (purpose) == RANGE_EXPR)
4682 {
4683 tree lo_index = TREE_OPERAND (purpose, 0);
4684 tree hi_index = TREE_OPERAND (purpose, 1);
4685
4686 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4687 mult = (tree_low_cst (hi_index, 1)
4688 - tree_low_cst (lo_index, 1) + 1);
4689 }
4690
4691 switch (TREE_CODE (value))
4692 {
4693 case CONSTRUCTOR:
4694 {
4695 HOST_WIDE_INT nz = 0, ic = 0;
4696
4697 bool const_elt_p
4698 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4699
4700 nz_elts += mult * nz;
4701 elt_count += mult * ic;
4702
4703 if (const_from_elts_p && const_p)
4704 const_p = const_elt_p;
4705 }
4706 break;
4707
4708 case INTEGER_CST:
4709 case REAL_CST:
4710 if (!initializer_zerop (value))
4711 nz_elts += mult;
4712 elt_count += mult;
4713 break;
4714
4715 case STRING_CST:
4716 nz_elts += mult * TREE_STRING_LENGTH (value);
4717 elt_count += mult * TREE_STRING_LENGTH (value);
4718 break;
4719
4720 case COMPLEX_CST:
4721 if (!initializer_zerop (TREE_REALPART (value)))
4722 nz_elts += mult;
4723 if (!initializer_zerop (TREE_IMAGPART (value)))
4724 nz_elts += mult;
4725 elt_count += mult;
4726 break;
4727
4728 case VECTOR_CST:
4729 {
4730 tree v;
4731 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4732 {
4733 if (!initializer_zerop (TREE_VALUE (v)))
4734 nz_elts += mult;
4735 elt_count += mult;
4736 }
4737 }
4738 break;
4739
4740 default:
4741 nz_elts += mult;
4742 elt_count += mult;
4743
4744 if (const_from_elts_p && const_p)
4745 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4746 != NULL_TREE;
4747 break;
4748 }
4749 }
4750
4751 if (!*p_must_clear
4752 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4753 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4754 {
4755 tree init_sub_type;
4756 bool clear_this = true;
4757
4758 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4759 {
4760 /* We don't expect more than one element of the union to be
4761 initialized. Not sure what we should do otherwise... */
4762 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4763 == 1);
4764
4765 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4766 CONSTRUCTOR_ELTS (ctor),
4767 0)->value);
4768
4769 /* ??? We could look at each element of the union, and find the
4770 largest element. Which would avoid comparing the size of the
4771 initialized element against any tail padding in the union.
4772 Doesn't seem worth the effort... */
4773 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4774 TYPE_SIZE (init_sub_type)) == 1)
4775 {
4776 /* And now we have to find out if the element itself is fully
4777 constructed. E.g. for union { struct { int a, b; } s; } u
4778 = { .s = { .a = 1 } }. */
4779 if (elt_count == count_type_elements (init_sub_type, false))
4780 clear_this = false;
4781 }
4782 }
4783
4784 *p_must_clear = clear_this;
4785 }
4786
4787 *p_nz_elts += nz_elts;
4788 *p_elt_count += elt_count;
4789
4790 return const_p;
4791 }
4792
4793 /* Examine CTOR to discover:
4794 * how many scalar fields are set to nonzero values,
4795 and place it in *P_NZ_ELTS;
4796 * how many scalar fields in total are in CTOR,
4797 and place it in *P_ELT_COUNT.
4798 * if a type is a union, and the initializer from the constructor
4799 is not the largest element in the union, then set *p_must_clear.
4800
4801 Return whether or not CTOR is a valid static constant initializer, the same
4802 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4803
4804 bool
4805 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4806 HOST_WIDE_INT *p_elt_count,
4807 bool *p_must_clear)
4808 {
4809 *p_nz_elts = 0;
4810 *p_elt_count = 0;
4811 *p_must_clear = false;
4812
4813 return
4814 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4815 }
4816
4817 /* Count the number of scalars in TYPE. Return -1 on overflow or
4818 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4819 array member at the end of the structure. */
4820
4821 HOST_WIDE_INT
4822 count_type_elements (const_tree type, bool allow_flexarr)
4823 {
4824 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4825 switch (TREE_CODE (type))
4826 {
4827 case ARRAY_TYPE:
4828 {
4829 tree telts = array_type_nelts (type);
4830 if (telts && host_integerp (telts, 1))
4831 {
4832 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4833 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4834 if (n == 0)
4835 return 0;
4836 else if (max / n > m)
4837 return n * m;
4838 }
4839 return -1;
4840 }
4841
4842 case RECORD_TYPE:
4843 {
4844 HOST_WIDE_INT n = 0, t;
4845 tree f;
4846
4847 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4848 if (TREE_CODE (f) == FIELD_DECL)
4849 {
4850 t = count_type_elements (TREE_TYPE (f), false);
4851 if (t < 0)
4852 {
4853 /* Check for structures with flexible array member. */
4854 tree tf = TREE_TYPE (f);
4855 if (allow_flexarr
4856 && TREE_CHAIN (f) == NULL
4857 && TREE_CODE (tf) == ARRAY_TYPE
4858 && TYPE_DOMAIN (tf)
4859 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4860 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4861 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4862 && int_size_in_bytes (type) >= 0)
4863 break;
4864
4865 return -1;
4866 }
4867 n += t;
4868 }
4869
4870 return n;
4871 }
4872
4873 case UNION_TYPE:
4874 case QUAL_UNION_TYPE:
4875 {
4876 /* Ho hum. How in the world do we guess here? Clearly it isn't
4877 right to count the fields. Guess based on the number of words. */
4878 HOST_WIDE_INT n = int_size_in_bytes (type);
4879 if (n < 0)
4880 return -1;
4881 return n / UNITS_PER_WORD;
4882 }
4883
4884 case COMPLEX_TYPE:
4885 return 2;
4886
4887 case VECTOR_TYPE:
4888 return TYPE_VECTOR_SUBPARTS (type);
4889
4890 case INTEGER_TYPE:
4891 case REAL_TYPE:
4892 case ENUMERAL_TYPE:
4893 case BOOLEAN_TYPE:
4894 case POINTER_TYPE:
4895 case OFFSET_TYPE:
4896 case REFERENCE_TYPE:
4897 return 1;
4898
4899 case VOID_TYPE:
4900 case METHOD_TYPE:
4901 case FUNCTION_TYPE:
4902 case LANG_TYPE:
4903 default:
4904 gcc_unreachable ();
4905 }
4906 }
4907
4908 /* Return 1 if EXP contains mostly (3/4) zeros. */
4909
4910 static int
4911 mostly_zeros_p (tree exp)
4912 {
4913 if (TREE_CODE (exp) == CONSTRUCTOR)
4914
4915 {
4916 HOST_WIDE_INT nz_elts, count, elts;
4917 bool must_clear;
4918
4919 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4920 if (must_clear)
4921 return 1;
4922
4923 elts = count_type_elements (TREE_TYPE (exp), false);
4924
4925 return nz_elts < elts / 4;
4926 }
4927
4928 return initializer_zerop (exp);
4929 }
4930
4931 /* Return 1 if EXP contains all zeros. */
4932
4933 static int
4934 all_zeros_p (tree exp)
4935 {
4936 if (TREE_CODE (exp) == CONSTRUCTOR)
4937
4938 {
4939 HOST_WIDE_INT nz_elts, count;
4940 bool must_clear;
4941
4942 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4943 return nz_elts == 0;
4944 }
4945
4946 return initializer_zerop (exp);
4947 }
4948 \f
4949 /* Helper function for store_constructor.
4950 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4951 TYPE is the type of the CONSTRUCTOR, not the element type.
4952 CLEARED is as for store_constructor.
4953 ALIAS_SET is the alias set to use for any stores.
4954
4955 This provides a recursive shortcut back to store_constructor when it isn't
4956 necessary to go through store_field. This is so that we can pass through
4957 the cleared field to let store_constructor know that we may not have to
4958 clear a substructure if the outer structure has already been cleared. */
4959
4960 static void
4961 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4962 HOST_WIDE_INT bitpos, enum machine_mode mode,
4963 tree exp, tree type, int cleared,
4964 alias_set_type alias_set)
4965 {
4966 if (TREE_CODE (exp) == CONSTRUCTOR
4967 /* We can only call store_constructor recursively if the size and
4968 bit position are on a byte boundary. */
4969 && bitpos % BITS_PER_UNIT == 0
4970 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4971 /* If we have a nonzero bitpos for a register target, then we just
4972 let store_field do the bitfield handling. This is unlikely to
4973 generate unnecessary clear instructions anyways. */
4974 && (bitpos == 0 || MEM_P (target)))
4975 {
4976 if (MEM_P (target))
4977 target
4978 = adjust_address (target,
4979 GET_MODE (target) == BLKmode
4980 || 0 != (bitpos
4981 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4982 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4983
4984
4985 /* Update the alias set, if required. */
4986 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4987 && MEM_ALIAS_SET (target) != 0)
4988 {
4989 target = copy_rtx (target);
4990 set_mem_alias_set (target, alias_set);
4991 }
4992
4993 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4994 }
4995 else
4996 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
4997 }
4998
4999 /* Store the value of constructor EXP into the rtx TARGET.
5000 TARGET is either a REG or a MEM; we know it cannot conflict, since
5001 safe_from_p has been called.
5002 CLEARED is true if TARGET is known to have been zero'd.
5003 SIZE is the number of bytes of TARGET we are allowed to modify: this
5004 may not be the same as the size of EXP if we are assigning to a field
5005 which has been packed to exclude padding bits. */
5006
5007 static void
5008 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5009 {
5010 tree type = TREE_TYPE (exp);
5011 #ifdef WORD_REGISTER_OPERATIONS
5012 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5013 #endif
5014
5015 switch (TREE_CODE (type))
5016 {
5017 case RECORD_TYPE:
5018 case UNION_TYPE:
5019 case QUAL_UNION_TYPE:
5020 {
5021 unsigned HOST_WIDE_INT idx;
5022 tree field, value;
5023
5024 /* If size is zero or the target is already cleared, do nothing. */
5025 if (size == 0 || cleared)
5026 cleared = 1;
5027 /* We either clear the aggregate or indicate the value is dead. */
5028 else if ((TREE_CODE (type) == UNION_TYPE
5029 || TREE_CODE (type) == QUAL_UNION_TYPE)
5030 && ! CONSTRUCTOR_ELTS (exp))
5031 /* If the constructor is empty, clear the union. */
5032 {
5033 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5034 cleared = 1;
5035 }
5036
5037 /* If we are building a static constructor into a register,
5038 set the initial value as zero so we can fold the value into
5039 a constant. But if more than one register is involved,
5040 this probably loses. */
5041 else if (REG_P (target) && TREE_STATIC (exp)
5042 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5043 {
5044 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5045 cleared = 1;
5046 }
5047
5048 /* If the constructor has fewer fields than the structure or
5049 if we are initializing the structure to mostly zeros, clear
5050 the whole structure first. Don't do this if TARGET is a
5051 register whose mode size isn't equal to SIZE since
5052 clear_storage can't handle this case. */
5053 else if (size > 0
5054 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5055 != fields_length (type))
5056 || mostly_zeros_p (exp))
5057 && (!REG_P (target)
5058 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5059 == size)))
5060 {
5061 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5062 cleared = 1;
5063 }
5064
5065 if (REG_P (target) && !cleared)
5066 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5067
5068 /* Store each element of the constructor into the
5069 corresponding field of TARGET. */
5070 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5071 {
5072 enum machine_mode mode;
5073 HOST_WIDE_INT bitsize;
5074 HOST_WIDE_INT bitpos = 0;
5075 tree offset;
5076 rtx to_rtx = target;
5077
5078 /* Just ignore missing fields. We cleared the whole
5079 structure, above, if any fields are missing. */
5080 if (field == 0)
5081 continue;
5082
5083 if (cleared && initializer_zerop (value))
5084 continue;
5085
5086 if (host_integerp (DECL_SIZE (field), 1))
5087 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5088 else
5089 bitsize = -1;
5090
5091 mode = DECL_MODE (field);
5092 if (DECL_BIT_FIELD (field))
5093 mode = VOIDmode;
5094
5095 offset = DECL_FIELD_OFFSET (field);
5096 if (host_integerp (offset, 0)
5097 && host_integerp (bit_position (field), 0))
5098 {
5099 bitpos = int_bit_position (field);
5100 offset = 0;
5101 }
5102 else
5103 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5104
5105 if (offset)
5106 {
5107 rtx offset_rtx;
5108
5109 offset
5110 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5111 make_tree (TREE_TYPE (exp),
5112 target));
5113
5114 offset_rtx = expand_normal (offset);
5115 gcc_assert (MEM_P (to_rtx));
5116
5117 #ifdef POINTERS_EXTEND_UNSIGNED
5118 if (GET_MODE (offset_rtx) != Pmode)
5119 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5120 #else
5121 if (GET_MODE (offset_rtx) != ptr_mode)
5122 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5123 #endif
5124
5125 to_rtx = offset_address (to_rtx, offset_rtx,
5126 highest_pow2_factor (offset));
5127 }
5128
5129 #ifdef WORD_REGISTER_OPERATIONS
5130 /* If this initializes a field that is smaller than a
5131 word, at the start of a word, try to widen it to a full
5132 word. This special case allows us to output C++ member
5133 function initializations in a form that the optimizers
5134 can understand. */
5135 if (REG_P (target)
5136 && bitsize < BITS_PER_WORD
5137 && bitpos % BITS_PER_WORD == 0
5138 && GET_MODE_CLASS (mode) == MODE_INT
5139 && TREE_CODE (value) == INTEGER_CST
5140 && exp_size >= 0
5141 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5142 {
5143 tree type = TREE_TYPE (value);
5144
5145 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5146 {
5147 type = lang_hooks.types.type_for_size
5148 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5149 value = fold_convert (type, value);
5150 }
5151
5152 if (BYTES_BIG_ENDIAN)
5153 value
5154 = fold_build2 (LSHIFT_EXPR, type, value,
5155 build_int_cst (type,
5156 BITS_PER_WORD - bitsize));
5157 bitsize = BITS_PER_WORD;
5158 mode = word_mode;
5159 }
5160 #endif
5161
5162 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5163 && DECL_NONADDRESSABLE_P (field))
5164 {
5165 to_rtx = copy_rtx (to_rtx);
5166 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5167 }
5168
5169 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5170 value, type, cleared,
5171 get_alias_set (TREE_TYPE (field)));
5172 }
5173 break;
5174 }
5175 case ARRAY_TYPE:
5176 {
5177 tree value, index;
5178 unsigned HOST_WIDE_INT i;
5179 int need_to_clear;
5180 tree domain;
5181 tree elttype = TREE_TYPE (type);
5182 int const_bounds_p;
5183 HOST_WIDE_INT minelt = 0;
5184 HOST_WIDE_INT maxelt = 0;
5185
5186 domain = TYPE_DOMAIN (type);
5187 const_bounds_p = (TYPE_MIN_VALUE (domain)
5188 && TYPE_MAX_VALUE (domain)
5189 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5190 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5191
5192 /* If we have constant bounds for the range of the type, get them. */
5193 if (const_bounds_p)
5194 {
5195 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5196 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5197 }
5198
5199 /* If the constructor has fewer elements than the array, clear
5200 the whole array first. Similarly if this is static
5201 constructor of a non-BLKmode object. */
5202 if (cleared)
5203 need_to_clear = 0;
5204 else if (REG_P (target) && TREE_STATIC (exp))
5205 need_to_clear = 1;
5206 else
5207 {
5208 unsigned HOST_WIDE_INT idx;
5209 tree index, value;
5210 HOST_WIDE_INT count = 0, zero_count = 0;
5211 need_to_clear = ! const_bounds_p;
5212
5213 /* This loop is a more accurate version of the loop in
5214 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5215 is also needed to check for missing elements. */
5216 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5217 {
5218 HOST_WIDE_INT this_node_count;
5219
5220 if (need_to_clear)
5221 break;
5222
5223 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5224 {
5225 tree lo_index = TREE_OPERAND (index, 0);
5226 tree hi_index = TREE_OPERAND (index, 1);
5227
5228 if (! host_integerp (lo_index, 1)
5229 || ! host_integerp (hi_index, 1))
5230 {
5231 need_to_clear = 1;
5232 break;
5233 }
5234
5235 this_node_count = (tree_low_cst (hi_index, 1)
5236 - tree_low_cst (lo_index, 1) + 1);
5237 }
5238 else
5239 this_node_count = 1;
5240
5241 count += this_node_count;
5242 if (mostly_zeros_p (value))
5243 zero_count += this_node_count;
5244 }
5245
5246 /* Clear the entire array first if there are any missing
5247 elements, or if the incidence of zero elements is >=
5248 75%. */
5249 if (! need_to_clear
5250 && (count < maxelt - minelt + 1
5251 || 4 * zero_count >= 3 * count))
5252 need_to_clear = 1;
5253 }
5254
5255 if (need_to_clear && size > 0)
5256 {
5257 if (REG_P (target))
5258 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5259 else
5260 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5261 cleared = 1;
5262 }
5263
5264 if (!cleared && REG_P (target))
5265 /* Inform later passes that the old value is dead. */
5266 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5267
5268 /* Store each element of the constructor into the
5269 corresponding element of TARGET, determined by counting the
5270 elements. */
5271 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5272 {
5273 enum machine_mode mode;
5274 HOST_WIDE_INT bitsize;
5275 HOST_WIDE_INT bitpos;
5276 int unsignedp;
5277 rtx xtarget = target;
5278
5279 if (cleared && initializer_zerop (value))
5280 continue;
5281
5282 unsignedp = TYPE_UNSIGNED (elttype);
5283 mode = TYPE_MODE (elttype);
5284 if (mode == BLKmode)
5285 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5286 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5287 : -1);
5288 else
5289 bitsize = GET_MODE_BITSIZE (mode);
5290
5291 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5292 {
5293 tree lo_index = TREE_OPERAND (index, 0);
5294 tree hi_index = TREE_OPERAND (index, 1);
5295 rtx index_r, pos_rtx;
5296 HOST_WIDE_INT lo, hi, count;
5297 tree position;
5298
5299 /* If the range is constant and "small", unroll the loop. */
5300 if (const_bounds_p
5301 && host_integerp (lo_index, 0)
5302 && host_integerp (hi_index, 0)
5303 && (lo = tree_low_cst (lo_index, 0),
5304 hi = tree_low_cst (hi_index, 0),
5305 count = hi - lo + 1,
5306 (!MEM_P (target)
5307 || count <= 2
5308 || (host_integerp (TYPE_SIZE (elttype), 1)
5309 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5310 <= 40 * 8)))))
5311 {
5312 lo -= minelt; hi -= minelt;
5313 for (; lo <= hi; lo++)
5314 {
5315 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5316
5317 if (MEM_P (target)
5318 && !MEM_KEEP_ALIAS_SET_P (target)
5319 && TREE_CODE (type) == ARRAY_TYPE
5320 && TYPE_NONALIASED_COMPONENT (type))
5321 {
5322 target = copy_rtx (target);
5323 MEM_KEEP_ALIAS_SET_P (target) = 1;
5324 }
5325
5326 store_constructor_field
5327 (target, bitsize, bitpos, mode, value, type, cleared,
5328 get_alias_set (elttype));
5329 }
5330 }
5331 else
5332 {
5333 rtx loop_start = gen_label_rtx ();
5334 rtx loop_end = gen_label_rtx ();
5335 tree exit_cond;
5336
5337 expand_normal (hi_index);
5338 unsignedp = TYPE_UNSIGNED (domain);
5339
5340 index = build_decl (VAR_DECL, NULL_TREE, domain);
5341
5342 index_r
5343 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5344 &unsignedp, 0));
5345 SET_DECL_RTL (index, index_r);
5346 store_expr (lo_index, index_r, 0, false);
5347
5348 /* Build the head of the loop. */
5349 do_pending_stack_adjust ();
5350 emit_label (loop_start);
5351
5352 /* Assign value to element index. */
5353 position =
5354 fold_convert (ssizetype,
5355 fold_build2 (MINUS_EXPR,
5356 TREE_TYPE (index),
5357 index,
5358 TYPE_MIN_VALUE (domain)));
5359
5360 position =
5361 size_binop (MULT_EXPR, position,
5362 fold_convert (ssizetype,
5363 TYPE_SIZE_UNIT (elttype)));
5364
5365 pos_rtx = expand_normal (position);
5366 xtarget = offset_address (target, pos_rtx,
5367 highest_pow2_factor (position));
5368 xtarget = adjust_address (xtarget, mode, 0);
5369 if (TREE_CODE (value) == CONSTRUCTOR)
5370 store_constructor (value, xtarget, cleared,
5371 bitsize / BITS_PER_UNIT);
5372 else
5373 store_expr (value, xtarget, 0, false);
5374
5375 /* Generate a conditional jump to exit the loop. */
5376 exit_cond = build2 (LT_EXPR, integer_type_node,
5377 index, hi_index);
5378 jumpif (exit_cond, loop_end);
5379
5380 /* Update the loop counter, and jump to the head of
5381 the loop. */
5382 expand_assignment (index,
5383 build2 (PLUS_EXPR, TREE_TYPE (index),
5384 index, integer_one_node),
5385 false);
5386
5387 emit_jump (loop_start);
5388
5389 /* Build the end of the loop. */
5390 emit_label (loop_end);
5391 }
5392 }
5393 else if ((index != 0 && ! host_integerp (index, 0))
5394 || ! host_integerp (TYPE_SIZE (elttype), 1))
5395 {
5396 tree position;
5397
5398 if (index == 0)
5399 index = ssize_int (1);
5400
5401 if (minelt)
5402 index = fold_convert (ssizetype,
5403 fold_build2 (MINUS_EXPR,
5404 TREE_TYPE (index),
5405 index,
5406 TYPE_MIN_VALUE (domain)));
5407
5408 position =
5409 size_binop (MULT_EXPR, index,
5410 fold_convert (ssizetype,
5411 TYPE_SIZE_UNIT (elttype)));
5412 xtarget = offset_address (target,
5413 expand_normal (position),
5414 highest_pow2_factor (position));
5415 xtarget = adjust_address (xtarget, mode, 0);
5416 store_expr (value, xtarget, 0, false);
5417 }
5418 else
5419 {
5420 if (index != 0)
5421 bitpos = ((tree_low_cst (index, 0) - minelt)
5422 * tree_low_cst (TYPE_SIZE (elttype), 1));
5423 else
5424 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5425
5426 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5427 && TREE_CODE (type) == ARRAY_TYPE
5428 && TYPE_NONALIASED_COMPONENT (type))
5429 {
5430 target = copy_rtx (target);
5431 MEM_KEEP_ALIAS_SET_P (target) = 1;
5432 }
5433 store_constructor_field (target, bitsize, bitpos, mode, value,
5434 type, cleared, get_alias_set (elttype));
5435 }
5436 }
5437 break;
5438 }
5439
5440 case VECTOR_TYPE:
5441 {
5442 unsigned HOST_WIDE_INT idx;
5443 constructor_elt *ce;
5444 int i;
5445 int need_to_clear;
5446 int icode = 0;
5447 tree elttype = TREE_TYPE (type);
5448 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5449 enum machine_mode eltmode = TYPE_MODE (elttype);
5450 HOST_WIDE_INT bitsize;
5451 HOST_WIDE_INT bitpos;
5452 rtvec vector = NULL;
5453 unsigned n_elts;
5454
5455 gcc_assert (eltmode != BLKmode);
5456
5457 n_elts = TYPE_VECTOR_SUBPARTS (type);
5458 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5459 {
5460 enum machine_mode mode = GET_MODE (target);
5461
5462 icode = (int) optab_handler (vec_init_optab, mode)->insn_code;
5463 if (icode != CODE_FOR_nothing)
5464 {
5465 unsigned int i;
5466
5467 vector = rtvec_alloc (n_elts);
5468 for (i = 0; i < n_elts; i++)
5469 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5470 }
5471 }
5472
5473 /* If the constructor has fewer elements than the vector,
5474 clear the whole array first. Similarly if this is static
5475 constructor of a non-BLKmode object. */
5476 if (cleared)
5477 need_to_clear = 0;
5478 else if (REG_P (target) && TREE_STATIC (exp))
5479 need_to_clear = 1;
5480 else
5481 {
5482 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5483 tree value;
5484
5485 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5486 {
5487 int n_elts_here = tree_low_cst
5488 (int_const_binop (TRUNC_DIV_EXPR,
5489 TYPE_SIZE (TREE_TYPE (value)),
5490 TYPE_SIZE (elttype), 0), 1);
5491
5492 count += n_elts_here;
5493 if (mostly_zeros_p (value))
5494 zero_count += n_elts_here;
5495 }
5496
5497 /* Clear the entire vector first if there are any missing elements,
5498 or if the incidence of zero elements is >= 75%. */
5499 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5500 }
5501
5502 if (need_to_clear && size > 0 && !vector)
5503 {
5504 if (REG_P (target))
5505 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5506 else
5507 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5508 cleared = 1;
5509 }
5510
5511 /* Inform later passes that the old value is dead. */
5512 if (!cleared && !vector && REG_P (target))
5513 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5514
5515 /* Store each element of the constructor into the corresponding
5516 element of TARGET, determined by counting the elements. */
5517 for (idx = 0, i = 0;
5518 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5519 idx++, i += bitsize / elt_size)
5520 {
5521 HOST_WIDE_INT eltpos;
5522 tree value = ce->value;
5523
5524 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5525 if (cleared && initializer_zerop (value))
5526 continue;
5527
5528 if (ce->index)
5529 eltpos = tree_low_cst (ce->index, 1);
5530 else
5531 eltpos = i;
5532
5533 if (vector)
5534 {
5535 /* Vector CONSTRUCTORs should only be built from smaller
5536 vectors in the case of BLKmode vectors. */
5537 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5538 RTVEC_ELT (vector, eltpos)
5539 = expand_normal (value);
5540 }
5541 else
5542 {
5543 enum machine_mode value_mode =
5544 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5545 ? TYPE_MODE (TREE_TYPE (value))
5546 : eltmode;
5547 bitpos = eltpos * elt_size;
5548 store_constructor_field (target, bitsize, bitpos,
5549 value_mode, value, type,
5550 cleared, get_alias_set (elttype));
5551 }
5552 }
5553
5554 if (vector)
5555 emit_insn (GEN_FCN (icode)
5556 (target,
5557 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5558 break;
5559 }
5560
5561 default:
5562 gcc_unreachable ();
5563 }
5564 }
5565
5566 /* Store the value of EXP (an expression tree)
5567 into a subfield of TARGET which has mode MODE and occupies
5568 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5569 If MODE is VOIDmode, it means that we are storing into a bit-field.
5570
5571 Always return const0_rtx unless we have something particular to
5572 return.
5573
5574 TYPE is the type of the underlying object,
5575
5576 ALIAS_SET is the alias set for the destination. This value will
5577 (in general) be different from that for TARGET, since TARGET is a
5578 reference to the containing structure.
5579
5580 If NONTEMPORAL is true, try generating a nontemporal store. */
5581
5582 static rtx
5583 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5584 enum machine_mode mode, tree exp, tree type,
5585 alias_set_type alias_set, bool nontemporal)
5586 {
5587 HOST_WIDE_INT width_mask = 0;
5588
5589 if (TREE_CODE (exp) == ERROR_MARK)
5590 return const0_rtx;
5591
5592 /* If we have nothing to store, do nothing unless the expression has
5593 side-effects. */
5594 if (bitsize == 0)
5595 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5596 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5597 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5598
5599 /* If we are storing into an unaligned field of an aligned union that is
5600 in a register, we may have the mode of TARGET being an integer mode but
5601 MODE == BLKmode. In that case, get an aligned object whose size and
5602 alignment are the same as TARGET and store TARGET into it (we can avoid
5603 the store if the field being stored is the entire width of TARGET). Then
5604 call ourselves recursively to store the field into a BLKmode version of
5605 that object. Finally, load from the object into TARGET. This is not
5606 very efficient in general, but should only be slightly more expensive
5607 than the otherwise-required unaligned accesses. Perhaps this can be
5608 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5609 twice, once with emit_move_insn and once via store_field. */
5610
5611 if (mode == BLKmode
5612 && (REG_P (target) || GET_CODE (target) == SUBREG))
5613 {
5614 rtx object = assign_temp (type, 0, 1, 1);
5615 rtx blk_object = adjust_address (object, BLKmode, 0);
5616
5617 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5618 emit_move_insn (object, target);
5619
5620 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5621 nontemporal);
5622
5623 emit_move_insn (target, object);
5624
5625 /* We want to return the BLKmode version of the data. */
5626 return blk_object;
5627 }
5628
5629 if (GET_CODE (target) == CONCAT)
5630 {
5631 /* We're storing into a struct containing a single __complex. */
5632
5633 gcc_assert (!bitpos);
5634 return store_expr (exp, target, 0, nontemporal);
5635 }
5636
5637 /* If the structure is in a register or if the component
5638 is a bit field, we cannot use addressing to access it.
5639 Use bit-field techniques or SUBREG to store in it. */
5640
5641 if (mode == VOIDmode
5642 || (mode != BLKmode && ! direct_store[(int) mode]
5643 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5644 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5645 || REG_P (target)
5646 || GET_CODE (target) == SUBREG
5647 /* If the field isn't aligned enough to store as an ordinary memref,
5648 store it as a bit field. */
5649 || (mode != BLKmode
5650 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5651 || bitpos % GET_MODE_ALIGNMENT (mode))
5652 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5653 || (bitpos % BITS_PER_UNIT != 0)))
5654 /* If the RHS and field are a constant size and the size of the
5655 RHS isn't the same size as the bitfield, we must use bitfield
5656 operations. */
5657 || (bitsize >= 0
5658 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5659 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5660 {
5661 rtx temp;
5662
5663 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5664 implies a mask operation. If the precision is the same size as
5665 the field we're storing into, that mask is redundant. This is
5666 particularly common with bit field assignments generated by the
5667 C front end. */
5668 if (TREE_CODE (exp) == NOP_EXPR)
5669 {
5670 tree type = TREE_TYPE (exp);
5671 if (INTEGRAL_TYPE_P (type)
5672 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5673 && bitsize == TYPE_PRECISION (type))
5674 {
5675 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5676 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5677 exp = TREE_OPERAND (exp, 0);
5678 }
5679 }
5680
5681 temp = expand_normal (exp);
5682
5683 /* If BITSIZE is narrower than the size of the type of EXP
5684 we will be narrowing TEMP. Normally, what's wanted are the
5685 low-order bits. However, if EXP's type is a record and this is
5686 big-endian machine, we want the upper BITSIZE bits. */
5687 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5688 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5689 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5690 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5691 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5692 - bitsize),
5693 NULL_RTX, 1);
5694
5695 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5696 MODE. */
5697 if (mode != VOIDmode && mode != BLKmode
5698 && mode != TYPE_MODE (TREE_TYPE (exp)))
5699 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5700
5701 /* If the modes of TARGET and TEMP are both BLKmode, both
5702 must be in memory and BITPOS must be aligned on a byte
5703 boundary. If so, we simply do a block copy. */
5704 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5705 {
5706 gcc_assert (MEM_P (target) && MEM_P (temp)
5707 && !(bitpos % BITS_PER_UNIT));
5708
5709 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5710 emit_block_move (target, temp,
5711 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5712 / BITS_PER_UNIT),
5713 BLOCK_OP_NORMAL);
5714
5715 return const0_rtx;
5716 }
5717
5718 /* Store the value in the bitfield. */
5719 store_bit_field (target, bitsize, bitpos, mode, temp);
5720
5721 return const0_rtx;
5722 }
5723 else
5724 {
5725 /* Now build a reference to just the desired component. */
5726 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5727
5728 if (to_rtx == target)
5729 to_rtx = copy_rtx (to_rtx);
5730
5731 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5732 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5733 set_mem_alias_set (to_rtx, alias_set);
5734
5735 return store_expr (exp, to_rtx, 0, nontemporal);
5736 }
5737 }
5738 \f
5739 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5740 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5741 codes and find the ultimate containing object, which we return.
5742
5743 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5744 bit position, and *PUNSIGNEDP to the signedness of the field.
5745 If the position of the field is variable, we store a tree
5746 giving the variable offset (in units) in *POFFSET.
5747 This offset is in addition to the bit position.
5748 If the position is not variable, we store 0 in *POFFSET.
5749
5750 If any of the extraction expressions is volatile,
5751 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5752
5753 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5754 is a mode that can be used to access the field. In that case, *PBITSIZE
5755 is redundant.
5756
5757 If the field describes a variable-sized object, *PMODE is set to
5758 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5759 this case, but the address of the object can be found.
5760
5761 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5762 look through nodes that serve as markers of a greater alignment than
5763 the one that can be deduced from the expression. These nodes make it
5764 possible for front-ends to prevent temporaries from being created by
5765 the middle-end on alignment considerations. For that purpose, the
5766 normal operating mode at high-level is to always pass FALSE so that
5767 the ultimate containing object is really returned; moreover, the
5768 associated predicate handled_component_p will always return TRUE
5769 on these nodes, thus indicating that they are essentially handled
5770 by get_inner_reference. TRUE should only be passed when the caller
5771 is scanning the expression in order to build another representation
5772 and specifically knows how to handle these nodes; as such, this is
5773 the normal operating mode in the RTL expanders. */
5774
5775 tree
5776 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5777 HOST_WIDE_INT *pbitpos, tree *poffset,
5778 enum machine_mode *pmode, int *punsignedp,
5779 int *pvolatilep, bool keep_aligning)
5780 {
5781 tree size_tree = 0;
5782 enum machine_mode mode = VOIDmode;
5783 tree offset = size_zero_node;
5784 tree bit_offset = bitsize_zero_node;
5785 tree tem;
5786
5787 /* First get the mode, signedness, and size. We do this from just the
5788 outermost expression. */
5789 if (TREE_CODE (exp) == COMPONENT_REF)
5790 {
5791 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5792 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5793 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5794
5795 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5796 }
5797 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5798 {
5799 size_tree = TREE_OPERAND (exp, 1);
5800 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5801
5802 /* For vector types, with the correct size of access, use the mode of
5803 inner type. */
5804 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5805 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5806 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5807 mode = TYPE_MODE (TREE_TYPE (exp));
5808 }
5809 else
5810 {
5811 mode = TYPE_MODE (TREE_TYPE (exp));
5812 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5813
5814 if (mode == BLKmode)
5815 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5816 else
5817 *pbitsize = GET_MODE_BITSIZE (mode);
5818 }
5819
5820 if (size_tree != 0)
5821 {
5822 if (! host_integerp (size_tree, 1))
5823 mode = BLKmode, *pbitsize = -1;
5824 else
5825 *pbitsize = tree_low_cst (size_tree, 1);
5826 }
5827
5828 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5829 and find the ultimate containing object. */
5830 while (1)
5831 {
5832 switch (TREE_CODE (exp))
5833 {
5834 case BIT_FIELD_REF:
5835 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5836 TREE_OPERAND (exp, 2));
5837 break;
5838
5839 case COMPONENT_REF:
5840 {
5841 tree field = TREE_OPERAND (exp, 1);
5842 tree this_offset = component_ref_field_offset (exp);
5843
5844 /* If this field hasn't been filled in yet, don't go past it.
5845 This should only happen when folding expressions made during
5846 type construction. */
5847 if (this_offset == 0)
5848 break;
5849
5850 offset = size_binop (PLUS_EXPR, offset, this_offset);
5851 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5852 DECL_FIELD_BIT_OFFSET (field));
5853
5854 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5855 }
5856 break;
5857
5858 case ARRAY_REF:
5859 case ARRAY_RANGE_REF:
5860 {
5861 tree index = TREE_OPERAND (exp, 1);
5862 tree low_bound = array_ref_low_bound (exp);
5863 tree unit_size = array_ref_element_size (exp);
5864
5865 /* We assume all arrays have sizes that are a multiple of a byte.
5866 First subtract the lower bound, if any, in the type of the
5867 index, then convert to sizetype and multiply by the size of
5868 the array element. */
5869 if (! integer_zerop (low_bound))
5870 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5871 index, low_bound);
5872
5873 offset = size_binop (PLUS_EXPR, offset,
5874 size_binop (MULT_EXPR,
5875 fold_convert (sizetype, index),
5876 unit_size));
5877 }
5878 break;
5879
5880 case REALPART_EXPR:
5881 break;
5882
5883 case IMAGPART_EXPR:
5884 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5885 bitsize_int (*pbitsize));
5886 break;
5887
5888 case VIEW_CONVERT_EXPR:
5889 if (keep_aligning && STRICT_ALIGNMENT
5890 && (TYPE_ALIGN (TREE_TYPE (exp))
5891 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5892 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5893 < BIGGEST_ALIGNMENT)
5894 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5895 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5896 goto done;
5897 break;
5898
5899 default:
5900 goto done;
5901 }
5902
5903 /* If any reference in the chain is volatile, the effect is volatile. */
5904 if (TREE_THIS_VOLATILE (exp))
5905 *pvolatilep = 1;
5906
5907 exp = TREE_OPERAND (exp, 0);
5908 }
5909 done:
5910
5911 /* If OFFSET is constant, see if we can return the whole thing as a
5912 constant bit position. Otherwise, split it up. */
5913 if (host_integerp (offset, 0)
5914 && 0 != (tem = size_binop (MULT_EXPR,
5915 fold_convert (bitsizetype, offset),
5916 bitsize_unit_node))
5917 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5918 && host_integerp (tem, 0))
5919 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5920 else
5921 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5922
5923 *pmode = mode;
5924 return exp;
5925 }
5926
5927 /* Given an expression EXP that may be a COMPONENT_REF or an ARRAY_REF,
5928 look for whether EXP or any nested component-refs within EXP is marked
5929 as PACKED. */
5930
5931 bool
5932 contains_packed_reference (const_tree exp)
5933 {
5934 bool packed_p = false;
5935
5936 while (1)
5937 {
5938 switch (TREE_CODE (exp))
5939 {
5940 case COMPONENT_REF:
5941 {
5942 tree field = TREE_OPERAND (exp, 1);
5943 packed_p = DECL_PACKED (field)
5944 || TYPE_PACKED (TREE_TYPE (field))
5945 || TYPE_PACKED (TREE_TYPE (exp));
5946 if (packed_p)
5947 goto done;
5948 }
5949 break;
5950
5951 case BIT_FIELD_REF:
5952 case ARRAY_REF:
5953 case ARRAY_RANGE_REF:
5954 case REALPART_EXPR:
5955 case IMAGPART_EXPR:
5956 case VIEW_CONVERT_EXPR:
5957 break;
5958
5959 default:
5960 goto done;
5961 }
5962 exp = TREE_OPERAND (exp, 0);
5963 }
5964 done:
5965 return packed_p;
5966 }
5967
5968 /* Return a tree of sizetype representing the size, in bytes, of the element
5969 of EXP, an ARRAY_REF. */
5970
5971 tree
5972 array_ref_element_size (tree exp)
5973 {
5974 tree aligned_size = TREE_OPERAND (exp, 3);
5975 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5976
5977 /* If a size was specified in the ARRAY_REF, it's the size measured
5978 in alignment units of the element type. So multiply by that value. */
5979 if (aligned_size)
5980 {
5981 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5982 sizetype from another type of the same width and signedness. */
5983 if (TREE_TYPE (aligned_size) != sizetype)
5984 aligned_size = fold_convert (sizetype, aligned_size);
5985 return size_binop (MULT_EXPR, aligned_size,
5986 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5987 }
5988
5989 /* Otherwise, take the size from that of the element type. Substitute
5990 any PLACEHOLDER_EXPR that we have. */
5991 else
5992 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5993 }
5994
5995 /* Return a tree representing the lower bound of the array mentioned in
5996 EXP, an ARRAY_REF. */
5997
5998 tree
5999 array_ref_low_bound (tree exp)
6000 {
6001 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6002
6003 /* If a lower bound is specified in EXP, use it. */
6004 if (TREE_OPERAND (exp, 2))
6005 return TREE_OPERAND (exp, 2);
6006
6007 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6008 substituting for a PLACEHOLDER_EXPR as needed. */
6009 if (domain_type && TYPE_MIN_VALUE (domain_type))
6010 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6011
6012 /* Otherwise, return a zero of the appropriate type. */
6013 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6014 }
6015
6016 /* Return a tree representing the upper bound of the array mentioned in
6017 EXP, an ARRAY_REF. */
6018
6019 tree
6020 array_ref_up_bound (tree exp)
6021 {
6022 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6023
6024 /* If there is a domain type and it has an upper bound, use it, substituting
6025 for a PLACEHOLDER_EXPR as needed. */
6026 if (domain_type && TYPE_MAX_VALUE (domain_type))
6027 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6028
6029 /* Otherwise fail. */
6030 return NULL_TREE;
6031 }
6032
6033 /* Return a tree representing the offset, in bytes, of the field referenced
6034 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6035
6036 tree
6037 component_ref_field_offset (tree exp)
6038 {
6039 tree aligned_offset = TREE_OPERAND (exp, 2);
6040 tree field = TREE_OPERAND (exp, 1);
6041
6042 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6043 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6044 value. */
6045 if (aligned_offset)
6046 {
6047 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6048 sizetype from another type of the same width and signedness. */
6049 if (TREE_TYPE (aligned_offset) != sizetype)
6050 aligned_offset = fold_convert (sizetype, aligned_offset);
6051 return size_binop (MULT_EXPR, aligned_offset,
6052 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
6053 }
6054
6055 /* Otherwise, take the offset from that of the field. Substitute
6056 any PLACEHOLDER_EXPR that we have. */
6057 else
6058 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6059 }
6060
6061 /* Return 1 if T is an expression that get_inner_reference handles. */
6062
6063 int
6064 handled_component_p (const_tree t)
6065 {
6066 switch (TREE_CODE (t))
6067 {
6068 case BIT_FIELD_REF:
6069 case COMPONENT_REF:
6070 case ARRAY_REF:
6071 case ARRAY_RANGE_REF:
6072 case VIEW_CONVERT_EXPR:
6073 case REALPART_EXPR:
6074 case IMAGPART_EXPR:
6075 return 1;
6076
6077 default:
6078 return 0;
6079 }
6080 }
6081 \f
6082 /* Given an rtx VALUE that may contain additions and multiplications, return
6083 an equivalent value that just refers to a register, memory, or constant.
6084 This is done by generating instructions to perform the arithmetic and
6085 returning a pseudo-register containing the value.
6086
6087 The returned value may be a REG, SUBREG, MEM or constant. */
6088
6089 rtx
6090 force_operand (rtx value, rtx target)
6091 {
6092 rtx op1, op2;
6093 /* Use subtarget as the target for operand 0 of a binary operation. */
6094 rtx subtarget = get_subtarget (target);
6095 enum rtx_code code = GET_CODE (value);
6096
6097 /* Check for subreg applied to an expression produced by loop optimizer. */
6098 if (code == SUBREG
6099 && !REG_P (SUBREG_REG (value))
6100 && !MEM_P (SUBREG_REG (value)))
6101 {
6102 value
6103 = simplify_gen_subreg (GET_MODE (value),
6104 force_reg (GET_MODE (SUBREG_REG (value)),
6105 force_operand (SUBREG_REG (value),
6106 NULL_RTX)),
6107 GET_MODE (SUBREG_REG (value)),
6108 SUBREG_BYTE (value));
6109 code = GET_CODE (value);
6110 }
6111
6112 /* Check for a PIC address load. */
6113 if ((code == PLUS || code == MINUS)
6114 && XEXP (value, 0) == pic_offset_table_rtx
6115 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6116 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6117 || GET_CODE (XEXP (value, 1)) == CONST))
6118 {
6119 if (!subtarget)
6120 subtarget = gen_reg_rtx (GET_MODE (value));
6121 emit_move_insn (subtarget, value);
6122 return subtarget;
6123 }
6124
6125 if (ARITHMETIC_P (value))
6126 {
6127 op2 = XEXP (value, 1);
6128 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6129 subtarget = 0;
6130 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6131 {
6132 code = PLUS;
6133 op2 = negate_rtx (GET_MODE (value), op2);
6134 }
6135
6136 /* Check for an addition with OP2 a constant integer and our first
6137 operand a PLUS of a virtual register and something else. In that
6138 case, we want to emit the sum of the virtual register and the
6139 constant first and then add the other value. This allows virtual
6140 register instantiation to simply modify the constant rather than
6141 creating another one around this addition. */
6142 if (code == PLUS && GET_CODE (op2) == CONST_INT
6143 && GET_CODE (XEXP (value, 0)) == PLUS
6144 && REG_P (XEXP (XEXP (value, 0), 0))
6145 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6146 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6147 {
6148 rtx temp = expand_simple_binop (GET_MODE (value), code,
6149 XEXP (XEXP (value, 0), 0), op2,
6150 subtarget, 0, OPTAB_LIB_WIDEN);
6151 return expand_simple_binop (GET_MODE (value), code, temp,
6152 force_operand (XEXP (XEXP (value,
6153 0), 1), 0),
6154 target, 0, OPTAB_LIB_WIDEN);
6155 }
6156
6157 op1 = force_operand (XEXP (value, 0), subtarget);
6158 op2 = force_operand (op2, NULL_RTX);
6159 switch (code)
6160 {
6161 case MULT:
6162 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6163 case DIV:
6164 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6165 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6166 target, 1, OPTAB_LIB_WIDEN);
6167 else
6168 return expand_divmod (0,
6169 FLOAT_MODE_P (GET_MODE (value))
6170 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6171 GET_MODE (value), op1, op2, target, 0);
6172 case MOD:
6173 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6174 target, 0);
6175 case UDIV:
6176 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6177 target, 1);
6178 case UMOD:
6179 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6180 target, 1);
6181 case ASHIFTRT:
6182 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6183 target, 0, OPTAB_LIB_WIDEN);
6184 default:
6185 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6186 target, 1, OPTAB_LIB_WIDEN);
6187 }
6188 }
6189 if (UNARY_P (value))
6190 {
6191 if (!target)
6192 target = gen_reg_rtx (GET_MODE (value));
6193 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6194 switch (code)
6195 {
6196 case ZERO_EXTEND:
6197 case SIGN_EXTEND:
6198 case TRUNCATE:
6199 case FLOAT_EXTEND:
6200 case FLOAT_TRUNCATE:
6201 convert_move (target, op1, code == ZERO_EXTEND);
6202 return target;
6203
6204 case FIX:
6205 case UNSIGNED_FIX:
6206 expand_fix (target, op1, code == UNSIGNED_FIX);
6207 return target;
6208
6209 case FLOAT:
6210 case UNSIGNED_FLOAT:
6211 expand_float (target, op1, code == UNSIGNED_FLOAT);
6212 return target;
6213
6214 default:
6215 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6216 }
6217 }
6218
6219 #ifdef INSN_SCHEDULING
6220 /* On machines that have insn scheduling, we want all memory reference to be
6221 explicit, so we need to deal with such paradoxical SUBREGs. */
6222 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6223 && (GET_MODE_SIZE (GET_MODE (value))
6224 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6225 value
6226 = simplify_gen_subreg (GET_MODE (value),
6227 force_reg (GET_MODE (SUBREG_REG (value)),
6228 force_operand (SUBREG_REG (value),
6229 NULL_RTX)),
6230 GET_MODE (SUBREG_REG (value)),
6231 SUBREG_BYTE (value));
6232 #endif
6233
6234 return value;
6235 }
6236 \f
6237 /* Subroutine of expand_expr: return nonzero iff there is no way that
6238 EXP can reference X, which is being modified. TOP_P is nonzero if this
6239 call is going to be used to determine whether we need a temporary
6240 for EXP, as opposed to a recursive call to this function.
6241
6242 It is always safe for this routine to return zero since it merely
6243 searches for optimization opportunities. */
6244
6245 int
6246 safe_from_p (rtx x, tree exp, int top_p)
6247 {
6248 rtx exp_rtl = 0;
6249 int i, nops;
6250
6251 if (x == 0
6252 /* If EXP has varying size, we MUST use a target since we currently
6253 have no way of allocating temporaries of variable size
6254 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6255 So we assume here that something at a higher level has prevented a
6256 clash. This is somewhat bogus, but the best we can do. Only
6257 do this when X is BLKmode and when we are at the top level. */
6258 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6259 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6260 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6261 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6262 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6263 != INTEGER_CST)
6264 && GET_MODE (x) == BLKmode)
6265 /* If X is in the outgoing argument area, it is always safe. */
6266 || (MEM_P (x)
6267 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6268 || (GET_CODE (XEXP (x, 0)) == PLUS
6269 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6270 return 1;
6271
6272 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6273 find the underlying pseudo. */
6274 if (GET_CODE (x) == SUBREG)
6275 {
6276 x = SUBREG_REG (x);
6277 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6278 return 0;
6279 }
6280
6281 /* Now look at our tree code and possibly recurse. */
6282 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6283 {
6284 case tcc_declaration:
6285 exp_rtl = DECL_RTL_IF_SET (exp);
6286 break;
6287
6288 case tcc_constant:
6289 return 1;
6290
6291 case tcc_exceptional:
6292 if (TREE_CODE (exp) == TREE_LIST)
6293 {
6294 while (1)
6295 {
6296 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6297 return 0;
6298 exp = TREE_CHAIN (exp);
6299 if (!exp)
6300 return 1;
6301 if (TREE_CODE (exp) != TREE_LIST)
6302 return safe_from_p (x, exp, 0);
6303 }
6304 }
6305 else if (TREE_CODE (exp) == CONSTRUCTOR)
6306 {
6307 constructor_elt *ce;
6308 unsigned HOST_WIDE_INT idx;
6309
6310 for (idx = 0;
6311 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6312 idx++)
6313 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6314 || !safe_from_p (x, ce->value, 0))
6315 return 0;
6316 return 1;
6317 }
6318 else if (TREE_CODE (exp) == ERROR_MARK)
6319 return 1; /* An already-visited SAVE_EXPR? */
6320 else
6321 return 0;
6322
6323 case tcc_statement:
6324 /* The only case we look at here is the DECL_INITIAL inside a
6325 DECL_EXPR. */
6326 return (TREE_CODE (exp) != DECL_EXPR
6327 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6328 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6329 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6330
6331 case tcc_binary:
6332 case tcc_comparison:
6333 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6334 return 0;
6335 /* Fall through. */
6336
6337 case tcc_unary:
6338 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6339
6340 case tcc_expression:
6341 case tcc_reference:
6342 case tcc_vl_exp:
6343 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6344 the expression. If it is set, we conflict iff we are that rtx or
6345 both are in memory. Otherwise, we check all operands of the
6346 expression recursively. */
6347
6348 switch (TREE_CODE (exp))
6349 {
6350 case ADDR_EXPR:
6351 /* If the operand is static or we are static, we can't conflict.
6352 Likewise if we don't conflict with the operand at all. */
6353 if (staticp (TREE_OPERAND (exp, 0))
6354 || TREE_STATIC (exp)
6355 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6356 return 1;
6357
6358 /* Otherwise, the only way this can conflict is if we are taking
6359 the address of a DECL a that address if part of X, which is
6360 very rare. */
6361 exp = TREE_OPERAND (exp, 0);
6362 if (DECL_P (exp))
6363 {
6364 if (!DECL_RTL_SET_P (exp)
6365 || !MEM_P (DECL_RTL (exp)))
6366 return 0;
6367 else
6368 exp_rtl = XEXP (DECL_RTL (exp), 0);
6369 }
6370 break;
6371
6372 case MISALIGNED_INDIRECT_REF:
6373 case ALIGN_INDIRECT_REF:
6374 case INDIRECT_REF:
6375 if (MEM_P (x)
6376 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6377 get_alias_set (exp)))
6378 return 0;
6379 break;
6380
6381 case CALL_EXPR:
6382 /* Assume that the call will clobber all hard registers and
6383 all of memory. */
6384 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6385 || MEM_P (x))
6386 return 0;
6387 break;
6388
6389 case WITH_CLEANUP_EXPR:
6390 case CLEANUP_POINT_EXPR:
6391 /* Lowered by gimplify.c. */
6392 gcc_unreachable ();
6393
6394 case SAVE_EXPR:
6395 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6396
6397 default:
6398 break;
6399 }
6400
6401 /* If we have an rtx, we do not need to scan our operands. */
6402 if (exp_rtl)
6403 break;
6404
6405 nops = TREE_OPERAND_LENGTH (exp);
6406 for (i = 0; i < nops; i++)
6407 if (TREE_OPERAND (exp, i) != 0
6408 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6409 return 0;
6410
6411 break;
6412
6413 case tcc_type:
6414 /* Should never get a type here. */
6415 gcc_unreachable ();
6416
6417 case tcc_gimple_stmt:
6418 gcc_unreachable ();
6419 }
6420
6421 /* If we have an rtl, find any enclosed object. Then see if we conflict
6422 with it. */
6423 if (exp_rtl)
6424 {
6425 if (GET_CODE (exp_rtl) == SUBREG)
6426 {
6427 exp_rtl = SUBREG_REG (exp_rtl);
6428 if (REG_P (exp_rtl)
6429 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6430 return 0;
6431 }
6432
6433 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6434 are memory and they conflict. */
6435 return ! (rtx_equal_p (x, exp_rtl)
6436 || (MEM_P (x) && MEM_P (exp_rtl)
6437 && true_dependence (exp_rtl, VOIDmode, x,
6438 rtx_addr_varies_p)));
6439 }
6440
6441 /* If we reach here, it is safe. */
6442 return 1;
6443 }
6444
6445 \f
6446 /* Return the highest power of two that EXP is known to be a multiple of.
6447 This is used in updating alignment of MEMs in array references. */
6448
6449 unsigned HOST_WIDE_INT
6450 highest_pow2_factor (const_tree exp)
6451 {
6452 unsigned HOST_WIDE_INT c0, c1;
6453
6454 switch (TREE_CODE (exp))
6455 {
6456 case INTEGER_CST:
6457 /* We can find the lowest bit that's a one. If the low
6458 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6459 We need to handle this case since we can find it in a COND_EXPR,
6460 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6461 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6462 later ICE. */
6463 if (TREE_OVERFLOW (exp))
6464 return BIGGEST_ALIGNMENT;
6465 else
6466 {
6467 /* Note: tree_low_cst is intentionally not used here,
6468 we don't care about the upper bits. */
6469 c0 = TREE_INT_CST_LOW (exp);
6470 c0 &= -c0;
6471 return c0 ? c0 : BIGGEST_ALIGNMENT;
6472 }
6473 break;
6474
6475 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6476 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6477 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6478 return MIN (c0, c1);
6479
6480 case MULT_EXPR:
6481 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6482 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6483 return c0 * c1;
6484
6485 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6486 case CEIL_DIV_EXPR:
6487 if (integer_pow2p (TREE_OPERAND (exp, 1))
6488 && host_integerp (TREE_OPERAND (exp, 1), 1))
6489 {
6490 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6491 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6492 return MAX (1, c0 / c1);
6493 }
6494 break;
6495
6496 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6497 case SAVE_EXPR:
6498 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6499
6500 case COMPOUND_EXPR:
6501 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6502
6503 case COND_EXPR:
6504 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6505 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6506 return MIN (c0, c1);
6507
6508 default:
6509 break;
6510 }
6511
6512 return 1;
6513 }
6514
6515 /* Similar, except that the alignment requirements of TARGET are
6516 taken into account. Assume it is at least as aligned as its
6517 type, unless it is a COMPONENT_REF in which case the layout of
6518 the structure gives the alignment. */
6519
6520 static unsigned HOST_WIDE_INT
6521 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6522 {
6523 unsigned HOST_WIDE_INT target_align, factor;
6524
6525 factor = highest_pow2_factor (exp);
6526 if (TREE_CODE (target) == COMPONENT_REF)
6527 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6528 else
6529 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6530 return MAX (factor, target_align);
6531 }
6532 \f
6533 /* Return &VAR expression for emulated thread local VAR. */
6534
6535 static tree
6536 emutls_var_address (tree var)
6537 {
6538 tree emuvar = emutls_decl (var);
6539 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6540 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6541 tree arglist = build_tree_list (NULL_TREE, arg);
6542 tree call = build_function_call_expr (fn, arglist);
6543 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6544 }
6545 \f
6546 /* Expands variable VAR. */
6547
6548 void
6549 expand_var (tree var)
6550 {
6551 if (DECL_EXTERNAL (var))
6552 return;
6553
6554 if (TREE_STATIC (var))
6555 /* If this is an inlined copy of a static local variable,
6556 look up the original decl. */
6557 var = DECL_ORIGIN (var);
6558
6559 if (TREE_STATIC (var)
6560 ? !TREE_ASM_WRITTEN (var)
6561 : !DECL_RTL_SET_P (var))
6562 {
6563 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6564 /* Should be ignored. */;
6565 else if (lang_hooks.expand_decl (var))
6566 /* OK. */;
6567 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6568 expand_decl (var);
6569 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6570 rest_of_decl_compilation (var, 0, 0);
6571 else
6572 /* No expansion needed. */
6573 gcc_assert (TREE_CODE (var) == TYPE_DECL
6574 || TREE_CODE (var) == CONST_DECL
6575 || TREE_CODE (var) == FUNCTION_DECL
6576 || TREE_CODE (var) == LABEL_DECL);
6577 }
6578 }
6579
6580 /* Subroutine of expand_expr. Expand the two operands of a binary
6581 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6582 The value may be stored in TARGET if TARGET is nonzero. The
6583 MODIFIER argument is as documented by expand_expr. */
6584
6585 static void
6586 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6587 enum expand_modifier modifier)
6588 {
6589 if (! safe_from_p (target, exp1, 1))
6590 target = 0;
6591 if (operand_equal_p (exp0, exp1, 0))
6592 {
6593 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6594 *op1 = copy_rtx (*op0);
6595 }
6596 else
6597 {
6598 /* If we need to preserve evaluation order, copy exp0 into its own
6599 temporary variable so that it can't be clobbered by exp1. */
6600 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6601 exp0 = save_expr (exp0);
6602 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6603 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6604 }
6605 }
6606
6607 \f
6608 /* Return a MEM that contains constant EXP. DEFER is as for
6609 output_constant_def and MODIFIER is as for expand_expr. */
6610
6611 static rtx
6612 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6613 {
6614 rtx mem;
6615
6616 mem = output_constant_def (exp, defer);
6617 if (modifier != EXPAND_INITIALIZER)
6618 mem = use_anchored_address (mem);
6619 return mem;
6620 }
6621
6622 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6623 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6624
6625 static rtx
6626 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6627 enum expand_modifier modifier)
6628 {
6629 rtx result, subtarget;
6630 tree inner, offset;
6631 HOST_WIDE_INT bitsize, bitpos;
6632 int volatilep, unsignedp;
6633 enum machine_mode mode1;
6634
6635 /* If we are taking the address of a constant and are at the top level,
6636 we have to use output_constant_def since we can't call force_const_mem
6637 at top level. */
6638 /* ??? This should be considered a front-end bug. We should not be
6639 generating ADDR_EXPR of something that isn't an LVALUE. The only
6640 exception here is STRING_CST. */
6641 if (TREE_CODE (exp) == CONSTRUCTOR
6642 || CONSTANT_CLASS_P (exp))
6643 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6644
6645 /* Everything must be something allowed by is_gimple_addressable. */
6646 switch (TREE_CODE (exp))
6647 {
6648 case INDIRECT_REF:
6649 /* This case will happen via recursion for &a->b. */
6650 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6651
6652 case CONST_DECL:
6653 /* Recurse and make the output_constant_def clause above handle this. */
6654 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6655 tmode, modifier);
6656
6657 case REALPART_EXPR:
6658 /* The real part of the complex number is always first, therefore
6659 the address is the same as the address of the parent object. */
6660 offset = 0;
6661 bitpos = 0;
6662 inner = TREE_OPERAND (exp, 0);
6663 break;
6664
6665 case IMAGPART_EXPR:
6666 /* The imaginary part of the complex number is always second.
6667 The expression is therefore always offset by the size of the
6668 scalar type. */
6669 offset = 0;
6670 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6671 inner = TREE_OPERAND (exp, 0);
6672 break;
6673
6674 case VAR_DECL:
6675 /* TLS emulation hook - replace __thread VAR's &VAR with
6676 __emutls_get_address (&_emutls.VAR). */
6677 if (! targetm.have_tls
6678 && TREE_CODE (exp) == VAR_DECL
6679 && DECL_THREAD_LOCAL_P (exp))
6680 {
6681 exp = emutls_var_address (exp);
6682 return expand_expr (exp, target, tmode, modifier);
6683 }
6684 /* Fall through. */
6685
6686 default:
6687 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6688 expand_expr, as that can have various side effects; LABEL_DECLs for
6689 example, may not have their DECL_RTL set yet. Assume language
6690 specific tree nodes can be expanded in some interesting way. */
6691 if (DECL_P (exp)
6692 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6693 {
6694 result = expand_expr (exp, target, tmode,
6695 modifier == EXPAND_INITIALIZER
6696 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6697
6698 /* If the DECL isn't in memory, then the DECL wasn't properly
6699 marked TREE_ADDRESSABLE, which will be either a front-end
6700 or a tree optimizer bug. */
6701 gcc_assert (MEM_P (result));
6702 result = XEXP (result, 0);
6703
6704 /* ??? Is this needed anymore? */
6705 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6706 {
6707 assemble_external (exp);
6708 TREE_USED (exp) = 1;
6709 }
6710
6711 if (modifier != EXPAND_INITIALIZER
6712 && modifier != EXPAND_CONST_ADDRESS)
6713 result = force_operand (result, target);
6714 return result;
6715 }
6716
6717 /* Pass FALSE as the last argument to get_inner_reference although
6718 we are expanding to RTL. The rationale is that we know how to
6719 handle "aligning nodes" here: we can just bypass them because
6720 they won't change the final object whose address will be returned
6721 (they actually exist only for that purpose). */
6722 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6723 &mode1, &unsignedp, &volatilep, false);
6724 break;
6725 }
6726
6727 /* We must have made progress. */
6728 gcc_assert (inner != exp);
6729
6730 subtarget = offset || bitpos ? NULL_RTX : target;
6731 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6732
6733 if (offset)
6734 {
6735 rtx tmp;
6736
6737 if (modifier != EXPAND_NORMAL)
6738 result = force_operand (result, NULL);
6739 tmp = expand_expr (offset, NULL_RTX, tmode,
6740 modifier == EXPAND_INITIALIZER
6741 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6742
6743 result = convert_memory_address (tmode, result);
6744 tmp = convert_memory_address (tmode, tmp);
6745
6746 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6747 result = gen_rtx_PLUS (tmode, result, tmp);
6748 else
6749 {
6750 subtarget = bitpos ? NULL_RTX : target;
6751 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6752 1, OPTAB_LIB_WIDEN);
6753 }
6754 }
6755
6756 if (bitpos)
6757 {
6758 /* Someone beforehand should have rejected taking the address
6759 of such an object. */
6760 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6761
6762 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6763 if (modifier < EXPAND_SUM)
6764 result = force_operand (result, target);
6765 }
6766
6767 return result;
6768 }
6769
6770 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6771 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6772
6773 static rtx
6774 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6775 enum expand_modifier modifier)
6776 {
6777 enum machine_mode rmode;
6778 rtx result;
6779
6780 /* Target mode of VOIDmode says "whatever's natural". */
6781 if (tmode == VOIDmode)
6782 tmode = TYPE_MODE (TREE_TYPE (exp));
6783
6784 /* We can get called with some Weird Things if the user does silliness
6785 like "(short) &a". In that case, convert_memory_address won't do
6786 the right thing, so ignore the given target mode. */
6787 if (tmode != Pmode && tmode != ptr_mode)
6788 tmode = Pmode;
6789
6790 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6791 tmode, modifier);
6792
6793 /* Despite expand_expr claims concerning ignoring TMODE when not
6794 strictly convenient, stuff breaks if we don't honor it. Note
6795 that combined with the above, we only do this for pointer modes. */
6796 rmode = GET_MODE (result);
6797 if (rmode == VOIDmode)
6798 rmode = tmode;
6799 if (rmode != tmode)
6800 result = convert_memory_address (tmode, result);
6801
6802 return result;
6803 }
6804
6805
6806 /* expand_expr: generate code for computing expression EXP.
6807 An rtx for the computed value is returned. The value is never null.
6808 In the case of a void EXP, const0_rtx is returned.
6809
6810 The value may be stored in TARGET if TARGET is nonzero.
6811 TARGET is just a suggestion; callers must assume that
6812 the rtx returned may not be the same as TARGET.
6813
6814 If TARGET is CONST0_RTX, it means that the value will be ignored.
6815
6816 If TMODE is not VOIDmode, it suggests generating the
6817 result in mode TMODE. But this is done only when convenient.
6818 Otherwise, TMODE is ignored and the value generated in its natural mode.
6819 TMODE is just a suggestion; callers must assume that
6820 the rtx returned may not have mode TMODE.
6821
6822 Note that TARGET may have neither TMODE nor MODE. In that case, it
6823 probably will not be used.
6824
6825 If MODIFIER is EXPAND_SUM then when EXP is an addition
6826 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6827 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6828 products as above, or REG or MEM, or constant.
6829 Ordinarily in such cases we would output mul or add instructions
6830 and then return a pseudo reg containing the sum.
6831
6832 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6833 it also marks a label as absolutely required (it can't be dead).
6834 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6835 This is used for outputting expressions used in initializers.
6836
6837 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6838 with a constant address even if that address is not normally legitimate.
6839 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6840
6841 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6842 a call parameter. Such targets require special care as we haven't yet
6843 marked TARGET so that it's safe from being trashed by libcalls. We
6844 don't want to use TARGET for anything but the final result;
6845 Intermediate values must go elsewhere. Additionally, calls to
6846 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6847
6848 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6849 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6850 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6851 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6852 recursively. */
6853
6854 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6855 enum expand_modifier, rtx *);
6856
6857 rtx
6858 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6859 enum expand_modifier modifier, rtx *alt_rtl)
6860 {
6861 int rn = -1;
6862 rtx ret, last = NULL;
6863
6864 /* Handle ERROR_MARK before anybody tries to access its type. */
6865 if (TREE_CODE (exp) == ERROR_MARK
6866 || (!GIMPLE_TUPLE_P (exp) && TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
6867 {
6868 ret = CONST0_RTX (tmode);
6869 return ret ? ret : const0_rtx;
6870 }
6871
6872 if (flag_non_call_exceptions)
6873 {
6874 rn = lookup_stmt_eh_region (exp);
6875 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6876 if (rn >= 0)
6877 last = get_last_insn ();
6878 }
6879
6880 /* If this is an expression of some kind and it has an associated line
6881 number, then emit the line number before expanding the expression.
6882
6883 We need to save and restore the file and line information so that
6884 errors discovered during expansion are emitted with the right
6885 information. It would be better of the diagnostic routines
6886 used the file/line information embedded in the tree nodes rather
6887 than globals. */
6888 if (cfun && EXPR_HAS_LOCATION (exp))
6889 {
6890 location_t saved_location = input_location;
6891 input_location = EXPR_LOCATION (exp);
6892 set_curr_insn_source_location (input_location);
6893
6894 /* Record where the insns produced belong. */
6895 set_curr_insn_block (TREE_BLOCK (exp));
6896
6897 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6898
6899 input_location = saved_location;
6900 }
6901 else
6902 {
6903 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6904 }
6905
6906 /* If using non-call exceptions, mark all insns that may trap.
6907 expand_call() will mark CALL_INSNs before we get to this code,
6908 but it doesn't handle libcalls, and these may trap. */
6909 if (rn >= 0)
6910 {
6911 rtx insn;
6912 for (insn = next_real_insn (last); insn;
6913 insn = next_real_insn (insn))
6914 {
6915 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6916 /* If we want exceptions for non-call insns, any
6917 may_trap_p instruction may throw. */
6918 && GET_CODE (PATTERN (insn)) != CLOBBER
6919 && GET_CODE (PATTERN (insn)) != USE
6920 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6921 {
6922 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6923 REG_NOTES (insn));
6924 }
6925 }
6926 }
6927
6928 return ret;
6929 }
6930
6931 static rtx
6932 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6933 enum expand_modifier modifier, rtx *alt_rtl)
6934 {
6935 rtx op0, op1, op2, temp, decl_rtl;
6936 tree type;
6937 int unsignedp;
6938 enum machine_mode mode;
6939 enum tree_code code = TREE_CODE (exp);
6940 optab this_optab;
6941 rtx subtarget, original_target;
6942 int ignore;
6943 tree context, subexp0, subexp1;
6944 bool reduce_bit_field = false;
6945 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6946 ? reduce_to_bit_field_precision ((expr), \
6947 target, \
6948 type) \
6949 : (expr))
6950
6951 if (GIMPLE_STMT_P (exp))
6952 {
6953 type = void_type_node;
6954 mode = VOIDmode;
6955 unsignedp = 0;
6956 }
6957 else
6958 {
6959 type = TREE_TYPE (exp);
6960 mode = TYPE_MODE (type);
6961 unsignedp = TYPE_UNSIGNED (type);
6962 }
6963 if (lang_hooks.reduce_bit_field_operations
6964 && TREE_CODE (type) == INTEGER_TYPE
6965 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6966 {
6967 /* An operation in what may be a bit-field type needs the
6968 result to be reduced to the precision of the bit-field type,
6969 which is narrower than that of the type's mode. */
6970 reduce_bit_field = true;
6971 if (modifier == EXPAND_STACK_PARM)
6972 target = 0;
6973 }
6974
6975 /* Use subtarget as the target for operand 0 of a binary operation. */
6976 subtarget = get_subtarget (target);
6977 original_target = target;
6978 ignore = (target == const0_rtx
6979 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6980 || code == CONVERT_EXPR || code == COND_EXPR
6981 || code == VIEW_CONVERT_EXPR)
6982 && TREE_CODE (type) == VOID_TYPE));
6983
6984 /* If we are going to ignore this result, we need only do something
6985 if there is a side-effect somewhere in the expression. If there
6986 is, short-circuit the most common cases here. Note that we must
6987 not call expand_expr with anything but const0_rtx in case this
6988 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6989
6990 if (ignore)
6991 {
6992 if (! TREE_SIDE_EFFECTS (exp))
6993 return const0_rtx;
6994
6995 /* Ensure we reference a volatile object even if value is ignored, but
6996 don't do this if all we are doing is taking its address. */
6997 if (TREE_THIS_VOLATILE (exp)
6998 && TREE_CODE (exp) != FUNCTION_DECL
6999 && mode != VOIDmode && mode != BLKmode
7000 && modifier != EXPAND_CONST_ADDRESS)
7001 {
7002 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
7003 if (MEM_P (temp))
7004 temp = copy_to_reg (temp);
7005 return const0_rtx;
7006 }
7007
7008 if (TREE_CODE_CLASS (code) == tcc_unary
7009 || code == COMPONENT_REF || code == INDIRECT_REF)
7010 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7011 modifier);
7012
7013 else if (TREE_CODE_CLASS (code) == tcc_binary
7014 || TREE_CODE_CLASS (code) == tcc_comparison
7015 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
7016 {
7017 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7018 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7019 return const0_rtx;
7020 }
7021 else if (code == BIT_FIELD_REF)
7022 {
7023 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7024 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7025 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
7026 return const0_rtx;
7027 }
7028
7029 target = 0;
7030 }
7031
7032
7033 switch (code)
7034 {
7035 case LABEL_DECL:
7036 {
7037 tree function = decl_function_context (exp);
7038
7039 temp = label_rtx (exp);
7040 temp = gen_rtx_LABEL_REF (Pmode, temp);
7041
7042 if (function != current_function_decl
7043 && function != 0)
7044 LABEL_REF_NONLOCAL_P (temp) = 1;
7045
7046 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
7047 return temp;
7048 }
7049
7050 case SSA_NAME:
7051 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
7052 NULL);
7053
7054 case PARM_DECL:
7055 case VAR_DECL:
7056 /* If a static var's type was incomplete when the decl was written,
7057 but the type is complete now, lay out the decl now. */
7058 if (DECL_SIZE (exp) == 0
7059 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
7060 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
7061 layout_decl (exp, 0);
7062
7063 /* TLS emulation hook - replace __thread vars with
7064 *__emutls_get_address (&_emutls.var). */
7065 if (! targetm.have_tls
7066 && TREE_CODE (exp) == VAR_DECL
7067 && DECL_THREAD_LOCAL_P (exp))
7068 {
7069 exp = build_fold_indirect_ref (emutls_var_address (exp));
7070 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
7071 }
7072
7073 /* ... fall through ... */
7074
7075 case FUNCTION_DECL:
7076 case RESULT_DECL:
7077 decl_rtl = DECL_RTL (exp);
7078 gcc_assert (decl_rtl);
7079 decl_rtl = copy_rtx (decl_rtl);
7080
7081 /* Ensure variable marked as used even if it doesn't go through
7082 a parser. If it hasn't be used yet, write out an external
7083 definition. */
7084 if (! TREE_USED (exp))
7085 {
7086 assemble_external (exp);
7087 TREE_USED (exp) = 1;
7088 }
7089
7090 /* Show we haven't gotten RTL for this yet. */
7091 temp = 0;
7092
7093 /* Variables inherited from containing functions should have
7094 been lowered by this point. */
7095 context = decl_function_context (exp);
7096 gcc_assert (!context
7097 || context == current_function_decl
7098 || TREE_STATIC (exp)
7099 /* ??? C++ creates functions that are not TREE_STATIC. */
7100 || TREE_CODE (exp) == FUNCTION_DECL);
7101
7102 /* This is the case of an array whose size is to be determined
7103 from its initializer, while the initializer is still being parsed.
7104 See expand_decl. */
7105
7106 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
7107 temp = validize_mem (decl_rtl);
7108
7109 /* If DECL_RTL is memory, we are in the normal case and either
7110 the address is not valid or it is not a register and -fforce-addr
7111 is specified, get the address into a register. */
7112
7113 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
7114 {
7115 if (alt_rtl)
7116 *alt_rtl = decl_rtl;
7117 decl_rtl = use_anchored_address (decl_rtl);
7118 if (modifier != EXPAND_CONST_ADDRESS
7119 && modifier != EXPAND_SUM
7120 && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
7121 || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
7122 temp = replace_equiv_address (decl_rtl,
7123 copy_rtx (XEXP (decl_rtl, 0)));
7124 }
7125
7126 /* If we got something, return it. But first, set the alignment
7127 if the address is a register. */
7128 if (temp != 0)
7129 {
7130 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
7131 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
7132
7133 return temp;
7134 }
7135
7136 /* If the mode of DECL_RTL does not match that of the decl, it
7137 must be a promoted value. We return a SUBREG of the wanted mode,
7138 but mark it so that we know that it was already extended. */
7139
7140 if (REG_P (decl_rtl)
7141 && GET_MODE (decl_rtl) != DECL_MODE (exp))
7142 {
7143 enum machine_mode pmode;
7144
7145 /* Get the signedness used for this variable. Ensure we get the
7146 same mode we got when the variable was declared. */
7147 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
7148 (TREE_CODE (exp) == RESULT_DECL
7149 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
7150 gcc_assert (GET_MODE (decl_rtl) == pmode);
7151
7152 temp = gen_lowpart_SUBREG (mode, decl_rtl);
7153 SUBREG_PROMOTED_VAR_P (temp) = 1;
7154 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7155 return temp;
7156 }
7157
7158 return decl_rtl;
7159
7160 case INTEGER_CST:
7161 temp = immed_double_const (TREE_INT_CST_LOW (exp),
7162 TREE_INT_CST_HIGH (exp), mode);
7163
7164 return temp;
7165
7166 case VECTOR_CST:
7167 {
7168 tree tmp = NULL_TREE;
7169 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7170 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
7171 return const_vector_from_tree (exp);
7172 if (GET_MODE_CLASS (mode) == MODE_INT)
7173 {
7174 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7175 if (type_for_mode)
7176 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7177 }
7178 if (!tmp)
7179 tmp = build_constructor_from_list (type,
7180 TREE_VECTOR_CST_ELTS (exp));
7181 return expand_expr (tmp, ignore ? const0_rtx : target,
7182 tmode, modifier);
7183 }
7184
7185 case CONST_DECL:
7186 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7187
7188 case REAL_CST:
7189 /* If optimized, generate immediate CONST_DOUBLE
7190 which will be turned into memory by reload if necessary.
7191
7192 We used to force a register so that loop.c could see it. But
7193 this does not allow gen_* patterns to perform optimizations with
7194 the constants. It also produces two insns in cases like "x = 1.0;".
7195 On most machines, floating-point constants are not permitted in
7196 many insns, so we'd end up copying it to a register in any case.
7197
7198 Now, we do the copying in expand_binop, if appropriate. */
7199 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7200 TYPE_MODE (TREE_TYPE (exp)));
7201
7202 case COMPLEX_CST:
7203 /* Handle evaluating a complex constant in a CONCAT target. */
7204 if (original_target && GET_CODE (original_target) == CONCAT)
7205 {
7206 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7207 rtx rtarg, itarg;
7208
7209 rtarg = XEXP (original_target, 0);
7210 itarg = XEXP (original_target, 1);
7211
7212 /* Move the real and imaginary parts separately. */
7213 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
7214 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
7215
7216 if (op0 != rtarg)
7217 emit_move_insn (rtarg, op0);
7218 if (op1 != itarg)
7219 emit_move_insn (itarg, op1);
7220
7221 return original_target;
7222 }
7223
7224 /* ... fall through ... */
7225
7226 case STRING_CST:
7227 temp = expand_expr_constant (exp, 1, modifier);
7228
7229 /* temp contains a constant address.
7230 On RISC machines where a constant address isn't valid,
7231 make some insns to get that address into a register. */
7232 if (modifier != EXPAND_CONST_ADDRESS
7233 && modifier != EXPAND_INITIALIZER
7234 && modifier != EXPAND_SUM
7235 && (! memory_address_p (mode, XEXP (temp, 0))
7236 || flag_force_addr))
7237 return replace_equiv_address (temp,
7238 copy_rtx (XEXP (temp, 0)));
7239 return temp;
7240
7241 case SAVE_EXPR:
7242 {
7243 tree val = TREE_OPERAND (exp, 0);
7244 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7245
7246 if (!SAVE_EXPR_RESOLVED_P (exp))
7247 {
7248 /* We can indeed still hit this case, typically via builtin
7249 expanders calling save_expr immediately before expanding
7250 something. Assume this means that we only have to deal
7251 with non-BLKmode values. */
7252 gcc_assert (GET_MODE (ret) != BLKmode);
7253
7254 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7255 DECL_ARTIFICIAL (val) = 1;
7256 DECL_IGNORED_P (val) = 1;
7257 TREE_OPERAND (exp, 0) = val;
7258 SAVE_EXPR_RESOLVED_P (exp) = 1;
7259
7260 if (!CONSTANT_P (ret))
7261 ret = copy_to_reg (ret);
7262 SET_DECL_RTL (val, ret);
7263 }
7264
7265 return ret;
7266 }
7267
7268 case GOTO_EXPR:
7269 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7270 expand_goto (TREE_OPERAND (exp, 0));
7271 else
7272 expand_computed_goto (TREE_OPERAND (exp, 0));
7273 return const0_rtx;
7274
7275 case CONSTRUCTOR:
7276 /* If we don't need the result, just ensure we evaluate any
7277 subexpressions. */
7278 if (ignore)
7279 {
7280 unsigned HOST_WIDE_INT idx;
7281 tree value;
7282
7283 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7284 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
7285
7286 return const0_rtx;
7287 }
7288
7289 /* Try to avoid creating a temporary at all. This is possible
7290 if all of the initializer is zero.
7291 FIXME: try to handle all [0..255] initializers we can handle
7292 with memset. */
7293 else if (TREE_STATIC (exp)
7294 && !TREE_ADDRESSABLE (exp)
7295 && target != 0 && mode == BLKmode
7296 && all_zeros_p (exp))
7297 {
7298 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7299 return target;
7300 }
7301
7302 /* All elts simple constants => refer to a constant in memory. But
7303 if this is a non-BLKmode mode, let it store a field at a time
7304 since that should make a CONST_INT or CONST_DOUBLE when we
7305 fold. Likewise, if we have a target we can use, it is best to
7306 store directly into the target unless the type is large enough
7307 that memcpy will be used. If we are making an initializer and
7308 all operands are constant, put it in memory as well.
7309
7310 FIXME: Avoid trying to fill vector constructors piece-meal.
7311 Output them with output_constant_def below unless we're sure
7312 they're zeros. This should go away when vector initializers
7313 are treated like VECTOR_CST instead of arrays.
7314 */
7315 else if ((TREE_STATIC (exp)
7316 && ((mode == BLKmode
7317 && ! (target != 0 && safe_from_p (target, exp, 1)))
7318 || TREE_ADDRESSABLE (exp)
7319 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7320 && (! MOVE_BY_PIECES_P
7321 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7322 TYPE_ALIGN (type)))
7323 && ! mostly_zeros_p (exp))))
7324 || ((modifier == EXPAND_INITIALIZER
7325 || modifier == EXPAND_CONST_ADDRESS)
7326 && TREE_CONSTANT (exp)))
7327 {
7328 rtx constructor = expand_expr_constant (exp, 1, modifier);
7329
7330 if (modifier != EXPAND_CONST_ADDRESS
7331 && modifier != EXPAND_INITIALIZER
7332 && modifier != EXPAND_SUM)
7333 constructor = validize_mem (constructor);
7334
7335 return constructor;
7336 }
7337 else
7338 {
7339 /* Handle calls that pass values in multiple non-contiguous
7340 locations. The Irix 6 ABI has examples of this. */
7341 if (target == 0 || ! safe_from_p (target, exp, 1)
7342 || GET_CODE (target) == PARALLEL
7343 || modifier == EXPAND_STACK_PARM)
7344 target
7345 = assign_temp (build_qualified_type (type,
7346 (TYPE_QUALS (type)
7347 | (TREE_READONLY (exp)
7348 * TYPE_QUAL_CONST))),
7349 0, TREE_ADDRESSABLE (exp), 1);
7350
7351 store_constructor (exp, target, 0, int_expr_size (exp));
7352 return target;
7353 }
7354
7355 case MISALIGNED_INDIRECT_REF:
7356 case ALIGN_INDIRECT_REF:
7357 case INDIRECT_REF:
7358 {
7359 tree exp1 = TREE_OPERAND (exp, 0);
7360
7361 if (modifier != EXPAND_WRITE)
7362 {
7363 tree t;
7364
7365 t = fold_read_from_constant_string (exp);
7366 if (t)
7367 return expand_expr (t, target, tmode, modifier);
7368 }
7369
7370 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7371 op0 = memory_address (mode, op0);
7372
7373 if (code == ALIGN_INDIRECT_REF)
7374 {
7375 int align = TYPE_ALIGN_UNIT (type);
7376 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7377 op0 = memory_address (mode, op0);
7378 }
7379
7380 temp = gen_rtx_MEM (mode, op0);
7381
7382 set_mem_attributes (temp, exp, 0);
7383
7384 /* Resolve the misalignment now, so that we don't have to remember
7385 to resolve it later. Of course, this only works for reads. */
7386 /* ??? When we get around to supporting writes, we'll have to handle
7387 this in store_expr directly. The vectorizer isn't generating
7388 those yet, however. */
7389 if (code == MISALIGNED_INDIRECT_REF)
7390 {
7391 int icode;
7392 rtx reg, insn;
7393
7394 gcc_assert (modifier == EXPAND_NORMAL
7395 || modifier == EXPAND_STACK_PARM);
7396
7397 /* The vectorizer should have already checked the mode. */
7398 icode = optab_handler (movmisalign_optab, mode)->insn_code;
7399 gcc_assert (icode != CODE_FOR_nothing);
7400
7401 /* We've already validated the memory, and we're creating a
7402 new pseudo destination. The predicates really can't fail. */
7403 reg = gen_reg_rtx (mode);
7404
7405 /* Nor can the insn generator. */
7406 insn = GEN_FCN (icode) (reg, temp);
7407 emit_insn (insn);
7408
7409 return reg;
7410 }
7411
7412 return temp;
7413 }
7414
7415 case TARGET_MEM_REF:
7416 {
7417 struct mem_address addr;
7418
7419 get_address_description (exp, &addr);
7420 op0 = addr_for_mem_ref (&addr, true);
7421 op0 = memory_address (mode, op0);
7422 temp = gen_rtx_MEM (mode, op0);
7423 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7424 }
7425 return temp;
7426
7427 case ARRAY_REF:
7428
7429 {
7430 tree array = TREE_OPERAND (exp, 0);
7431 tree index = TREE_OPERAND (exp, 1);
7432
7433 /* Fold an expression like: "foo"[2].
7434 This is not done in fold so it won't happen inside &.
7435 Don't fold if this is for wide characters since it's too
7436 difficult to do correctly and this is a very rare case. */
7437
7438 if (modifier != EXPAND_CONST_ADDRESS
7439 && modifier != EXPAND_INITIALIZER
7440 && modifier != EXPAND_MEMORY)
7441 {
7442 tree t = fold_read_from_constant_string (exp);
7443
7444 if (t)
7445 return expand_expr (t, target, tmode, modifier);
7446 }
7447
7448 /* If this is a constant index into a constant array,
7449 just get the value from the array. Handle both the cases when
7450 we have an explicit constructor and when our operand is a variable
7451 that was declared const. */
7452
7453 if (modifier != EXPAND_CONST_ADDRESS
7454 && modifier != EXPAND_INITIALIZER
7455 && modifier != EXPAND_MEMORY
7456 && TREE_CODE (array) == CONSTRUCTOR
7457 && ! TREE_SIDE_EFFECTS (array)
7458 && TREE_CODE (index) == INTEGER_CST)
7459 {
7460 unsigned HOST_WIDE_INT ix;
7461 tree field, value;
7462
7463 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7464 field, value)
7465 if (tree_int_cst_equal (field, index))
7466 {
7467 if (!TREE_SIDE_EFFECTS (value))
7468 return expand_expr (fold (value), target, tmode, modifier);
7469 break;
7470 }
7471 }
7472
7473 else if (optimize >= 1
7474 && modifier != EXPAND_CONST_ADDRESS
7475 && modifier != EXPAND_INITIALIZER
7476 && modifier != EXPAND_MEMORY
7477 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7478 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7479 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7480 && targetm.binds_local_p (array))
7481 {
7482 if (TREE_CODE (index) == INTEGER_CST)
7483 {
7484 tree init = DECL_INITIAL (array);
7485
7486 if (TREE_CODE (init) == CONSTRUCTOR)
7487 {
7488 unsigned HOST_WIDE_INT ix;
7489 tree field, value;
7490
7491 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7492 field, value)
7493 if (tree_int_cst_equal (field, index))
7494 {
7495 if (!TREE_SIDE_EFFECTS (value))
7496 return expand_expr (fold (value), target, tmode,
7497 modifier);
7498 break;
7499 }
7500 }
7501 else if(TREE_CODE (init) == STRING_CST)
7502 {
7503 tree index1 = index;
7504 tree low_bound = array_ref_low_bound (exp);
7505 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7506
7507 /* Optimize the special-case of a zero lower bound.
7508
7509 We convert the low_bound to sizetype to avoid some problems
7510 with constant folding. (E.g. suppose the lower bound is 1,
7511 and its mode is QI. Without the conversion,l (ARRAY
7512 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7513 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7514
7515 if (! integer_zerop (low_bound))
7516 index1 = size_diffop (index1, fold_convert (sizetype,
7517 low_bound));
7518
7519 if (0 > compare_tree_int (index1,
7520 TREE_STRING_LENGTH (init)))
7521 {
7522 tree type = TREE_TYPE (TREE_TYPE (init));
7523 enum machine_mode mode = TYPE_MODE (type);
7524
7525 if (GET_MODE_CLASS (mode) == MODE_INT
7526 && GET_MODE_SIZE (mode) == 1)
7527 return gen_int_mode (TREE_STRING_POINTER (init)
7528 [TREE_INT_CST_LOW (index1)],
7529 mode);
7530 }
7531 }
7532 }
7533 }
7534 }
7535 goto normal_inner_ref;
7536
7537 case COMPONENT_REF:
7538 /* If the operand is a CONSTRUCTOR, we can just extract the
7539 appropriate field if it is present. */
7540 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7541 {
7542 unsigned HOST_WIDE_INT idx;
7543 tree field, value;
7544
7545 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7546 idx, field, value)
7547 if (field == TREE_OPERAND (exp, 1)
7548 /* We can normally use the value of the field in the
7549 CONSTRUCTOR. However, if this is a bitfield in
7550 an integral mode that we can fit in a HOST_WIDE_INT,
7551 we must mask only the number of bits in the bitfield,
7552 since this is done implicitly by the constructor. If
7553 the bitfield does not meet either of those conditions,
7554 we can't do this optimization. */
7555 && (! DECL_BIT_FIELD (field)
7556 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7557 && (GET_MODE_BITSIZE (DECL_MODE (field))
7558 <= HOST_BITS_PER_WIDE_INT))))
7559 {
7560 if (DECL_BIT_FIELD (field)
7561 && modifier == EXPAND_STACK_PARM)
7562 target = 0;
7563 op0 = expand_expr (value, target, tmode, modifier);
7564 if (DECL_BIT_FIELD (field))
7565 {
7566 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7567 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7568
7569 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7570 {
7571 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7572 op0 = expand_and (imode, op0, op1, target);
7573 }
7574 else
7575 {
7576 tree count
7577 = build_int_cst (NULL_TREE,
7578 GET_MODE_BITSIZE (imode) - bitsize);
7579
7580 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7581 target, 0);
7582 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7583 target, 0);
7584 }
7585 }
7586
7587 return op0;
7588 }
7589 }
7590 goto normal_inner_ref;
7591
7592 case BIT_FIELD_REF:
7593 case ARRAY_RANGE_REF:
7594 normal_inner_ref:
7595 {
7596 enum machine_mode mode1;
7597 HOST_WIDE_INT bitsize, bitpos;
7598 tree offset;
7599 int volatilep = 0;
7600 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7601 &mode1, &unsignedp, &volatilep, true);
7602 rtx orig_op0;
7603
7604 /* If we got back the original object, something is wrong. Perhaps
7605 we are evaluating an expression too early. In any event, don't
7606 infinitely recurse. */
7607 gcc_assert (tem != exp);
7608
7609 /* If TEM's type is a union of variable size, pass TARGET to the inner
7610 computation, since it will need a temporary and TARGET is known
7611 to have to do. This occurs in unchecked conversion in Ada. */
7612
7613 orig_op0 = op0
7614 = expand_expr (tem,
7615 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7616 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7617 != INTEGER_CST)
7618 && modifier != EXPAND_STACK_PARM
7619 ? target : NULL_RTX),
7620 VOIDmode,
7621 (modifier == EXPAND_INITIALIZER
7622 || modifier == EXPAND_CONST_ADDRESS
7623 || modifier == EXPAND_STACK_PARM)
7624 ? modifier : EXPAND_NORMAL);
7625
7626 /* If this is a constant, put it into a register if it is a legitimate
7627 constant, OFFSET is 0, and we won't try to extract outside the
7628 register (in case we were passed a partially uninitialized object
7629 or a view_conversion to a larger size). Force the constant to
7630 memory otherwise. */
7631 if (CONSTANT_P (op0))
7632 {
7633 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7634 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7635 && offset == 0
7636 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7637 op0 = force_reg (mode, op0);
7638 else
7639 op0 = validize_mem (force_const_mem (mode, op0));
7640 }
7641
7642 /* Otherwise, if this object not in memory and we either have an
7643 offset, a BLKmode result, or a reference outside the object, put it
7644 there. Such cases can occur in Ada if we have unchecked conversion
7645 of an expression from a scalar type to an array or record type or
7646 for an ARRAY_RANGE_REF whose type is BLKmode. */
7647 else if (!MEM_P (op0)
7648 && (offset != 0
7649 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7650 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7651 {
7652 tree nt = build_qualified_type (TREE_TYPE (tem),
7653 (TYPE_QUALS (TREE_TYPE (tem))
7654 | TYPE_QUAL_CONST));
7655 rtx memloc = assign_temp (nt, 1, 1, 1);
7656
7657 emit_move_insn (memloc, op0);
7658 op0 = memloc;
7659 }
7660
7661 if (offset != 0)
7662 {
7663 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7664 EXPAND_SUM);
7665
7666 gcc_assert (MEM_P (op0));
7667
7668 #ifdef POINTERS_EXTEND_UNSIGNED
7669 if (GET_MODE (offset_rtx) != Pmode)
7670 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7671 #else
7672 if (GET_MODE (offset_rtx) != ptr_mode)
7673 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7674 #endif
7675
7676 if (GET_MODE (op0) == BLKmode
7677 /* A constant address in OP0 can have VOIDmode, we must
7678 not try to call force_reg in that case. */
7679 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7680 && bitsize != 0
7681 && (bitpos % bitsize) == 0
7682 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7683 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7684 {
7685 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7686 bitpos = 0;
7687 }
7688
7689 op0 = offset_address (op0, offset_rtx,
7690 highest_pow2_factor (offset));
7691 }
7692
7693 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7694 record its alignment as BIGGEST_ALIGNMENT. */
7695 if (MEM_P (op0) && bitpos == 0 && offset != 0
7696 && is_aligning_offset (offset, tem))
7697 set_mem_align (op0, BIGGEST_ALIGNMENT);
7698
7699 /* Don't forget about volatility even if this is a bitfield. */
7700 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7701 {
7702 if (op0 == orig_op0)
7703 op0 = copy_rtx (op0);
7704
7705 MEM_VOLATILE_P (op0) = 1;
7706 }
7707
7708 /* The following code doesn't handle CONCAT.
7709 Assume only bitpos == 0 can be used for CONCAT, due to
7710 one element arrays having the same mode as its element. */
7711 if (GET_CODE (op0) == CONCAT)
7712 {
7713 gcc_assert (bitpos == 0
7714 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7715 return op0;
7716 }
7717
7718 /* In cases where an aligned union has an unaligned object
7719 as a field, we might be extracting a BLKmode value from
7720 an integer-mode (e.g., SImode) object. Handle this case
7721 by doing the extract into an object as wide as the field
7722 (which we know to be the width of a basic mode), then
7723 storing into memory, and changing the mode to BLKmode. */
7724 if (mode1 == VOIDmode
7725 || REG_P (op0) || GET_CODE (op0) == SUBREG
7726 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7727 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7728 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7729 && modifier != EXPAND_CONST_ADDRESS
7730 && modifier != EXPAND_INITIALIZER)
7731 /* If the field isn't aligned enough to fetch as a memref,
7732 fetch it as a bit field. */
7733 || (mode1 != BLKmode
7734 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7735 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7736 || (MEM_P (op0)
7737 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7738 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7739 && ((modifier == EXPAND_CONST_ADDRESS
7740 || modifier == EXPAND_INITIALIZER)
7741 ? STRICT_ALIGNMENT
7742 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7743 || (bitpos % BITS_PER_UNIT != 0)))
7744 /* If the type and the field are a constant size and the
7745 size of the type isn't the same size as the bitfield,
7746 we must use bitfield operations. */
7747 || (bitsize >= 0
7748 && TYPE_SIZE (TREE_TYPE (exp))
7749 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7750 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7751 bitsize)))
7752 {
7753 enum machine_mode ext_mode = mode;
7754
7755 if (ext_mode == BLKmode
7756 && ! (target != 0 && MEM_P (op0)
7757 && MEM_P (target)
7758 && bitpos % BITS_PER_UNIT == 0))
7759 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7760
7761 if (ext_mode == BLKmode)
7762 {
7763 if (target == 0)
7764 target = assign_temp (type, 0, 1, 1);
7765
7766 if (bitsize == 0)
7767 return target;
7768
7769 /* In this case, BITPOS must start at a byte boundary and
7770 TARGET, if specified, must be a MEM. */
7771 gcc_assert (MEM_P (op0)
7772 && (!target || MEM_P (target))
7773 && !(bitpos % BITS_PER_UNIT));
7774
7775 emit_block_move (target,
7776 adjust_address (op0, VOIDmode,
7777 bitpos / BITS_PER_UNIT),
7778 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7779 / BITS_PER_UNIT),
7780 (modifier == EXPAND_STACK_PARM
7781 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7782
7783 return target;
7784 }
7785
7786 op0 = validize_mem (op0);
7787
7788 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7789 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7790
7791 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7792 (modifier == EXPAND_STACK_PARM
7793 ? NULL_RTX : target),
7794 ext_mode, ext_mode);
7795
7796 /* If the result is a record type and BITSIZE is narrower than
7797 the mode of OP0, an integral mode, and this is a big endian
7798 machine, we must put the field into the high-order bits. */
7799 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7800 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7801 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7802 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7803 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7804 - bitsize),
7805 op0, 1);
7806
7807 /* If the result type is BLKmode, store the data into a temporary
7808 of the appropriate type, but with the mode corresponding to the
7809 mode for the data we have (op0's mode). It's tempting to make
7810 this a constant type, since we know it's only being stored once,
7811 but that can cause problems if we are taking the address of this
7812 COMPONENT_REF because the MEM of any reference via that address
7813 will have flags corresponding to the type, which will not
7814 necessarily be constant. */
7815 if (mode == BLKmode)
7816 {
7817 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
7818 rtx new;
7819
7820 /* If the reference doesn't use the alias set of its type,
7821 we cannot create the temporary using that type. */
7822 if (component_uses_parent_alias_set (exp))
7823 {
7824 new = assign_stack_local (ext_mode, size, 0);
7825 set_mem_alias_set (new, get_alias_set (exp));
7826 }
7827 else
7828 new = assign_stack_temp_for_type (ext_mode, size, 0, type);
7829
7830 emit_move_insn (new, op0);
7831 op0 = copy_rtx (new);
7832 PUT_MODE (op0, BLKmode);
7833 set_mem_attributes (op0, exp, 1);
7834 }
7835
7836 return op0;
7837 }
7838
7839 /* If the result is BLKmode, use that to access the object
7840 now as well. */
7841 if (mode == BLKmode)
7842 mode1 = BLKmode;
7843
7844 /* Get a reference to just this component. */
7845 if (modifier == EXPAND_CONST_ADDRESS
7846 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7847 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7848 else
7849 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7850
7851 if (op0 == orig_op0)
7852 op0 = copy_rtx (op0);
7853
7854 set_mem_attributes (op0, exp, 0);
7855 if (REG_P (XEXP (op0, 0)))
7856 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7857
7858 MEM_VOLATILE_P (op0) |= volatilep;
7859 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7860 || modifier == EXPAND_CONST_ADDRESS
7861 || modifier == EXPAND_INITIALIZER)
7862 return op0;
7863 else if (target == 0)
7864 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7865
7866 convert_move (target, op0, unsignedp);
7867 return target;
7868 }
7869
7870 case OBJ_TYPE_REF:
7871 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7872
7873 case CALL_EXPR:
7874 /* Check for a built-in function. */
7875 if (TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
7876 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0))
7877 == FUNCTION_DECL)
7878 && DECL_BUILT_IN (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
7879 {
7880 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (CALL_EXPR_FN (exp), 0))
7881 == BUILT_IN_FRONTEND)
7882 return lang_hooks.expand_expr (exp, original_target,
7883 tmode, modifier,
7884 alt_rtl);
7885 else
7886 return expand_builtin (exp, target, subtarget, tmode, ignore);
7887 }
7888
7889 return expand_call (exp, target, ignore);
7890
7891 case NON_LVALUE_EXPR:
7892 case NOP_EXPR:
7893 case CONVERT_EXPR:
7894 if (TREE_OPERAND (exp, 0) == error_mark_node)
7895 return const0_rtx;
7896
7897 if (TREE_CODE (type) == UNION_TYPE)
7898 {
7899 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7900
7901 /* If both input and output are BLKmode, this conversion isn't doing
7902 anything except possibly changing memory attribute. */
7903 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7904 {
7905 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7906 modifier);
7907
7908 result = copy_rtx (result);
7909 set_mem_attributes (result, exp, 0);
7910 return result;
7911 }
7912
7913 if (target == 0)
7914 {
7915 if (TYPE_MODE (type) != BLKmode)
7916 target = gen_reg_rtx (TYPE_MODE (type));
7917 else
7918 target = assign_temp (type, 0, 1, 1);
7919 }
7920
7921 if (MEM_P (target))
7922 /* Store data into beginning of memory target. */
7923 store_expr (TREE_OPERAND (exp, 0),
7924 adjust_address (target, TYPE_MODE (valtype), 0),
7925 modifier == EXPAND_STACK_PARM,
7926 false);
7927
7928 else
7929 {
7930 gcc_assert (REG_P (target));
7931
7932 /* Store this field into a union of the proper type. */
7933 store_field (target,
7934 MIN ((int_size_in_bytes (TREE_TYPE
7935 (TREE_OPERAND (exp, 0)))
7936 * BITS_PER_UNIT),
7937 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7938 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7939 type, 0, false);
7940 }
7941
7942 /* Return the entire union. */
7943 return target;
7944 }
7945
7946 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7947 {
7948 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7949 modifier);
7950
7951 /* If the signedness of the conversion differs and OP0 is
7952 a promoted SUBREG, clear that indication since we now
7953 have to do the proper extension. */
7954 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7955 && GET_CODE (op0) == SUBREG)
7956 SUBREG_PROMOTED_VAR_P (op0) = 0;
7957
7958 return REDUCE_BIT_FIELD (op0);
7959 }
7960
7961 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
7962 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7963 if (GET_MODE (op0) == mode)
7964 ;
7965
7966 /* If OP0 is a constant, just convert it into the proper mode. */
7967 else if (CONSTANT_P (op0))
7968 {
7969 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7970 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7971
7972 if (modifier == EXPAND_INITIALIZER)
7973 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7974 subreg_lowpart_offset (mode,
7975 inner_mode));
7976 else
7977 op0= convert_modes (mode, inner_mode, op0,
7978 TYPE_UNSIGNED (inner_type));
7979 }
7980
7981 else if (modifier == EXPAND_INITIALIZER)
7982 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7983
7984 else if (target == 0)
7985 op0 = convert_to_mode (mode, op0,
7986 TYPE_UNSIGNED (TREE_TYPE
7987 (TREE_OPERAND (exp, 0))));
7988 else
7989 {
7990 convert_move (target, op0,
7991 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7992 op0 = target;
7993 }
7994
7995 return REDUCE_BIT_FIELD (op0);
7996
7997 case VIEW_CONVERT_EXPR:
7998 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7999
8000 /* If the input and output modes are both the same, we are done. */
8001 if (TYPE_MODE (type) == GET_MODE (op0))
8002 ;
8003 /* If neither mode is BLKmode, and both modes are the same size
8004 then we can use gen_lowpart. */
8005 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
8006 && GET_MODE_SIZE (TYPE_MODE (type))
8007 == GET_MODE_SIZE (GET_MODE (op0)))
8008 {
8009 if (GET_CODE (op0) == SUBREG)
8010 op0 = force_reg (GET_MODE (op0), op0);
8011 op0 = gen_lowpart (TYPE_MODE (type), op0);
8012 }
8013 /* If both modes are integral, then we can convert from one to the
8014 other. */
8015 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
8016 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
8017 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
8018 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8019 /* As a last resort, spill op0 to memory, and reload it in a
8020 different mode. */
8021 else if (!MEM_P (op0))
8022 {
8023 /* If the operand is not a MEM, force it into memory. Since we
8024 are going to be changing the mode of the MEM, don't call
8025 force_const_mem for constants because we don't allow pool
8026 constants to change mode. */
8027 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8028
8029 gcc_assert (!TREE_ADDRESSABLE (exp));
8030
8031 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
8032 target
8033 = assign_stack_temp_for_type
8034 (TYPE_MODE (inner_type),
8035 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
8036
8037 emit_move_insn (target, op0);
8038 op0 = target;
8039 }
8040
8041 /* At this point, OP0 is in the correct mode. If the output type is such
8042 that the operand is known to be aligned, indicate that it is.
8043 Otherwise, we need only be concerned about alignment for non-BLKmode
8044 results. */
8045 if (MEM_P (op0))
8046 {
8047 op0 = copy_rtx (op0);
8048
8049 if (TYPE_ALIGN_OK (type))
8050 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8051 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
8052 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8053 {
8054 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8055 HOST_WIDE_INT temp_size
8056 = MAX (int_size_in_bytes (inner_type),
8057 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
8058 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
8059 temp_size, 0, type);
8060 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
8061
8062 gcc_assert (!TREE_ADDRESSABLE (exp));
8063
8064 if (GET_MODE (op0) == BLKmode)
8065 emit_block_move (new_with_op0_mode, op0,
8066 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8067 (modifier == EXPAND_STACK_PARM
8068 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8069 else
8070 emit_move_insn (new_with_op0_mode, op0);
8071
8072 op0 = new;
8073 }
8074
8075 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8076 }
8077
8078 return op0;
8079
8080 case POINTER_PLUS_EXPR:
8081 /* Even though the sizetype mode and the pointer's mode can be different
8082 expand is able to handle this correctly and get the correct result out
8083 of the PLUS_EXPR code. */
8084 case PLUS_EXPR:
8085
8086 /* Check if this is a case for multiplication and addition. */
8087 if (TREE_CODE (type) == INTEGER_TYPE
8088 && TREE_CODE (TREE_OPERAND (exp, 0)) == MULT_EXPR)
8089 {
8090 tree subsubexp0, subsubexp1;
8091 enum tree_code code0, code1;
8092
8093 subexp0 = TREE_OPERAND (exp, 0);
8094 subsubexp0 = TREE_OPERAND (subexp0, 0);
8095 subsubexp1 = TREE_OPERAND (subexp0, 1);
8096 code0 = TREE_CODE (subsubexp0);
8097 code1 = TREE_CODE (subsubexp1);
8098 if (code0 == NOP_EXPR && code1 == NOP_EXPR
8099 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8100 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8101 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8102 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8103 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8104 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8105 {
8106 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8107 enum machine_mode innermode = TYPE_MODE (op0type);
8108 bool zextend_p = TYPE_UNSIGNED (op0type);
8109 this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
8110 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8111 && (optab_handler (this_optab, mode)->insn_code
8112 != CODE_FOR_nothing))
8113 {
8114 expand_operands (TREE_OPERAND (subsubexp0, 0),
8115 TREE_OPERAND (subsubexp1, 0),
8116 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8117 op2 = expand_expr (TREE_OPERAND (exp, 1), subtarget,
8118 VOIDmode, EXPAND_NORMAL);
8119 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8120 target, unsignedp);
8121 gcc_assert (temp);
8122 return REDUCE_BIT_FIELD (temp);
8123 }
8124 }
8125 }
8126
8127 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8128 something else, make sure we add the register to the constant and
8129 then to the other thing. This case can occur during strength
8130 reduction and doing it this way will produce better code if the
8131 frame pointer or argument pointer is eliminated.
8132
8133 fold-const.c will ensure that the constant is always in the inner
8134 PLUS_EXPR, so the only case we need to do anything about is if
8135 sp, ap, or fp is our second argument, in which case we must swap
8136 the innermost first argument and our second argument. */
8137
8138 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8139 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8140 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
8141 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8142 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8143 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8144 {
8145 tree t = TREE_OPERAND (exp, 1);
8146
8147 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8148 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8149 }
8150
8151 /* If the result is to be ptr_mode and we are adding an integer to
8152 something, we might be forming a constant. So try to use
8153 plus_constant. If it produces a sum and we can't accept it,
8154 use force_operand. This allows P = &ARR[const] to generate
8155 efficient code on machines where a SYMBOL_REF is not a valid
8156 address.
8157
8158 If this is an EXPAND_SUM call, always return the sum. */
8159 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8160 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8161 {
8162 if (modifier == EXPAND_STACK_PARM)
8163 target = 0;
8164 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8165 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8166 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8167 {
8168 rtx constant_part;
8169
8170 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8171 EXPAND_SUM);
8172 /* Use immed_double_const to ensure that the constant is
8173 truncated according to the mode of OP1, then sign extended
8174 to a HOST_WIDE_INT. Using the constant directly can result
8175 in non-canonical RTL in a 64x32 cross compile. */
8176 constant_part
8177 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8178 (HOST_WIDE_INT) 0,
8179 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8180 op1 = plus_constant (op1, INTVAL (constant_part));
8181 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8182 op1 = force_operand (op1, target);
8183 return REDUCE_BIT_FIELD (op1);
8184 }
8185
8186 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8187 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8188 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8189 {
8190 rtx constant_part;
8191
8192 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8193 (modifier == EXPAND_INITIALIZER
8194 ? EXPAND_INITIALIZER : EXPAND_SUM));
8195 if (! CONSTANT_P (op0))
8196 {
8197 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8198 VOIDmode, modifier);
8199 /* Return a PLUS if modifier says it's OK. */
8200 if (modifier == EXPAND_SUM
8201 || modifier == EXPAND_INITIALIZER)
8202 return simplify_gen_binary (PLUS, mode, op0, op1);
8203 goto binop2;
8204 }
8205 /* Use immed_double_const to ensure that the constant is
8206 truncated according to the mode of OP1, then sign extended
8207 to a HOST_WIDE_INT. Using the constant directly can result
8208 in non-canonical RTL in a 64x32 cross compile. */
8209 constant_part
8210 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8211 (HOST_WIDE_INT) 0,
8212 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8213 op0 = plus_constant (op0, INTVAL (constant_part));
8214 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8215 op0 = force_operand (op0, target);
8216 return REDUCE_BIT_FIELD (op0);
8217 }
8218 }
8219
8220 /* No sense saving up arithmetic to be done
8221 if it's all in the wrong mode to form part of an address.
8222 And force_operand won't know whether to sign-extend or
8223 zero-extend. */
8224 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8225 || mode != ptr_mode)
8226 {
8227 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8228 subtarget, &op0, &op1, 0);
8229 if (op0 == const0_rtx)
8230 return op1;
8231 if (op1 == const0_rtx)
8232 return op0;
8233 goto binop2;
8234 }
8235
8236 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8237 subtarget, &op0, &op1, modifier);
8238 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8239
8240 case MINUS_EXPR:
8241 /* Check if this is a case for multiplication and subtraction. */
8242 if (TREE_CODE (type) == INTEGER_TYPE
8243 && TREE_CODE (TREE_OPERAND (exp, 1)) == MULT_EXPR)
8244 {
8245 tree subsubexp0, subsubexp1;
8246 enum tree_code code0, code1;
8247
8248 subexp1 = TREE_OPERAND (exp, 1);
8249 subsubexp0 = TREE_OPERAND (subexp1, 0);
8250 subsubexp1 = TREE_OPERAND (subexp1, 1);
8251 code0 = TREE_CODE (subsubexp0);
8252 code1 = TREE_CODE (subsubexp1);
8253 if (code0 == NOP_EXPR && code1 == NOP_EXPR
8254 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8255 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8256 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8257 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8258 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8259 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8260 {
8261 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8262 enum machine_mode innermode = TYPE_MODE (op0type);
8263 bool zextend_p = TYPE_UNSIGNED (op0type);
8264 this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
8265 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8266 && (optab_handler (this_optab, mode)->insn_code
8267 != CODE_FOR_nothing))
8268 {
8269 expand_operands (TREE_OPERAND (subsubexp0, 0),
8270 TREE_OPERAND (subsubexp1, 0),
8271 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8272 op2 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8273 VOIDmode, EXPAND_NORMAL);
8274 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8275 target, unsignedp);
8276 gcc_assert (temp);
8277 return REDUCE_BIT_FIELD (temp);
8278 }
8279 }
8280 }
8281
8282 /* For initializers, we are allowed to return a MINUS of two
8283 symbolic constants. Here we handle all cases when both operands
8284 are constant. */
8285 /* Handle difference of two symbolic constants,
8286 for the sake of an initializer. */
8287 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8288 && really_constant_p (TREE_OPERAND (exp, 0))
8289 && really_constant_p (TREE_OPERAND (exp, 1)))
8290 {
8291 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8292 NULL_RTX, &op0, &op1, modifier);
8293
8294 /* If the last operand is a CONST_INT, use plus_constant of
8295 the negated constant. Else make the MINUS. */
8296 if (GET_CODE (op1) == CONST_INT)
8297 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8298 else
8299 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8300 }
8301
8302 /* No sense saving up arithmetic to be done
8303 if it's all in the wrong mode to form part of an address.
8304 And force_operand won't know whether to sign-extend or
8305 zero-extend. */
8306 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8307 || mode != ptr_mode)
8308 goto binop;
8309
8310 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8311 subtarget, &op0, &op1, modifier);
8312
8313 /* Convert A - const to A + (-const). */
8314 if (GET_CODE (op1) == CONST_INT)
8315 {
8316 op1 = negate_rtx (mode, op1);
8317 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8318 }
8319
8320 goto binop2;
8321
8322 case MULT_EXPR:
8323 /* If first operand is constant, swap them.
8324 Thus the following special case checks need only
8325 check the second operand. */
8326 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8327 {
8328 tree t1 = TREE_OPERAND (exp, 0);
8329 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8330 TREE_OPERAND (exp, 1) = t1;
8331 }
8332
8333 /* Attempt to return something suitable for generating an
8334 indexed address, for machines that support that. */
8335
8336 if (modifier == EXPAND_SUM && mode == ptr_mode
8337 && host_integerp (TREE_OPERAND (exp, 1), 0))
8338 {
8339 tree exp1 = TREE_OPERAND (exp, 1);
8340
8341 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8342 EXPAND_SUM);
8343
8344 if (!REG_P (op0))
8345 op0 = force_operand (op0, NULL_RTX);
8346 if (!REG_P (op0))
8347 op0 = copy_to_mode_reg (mode, op0);
8348
8349 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8350 gen_int_mode (tree_low_cst (exp1, 0),
8351 TYPE_MODE (TREE_TYPE (exp1)))));
8352 }
8353
8354 if (modifier == EXPAND_STACK_PARM)
8355 target = 0;
8356
8357 /* Check for multiplying things that have been extended
8358 from a narrower type. If this machine supports multiplying
8359 in that narrower type with a result in the desired type,
8360 do it that way, and avoid the explicit type-conversion. */
8361
8362 subexp0 = TREE_OPERAND (exp, 0);
8363 subexp1 = TREE_OPERAND (exp, 1);
8364 /* First, check if we have a multiplication of one signed and one
8365 unsigned operand. */
8366 if (TREE_CODE (subexp0) == NOP_EXPR
8367 && TREE_CODE (subexp1) == NOP_EXPR
8368 && TREE_CODE (type) == INTEGER_TYPE
8369 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8370 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8371 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8372 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8373 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8374 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8375 {
8376 enum machine_mode innermode
8377 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8378 this_optab = usmul_widen_optab;
8379 if (mode == GET_MODE_WIDER_MODE (innermode))
8380 {
8381 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8382 {
8383 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8384 expand_operands (TREE_OPERAND (subexp0, 0),
8385 TREE_OPERAND (subexp1, 0),
8386 NULL_RTX, &op0, &op1, 0);
8387 else
8388 expand_operands (TREE_OPERAND (subexp0, 0),
8389 TREE_OPERAND (subexp1, 0),
8390 NULL_RTX, &op1, &op0, 0);
8391
8392 goto binop3;
8393 }
8394 }
8395 }
8396 /* Check for a multiplication with matching signedness. */
8397 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8398 && TREE_CODE (type) == INTEGER_TYPE
8399 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8400 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8401 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8402 && int_fits_type_p (TREE_OPERAND (exp, 1),
8403 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8404 /* Don't use a widening multiply if a shift will do. */
8405 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8406 > HOST_BITS_PER_WIDE_INT)
8407 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8408 ||
8409 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8410 && (TYPE_PRECISION (TREE_TYPE
8411 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8412 == TYPE_PRECISION (TREE_TYPE
8413 (TREE_OPERAND
8414 (TREE_OPERAND (exp, 0), 0))))
8415 /* If both operands are extended, they must either both
8416 be zero-extended or both be sign-extended. */
8417 && (TYPE_UNSIGNED (TREE_TYPE
8418 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8419 == TYPE_UNSIGNED (TREE_TYPE
8420 (TREE_OPERAND
8421 (TREE_OPERAND (exp, 0), 0)))))))
8422 {
8423 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8424 enum machine_mode innermode = TYPE_MODE (op0type);
8425 bool zextend_p = TYPE_UNSIGNED (op0type);
8426 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8427 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8428
8429 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8430 {
8431 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8432 {
8433 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8434 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8435 TREE_OPERAND (exp, 1),
8436 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8437 else
8438 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8439 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8440 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8441 goto binop3;
8442 }
8443 else if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing
8444 && innermode == word_mode)
8445 {
8446 rtx htem, hipart;
8447 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8448 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8449 op1 = convert_modes (innermode, mode,
8450 expand_normal (TREE_OPERAND (exp, 1)),
8451 unsignedp);
8452 else
8453 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8454 temp = expand_binop (mode, other_optab, op0, op1, target,
8455 unsignedp, OPTAB_LIB_WIDEN);
8456 hipart = gen_highpart (innermode, temp);
8457 htem = expand_mult_highpart_adjust (innermode, hipart,
8458 op0, op1, hipart,
8459 zextend_p);
8460 if (htem != hipart)
8461 emit_move_insn (hipart, htem);
8462 return REDUCE_BIT_FIELD (temp);
8463 }
8464 }
8465 }
8466 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8467 subtarget, &op0, &op1, 0);
8468 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8469
8470 case TRUNC_DIV_EXPR:
8471 case FLOOR_DIV_EXPR:
8472 case CEIL_DIV_EXPR:
8473 case ROUND_DIV_EXPR:
8474 case EXACT_DIV_EXPR:
8475 if (modifier == EXPAND_STACK_PARM)
8476 target = 0;
8477 /* Possible optimization: compute the dividend with EXPAND_SUM
8478 then if the divisor is constant can optimize the case
8479 where some terms of the dividend have coeffs divisible by it. */
8480 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8481 subtarget, &op0, &op1, 0);
8482 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8483
8484 case RDIV_EXPR:
8485 goto binop;
8486
8487 case TRUNC_MOD_EXPR:
8488 case FLOOR_MOD_EXPR:
8489 case CEIL_MOD_EXPR:
8490 case ROUND_MOD_EXPR:
8491 if (modifier == EXPAND_STACK_PARM)
8492 target = 0;
8493 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8494 subtarget, &op0, &op1, 0);
8495 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8496
8497 case FIX_TRUNC_EXPR:
8498 op0 = expand_normal (TREE_OPERAND (exp, 0));
8499 if (target == 0 || modifier == EXPAND_STACK_PARM)
8500 target = gen_reg_rtx (mode);
8501 expand_fix (target, op0, unsignedp);
8502 return target;
8503
8504 case FLOAT_EXPR:
8505 op0 = expand_normal (TREE_OPERAND (exp, 0));
8506 if (target == 0 || modifier == EXPAND_STACK_PARM)
8507 target = gen_reg_rtx (mode);
8508 /* expand_float can't figure out what to do if FROM has VOIDmode.
8509 So give it the correct mode. With -O, cse will optimize this. */
8510 if (GET_MODE (op0) == VOIDmode)
8511 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8512 op0);
8513 expand_float (target, op0,
8514 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8515 return target;
8516
8517 case NEGATE_EXPR:
8518 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8519 VOIDmode, EXPAND_NORMAL);
8520 if (modifier == EXPAND_STACK_PARM)
8521 target = 0;
8522 temp = expand_unop (mode,
8523 optab_for_tree_code (NEGATE_EXPR, type),
8524 op0, target, 0);
8525 gcc_assert (temp);
8526 return REDUCE_BIT_FIELD (temp);
8527
8528 case ABS_EXPR:
8529 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8530 VOIDmode, EXPAND_NORMAL);
8531 if (modifier == EXPAND_STACK_PARM)
8532 target = 0;
8533
8534 /* ABS_EXPR is not valid for complex arguments. */
8535 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8536 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8537
8538 /* Unsigned abs is simply the operand. Testing here means we don't
8539 risk generating incorrect code below. */
8540 if (TYPE_UNSIGNED (type))
8541 return op0;
8542
8543 return expand_abs (mode, op0, target, unsignedp,
8544 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8545
8546 case MAX_EXPR:
8547 case MIN_EXPR:
8548 target = original_target;
8549 if (target == 0
8550 || modifier == EXPAND_STACK_PARM
8551 || (MEM_P (target) && MEM_VOLATILE_P (target))
8552 || GET_MODE (target) != mode
8553 || (REG_P (target)
8554 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8555 target = gen_reg_rtx (mode);
8556 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8557 target, &op0, &op1, 0);
8558
8559 /* First try to do it with a special MIN or MAX instruction.
8560 If that does not win, use a conditional jump to select the proper
8561 value. */
8562 this_optab = optab_for_tree_code (code, type);
8563 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8564 OPTAB_WIDEN);
8565 if (temp != 0)
8566 return temp;
8567
8568 /* At this point, a MEM target is no longer useful; we will get better
8569 code without it. */
8570
8571 if (! REG_P (target))
8572 target = gen_reg_rtx (mode);
8573
8574 /* If op1 was placed in target, swap op0 and op1. */
8575 if (target != op0 && target == op1)
8576 {
8577 temp = op0;
8578 op0 = op1;
8579 op1 = temp;
8580 }
8581
8582 /* We generate better code and avoid problems with op1 mentioning
8583 target by forcing op1 into a pseudo if it isn't a constant. */
8584 if (! CONSTANT_P (op1))
8585 op1 = force_reg (mode, op1);
8586
8587 {
8588 enum rtx_code comparison_code;
8589 rtx cmpop1 = op1;
8590
8591 if (code == MAX_EXPR)
8592 comparison_code = unsignedp ? GEU : GE;
8593 else
8594 comparison_code = unsignedp ? LEU : LE;
8595
8596 /* Canonicalize to comparisons against 0. */
8597 if (op1 == const1_rtx)
8598 {
8599 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8600 or (a != 0 ? a : 1) for unsigned.
8601 For MIN we are safe converting (a <= 1 ? a : 1)
8602 into (a <= 0 ? a : 1) */
8603 cmpop1 = const0_rtx;
8604 if (code == MAX_EXPR)
8605 comparison_code = unsignedp ? NE : GT;
8606 }
8607 if (op1 == constm1_rtx && !unsignedp)
8608 {
8609 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8610 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8611 cmpop1 = const0_rtx;
8612 if (code == MIN_EXPR)
8613 comparison_code = LT;
8614 }
8615 #ifdef HAVE_conditional_move
8616 /* Use a conditional move if possible. */
8617 if (can_conditionally_move_p (mode))
8618 {
8619 rtx insn;
8620
8621 /* ??? Same problem as in expmed.c: emit_conditional_move
8622 forces a stack adjustment via compare_from_rtx, and we
8623 lose the stack adjustment if the sequence we are about
8624 to create is discarded. */
8625 do_pending_stack_adjust ();
8626
8627 start_sequence ();
8628
8629 /* Try to emit the conditional move. */
8630 insn = emit_conditional_move (target, comparison_code,
8631 op0, cmpop1, mode,
8632 op0, op1, mode,
8633 unsignedp);
8634
8635 /* If we could do the conditional move, emit the sequence,
8636 and return. */
8637 if (insn)
8638 {
8639 rtx seq = get_insns ();
8640 end_sequence ();
8641 emit_insn (seq);
8642 return target;
8643 }
8644
8645 /* Otherwise discard the sequence and fall back to code with
8646 branches. */
8647 end_sequence ();
8648 }
8649 #endif
8650 if (target != op0)
8651 emit_move_insn (target, op0);
8652
8653 temp = gen_label_rtx ();
8654 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8655 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8656 }
8657 emit_move_insn (target, op1);
8658 emit_label (temp);
8659 return target;
8660
8661 case BIT_NOT_EXPR:
8662 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8663 VOIDmode, EXPAND_NORMAL);
8664 if (modifier == EXPAND_STACK_PARM)
8665 target = 0;
8666 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8667 gcc_assert (temp);
8668 return temp;
8669
8670 /* ??? Can optimize bitwise operations with one arg constant.
8671 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8672 and (a bitwise1 b) bitwise2 b (etc)
8673 but that is probably not worth while. */
8674
8675 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8676 boolean values when we want in all cases to compute both of them. In
8677 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8678 as actual zero-or-1 values and then bitwise anding. In cases where
8679 there cannot be any side effects, better code would be made by
8680 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8681 how to recognize those cases. */
8682
8683 case TRUTH_AND_EXPR:
8684 code = BIT_AND_EXPR;
8685 case BIT_AND_EXPR:
8686 goto binop;
8687
8688 case TRUTH_OR_EXPR:
8689 code = BIT_IOR_EXPR;
8690 case BIT_IOR_EXPR:
8691 goto binop;
8692
8693 case TRUTH_XOR_EXPR:
8694 code = BIT_XOR_EXPR;
8695 case BIT_XOR_EXPR:
8696 goto binop;
8697
8698 case LSHIFT_EXPR:
8699 case RSHIFT_EXPR:
8700 case LROTATE_EXPR:
8701 case RROTATE_EXPR:
8702 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8703 subtarget = 0;
8704 if (modifier == EXPAND_STACK_PARM)
8705 target = 0;
8706 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8707 VOIDmode, EXPAND_NORMAL);
8708 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8709 unsignedp);
8710
8711 /* Could determine the answer when only additive constants differ. Also,
8712 the addition of one can be handled by changing the condition. */
8713 case LT_EXPR:
8714 case LE_EXPR:
8715 case GT_EXPR:
8716 case GE_EXPR:
8717 case EQ_EXPR:
8718 case NE_EXPR:
8719 case UNORDERED_EXPR:
8720 case ORDERED_EXPR:
8721 case UNLT_EXPR:
8722 case UNLE_EXPR:
8723 case UNGT_EXPR:
8724 case UNGE_EXPR:
8725 case UNEQ_EXPR:
8726 case LTGT_EXPR:
8727 temp = do_store_flag (exp,
8728 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8729 tmode != VOIDmode ? tmode : mode, 0);
8730 if (temp != 0)
8731 return temp;
8732
8733 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8734 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8735 && original_target
8736 && REG_P (original_target)
8737 && (GET_MODE (original_target)
8738 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8739 {
8740 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8741 VOIDmode, EXPAND_NORMAL);
8742
8743 /* If temp is constant, we can just compute the result. */
8744 if (GET_CODE (temp) == CONST_INT)
8745 {
8746 if (INTVAL (temp) != 0)
8747 emit_move_insn (target, const1_rtx);
8748 else
8749 emit_move_insn (target, const0_rtx);
8750
8751 return target;
8752 }
8753
8754 if (temp != original_target)
8755 {
8756 enum machine_mode mode1 = GET_MODE (temp);
8757 if (mode1 == VOIDmode)
8758 mode1 = tmode != VOIDmode ? tmode : mode;
8759
8760 temp = copy_to_mode_reg (mode1, temp);
8761 }
8762
8763 op1 = gen_label_rtx ();
8764 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8765 GET_MODE (temp), unsignedp, op1);
8766 emit_move_insn (temp, const1_rtx);
8767 emit_label (op1);
8768 return temp;
8769 }
8770
8771 /* If no set-flag instruction, must generate a conditional store
8772 into a temporary variable. Drop through and handle this
8773 like && and ||. */
8774
8775 if (! ignore
8776 && (target == 0
8777 || modifier == EXPAND_STACK_PARM
8778 || ! safe_from_p (target, exp, 1)
8779 /* Make sure we don't have a hard reg (such as function's return
8780 value) live across basic blocks, if not optimizing. */
8781 || (!optimize && REG_P (target)
8782 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8783 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8784
8785 if (target)
8786 emit_move_insn (target, const0_rtx);
8787
8788 op1 = gen_label_rtx ();
8789 jumpifnot (exp, op1);
8790
8791 if (target)
8792 emit_move_insn (target, const1_rtx);
8793
8794 emit_label (op1);
8795 return ignore ? const0_rtx : target;
8796
8797 case TRUTH_NOT_EXPR:
8798 if (modifier == EXPAND_STACK_PARM)
8799 target = 0;
8800 op0 = expand_expr (TREE_OPERAND (exp, 0), target,
8801 VOIDmode, EXPAND_NORMAL);
8802 /* The parser is careful to generate TRUTH_NOT_EXPR
8803 only with operands that are always zero or one. */
8804 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8805 target, 1, OPTAB_LIB_WIDEN);
8806 gcc_assert (temp);
8807 return temp;
8808
8809 case STATEMENT_LIST:
8810 {
8811 tree_stmt_iterator iter;
8812
8813 gcc_assert (ignore);
8814
8815 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8816 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8817 }
8818 return const0_rtx;
8819
8820 case COND_EXPR:
8821 /* A COND_EXPR with its type being VOID_TYPE represents a
8822 conditional jump and is handled in
8823 expand_gimple_cond_expr. */
8824 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8825
8826 /* Note that COND_EXPRs whose type is a structure or union
8827 are required to be constructed to contain assignments of
8828 a temporary variable, so that we can evaluate them here
8829 for side effect only. If type is void, we must do likewise. */
8830
8831 gcc_assert (!TREE_ADDRESSABLE (type)
8832 && !ignore
8833 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8834 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8835
8836 /* If we are not to produce a result, we have no target. Otherwise,
8837 if a target was specified use it; it will not be used as an
8838 intermediate target unless it is safe. If no target, use a
8839 temporary. */
8840
8841 if (modifier != EXPAND_STACK_PARM
8842 && original_target
8843 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8844 && GET_MODE (original_target) == mode
8845 #ifdef HAVE_conditional_move
8846 && (! can_conditionally_move_p (mode)
8847 || REG_P (original_target))
8848 #endif
8849 && !MEM_P (original_target))
8850 temp = original_target;
8851 else
8852 temp = assign_temp (type, 0, 0, 1);
8853
8854 do_pending_stack_adjust ();
8855 NO_DEFER_POP;
8856 op0 = gen_label_rtx ();
8857 op1 = gen_label_rtx ();
8858 jumpifnot (TREE_OPERAND (exp, 0), op0);
8859 store_expr (TREE_OPERAND (exp, 1), temp,
8860 modifier == EXPAND_STACK_PARM,
8861 false);
8862
8863 emit_jump_insn (gen_jump (op1));
8864 emit_barrier ();
8865 emit_label (op0);
8866 store_expr (TREE_OPERAND (exp, 2), temp,
8867 modifier == EXPAND_STACK_PARM,
8868 false);
8869
8870 emit_label (op1);
8871 OK_DEFER_POP;
8872 return temp;
8873
8874 case VEC_COND_EXPR:
8875 target = expand_vec_cond_expr (exp, target);
8876 return target;
8877
8878 case MODIFY_EXPR:
8879 {
8880 tree lhs = TREE_OPERAND (exp, 0);
8881 tree rhs = TREE_OPERAND (exp, 1);
8882 gcc_assert (ignore);
8883 expand_assignment (lhs, rhs, false);
8884 return const0_rtx;
8885 }
8886
8887 case GIMPLE_MODIFY_STMT:
8888 {
8889 tree lhs = GIMPLE_STMT_OPERAND (exp, 0);
8890 tree rhs = GIMPLE_STMT_OPERAND (exp, 1);
8891
8892 gcc_assert (ignore);
8893
8894 /* Check for |= or &= of a bitfield of size one into another bitfield
8895 of size 1. In this case, (unless we need the result of the
8896 assignment) we can do this more efficiently with a
8897 test followed by an assignment, if necessary.
8898
8899 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8900 things change so we do, this code should be enhanced to
8901 support it. */
8902 if (TREE_CODE (lhs) == COMPONENT_REF
8903 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8904 || TREE_CODE (rhs) == BIT_AND_EXPR)
8905 && TREE_OPERAND (rhs, 0) == lhs
8906 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8907 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8908 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8909 {
8910 rtx label = gen_label_rtx ();
8911 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
8912 do_jump (TREE_OPERAND (rhs, 1),
8913 value ? label : 0,
8914 value ? 0 : label);
8915 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
8916 MOVE_NONTEMPORAL (exp));
8917 do_pending_stack_adjust ();
8918 emit_label (label);
8919 return const0_rtx;
8920 }
8921
8922 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
8923 return const0_rtx;
8924 }
8925
8926 case RETURN_EXPR:
8927 if (!TREE_OPERAND (exp, 0))
8928 expand_null_return ();
8929 else
8930 expand_return (TREE_OPERAND (exp, 0));
8931 return const0_rtx;
8932
8933 case ADDR_EXPR:
8934 return expand_expr_addr_expr (exp, target, tmode, modifier);
8935
8936 case COMPLEX_EXPR:
8937 /* Get the rtx code of the operands. */
8938 op0 = expand_normal (TREE_OPERAND (exp, 0));
8939 op1 = expand_normal (TREE_OPERAND (exp, 1));
8940
8941 if (!target)
8942 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8943
8944 /* Move the real (op0) and imaginary (op1) parts to their location. */
8945 write_complex_part (target, op0, false);
8946 write_complex_part (target, op1, true);
8947
8948 return target;
8949
8950 case REALPART_EXPR:
8951 op0 = expand_normal (TREE_OPERAND (exp, 0));
8952 return read_complex_part (op0, false);
8953
8954 case IMAGPART_EXPR:
8955 op0 = expand_normal (TREE_OPERAND (exp, 0));
8956 return read_complex_part (op0, true);
8957
8958 case RESX_EXPR:
8959 expand_resx_expr (exp);
8960 return const0_rtx;
8961
8962 case TRY_CATCH_EXPR:
8963 case CATCH_EXPR:
8964 case EH_FILTER_EXPR:
8965 case TRY_FINALLY_EXPR:
8966 /* Lowered by tree-eh.c. */
8967 gcc_unreachable ();
8968
8969 case WITH_CLEANUP_EXPR:
8970 case CLEANUP_POINT_EXPR:
8971 case TARGET_EXPR:
8972 case CASE_LABEL_EXPR:
8973 case VA_ARG_EXPR:
8974 case BIND_EXPR:
8975 case INIT_EXPR:
8976 case CONJ_EXPR:
8977 case COMPOUND_EXPR:
8978 case PREINCREMENT_EXPR:
8979 case PREDECREMENT_EXPR:
8980 case POSTINCREMENT_EXPR:
8981 case POSTDECREMENT_EXPR:
8982 case LOOP_EXPR:
8983 case EXIT_EXPR:
8984 case TRUTH_ANDIF_EXPR:
8985 case TRUTH_ORIF_EXPR:
8986 /* Lowered by gimplify.c. */
8987 gcc_unreachable ();
8988
8989 case CHANGE_DYNAMIC_TYPE_EXPR:
8990 /* This is ignored at the RTL level. The tree level set
8991 DECL_POINTER_ALIAS_SET of any variable to be 0, which is
8992 overkill for the RTL layer but is all that we can
8993 represent. */
8994 return const0_rtx;
8995
8996 case EXC_PTR_EXPR:
8997 return get_exception_pointer (cfun);
8998
8999 case FILTER_EXPR:
9000 return get_exception_filter (cfun);
9001
9002 case FDESC_EXPR:
9003 /* Function descriptors are not valid except for as
9004 initialization constants, and should not be expanded. */
9005 gcc_unreachable ();
9006
9007 case SWITCH_EXPR:
9008 expand_case (exp);
9009 return const0_rtx;
9010
9011 case LABEL_EXPR:
9012 expand_label (TREE_OPERAND (exp, 0));
9013 return const0_rtx;
9014
9015 case ASM_EXPR:
9016 expand_asm_expr (exp);
9017 return const0_rtx;
9018
9019 case WITH_SIZE_EXPR:
9020 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9021 have pulled out the size to use in whatever context it needed. */
9022 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
9023 modifier, alt_rtl);
9024
9025 case REALIGN_LOAD_EXPR:
9026 {
9027 tree oprnd0 = TREE_OPERAND (exp, 0);
9028 tree oprnd1 = TREE_OPERAND (exp, 1);
9029 tree oprnd2 = TREE_OPERAND (exp, 2);
9030 rtx op2;
9031
9032 this_optab = optab_for_tree_code (code, type);
9033 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9034 op2 = expand_normal (oprnd2);
9035 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9036 target, unsignedp);
9037 gcc_assert (temp);
9038 return temp;
9039 }
9040
9041 case DOT_PROD_EXPR:
9042 {
9043 tree oprnd0 = TREE_OPERAND (exp, 0);
9044 tree oprnd1 = TREE_OPERAND (exp, 1);
9045 tree oprnd2 = TREE_OPERAND (exp, 2);
9046 rtx op2;
9047
9048 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9049 op2 = expand_normal (oprnd2);
9050 target = expand_widen_pattern_expr (exp, op0, op1, op2,
9051 target, unsignedp);
9052 return target;
9053 }
9054
9055 case WIDEN_SUM_EXPR:
9056 {
9057 tree oprnd0 = TREE_OPERAND (exp, 0);
9058 tree oprnd1 = TREE_OPERAND (exp, 1);
9059
9060 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9061 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
9062 target, unsignedp);
9063 return target;
9064 }
9065
9066 case REDUC_MAX_EXPR:
9067 case REDUC_MIN_EXPR:
9068 case REDUC_PLUS_EXPR:
9069 {
9070 op0 = expand_normal (TREE_OPERAND (exp, 0));
9071 this_optab = optab_for_tree_code (code, type);
9072 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
9073 gcc_assert (temp);
9074 return temp;
9075 }
9076
9077 case VEC_EXTRACT_EVEN_EXPR:
9078 case VEC_EXTRACT_ODD_EXPR:
9079 {
9080 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9081 NULL_RTX, &op0, &op1, 0);
9082 this_optab = optab_for_tree_code (code, type);
9083 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9084 OPTAB_WIDEN);
9085 gcc_assert (temp);
9086 return temp;
9087 }
9088
9089 case VEC_INTERLEAVE_HIGH_EXPR:
9090 case VEC_INTERLEAVE_LOW_EXPR:
9091 {
9092 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9093 NULL_RTX, &op0, &op1, 0);
9094 this_optab = optab_for_tree_code (code, type);
9095 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9096 OPTAB_WIDEN);
9097 gcc_assert (temp);
9098 return temp;
9099 }
9100
9101 case VEC_LSHIFT_EXPR:
9102 case VEC_RSHIFT_EXPR:
9103 {
9104 target = expand_vec_shift_expr (exp, target);
9105 return target;
9106 }
9107
9108 case VEC_UNPACK_HI_EXPR:
9109 case VEC_UNPACK_LO_EXPR:
9110 {
9111 op0 = expand_normal (TREE_OPERAND (exp, 0));
9112 this_optab = optab_for_tree_code (code, type);
9113 temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX,
9114 target, unsignedp);
9115 gcc_assert (temp);
9116 return temp;
9117 }
9118
9119 case VEC_UNPACK_FLOAT_HI_EXPR:
9120 case VEC_UNPACK_FLOAT_LO_EXPR:
9121 {
9122 op0 = expand_normal (TREE_OPERAND (exp, 0));
9123 /* The signedness is determined from input operand. */
9124 this_optab = optab_for_tree_code (code,
9125 TREE_TYPE (TREE_OPERAND (exp, 0)));
9126 temp = expand_widen_pattern_expr
9127 (exp, op0, NULL_RTX, NULL_RTX,
9128 target, TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
9129
9130 gcc_assert (temp);
9131 return temp;
9132 }
9133
9134 case VEC_WIDEN_MULT_HI_EXPR:
9135 case VEC_WIDEN_MULT_LO_EXPR:
9136 {
9137 tree oprnd0 = TREE_OPERAND (exp, 0);
9138 tree oprnd1 = TREE_OPERAND (exp, 1);
9139
9140 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9141 target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX,
9142 target, unsignedp);
9143 gcc_assert (target);
9144 return target;
9145 }
9146
9147 case VEC_PACK_TRUNC_EXPR:
9148 case VEC_PACK_SAT_EXPR:
9149 case VEC_PACK_FIX_TRUNC_EXPR:
9150 {
9151 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9152 goto binop;
9153 }
9154
9155 default:
9156 return lang_hooks.expand_expr (exp, original_target, tmode,
9157 modifier, alt_rtl);
9158 }
9159
9160 /* Here to do an ordinary binary operator. */
9161 binop:
9162 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9163 subtarget, &op0, &op1, 0);
9164 binop2:
9165 this_optab = optab_for_tree_code (code, type);
9166 binop3:
9167 if (modifier == EXPAND_STACK_PARM)
9168 target = 0;
9169 temp = expand_binop (mode, this_optab, op0, op1, target,
9170 unsignedp, OPTAB_LIB_WIDEN);
9171 gcc_assert (temp);
9172 return REDUCE_BIT_FIELD (temp);
9173 }
9174 #undef REDUCE_BIT_FIELD
9175 \f
9176 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9177 signedness of TYPE), possibly returning the result in TARGET. */
9178 static rtx
9179 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9180 {
9181 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9182 if (target && GET_MODE (target) != GET_MODE (exp))
9183 target = 0;
9184 /* For constant values, reduce using build_int_cst_type. */
9185 if (GET_CODE (exp) == CONST_INT)
9186 {
9187 HOST_WIDE_INT value = INTVAL (exp);
9188 tree t = build_int_cst_type (type, value);
9189 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9190 }
9191 else if (TYPE_UNSIGNED (type))
9192 {
9193 rtx mask;
9194 if (prec < HOST_BITS_PER_WIDE_INT)
9195 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
9196 GET_MODE (exp));
9197 else
9198 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
9199 ((unsigned HOST_WIDE_INT) 1
9200 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
9201 GET_MODE (exp));
9202 return expand_and (GET_MODE (exp), exp, mask, target);
9203 }
9204 else
9205 {
9206 tree count = build_int_cst (NULL_TREE,
9207 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9208 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9209 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9210 }
9211 }
9212 \f
9213 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9214 when applied to the address of EXP produces an address known to be
9215 aligned more than BIGGEST_ALIGNMENT. */
9216
9217 static int
9218 is_aligning_offset (tree offset, tree exp)
9219 {
9220 /* Strip off any conversions. */
9221 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9222 || TREE_CODE (offset) == NOP_EXPR
9223 || TREE_CODE (offset) == CONVERT_EXPR)
9224 offset = TREE_OPERAND (offset, 0);
9225
9226 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9227 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9228 if (TREE_CODE (offset) != BIT_AND_EXPR
9229 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9230 || compare_tree_int (TREE_OPERAND (offset, 1),
9231 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9232 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9233 return 0;
9234
9235 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9236 It must be NEGATE_EXPR. Then strip any more conversions. */
9237 offset = TREE_OPERAND (offset, 0);
9238 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9239 || TREE_CODE (offset) == NOP_EXPR
9240 || TREE_CODE (offset) == CONVERT_EXPR)
9241 offset = TREE_OPERAND (offset, 0);
9242
9243 if (TREE_CODE (offset) != NEGATE_EXPR)
9244 return 0;
9245
9246 offset = TREE_OPERAND (offset, 0);
9247 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9248 || TREE_CODE (offset) == NOP_EXPR
9249 || TREE_CODE (offset) == CONVERT_EXPR)
9250 offset = TREE_OPERAND (offset, 0);
9251
9252 /* This must now be the address of EXP. */
9253 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9254 }
9255 \f
9256 /* Return the tree node if an ARG corresponds to a string constant or zero
9257 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9258 in bytes within the string that ARG is accessing. The type of the
9259 offset will be `sizetype'. */
9260
9261 tree
9262 string_constant (tree arg, tree *ptr_offset)
9263 {
9264 tree array, offset, lower_bound;
9265 STRIP_NOPS (arg);
9266
9267 if (TREE_CODE (arg) == ADDR_EXPR)
9268 {
9269 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9270 {
9271 *ptr_offset = size_zero_node;
9272 return TREE_OPERAND (arg, 0);
9273 }
9274 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9275 {
9276 array = TREE_OPERAND (arg, 0);
9277 offset = size_zero_node;
9278 }
9279 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9280 {
9281 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9282 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9283 if (TREE_CODE (array) != STRING_CST
9284 && TREE_CODE (array) != VAR_DECL)
9285 return 0;
9286
9287 /* Check if the array has a nonzero lower bound. */
9288 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9289 if (!integer_zerop (lower_bound))
9290 {
9291 /* If the offset and base aren't both constants, return 0. */
9292 if (TREE_CODE (lower_bound) != INTEGER_CST)
9293 return 0;
9294 if (TREE_CODE (offset) != INTEGER_CST)
9295 return 0;
9296 /* Adjust offset by the lower bound. */
9297 offset = size_diffop (fold_convert (sizetype, offset),
9298 fold_convert (sizetype, lower_bound));
9299 }
9300 }
9301 else
9302 return 0;
9303 }
9304 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9305 {
9306 tree arg0 = TREE_OPERAND (arg, 0);
9307 tree arg1 = TREE_OPERAND (arg, 1);
9308
9309 STRIP_NOPS (arg0);
9310 STRIP_NOPS (arg1);
9311
9312 if (TREE_CODE (arg0) == ADDR_EXPR
9313 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9314 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9315 {
9316 array = TREE_OPERAND (arg0, 0);
9317 offset = arg1;
9318 }
9319 else if (TREE_CODE (arg1) == ADDR_EXPR
9320 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9321 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9322 {
9323 array = TREE_OPERAND (arg1, 0);
9324 offset = arg0;
9325 }
9326 else
9327 return 0;
9328 }
9329 else
9330 return 0;
9331
9332 if (TREE_CODE (array) == STRING_CST)
9333 {
9334 *ptr_offset = fold_convert (sizetype, offset);
9335 return array;
9336 }
9337 else if (TREE_CODE (array) == VAR_DECL)
9338 {
9339 int length;
9340
9341 /* Variables initialized to string literals can be handled too. */
9342 if (DECL_INITIAL (array) == NULL_TREE
9343 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9344 return 0;
9345
9346 /* If they are read-only, non-volatile and bind locally. */
9347 if (! TREE_READONLY (array)
9348 || TREE_SIDE_EFFECTS (array)
9349 || ! targetm.binds_local_p (array))
9350 return 0;
9351
9352 /* Avoid const char foo[4] = "abcde"; */
9353 if (DECL_SIZE_UNIT (array) == NULL_TREE
9354 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9355 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9356 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9357 return 0;
9358
9359 /* If variable is bigger than the string literal, OFFSET must be constant
9360 and inside of the bounds of the string literal. */
9361 offset = fold_convert (sizetype, offset);
9362 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9363 && (! host_integerp (offset, 1)
9364 || compare_tree_int (offset, length) >= 0))
9365 return 0;
9366
9367 *ptr_offset = offset;
9368 return DECL_INITIAL (array);
9369 }
9370
9371 return 0;
9372 }
9373 \f
9374 /* Generate code to calculate EXP using a store-flag instruction
9375 and return an rtx for the result. EXP is either a comparison
9376 or a TRUTH_NOT_EXPR whose operand is a comparison.
9377
9378 If TARGET is nonzero, store the result there if convenient.
9379
9380 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9381 cheap.
9382
9383 Return zero if there is no suitable set-flag instruction
9384 available on this machine.
9385
9386 Once expand_expr has been called on the arguments of the comparison,
9387 we are committed to doing the store flag, since it is not safe to
9388 re-evaluate the expression. We emit the store-flag insn by calling
9389 emit_store_flag, but only expand the arguments if we have a reason
9390 to believe that emit_store_flag will be successful. If we think that
9391 it will, but it isn't, we have to simulate the store-flag with a
9392 set/jump/set sequence. */
9393
9394 static rtx
9395 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9396 {
9397 enum rtx_code code;
9398 tree arg0, arg1, type;
9399 tree tem;
9400 enum machine_mode operand_mode;
9401 int invert = 0;
9402 int unsignedp;
9403 rtx op0, op1;
9404 enum insn_code icode;
9405 rtx subtarget = target;
9406 rtx result, label;
9407
9408 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9409 result at the end. We can't simply invert the test since it would
9410 have already been inverted if it were valid. This case occurs for
9411 some floating-point comparisons. */
9412
9413 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9414 invert = 1, exp = TREE_OPERAND (exp, 0);
9415
9416 arg0 = TREE_OPERAND (exp, 0);
9417 arg1 = TREE_OPERAND (exp, 1);
9418
9419 /* Don't crash if the comparison was erroneous. */
9420 if (arg0 == error_mark_node || arg1 == error_mark_node)
9421 return const0_rtx;
9422
9423 type = TREE_TYPE (arg0);
9424 operand_mode = TYPE_MODE (type);
9425 unsignedp = TYPE_UNSIGNED (type);
9426
9427 /* We won't bother with BLKmode store-flag operations because it would mean
9428 passing a lot of information to emit_store_flag. */
9429 if (operand_mode == BLKmode)
9430 return 0;
9431
9432 /* We won't bother with store-flag operations involving function pointers
9433 when function pointers must be canonicalized before comparisons. */
9434 #ifdef HAVE_canonicalize_funcptr_for_compare
9435 if (HAVE_canonicalize_funcptr_for_compare
9436 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9437 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9438 == FUNCTION_TYPE))
9439 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9440 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9441 == FUNCTION_TYPE))))
9442 return 0;
9443 #endif
9444
9445 STRIP_NOPS (arg0);
9446 STRIP_NOPS (arg1);
9447
9448 /* Get the rtx comparison code to use. We know that EXP is a comparison
9449 operation of some type. Some comparisons against 1 and -1 can be
9450 converted to comparisons with zero. Do so here so that the tests
9451 below will be aware that we have a comparison with zero. These
9452 tests will not catch constants in the first operand, but constants
9453 are rarely passed as the first operand. */
9454
9455 switch (TREE_CODE (exp))
9456 {
9457 case EQ_EXPR:
9458 code = EQ;
9459 break;
9460 case NE_EXPR:
9461 code = NE;
9462 break;
9463 case LT_EXPR:
9464 if (integer_onep (arg1))
9465 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9466 else
9467 code = unsignedp ? LTU : LT;
9468 break;
9469 case LE_EXPR:
9470 if (! unsignedp && integer_all_onesp (arg1))
9471 arg1 = integer_zero_node, code = LT;
9472 else
9473 code = unsignedp ? LEU : LE;
9474 break;
9475 case GT_EXPR:
9476 if (! unsignedp && integer_all_onesp (arg1))
9477 arg1 = integer_zero_node, code = GE;
9478 else
9479 code = unsignedp ? GTU : GT;
9480 break;
9481 case GE_EXPR:
9482 if (integer_onep (arg1))
9483 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9484 else
9485 code = unsignedp ? GEU : GE;
9486 break;
9487
9488 case UNORDERED_EXPR:
9489 code = UNORDERED;
9490 break;
9491 case ORDERED_EXPR:
9492 code = ORDERED;
9493 break;
9494 case UNLT_EXPR:
9495 code = UNLT;
9496 break;
9497 case UNLE_EXPR:
9498 code = UNLE;
9499 break;
9500 case UNGT_EXPR:
9501 code = UNGT;
9502 break;
9503 case UNGE_EXPR:
9504 code = UNGE;
9505 break;
9506 case UNEQ_EXPR:
9507 code = UNEQ;
9508 break;
9509 case LTGT_EXPR:
9510 code = LTGT;
9511 break;
9512
9513 default:
9514 gcc_unreachable ();
9515 }
9516
9517 /* Put a constant second. */
9518 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9519 {
9520 tem = arg0; arg0 = arg1; arg1 = tem;
9521 code = swap_condition (code);
9522 }
9523
9524 /* If this is an equality or inequality test of a single bit, we can
9525 do this by shifting the bit being tested to the low-order bit and
9526 masking the result with the constant 1. If the condition was EQ,
9527 we xor it with 1. This does not require an scc insn and is faster
9528 than an scc insn even if we have it.
9529
9530 The code to make this transformation was moved into fold_single_bit_test,
9531 so we just call into the folder and expand its result. */
9532
9533 if ((code == NE || code == EQ)
9534 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9535 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9536 {
9537 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9538 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9539 arg0, arg1, type),
9540 target, VOIDmode, EXPAND_NORMAL);
9541 }
9542
9543 /* Now see if we are likely to be able to do this. Return if not. */
9544 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9545 return 0;
9546
9547 icode = setcc_gen_code[(int) code];
9548
9549 if (icode == CODE_FOR_nothing)
9550 {
9551 enum machine_mode wmode;
9552
9553 for (wmode = operand_mode;
9554 icode == CODE_FOR_nothing && wmode != VOIDmode;
9555 wmode = GET_MODE_WIDER_MODE (wmode))
9556 icode = optab_handler (cstore_optab, wmode)->insn_code;
9557 }
9558
9559 if (icode == CODE_FOR_nothing
9560 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9561 {
9562 /* We can only do this if it is one of the special cases that
9563 can be handled without an scc insn. */
9564 if ((code == LT && integer_zerop (arg1))
9565 || (! only_cheap && code == GE && integer_zerop (arg1)))
9566 ;
9567 else if (! only_cheap && (code == NE || code == EQ)
9568 && TREE_CODE (type) != REAL_TYPE
9569 && ((optab_handler (abs_optab, operand_mode)->insn_code
9570 != CODE_FOR_nothing)
9571 || (optab_handler (ffs_optab, operand_mode)->insn_code
9572 != CODE_FOR_nothing)))
9573 ;
9574 else
9575 return 0;
9576 }
9577
9578 if (! get_subtarget (target)
9579 || GET_MODE (subtarget) != operand_mode)
9580 subtarget = 0;
9581
9582 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9583
9584 if (target == 0)
9585 target = gen_reg_rtx (mode);
9586
9587 result = emit_store_flag (target, code, op0, op1,
9588 operand_mode, unsignedp, 1);
9589
9590 if (result)
9591 {
9592 if (invert)
9593 result = expand_binop (mode, xor_optab, result, const1_rtx,
9594 result, 0, OPTAB_LIB_WIDEN);
9595 return result;
9596 }
9597
9598 /* If this failed, we have to do this with set/compare/jump/set code. */
9599 if (!REG_P (target)
9600 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9601 target = gen_reg_rtx (GET_MODE (target));
9602
9603 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9604 label = gen_label_rtx ();
9605 do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX,
9606 NULL_RTX, label);
9607
9608 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9609 emit_label (label);
9610
9611 return target;
9612 }
9613 \f
9614
9615 /* Stubs in case we haven't got a casesi insn. */
9616 #ifndef HAVE_casesi
9617 # define HAVE_casesi 0
9618 # define gen_casesi(a, b, c, d, e) (0)
9619 # define CODE_FOR_casesi CODE_FOR_nothing
9620 #endif
9621
9622 /* If the machine does not have a case insn that compares the bounds,
9623 this means extra overhead for dispatch tables, which raises the
9624 threshold for using them. */
9625 #ifndef CASE_VALUES_THRESHOLD
9626 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9627 #endif /* CASE_VALUES_THRESHOLD */
9628
9629 unsigned int
9630 case_values_threshold (void)
9631 {
9632 return CASE_VALUES_THRESHOLD;
9633 }
9634
9635 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9636 0 otherwise (i.e. if there is no casesi instruction). */
9637 int
9638 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9639 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9640 {
9641 enum machine_mode index_mode = SImode;
9642 int index_bits = GET_MODE_BITSIZE (index_mode);
9643 rtx op1, op2, index;
9644 enum machine_mode op_mode;
9645
9646 if (! HAVE_casesi)
9647 return 0;
9648
9649 /* Convert the index to SImode. */
9650 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9651 {
9652 enum machine_mode omode = TYPE_MODE (index_type);
9653 rtx rangertx = expand_normal (range);
9654
9655 /* We must handle the endpoints in the original mode. */
9656 index_expr = build2 (MINUS_EXPR, index_type,
9657 index_expr, minval);
9658 minval = integer_zero_node;
9659 index = expand_normal (index_expr);
9660 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9661 omode, 1, default_label);
9662 /* Now we can safely truncate. */
9663 index = convert_to_mode (index_mode, index, 0);
9664 }
9665 else
9666 {
9667 if (TYPE_MODE (index_type) != index_mode)
9668 {
9669 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9670 index_expr = fold_convert (index_type, index_expr);
9671 }
9672
9673 index = expand_normal (index_expr);
9674 }
9675
9676 do_pending_stack_adjust ();
9677
9678 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9679 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9680 (index, op_mode))
9681 index = copy_to_mode_reg (op_mode, index);
9682
9683 op1 = expand_normal (minval);
9684
9685 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9686 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9687 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9688 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9689 (op1, op_mode))
9690 op1 = copy_to_mode_reg (op_mode, op1);
9691
9692 op2 = expand_normal (range);
9693
9694 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9695 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9696 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9697 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9698 (op2, op_mode))
9699 op2 = copy_to_mode_reg (op_mode, op2);
9700
9701 emit_jump_insn (gen_casesi (index, op1, op2,
9702 table_label, default_label));
9703 return 1;
9704 }
9705
9706 /* Attempt to generate a tablejump instruction; same concept. */
9707 #ifndef HAVE_tablejump
9708 #define HAVE_tablejump 0
9709 #define gen_tablejump(x, y) (0)
9710 #endif
9711
9712 /* Subroutine of the next function.
9713
9714 INDEX is the value being switched on, with the lowest value
9715 in the table already subtracted.
9716 MODE is its expected mode (needed if INDEX is constant).
9717 RANGE is the length of the jump table.
9718 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9719
9720 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9721 index value is out of range. */
9722
9723 static void
9724 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9725 rtx default_label)
9726 {
9727 rtx temp, vector;
9728
9729 if (INTVAL (range) > cfun->max_jumptable_ents)
9730 cfun->max_jumptable_ents = INTVAL (range);
9731
9732 /* Do an unsigned comparison (in the proper mode) between the index
9733 expression and the value which represents the length of the range.
9734 Since we just finished subtracting the lower bound of the range
9735 from the index expression, this comparison allows us to simultaneously
9736 check that the original index expression value is both greater than
9737 or equal to the minimum value of the range and less than or equal to
9738 the maximum value of the range. */
9739
9740 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9741 default_label);
9742
9743 /* If index is in range, it must fit in Pmode.
9744 Convert to Pmode so we can index with it. */
9745 if (mode != Pmode)
9746 index = convert_to_mode (Pmode, index, 1);
9747
9748 /* Don't let a MEM slip through, because then INDEX that comes
9749 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9750 and break_out_memory_refs will go to work on it and mess it up. */
9751 #ifdef PIC_CASE_VECTOR_ADDRESS
9752 if (flag_pic && !REG_P (index))
9753 index = copy_to_mode_reg (Pmode, index);
9754 #endif
9755
9756 /* If flag_force_addr were to affect this address
9757 it could interfere with the tricky assumptions made
9758 about addresses that contain label-refs,
9759 which may be valid only very near the tablejump itself. */
9760 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9761 GET_MODE_SIZE, because this indicates how large insns are. The other
9762 uses should all be Pmode, because they are addresses. This code
9763 could fail if addresses and insns are not the same size. */
9764 index = gen_rtx_PLUS (Pmode,
9765 gen_rtx_MULT (Pmode, index,
9766 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9767 gen_rtx_LABEL_REF (Pmode, table_label));
9768 #ifdef PIC_CASE_VECTOR_ADDRESS
9769 if (flag_pic)
9770 index = PIC_CASE_VECTOR_ADDRESS (index);
9771 else
9772 #endif
9773 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9774 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9775 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9776 convert_move (temp, vector, 0);
9777
9778 emit_jump_insn (gen_tablejump (temp, table_label));
9779
9780 /* If we are generating PIC code or if the table is PC-relative, the
9781 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9782 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9783 emit_barrier ();
9784 }
9785
9786 int
9787 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9788 rtx table_label, rtx default_label)
9789 {
9790 rtx index;
9791
9792 if (! HAVE_tablejump)
9793 return 0;
9794
9795 index_expr = fold_build2 (MINUS_EXPR, index_type,
9796 fold_convert (index_type, index_expr),
9797 fold_convert (index_type, minval));
9798 index = expand_normal (index_expr);
9799 do_pending_stack_adjust ();
9800
9801 do_tablejump (index, TYPE_MODE (index_type),
9802 convert_modes (TYPE_MODE (index_type),
9803 TYPE_MODE (TREE_TYPE (range)),
9804 expand_normal (range),
9805 TYPE_UNSIGNED (TREE_TYPE (range))),
9806 table_label, default_label);
9807 return 1;
9808 }
9809
9810 /* Nonzero if the mode is a valid vector mode for this architecture.
9811 This returns nonzero even if there is no hardware support for the
9812 vector mode, but we can emulate with narrower modes. */
9813
9814 int
9815 vector_mode_valid_p (enum machine_mode mode)
9816 {
9817 enum mode_class class = GET_MODE_CLASS (mode);
9818 enum machine_mode innermode;
9819
9820 /* Doh! What's going on? */
9821 if (class != MODE_VECTOR_INT
9822 && class != MODE_VECTOR_FLOAT)
9823 return 0;
9824
9825 /* Hardware support. Woo hoo! */
9826 if (targetm.vector_mode_supported_p (mode))
9827 return 1;
9828
9829 innermode = GET_MODE_INNER (mode);
9830
9831 /* We should probably return 1 if requesting V4DI and we have no DI,
9832 but we have V2DI, but this is probably very unlikely. */
9833
9834 /* If we have support for the inner mode, we can safely emulate it.
9835 We may not have V2DI, but me can emulate with a pair of DIs. */
9836 return targetm.scalar_mode_supported_p (innermode);
9837 }
9838
9839 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9840 static rtx
9841 const_vector_from_tree (tree exp)
9842 {
9843 rtvec v;
9844 int units, i;
9845 tree link, elt;
9846 enum machine_mode inner, mode;
9847
9848 mode = TYPE_MODE (TREE_TYPE (exp));
9849
9850 if (initializer_zerop (exp))
9851 return CONST0_RTX (mode);
9852
9853 units = GET_MODE_NUNITS (mode);
9854 inner = GET_MODE_INNER (mode);
9855
9856 v = rtvec_alloc (units);
9857
9858 link = TREE_VECTOR_CST_ELTS (exp);
9859 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9860 {
9861 elt = TREE_VALUE (link);
9862
9863 if (TREE_CODE (elt) == REAL_CST)
9864 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9865 inner);
9866 else
9867 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9868 TREE_INT_CST_HIGH (elt),
9869 inner);
9870 }
9871
9872 /* Initialize remaining elements to 0. */
9873 for (; i < units; ++i)
9874 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9875
9876 return gen_rtx_CONST_VECTOR (mode, v);
9877 }
9878 #include "gt-expr.h"