]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/expr.c
re PR testsuite/27476 (ACATS: Ada testsuite Bourne shell compatibility problem on...
[thirdparty/gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
4 Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "machmode.h"
28 #include "real.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "reload.h"
44 #include "output.h"
45 #include "typeclass.h"
46 #include "toplev.h"
47 #include "ggc.h"
48 #include "langhooks.h"
49 #include "intl.h"
50 #include "tm_p.h"
51 #include "tree-iterator.h"
52 #include "tree-pass.h"
53 #include "tree-flow.h"
54 #include "target.h"
55 #include "timevar.h"
56
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
59
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
62
63 #ifdef PUSH_ROUNDING
64
65 #ifndef PUSH_ARGS_REVERSED
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED /* If it's last to first. */
68 #endif
69 #endif
70
71 #endif
72
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
76 #else
77 #define STACK_PUSH_CODE PRE_INC
78 #endif
79 #endif
80
81
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
88 int cse_not_expected;
89
90 /* This structure is used by move_by_pieces to describe the move to
91 be performed. */
92 struct move_by_pieces
93 {
94 rtx to;
95 rtx to_addr;
96 int autinc_to;
97 int explicit_inc_to;
98 rtx from;
99 rtx from_addr;
100 int autinc_from;
101 int explicit_inc_from;
102 unsigned HOST_WIDE_INT len;
103 HOST_WIDE_INT offset;
104 int reverse;
105 };
106
107 /* This structure is used by store_by_pieces to describe the clear to
108 be performed. */
109
110 struct store_by_pieces
111 {
112 rtx to;
113 rtx to_addr;
114 int autinc_to;
115 int explicit_inc_to;
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
119 void *constfundata;
120 int reverse;
121 };
122
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124 unsigned int,
125 unsigned int);
126 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
130 static rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
131 static tree emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137 struct store_by_pieces *);
138 static rtx clear_storage_via_libcall (rtx, rtx, bool);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, tree, int);
148
149 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
150
151 static int is_aligning_offset (tree, tree);
152 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153 enum expand_modifier);
154 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
156 #ifdef PUSH_ROUNDING
157 static void emit_single_push_insn (enum machine_mode, rtx, tree);
158 #endif
159 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160 static rtx const_vector_from_tree (tree);
161 static void write_complex_part (rtx, rtx, bool);
162
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
166
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
169
170 /* Record for each mode whether we can float-extend from memory. */
171
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
180 #endif
181
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
188 #endif
189
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
197 #endif
198
199 /* This array records the insn_code of insns to perform block moves. */
200 enum insn_code movmem_optab[NUM_MACHINE_MODES];
201
202 /* This array records the insn_code of insns to perform block sets. */
203 enum insn_code setmem_optab[NUM_MACHINE_MODES];
204
205 /* These arrays record the insn_code of three different kinds of insns
206 to perform block compares. */
207 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
209 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210
211 /* Synchronization primitives. */
212 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
231 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
232 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
233 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
234
235 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
236
237 #ifndef SLOW_UNALIGNED_ACCESS
238 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
239 #endif
240 \f
241 /* This is run once per compilation to set up which modes can be used
242 directly in memory and to initialize the block move optab. */
243
244 void
245 init_expr_once (void)
246 {
247 rtx insn, pat;
248 enum machine_mode mode;
249 int num_clobbers;
250 rtx mem, mem1;
251 rtx reg;
252
253 /* Try indexing by frame ptr and try by stack ptr.
254 It is known that on the Convex the stack ptr isn't a valid index.
255 With luck, one or the other is valid on any machine. */
256 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
257 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
258
259 /* A scratch register we can modify in-place below to avoid
260 useless RTL allocations. */
261 reg = gen_rtx_REG (VOIDmode, -1);
262
263 insn = rtx_alloc (INSN);
264 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
265 PATTERN (insn) = pat;
266
267 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
268 mode = (enum machine_mode) ((int) mode + 1))
269 {
270 int regno;
271
272 direct_load[(int) mode] = direct_store[(int) mode] = 0;
273 PUT_MODE (mem, mode);
274 PUT_MODE (mem1, mode);
275 PUT_MODE (reg, mode);
276
277 /* See if there is some register that can be used in this mode and
278 directly loaded or stored from memory. */
279
280 if (mode != VOIDmode && mode != BLKmode)
281 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
282 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
283 regno++)
284 {
285 if (! HARD_REGNO_MODE_OK (regno, mode))
286 continue;
287
288 REGNO (reg) = regno;
289
290 SET_SRC (pat) = mem;
291 SET_DEST (pat) = reg;
292 if (recog (pat, insn, &num_clobbers) >= 0)
293 direct_load[(int) mode] = 1;
294
295 SET_SRC (pat) = mem1;
296 SET_DEST (pat) = reg;
297 if (recog (pat, insn, &num_clobbers) >= 0)
298 direct_load[(int) mode] = 1;
299
300 SET_SRC (pat) = reg;
301 SET_DEST (pat) = mem;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_store[(int) mode] = 1;
304
305 SET_SRC (pat) = reg;
306 SET_DEST (pat) = mem1;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_store[(int) mode] = 1;
309 }
310 }
311
312 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
313
314 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
315 mode = GET_MODE_WIDER_MODE (mode))
316 {
317 enum machine_mode srcmode;
318 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
319 srcmode = GET_MODE_WIDER_MODE (srcmode))
320 {
321 enum insn_code ic;
322
323 ic = can_extend_p (mode, srcmode, 0);
324 if (ic == CODE_FOR_nothing)
325 continue;
326
327 PUT_MODE (mem, srcmode);
328
329 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
330 float_extend_from_mem[mode][srcmode] = true;
331 }
332 }
333 }
334
335 /* This is run at the start of compiling a function. */
336
337 void
338 init_expr (void)
339 {
340 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
341 }
342 \f
343 /* Copy data from FROM to TO, where the machine modes are not the same.
344 Both modes may be integer, or both may be floating.
345 UNSIGNEDP should be nonzero if FROM is an unsigned type.
346 This causes zero-extension instead of sign-extension. */
347
348 void
349 convert_move (rtx to, rtx from, int unsignedp)
350 {
351 enum machine_mode to_mode = GET_MODE (to);
352 enum machine_mode from_mode = GET_MODE (from);
353 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
354 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
355 enum insn_code code;
356 rtx libcall;
357
358 /* rtx code for making an equivalent value. */
359 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
360 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
361
362
363 gcc_assert (to_real == from_real);
364
365 /* If the source and destination are already the same, then there's
366 nothing to do. */
367 if (to == from)
368 return;
369
370 /* If FROM is a SUBREG that indicates that we have already done at least
371 the required extension, strip it. We don't handle such SUBREGs as
372 TO here. */
373
374 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
375 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
376 >= GET_MODE_SIZE (to_mode))
377 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
378 from = gen_lowpart (to_mode, from), from_mode = to_mode;
379
380 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
381
382 if (to_mode == from_mode
383 || (from_mode == VOIDmode && CONSTANT_P (from)))
384 {
385 emit_move_insn (to, from);
386 return;
387 }
388
389 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
390 {
391 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
392
393 if (VECTOR_MODE_P (to_mode))
394 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
395 else
396 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
397
398 emit_move_insn (to, from);
399 return;
400 }
401
402 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
403 {
404 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
405 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
406 return;
407 }
408
409 if (to_real)
410 {
411 rtx value, insns;
412 convert_optab tab;
413
414 gcc_assert ((GET_MODE_PRECISION (from_mode)
415 != GET_MODE_PRECISION (to_mode))
416 || (DECIMAL_FLOAT_MODE_P (from_mode)
417 != DECIMAL_FLOAT_MODE_P (to_mode)));
418
419 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
420 /* Conversion between decimal float and binary float, same size. */
421 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
422 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
423 tab = sext_optab;
424 else
425 tab = trunc_optab;
426
427 /* Try converting directly if the insn is supported. */
428
429 code = tab->handlers[to_mode][from_mode].insn_code;
430 if (code != CODE_FOR_nothing)
431 {
432 emit_unop_insn (code, to, from,
433 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
434 return;
435 }
436
437 /* Otherwise use a libcall. */
438 libcall = tab->handlers[to_mode][from_mode].libfunc;
439
440 /* Is this conversion implemented yet? */
441 gcc_assert (libcall);
442
443 start_sequence ();
444 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
445 1, from, from_mode);
446 insns = get_insns ();
447 end_sequence ();
448 emit_libcall_block (insns, to, value,
449 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
450 from)
451 : gen_rtx_FLOAT_EXTEND (to_mode, from));
452 return;
453 }
454
455 /* Handle pointer conversion. */ /* SPEE 900220. */
456 /* Targets are expected to provide conversion insns between PxImode and
457 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
458 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
459 {
460 enum machine_mode full_mode
461 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
462
463 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
464 != CODE_FOR_nothing);
465
466 if (full_mode != from_mode)
467 from = convert_to_mode (full_mode, from, unsignedp);
468 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
469 to, from, UNKNOWN);
470 return;
471 }
472 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
473 {
474 rtx new_from;
475 enum machine_mode full_mode
476 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
477
478 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
479 != CODE_FOR_nothing);
480
481 if (to_mode == full_mode)
482 {
483 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
484 to, from, UNKNOWN);
485 return;
486 }
487
488 new_from = gen_reg_rtx (full_mode);
489 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
490 new_from, from, UNKNOWN);
491
492 /* else proceed to integer conversions below. */
493 from_mode = full_mode;
494 from = new_from;
495 }
496
497 /* Now both modes are integers. */
498
499 /* Handle expanding beyond a word. */
500 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
501 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
502 {
503 rtx insns;
504 rtx lowpart;
505 rtx fill_value;
506 rtx lowfrom;
507 int i;
508 enum machine_mode lowpart_mode;
509 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
510
511 /* Try converting directly if the insn is supported. */
512 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
513 != CODE_FOR_nothing)
514 {
515 /* If FROM is a SUBREG, put it into a register. Do this
516 so that we always generate the same set of insns for
517 better cse'ing; if an intermediate assignment occurred,
518 we won't be doing the operation directly on the SUBREG. */
519 if (optimize > 0 && GET_CODE (from) == SUBREG)
520 from = force_reg (from_mode, from);
521 emit_unop_insn (code, to, from, equiv_code);
522 return;
523 }
524 /* Next, try converting via full word. */
525 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
526 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
527 != CODE_FOR_nothing))
528 {
529 if (REG_P (to))
530 {
531 if (reg_overlap_mentioned_p (to, from))
532 from = force_reg (from_mode, from);
533 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
534 }
535 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
536 emit_unop_insn (code, to,
537 gen_lowpart (word_mode, to), equiv_code);
538 return;
539 }
540
541 /* No special multiword conversion insn; do it by hand. */
542 start_sequence ();
543
544 /* Since we will turn this into a no conflict block, we must ensure
545 that the source does not overlap the target. */
546
547 if (reg_overlap_mentioned_p (to, from))
548 from = force_reg (from_mode, from);
549
550 /* Get a copy of FROM widened to a word, if necessary. */
551 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
552 lowpart_mode = word_mode;
553 else
554 lowpart_mode = from_mode;
555
556 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
557
558 lowpart = gen_lowpart (lowpart_mode, to);
559 emit_move_insn (lowpart, lowfrom);
560
561 /* Compute the value to put in each remaining word. */
562 if (unsignedp)
563 fill_value = const0_rtx;
564 else
565 {
566 #ifdef HAVE_slt
567 if (HAVE_slt
568 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
569 && STORE_FLAG_VALUE == -1)
570 {
571 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
572 lowpart_mode, 0);
573 fill_value = gen_reg_rtx (word_mode);
574 emit_insn (gen_slt (fill_value));
575 }
576 else
577 #endif
578 {
579 fill_value
580 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
581 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
582 NULL_RTX, 0);
583 fill_value = convert_to_mode (word_mode, fill_value, 1);
584 }
585 }
586
587 /* Fill the remaining words. */
588 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
589 {
590 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
591 rtx subword = operand_subword (to, index, 1, to_mode);
592
593 gcc_assert (subword);
594
595 if (fill_value != subword)
596 emit_move_insn (subword, fill_value);
597 }
598
599 insns = get_insns ();
600 end_sequence ();
601
602 emit_no_conflict_block (insns, to, from, NULL_RTX,
603 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
604 return;
605 }
606
607 /* Truncating multi-word to a word or less. */
608 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
609 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
610 {
611 if (!((MEM_P (from)
612 && ! MEM_VOLATILE_P (from)
613 && direct_load[(int) to_mode]
614 && ! mode_dependent_address_p (XEXP (from, 0)))
615 || REG_P (from)
616 || GET_CODE (from) == SUBREG))
617 from = force_reg (from_mode, from);
618 convert_move (to, gen_lowpart (word_mode, from), 0);
619 return;
620 }
621
622 /* Now follow all the conversions between integers
623 no more than a word long. */
624
625 /* For truncation, usually we can just refer to FROM in a narrower mode. */
626 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
627 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
628 GET_MODE_BITSIZE (from_mode)))
629 {
630 if (!((MEM_P (from)
631 && ! MEM_VOLATILE_P (from)
632 && direct_load[(int) to_mode]
633 && ! mode_dependent_address_p (XEXP (from, 0)))
634 || REG_P (from)
635 || GET_CODE (from) == SUBREG))
636 from = force_reg (from_mode, from);
637 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
638 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
639 from = copy_to_reg (from);
640 emit_move_insn (to, gen_lowpart (to_mode, from));
641 return;
642 }
643
644 /* Handle extension. */
645 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
646 {
647 /* Convert directly if that works. */
648 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
649 != CODE_FOR_nothing)
650 {
651 emit_unop_insn (code, to, from, equiv_code);
652 return;
653 }
654 else
655 {
656 enum machine_mode intermediate;
657 rtx tmp;
658 tree shift_amount;
659
660 /* Search for a mode to convert via. */
661 for (intermediate = from_mode; intermediate != VOIDmode;
662 intermediate = GET_MODE_WIDER_MODE (intermediate))
663 if (((can_extend_p (to_mode, intermediate, unsignedp)
664 != CODE_FOR_nothing)
665 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
666 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
667 GET_MODE_BITSIZE (intermediate))))
668 && (can_extend_p (intermediate, from_mode, unsignedp)
669 != CODE_FOR_nothing))
670 {
671 convert_move (to, convert_to_mode (intermediate, from,
672 unsignedp), unsignedp);
673 return;
674 }
675
676 /* No suitable intermediate mode.
677 Generate what we need with shifts. */
678 shift_amount = build_int_cst (NULL_TREE,
679 GET_MODE_BITSIZE (to_mode)
680 - GET_MODE_BITSIZE (from_mode));
681 from = gen_lowpart (to_mode, force_reg (from_mode, from));
682 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
683 to, unsignedp);
684 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
685 to, unsignedp);
686 if (tmp != to)
687 emit_move_insn (to, tmp);
688 return;
689 }
690 }
691
692 /* Support special truncate insns for certain modes. */
693 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
694 {
695 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
696 to, from, UNKNOWN);
697 return;
698 }
699
700 /* Handle truncation of volatile memrefs, and so on;
701 the things that couldn't be truncated directly,
702 and for which there was no special instruction.
703
704 ??? Code above formerly short-circuited this, for most integer
705 mode pairs, with a force_reg in from_mode followed by a recursive
706 call to this routine. Appears always to have been wrong. */
707 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
708 {
709 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
710 emit_move_insn (to, temp);
711 return;
712 }
713
714 /* Mode combination is not recognized. */
715 gcc_unreachable ();
716 }
717
718 /* Return an rtx for a value that would result
719 from converting X to mode MODE.
720 Both X and MODE may be floating, or both integer.
721 UNSIGNEDP is nonzero if X is an unsigned value.
722 This can be done by referring to a part of X in place
723 or by copying to a new temporary with conversion. */
724
725 rtx
726 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
727 {
728 return convert_modes (mode, VOIDmode, x, unsignedp);
729 }
730
731 /* Return an rtx for a value that would result
732 from converting X from mode OLDMODE to mode MODE.
733 Both modes may be floating, or both integer.
734 UNSIGNEDP is nonzero if X is an unsigned value.
735
736 This can be done by referring to a part of X in place
737 or by copying to a new temporary with conversion.
738
739 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
740
741 rtx
742 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
743 {
744 rtx temp;
745
746 /* If FROM is a SUBREG that indicates that we have already done at least
747 the required extension, strip it. */
748
749 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
750 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
751 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
752 x = gen_lowpart (mode, x);
753
754 if (GET_MODE (x) != VOIDmode)
755 oldmode = GET_MODE (x);
756
757 if (mode == oldmode)
758 return x;
759
760 /* There is one case that we must handle specially: If we are converting
761 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
762 we are to interpret the constant as unsigned, gen_lowpart will do
763 the wrong if the constant appears negative. What we want to do is
764 make the high-order word of the constant zero, not all ones. */
765
766 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
767 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
768 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
769 {
770 HOST_WIDE_INT val = INTVAL (x);
771
772 if (oldmode != VOIDmode
773 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
774 {
775 int width = GET_MODE_BITSIZE (oldmode);
776
777 /* We need to zero extend VAL. */
778 val &= ((HOST_WIDE_INT) 1 << width) - 1;
779 }
780
781 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
782 }
783
784 /* We can do this with a gen_lowpart if both desired and current modes
785 are integer, and this is either a constant integer, a register, or a
786 non-volatile MEM. Except for the constant case where MODE is no
787 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
788
789 if ((GET_CODE (x) == CONST_INT
790 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
791 || (GET_MODE_CLASS (mode) == MODE_INT
792 && GET_MODE_CLASS (oldmode) == MODE_INT
793 && (GET_CODE (x) == CONST_DOUBLE
794 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
795 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
796 && direct_load[(int) mode])
797 || (REG_P (x)
798 && (! HARD_REGISTER_P (x)
799 || HARD_REGNO_MODE_OK (REGNO (x), mode))
800 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
801 GET_MODE_BITSIZE (GET_MODE (x)))))))))
802 {
803 /* ?? If we don't know OLDMODE, we have to assume here that
804 X does not need sign- or zero-extension. This may not be
805 the case, but it's the best we can do. */
806 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
807 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
808 {
809 HOST_WIDE_INT val = INTVAL (x);
810 int width = GET_MODE_BITSIZE (oldmode);
811
812 /* We must sign or zero-extend in this case. Start by
813 zero-extending, then sign extend if we need to. */
814 val &= ((HOST_WIDE_INT) 1 << width) - 1;
815 if (! unsignedp
816 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
817 val |= (HOST_WIDE_INT) (-1) << width;
818
819 return gen_int_mode (val, mode);
820 }
821
822 return gen_lowpart (mode, x);
823 }
824
825 /* Converting from integer constant into mode is always equivalent to an
826 subreg operation. */
827 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
828 {
829 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
830 return simplify_gen_subreg (mode, x, oldmode, 0);
831 }
832
833 temp = gen_reg_rtx (mode);
834 convert_move (temp, x, unsignedp);
835 return temp;
836 }
837 \f
838 /* STORE_MAX_PIECES is the number of bytes at a time that we can
839 store efficiently. Due to internal GCC limitations, this is
840 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
841 for an immediate constant. */
842
843 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
844
845 /* Determine whether the LEN bytes can be moved by using several move
846 instructions. Return nonzero if a call to move_by_pieces should
847 succeed. */
848
849 int
850 can_move_by_pieces (unsigned HOST_WIDE_INT len,
851 unsigned int align ATTRIBUTE_UNUSED)
852 {
853 return MOVE_BY_PIECES_P (len, align);
854 }
855
856 /* Generate several move instructions to copy LEN bytes from block FROM to
857 block TO. (These are MEM rtx's with BLKmode).
858
859 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
860 used to push FROM to the stack.
861
862 ALIGN is maximum stack alignment we can assume.
863
864 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
865 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
866 stpcpy. */
867
868 rtx
869 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
870 unsigned int align, int endp)
871 {
872 struct move_by_pieces data;
873 rtx to_addr, from_addr = XEXP (from, 0);
874 unsigned int max_size = MOVE_MAX_PIECES + 1;
875 enum machine_mode mode = VOIDmode, tmode;
876 enum insn_code icode;
877
878 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
879
880 data.offset = 0;
881 data.from_addr = from_addr;
882 if (to)
883 {
884 to_addr = XEXP (to, 0);
885 data.to = to;
886 data.autinc_to
887 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
888 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
889 data.reverse
890 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
891 }
892 else
893 {
894 to_addr = NULL_RTX;
895 data.to = NULL_RTX;
896 data.autinc_to = 1;
897 #ifdef STACK_GROWS_DOWNWARD
898 data.reverse = 1;
899 #else
900 data.reverse = 0;
901 #endif
902 }
903 data.to_addr = to_addr;
904 data.from = from;
905 data.autinc_from
906 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
907 || GET_CODE (from_addr) == POST_INC
908 || GET_CODE (from_addr) == POST_DEC);
909
910 data.explicit_inc_from = 0;
911 data.explicit_inc_to = 0;
912 if (data.reverse) data.offset = len;
913 data.len = len;
914
915 /* If copying requires more than two move insns,
916 copy addresses to registers (to make displacements shorter)
917 and use post-increment if available. */
918 if (!(data.autinc_from && data.autinc_to)
919 && move_by_pieces_ninsns (len, align, max_size) > 2)
920 {
921 /* Find the mode of the largest move... */
922 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
923 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
924 if (GET_MODE_SIZE (tmode) < max_size)
925 mode = tmode;
926
927 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
928 {
929 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
930 data.autinc_from = 1;
931 data.explicit_inc_from = -1;
932 }
933 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
934 {
935 data.from_addr = copy_addr_to_reg (from_addr);
936 data.autinc_from = 1;
937 data.explicit_inc_from = 1;
938 }
939 if (!data.autinc_from && CONSTANT_P (from_addr))
940 data.from_addr = copy_addr_to_reg (from_addr);
941 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
942 {
943 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
944 data.autinc_to = 1;
945 data.explicit_inc_to = -1;
946 }
947 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
948 {
949 data.to_addr = copy_addr_to_reg (to_addr);
950 data.autinc_to = 1;
951 data.explicit_inc_to = 1;
952 }
953 if (!data.autinc_to && CONSTANT_P (to_addr))
954 data.to_addr = copy_addr_to_reg (to_addr);
955 }
956
957 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
958 if (align >= GET_MODE_ALIGNMENT (tmode))
959 align = GET_MODE_ALIGNMENT (tmode);
960 else
961 {
962 enum machine_mode xmode;
963
964 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
965 tmode != VOIDmode;
966 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
967 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
968 || SLOW_UNALIGNED_ACCESS (tmode, align))
969 break;
970
971 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
972 }
973
974 /* First move what we can in the largest integer mode, then go to
975 successively smaller modes. */
976
977 while (max_size > 1)
978 {
979 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
980 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
981 if (GET_MODE_SIZE (tmode) < max_size)
982 mode = tmode;
983
984 if (mode == VOIDmode)
985 break;
986
987 icode = mov_optab->handlers[(int) mode].insn_code;
988 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
989 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
990
991 max_size = GET_MODE_SIZE (mode);
992 }
993
994 /* The code above should have handled everything. */
995 gcc_assert (!data.len);
996
997 if (endp)
998 {
999 rtx to1;
1000
1001 gcc_assert (!data.reverse);
1002 if (data.autinc_to)
1003 {
1004 if (endp == 2)
1005 {
1006 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1007 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1008 else
1009 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1010 -1));
1011 }
1012 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1013 data.offset);
1014 }
1015 else
1016 {
1017 if (endp == 2)
1018 --data.offset;
1019 to1 = adjust_address (data.to, QImode, data.offset);
1020 }
1021 return to1;
1022 }
1023 else
1024 return data.to;
1025 }
1026
1027 /* Return number of insns required to move L bytes by pieces.
1028 ALIGN (in bits) is maximum alignment we can assume. */
1029
1030 static unsigned HOST_WIDE_INT
1031 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1032 unsigned int max_size)
1033 {
1034 unsigned HOST_WIDE_INT n_insns = 0;
1035 enum machine_mode tmode;
1036
1037 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1038 if (align >= GET_MODE_ALIGNMENT (tmode))
1039 align = GET_MODE_ALIGNMENT (tmode);
1040 else
1041 {
1042 enum machine_mode tmode, xmode;
1043
1044 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1045 tmode != VOIDmode;
1046 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1047 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1048 || SLOW_UNALIGNED_ACCESS (tmode, align))
1049 break;
1050
1051 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1052 }
1053
1054 while (max_size > 1)
1055 {
1056 enum machine_mode mode = VOIDmode;
1057 enum insn_code icode;
1058
1059 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1060 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1061 if (GET_MODE_SIZE (tmode) < max_size)
1062 mode = tmode;
1063
1064 if (mode == VOIDmode)
1065 break;
1066
1067 icode = mov_optab->handlers[(int) mode].insn_code;
1068 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1069 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1070
1071 max_size = GET_MODE_SIZE (mode);
1072 }
1073
1074 gcc_assert (!l);
1075 return n_insns;
1076 }
1077
1078 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1079 with move instructions for mode MODE. GENFUN is the gen_... function
1080 to make a move insn for that mode. DATA has all the other info. */
1081
1082 static void
1083 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1084 struct move_by_pieces *data)
1085 {
1086 unsigned int size = GET_MODE_SIZE (mode);
1087 rtx to1 = NULL_RTX, from1;
1088
1089 while (data->len >= size)
1090 {
1091 if (data->reverse)
1092 data->offset -= size;
1093
1094 if (data->to)
1095 {
1096 if (data->autinc_to)
1097 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1098 data->offset);
1099 else
1100 to1 = adjust_address (data->to, mode, data->offset);
1101 }
1102
1103 if (data->autinc_from)
1104 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1105 data->offset);
1106 else
1107 from1 = adjust_address (data->from, mode, data->offset);
1108
1109 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1110 emit_insn (gen_add2_insn (data->to_addr,
1111 GEN_INT (-(HOST_WIDE_INT)size)));
1112 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1113 emit_insn (gen_add2_insn (data->from_addr,
1114 GEN_INT (-(HOST_WIDE_INT)size)));
1115
1116 if (data->to)
1117 emit_insn ((*genfun) (to1, from1));
1118 else
1119 {
1120 #ifdef PUSH_ROUNDING
1121 emit_single_push_insn (mode, from1, NULL);
1122 #else
1123 gcc_unreachable ();
1124 #endif
1125 }
1126
1127 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1128 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1129 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1130 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1131
1132 if (! data->reverse)
1133 data->offset += size;
1134
1135 data->len -= size;
1136 }
1137 }
1138 \f
1139 /* Emit code to move a block Y to a block X. This may be done with
1140 string-move instructions, with multiple scalar move instructions,
1141 or with a library call.
1142
1143 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1144 SIZE is an rtx that says how long they are.
1145 ALIGN is the maximum alignment we can assume they have.
1146 METHOD describes what kind of copy this is, and what mechanisms may be used.
1147
1148 Return the address of the new block, if memcpy is called and returns it,
1149 0 otherwise. */
1150
1151 rtx
1152 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1153 {
1154 bool may_use_call;
1155 rtx retval = 0;
1156 unsigned int align;
1157
1158 switch (method)
1159 {
1160 case BLOCK_OP_NORMAL:
1161 case BLOCK_OP_TAILCALL:
1162 may_use_call = true;
1163 break;
1164
1165 case BLOCK_OP_CALL_PARM:
1166 may_use_call = block_move_libcall_safe_for_call_parm ();
1167
1168 /* Make inhibit_defer_pop nonzero around the library call
1169 to force it to pop the arguments right away. */
1170 NO_DEFER_POP;
1171 break;
1172
1173 case BLOCK_OP_NO_LIBCALL:
1174 may_use_call = false;
1175 break;
1176
1177 default:
1178 gcc_unreachable ();
1179 }
1180
1181 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1182
1183 gcc_assert (MEM_P (x));
1184 gcc_assert (MEM_P (y));
1185 gcc_assert (size);
1186
1187 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1188 block copy is more efficient for other large modes, e.g. DCmode. */
1189 x = adjust_address (x, BLKmode, 0);
1190 y = adjust_address (y, BLKmode, 0);
1191
1192 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1193 can be incorrect is coming from __builtin_memcpy. */
1194 if (GET_CODE (size) == CONST_INT)
1195 {
1196 if (INTVAL (size) == 0)
1197 return 0;
1198
1199 x = shallow_copy_rtx (x);
1200 y = shallow_copy_rtx (y);
1201 set_mem_size (x, size);
1202 set_mem_size (y, size);
1203 }
1204
1205 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1206 move_by_pieces (x, y, INTVAL (size), align, 0);
1207 else if (emit_block_move_via_movmem (x, y, size, align))
1208 ;
1209 else if (may_use_call)
1210 retval = emit_block_move_via_libcall (x, y, size,
1211 method == BLOCK_OP_TAILCALL);
1212 else
1213 emit_block_move_via_loop (x, y, size, align);
1214
1215 if (method == BLOCK_OP_CALL_PARM)
1216 OK_DEFER_POP;
1217
1218 return retval;
1219 }
1220
1221 /* A subroutine of emit_block_move. Returns true if calling the
1222 block move libcall will not clobber any parameters which may have
1223 already been placed on the stack. */
1224
1225 static bool
1226 block_move_libcall_safe_for_call_parm (void)
1227 {
1228 /* If arguments are pushed on the stack, then they're safe. */
1229 if (PUSH_ARGS)
1230 return true;
1231
1232 /* If registers go on the stack anyway, any argument is sure to clobber
1233 an outgoing argument. */
1234 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1235 {
1236 tree fn = emit_block_move_libcall_fn (false);
1237 (void) fn;
1238 if (REG_PARM_STACK_SPACE (fn) != 0)
1239 return false;
1240 }
1241 #endif
1242
1243 /* If any argument goes in memory, then it might clobber an outgoing
1244 argument. */
1245 {
1246 CUMULATIVE_ARGS args_so_far;
1247 tree fn, arg;
1248
1249 fn = emit_block_move_libcall_fn (false);
1250 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1251
1252 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1253 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1254 {
1255 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1256 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1257 if (!tmp || !REG_P (tmp))
1258 return false;
1259 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1260 return false;
1261 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1262 }
1263 }
1264 return true;
1265 }
1266
1267 /* A subroutine of emit_block_move. Expand a movmem pattern;
1268 return true if successful. */
1269
1270 static bool
1271 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1272 {
1273 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1274 int save_volatile_ok = volatile_ok;
1275 enum machine_mode mode;
1276
1277 /* Since this is a move insn, we don't care about volatility. */
1278 volatile_ok = 1;
1279
1280 /* Try the most limited insn first, because there's no point
1281 including more than one in the machine description unless
1282 the more limited one has some advantage. */
1283
1284 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1285 mode = GET_MODE_WIDER_MODE (mode))
1286 {
1287 enum insn_code code = movmem_optab[(int) mode];
1288 insn_operand_predicate_fn pred;
1289
1290 if (code != CODE_FOR_nothing
1291 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1292 here because if SIZE is less than the mode mask, as it is
1293 returned by the macro, it will definitely be less than the
1294 actual mode mask. */
1295 && ((GET_CODE (size) == CONST_INT
1296 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1297 <= (GET_MODE_MASK (mode) >> 1)))
1298 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1299 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1300 || (*pred) (x, BLKmode))
1301 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1302 || (*pred) (y, BLKmode))
1303 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1304 || (*pred) (opalign, VOIDmode)))
1305 {
1306 rtx op2;
1307 rtx last = get_last_insn ();
1308 rtx pat;
1309
1310 op2 = convert_to_mode (mode, size, 1);
1311 pred = insn_data[(int) code].operand[2].predicate;
1312 if (pred != 0 && ! (*pred) (op2, mode))
1313 op2 = copy_to_mode_reg (mode, op2);
1314
1315 /* ??? When called via emit_block_move_for_call, it'd be
1316 nice if there were some way to inform the backend, so
1317 that it doesn't fail the expansion because it thinks
1318 emitting the libcall would be more efficient. */
1319
1320 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1321 if (pat)
1322 {
1323 emit_insn (pat);
1324 volatile_ok = save_volatile_ok;
1325 return true;
1326 }
1327 else
1328 delete_insns_since (last);
1329 }
1330 }
1331
1332 volatile_ok = save_volatile_ok;
1333 return false;
1334 }
1335
1336 /* A subroutine of emit_block_move. Expand a call to memcpy.
1337 Return the return value from memcpy, 0 otherwise. */
1338
1339 static rtx
1340 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1341 {
1342 rtx dst_addr, src_addr;
1343 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1344 enum machine_mode size_mode;
1345 rtx retval;
1346
1347 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1348 pseudos. We can then place those new pseudos into a VAR_DECL and
1349 use them later. */
1350
1351 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1352 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1353
1354 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1355 src_addr = convert_memory_address (ptr_mode, src_addr);
1356
1357 dst_tree = make_tree (ptr_type_node, dst_addr);
1358 src_tree = make_tree (ptr_type_node, src_addr);
1359
1360 size_mode = TYPE_MODE (sizetype);
1361
1362 size = convert_to_mode (size_mode, size, 1);
1363 size = copy_to_mode_reg (size_mode, size);
1364
1365 /* It is incorrect to use the libcall calling conventions to call
1366 memcpy in this context. This could be a user call to memcpy and
1367 the user may wish to examine the return value from memcpy. For
1368 targets where libcalls and normal calls have different conventions
1369 for returning pointers, we could end up generating incorrect code. */
1370
1371 size_tree = make_tree (sizetype, size);
1372
1373 fn = emit_block_move_libcall_fn (true);
1374 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1375 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1376 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1377
1378 /* Now we have to build up the CALL_EXPR itself. */
1379 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1380 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1381 call_expr, arg_list, NULL_TREE);
1382 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1383
1384 retval = expand_normal (call_expr);
1385
1386 return retval;
1387 }
1388
1389 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1390 for the function we use for block copies. The first time FOR_CALL
1391 is true, we call assemble_external. */
1392
1393 static GTY(()) tree block_move_fn;
1394
1395 void
1396 init_block_move_fn (const char *asmspec)
1397 {
1398 if (!block_move_fn)
1399 {
1400 tree args, fn;
1401
1402 fn = get_identifier ("memcpy");
1403 args = build_function_type_list (ptr_type_node, ptr_type_node,
1404 const_ptr_type_node, sizetype,
1405 NULL_TREE);
1406
1407 fn = build_decl (FUNCTION_DECL, fn, args);
1408 DECL_EXTERNAL (fn) = 1;
1409 TREE_PUBLIC (fn) = 1;
1410 DECL_ARTIFICIAL (fn) = 1;
1411 TREE_NOTHROW (fn) = 1;
1412 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1413 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1414
1415 block_move_fn = fn;
1416 }
1417
1418 if (asmspec)
1419 set_user_assembler_name (block_move_fn, asmspec);
1420 }
1421
1422 static tree
1423 emit_block_move_libcall_fn (int for_call)
1424 {
1425 static bool emitted_extern;
1426
1427 if (!block_move_fn)
1428 init_block_move_fn (NULL);
1429
1430 if (for_call && !emitted_extern)
1431 {
1432 emitted_extern = true;
1433 make_decl_rtl (block_move_fn);
1434 assemble_external (block_move_fn);
1435 }
1436
1437 return block_move_fn;
1438 }
1439
1440 /* A subroutine of emit_block_move. Copy the data via an explicit
1441 loop. This is used only when libcalls are forbidden. */
1442 /* ??? It'd be nice to copy in hunks larger than QImode. */
1443
1444 static void
1445 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1446 unsigned int align ATTRIBUTE_UNUSED)
1447 {
1448 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1449 enum machine_mode iter_mode;
1450
1451 iter_mode = GET_MODE (size);
1452 if (iter_mode == VOIDmode)
1453 iter_mode = word_mode;
1454
1455 top_label = gen_label_rtx ();
1456 cmp_label = gen_label_rtx ();
1457 iter = gen_reg_rtx (iter_mode);
1458
1459 emit_move_insn (iter, const0_rtx);
1460
1461 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1462 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1463 do_pending_stack_adjust ();
1464
1465 emit_jump (cmp_label);
1466 emit_label (top_label);
1467
1468 tmp = convert_modes (Pmode, iter_mode, iter, true);
1469 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1470 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1471 x = change_address (x, QImode, x_addr);
1472 y = change_address (y, QImode, y_addr);
1473
1474 emit_move_insn (x, y);
1475
1476 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1477 true, OPTAB_LIB_WIDEN);
1478 if (tmp != iter)
1479 emit_move_insn (iter, tmp);
1480
1481 emit_label (cmp_label);
1482
1483 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1484 true, top_label);
1485 }
1486 \f
1487 /* Copy all or part of a value X into registers starting at REGNO.
1488 The number of registers to be filled is NREGS. */
1489
1490 void
1491 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1492 {
1493 int i;
1494 #ifdef HAVE_load_multiple
1495 rtx pat;
1496 rtx last;
1497 #endif
1498
1499 if (nregs == 0)
1500 return;
1501
1502 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1503 x = validize_mem (force_const_mem (mode, x));
1504
1505 /* See if the machine can do this with a load multiple insn. */
1506 #ifdef HAVE_load_multiple
1507 if (HAVE_load_multiple)
1508 {
1509 last = get_last_insn ();
1510 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1511 GEN_INT (nregs));
1512 if (pat)
1513 {
1514 emit_insn (pat);
1515 return;
1516 }
1517 else
1518 delete_insns_since (last);
1519 }
1520 #endif
1521
1522 for (i = 0; i < nregs; i++)
1523 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1524 operand_subword_force (x, i, mode));
1525 }
1526
1527 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1528 The number of registers to be filled is NREGS. */
1529
1530 void
1531 move_block_from_reg (int regno, rtx x, int nregs)
1532 {
1533 int i;
1534
1535 if (nregs == 0)
1536 return;
1537
1538 /* See if the machine can do this with a store multiple insn. */
1539 #ifdef HAVE_store_multiple
1540 if (HAVE_store_multiple)
1541 {
1542 rtx last = get_last_insn ();
1543 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1544 GEN_INT (nregs));
1545 if (pat)
1546 {
1547 emit_insn (pat);
1548 return;
1549 }
1550 else
1551 delete_insns_since (last);
1552 }
1553 #endif
1554
1555 for (i = 0; i < nregs; i++)
1556 {
1557 rtx tem = operand_subword (x, i, 1, BLKmode);
1558
1559 gcc_assert (tem);
1560
1561 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1562 }
1563 }
1564
1565 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1566 ORIG, where ORIG is a non-consecutive group of registers represented by
1567 a PARALLEL. The clone is identical to the original except in that the
1568 original set of registers is replaced by a new set of pseudo registers.
1569 The new set has the same modes as the original set. */
1570
1571 rtx
1572 gen_group_rtx (rtx orig)
1573 {
1574 int i, length;
1575 rtx *tmps;
1576
1577 gcc_assert (GET_CODE (orig) == PARALLEL);
1578
1579 length = XVECLEN (orig, 0);
1580 tmps = alloca (sizeof (rtx) * length);
1581
1582 /* Skip a NULL entry in first slot. */
1583 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1584
1585 if (i)
1586 tmps[0] = 0;
1587
1588 for (; i < length; i++)
1589 {
1590 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1591 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1592
1593 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1594 }
1595
1596 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1597 }
1598
1599 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1600 except that values are placed in TMPS[i], and must later be moved
1601 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1602
1603 static void
1604 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1605 {
1606 rtx src;
1607 int start, i;
1608 enum machine_mode m = GET_MODE (orig_src);
1609
1610 gcc_assert (GET_CODE (dst) == PARALLEL);
1611
1612 if (m != VOIDmode
1613 && !SCALAR_INT_MODE_P (m)
1614 && !MEM_P (orig_src)
1615 && GET_CODE (orig_src) != CONCAT)
1616 {
1617 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1618 if (imode == BLKmode)
1619 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1620 else
1621 src = gen_reg_rtx (imode);
1622 if (imode != BLKmode)
1623 src = gen_lowpart (GET_MODE (orig_src), src);
1624 emit_move_insn (src, orig_src);
1625 /* ...and back again. */
1626 if (imode != BLKmode)
1627 src = gen_lowpart (imode, src);
1628 emit_group_load_1 (tmps, dst, src, type, ssize);
1629 return;
1630 }
1631
1632 /* Check for a NULL entry, used to indicate that the parameter goes
1633 both on the stack and in registers. */
1634 if (XEXP (XVECEXP (dst, 0, 0), 0))
1635 start = 0;
1636 else
1637 start = 1;
1638
1639 /* Process the pieces. */
1640 for (i = start; i < XVECLEN (dst, 0); i++)
1641 {
1642 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1643 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1644 unsigned int bytelen = GET_MODE_SIZE (mode);
1645 int shift = 0;
1646
1647 /* Handle trailing fragments that run over the size of the struct. */
1648 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1649 {
1650 /* Arrange to shift the fragment to where it belongs.
1651 extract_bit_field loads to the lsb of the reg. */
1652 if (
1653 #ifdef BLOCK_REG_PADDING
1654 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1655 == (BYTES_BIG_ENDIAN ? upward : downward)
1656 #else
1657 BYTES_BIG_ENDIAN
1658 #endif
1659 )
1660 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1661 bytelen = ssize - bytepos;
1662 gcc_assert (bytelen > 0);
1663 }
1664
1665 /* If we won't be loading directly from memory, protect the real source
1666 from strange tricks we might play; but make sure that the source can
1667 be loaded directly into the destination. */
1668 src = orig_src;
1669 if (!MEM_P (orig_src)
1670 && (!CONSTANT_P (orig_src)
1671 || (GET_MODE (orig_src) != mode
1672 && GET_MODE (orig_src) != VOIDmode)))
1673 {
1674 if (GET_MODE (orig_src) == VOIDmode)
1675 src = gen_reg_rtx (mode);
1676 else
1677 src = gen_reg_rtx (GET_MODE (orig_src));
1678
1679 emit_move_insn (src, orig_src);
1680 }
1681
1682 /* Optimize the access just a bit. */
1683 if (MEM_P (src)
1684 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1685 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1686 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1687 && bytelen == GET_MODE_SIZE (mode))
1688 {
1689 tmps[i] = gen_reg_rtx (mode);
1690 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1691 }
1692 else if (COMPLEX_MODE_P (mode)
1693 && GET_MODE (src) == mode
1694 && bytelen == GET_MODE_SIZE (mode))
1695 /* Let emit_move_complex do the bulk of the work. */
1696 tmps[i] = src;
1697 else if (GET_CODE (src) == CONCAT)
1698 {
1699 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1700 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1701
1702 if ((bytepos == 0 && bytelen == slen0)
1703 || (bytepos != 0 && bytepos + bytelen <= slen))
1704 {
1705 /* The following assumes that the concatenated objects all
1706 have the same size. In this case, a simple calculation
1707 can be used to determine the object and the bit field
1708 to be extracted. */
1709 tmps[i] = XEXP (src, bytepos / slen0);
1710 if (! CONSTANT_P (tmps[i])
1711 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1712 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1713 (bytepos % slen0) * BITS_PER_UNIT,
1714 1, NULL_RTX, mode, mode);
1715 }
1716 else
1717 {
1718 rtx mem;
1719
1720 gcc_assert (!bytepos);
1721 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1722 emit_move_insn (mem, src);
1723 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1724 0, 1, NULL_RTX, mode, mode);
1725 }
1726 }
1727 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1728 SIMD register, which is currently broken. While we get GCC
1729 to emit proper RTL for these cases, let's dump to memory. */
1730 else if (VECTOR_MODE_P (GET_MODE (dst))
1731 && REG_P (src))
1732 {
1733 int slen = GET_MODE_SIZE (GET_MODE (src));
1734 rtx mem;
1735
1736 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1737 emit_move_insn (mem, src);
1738 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1739 }
1740 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1741 && XVECLEN (dst, 0) > 1)
1742 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1743 else if (CONSTANT_P (src)
1744 || (REG_P (src) && GET_MODE (src) == mode))
1745 tmps[i] = src;
1746 else
1747 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1748 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1749 mode, mode);
1750
1751 if (shift)
1752 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1753 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1754 }
1755 }
1756
1757 /* Emit code to move a block SRC of type TYPE to a block DST,
1758 where DST is non-consecutive registers represented by a PARALLEL.
1759 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1760 if not known. */
1761
1762 void
1763 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1764 {
1765 rtx *tmps;
1766 int i;
1767
1768 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1769 emit_group_load_1 (tmps, dst, src, type, ssize);
1770
1771 /* Copy the extracted pieces into the proper (probable) hard regs. */
1772 for (i = 0; i < XVECLEN (dst, 0); i++)
1773 {
1774 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1775 if (d == NULL)
1776 continue;
1777 emit_move_insn (d, tmps[i]);
1778 }
1779 }
1780
1781 /* Similar, but load SRC into new pseudos in a format that looks like
1782 PARALLEL. This can later be fed to emit_group_move to get things
1783 in the right place. */
1784
1785 rtx
1786 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1787 {
1788 rtvec vec;
1789 int i;
1790
1791 vec = rtvec_alloc (XVECLEN (parallel, 0));
1792 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1793
1794 /* Convert the vector to look just like the original PARALLEL, except
1795 with the computed values. */
1796 for (i = 0; i < XVECLEN (parallel, 0); i++)
1797 {
1798 rtx e = XVECEXP (parallel, 0, i);
1799 rtx d = XEXP (e, 0);
1800
1801 if (d)
1802 {
1803 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1804 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1805 }
1806 RTVEC_ELT (vec, i) = e;
1807 }
1808
1809 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1810 }
1811
1812 /* Emit code to move a block SRC to block DST, where SRC and DST are
1813 non-consecutive groups of registers, each represented by a PARALLEL. */
1814
1815 void
1816 emit_group_move (rtx dst, rtx src)
1817 {
1818 int i;
1819
1820 gcc_assert (GET_CODE (src) == PARALLEL
1821 && GET_CODE (dst) == PARALLEL
1822 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1823
1824 /* Skip first entry if NULL. */
1825 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1826 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1827 XEXP (XVECEXP (src, 0, i), 0));
1828 }
1829
1830 /* Move a group of registers represented by a PARALLEL into pseudos. */
1831
1832 rtx
1833 emit_group_move_into_temps (rtx src)
1834 {
1835 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1836 int i;
1837
1838 for (i = 0; i < XVECLEN (src, 0); i++)
1839 {
1840 rtx e = XVECEXP (src, 0, i);
1841 rtx d = XEXP (e, 0);
1842
1843 if (d)
1844 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1845 RTVEC_ELT (vec, i) = e;
1846 }
1847
1848 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1849 }
1850
1851 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1852 where SRC is non-consecutive registers represented by a PARALLEL.
1853 SSIZE represents the total size of block ORIG_DST, or -1 if not
1854 known. */
1855
1856 void
1857 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1858 {
1859 rtx *tmps, dst;
1860 int start, finish, i;
1861 enum machine_mode m = GET_MODE (orig_dst);
1862
1863 gcc_assert (GET_CODE (src) == PARALLEL);
1864
1865 if (!SCALAR_INT_MODE_P (m)
1866 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1867 {
1868 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1869 if (imode == BLKmode)
1870 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1871 else
1872 dst = gen_reg_rtx (imode);
1873 emit_group_store (dst, src, type, ssize);
1874 if (imode != BLKmode)
1875 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1876 emit_move_insn (orig_dst, dst);
1877 return;
1878 }
1879
1880 /* Check for a NULL entry, used to indicate that the parameter goes
1881 both on the stack and in registers. */
1882 if (XEXP (XVECEXP (src, 0, 0), 0))
1883 start = 0;
1884 else
1885 start = 1;
1886 finish = XVECLEN (src, 0);
1887
1888 tmps = alloca (sizeof (rtx) * finish);
1889
1890 /* Copy the (probable) hard regs into pseudos. */
1891 for (i = start; i < finish; i++)
1892 {
1893 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1894 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1895 {
1896 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1897 emit_move_insn (tmps[i], reg);
1898 }
1899 else
1900 tmps[i] = reg;
1901 }
1902
1903 /* If we won't be storing directly into memory, protect the real destination
1904 from strange tricks we might play. */
1905 dst = orig_dst;
1906 if (GET_CODE (dst) == PARALLEL)
1907 {
1908 rtx temp;
1909
1910 /* We can get a PARALLEL dst if there is a conditional expression in
1911 a return statement. In that case, the dst and src are the same,
1912 so no action is necessary. */
1913 if (rtx_equal_p (dst, src))
1914 return;
1915
1916 /* It is unclear if we can ever reach here, but we may as well handle
1917 it. Allocate a temporary, and split this into a store/load to/from
1918 the temporary. */
1919
1920 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1921 emit_group_store (temp, src, type, ssize);
1922 emit_group_load (dst, temp, type, ssize);
1923 return;
1924 }
1925 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1926 {
1927 enum machine_mode outer = GET_MODE (dst);
1928 enum machine_mode inner;
1929 HOST_WIDE_INT bytepos;
1930 bool done = false;
1931 rtx temp;
1932
1933 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1934 dst = gen_reg_rtx (outer);
1935
1936 /* Make life a bit easier for combine. */
1937 /* If the first element of the vector is the low part
1938 of the destination mode, use a paradoxical subreg to
1939 initialize the destination. */
1940 if (start < finish)
1941 {
1942 inner = GET_MODE (tmps[start]);
1943 bytepos = subreg_lowpart_offset (inner, outer);
1944 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1945 {
1946 temp = simplify_gen_subreg (outer, tmps[start],
1947 inner, 0);
1948 if (temp)
1949 {
1950 emit_move_insn (dst, temp);
1951 done = true;
1952 start++;
1953 }
1954 }
1955 }
1956
1957 /* If the first element wasn't the low part, try the last. */
1958 if (!done
1959 && start < finish - 1)
1960 {
1961 inner = GET_MODE (tmps[finish - 1]);
1962 bytepos = subreg_lowpart_offset (inner, outer);
1963 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1964 {
1965 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1966 inner, 0);
1967 if (temp)
1968 {
1969 emit_move_insn (dst, temp);
1970 done = true;
1971 finish--;
1972 }
1973 }
1974 }
1975
1976 /* Otherwise, simply initialize the result to zero. */
1977 if (!done)
1978 emit_move_insn (dst, CONST0_RTX (outer));
1979 }
1980
1981 /* Process the pieces. */
1982 for (i = start; i < finish; i++)
1983 {
1984 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1985 enum machine_mode mode = GET_MODE (tmps[i]);
1986 unsigned int bytelen = GET_MODE_SIZE (mode);
1987 rtx dest = dst;
1988
1989 /* Handle trailing fragments that run over the size of the struct. */
1990 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1991 {
1992 /* store_bit_field always takes its value from the lsb.
1993 Move the fragment to the lsb if it's not already there. */
1994 if (
1995 #ifdef BLOCK_REG_PADDING
1996 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1997 == (BYTES_BIG_ENDIAN ? upward : downward)
1998 #else
1999 BYTES_BIG_ENDIAN
2000 #endif
2001 )
2002 {
2003 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2004 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2005 build_int_cst (NULL_TREE, shift),
2006 tmps[i], 0);
2007 }
2008 bytelen = ssize - bytepos;
2009 }
2010
2011 if (GET_CODE (dst) == CONCAT)
2012 {
2013 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2014 dest = XEXP (dst, 0);
2015 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2016 {
2017 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2018 dest = XEXP (dst, 1);
2019 }
2020 else
2021 {
2022 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2023 dest = assign_stack_temp (GET_MODE (dest),
2024 GET_MODE_SIZE (GET_MODE (dest)), 0);
2025 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2026 tmps[i]);
2027 dst = dest;
2028 break;
2029 }
2030 }
2031
2032 /* Optimize the access just a bit. */
2033 if (MEM_P (dest)
2034 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2035 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2036 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2037 && bytelen == GET_MODE_SIZE (mode))
2038 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2039 else
2040 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2041 mode, tmps[i]);
2042 }
2043
2044 /* Copy from the pseudo into the (probable) hard reg. */
2045 if (orig_dst != dst)
2046 emit_move_insn (orig_dst, dst);
2047 }
2048
2049 /* Generate code to copy a BLKmode object of TYPE out of a
2050 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2051 is null, a stack temporary is created. TGTBLK is returned.
2052
2053 The purpose of this routine is to handle functions that return
2054 BLKmode structures in registers. Some machines (the PA for example)
2055 want to return all small structures in registers regardless of the
2056 structure's alignment. */
2057
2058 rtx
2059 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2060 {
2061 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2062 rtx src = NULL, dst = NULL;
2063 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2064 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2065
2066 if (tgtblk == 0)
2067 {
2068 tgtblk = assign_temp (build_qualified_type (type,
2069 (TYPE_QUALS (type)
2070 | TYPE_QUAL_CONST)),
2071 0, 1, 1);
2072 preserve_temp_slots (tgtblk);
2073 }
2074
2075 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2076 into a new pseudo which is a full word. */
2077
2078 if (GET_MODE (srcreg) != BLKmode
2079 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2080 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2081
2082 /* If the structure doesn't take up a whole number of words, see whether
2083 SRCREG is padded on the left or on the right. If it's on the left,
2084 set PADDING_CORRECTION to the number of bits to skip.
2085
2086 In most ABIs, the structure will be returned at the least end of
2087 the register, which translates to right padding on little-endian
2088 targets and left padding on big-endian targets. The opposite
2089 holds if the structure is returned at the most significant
2090 end of the register. */
2091 if (bytes % UNITS_PER_WORD != 0
2092 && (targetm.calls.return_in_msb (type)
2093 ? !BYTES_BIG_ENDIAN
2094 : BYTES_BIG_ENDIAN))
2095 padding_correction
2096 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2097
2098 /* Copy the structure BITSIZE bites at a time.
2099
2100 We could probably emit more efficient code for machines which do not use
2101 strict alignment, but it doesn't seem worth the effort at the current
2102 time. */
2103 for (bitpos = 0, xbitpos = padding_correction;
2104 bitpos < bytes * BITS_PER_UNIT;
2105 bitpos += bitsize, xbitpos += bitsize)
2106 {
2107 /* We need a new source operand each time xbitpos is on a
2108 word boundary and when xbitpos == padding_correction
2109 (the first time through). */
2110 if (xbitpos % BITS_PER_WORD == 0
2111 || xbitpos == padding_correction)
2112 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2113 GET_MODE (srcreg));
2114
2115 /* We need a new destination operand each time bitpos is on
2116 a word boundary. */
2117 if (bitpos % BITS_PER_WORD == 0)
2118 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2119
2120 /* Use xbitpos for the source extraction (right justified) and
2121 xbitpos for the destination store (left justified). */
2122 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2123 extract_bit_field (src, bitsize,
2124 xbitpos % BITS_PER_WORD, 1,
2125 NULL_RTX, word_mode, word_mode));
2126 }
2127
2128 return tgtblk;
2129 }
2130
2131 /* Add a USE expression for REG to the (possibly empty) list pointed
2132 to by CALL_FUSAGE. REG must denote a hard register. */
2133
2134 void
2135 use_reg (rtx *call_fusage, rtx reg)
2136 {
2137 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2138
2139 *call_fusage
2140 = gen_rtx_EXPR_LIST (VOIDmode,
2141 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2142 }
2143
2144 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2145 starting at REGNO. All of these registers must be hard registers. */
2146
2147 void
2148 use_regs (rtx *call_fusage, int regno, int nregs)
2149 {
2150 int i;
2151
2152 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2153
2154 for (i = 0; i < nregs; i++)
2155 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2156 }
2157
2158 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2159 PARALLEL REGS. This is for calls that pass values in multiple
2160 non-contiguous locations. The Irix 6 ABI has examples of this. */
2161
2162 void
2163 use_group_regs (rtx *call_fusage, rtx regs)
2164 {
2165 int i;
2166
2167 for (i = 0; i < XVECLEN (regs, 0); i++)
2168 {
2169 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2170
2171 /* A NULL entry means the parameter goes both on the stack and in
2172 registers. This can also be a MEM for targets that pass values
2173 partially on the stack and partially in registers. */
2174 if (reg != 0 && REG_P (reg))
2175 use_reg (call_fusage, reg);
2176 }
2177 }
2178 \f
2179
2180 /* Determine whether the LEN bytes generated by CONSTFUN can be
2181 stored to memory using several move instructions. CONSTFUNDATA is
2182 a pointer which will be passed as argument in every CONSTFUN call.
2183 ALIGN is maximum alignment we can assume. Return nonzero if a
2184 call to store_by_pieces should succeed. */
2185
2186 int
2187 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2188 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2189 void *constfundata, unsigned int align)
2190 {
2191 unsigned HOST_WIDE_INT l;
2192 unsigned int max_size;
2193 HOST_WIDE_INT offset = 0;
2194 enum machine_mode mode, tmode;
2195 enum insn_code icode;
2196 int reverse;
2197 rtx cst;
2198
2199 if (len == 0)
2200 return 1;
2201
2202 if (! STORE_BY_PIECES_P (len, align))
2203 return 0;
2204
2205 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2206 if (align >= GET_MODE_ALIGNMENT (tmode))
2207 align = GET_MODE_ALIGNMENT (tmode);
2208 else
2209 {
2210 enum machine_mode xmode;
2211
2212 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2213 tmode != VOIDmode;
2214 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2215 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2216 || SLOW_UNALIGNED_ACCESS (tmode, align))
2217 break;
2218
2219 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2220 }
2221
2222 /* We would first store what we can in the largest integer mode, then go to
2223 successively smaller modes. */
2224
2225 for (reverse = 0;
2226 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2227 reverse++)
2228 {
2229 l = len;
2230 mode = VOIDmode;
2231 max_size = STORE_MAX_PIECES + 1;
2232 while (max_size > 1)
2233 {
2234 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2235 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2236 if (GET_MODE_SIZE (tmode) < max_size)
2237 mode = tmode;
2238
2239 if (mode == VOIDmode)
2240 break;
2241
2242 icode = mov_optab->handlers[(int) mode].insn_code;
2243 if (icode != CODE_FOR_nothing
2244 && align >= GET_MODE_ALIGNMENT (mode))
2245 {
2246 unsigned int size = GET_MODE_SIZE (mode);
2247
2248 while (l >= size)
2249 {
2250 if (reverse)
2251 offset -= size;
2252
2253 cst = (*constfun) (constfundata, offset, mode);
2254 if (!LEGITIMATE_CONSTANT_P (cst))
2255 return 0;
2256
2257 if (!reverse)
2258 offset += size;
2259
2260 l -= size;
2261 }
2262 }
2263
2264 max_size = GET_MODE_SIZE (mode);
2265 }
2266
2267 /* The code above should have handled everything. */
2268 gcc_assert (!l);
2269 }
2270
2271 return 1;
2272 }
2273
2274 /* Generate several move instructions to store LEN bytes generated by
2275 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2276 pointer which will be passed as argument in every CONSTFUN call.
2277 ALIGN is maximum alignment we can assume.
2278 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2279 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2280 stpcpy. */
2281
2282 rtx
2283 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2284 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2285 void *constfundata, unsigned int align, int endp)
2286 {
2287 struct store_by_pieces data;
2288
2289 if (len == 0)
2290 {
2291 gcc_assert (endp != 2);
2292 return to;
2293 }
2294
2295 gcc_assert (STORE_BY_PIECES_P (len, align));
2296 data.constfun = constfun;
2297 data.constfundata = constfundata;
2298 data.len = len;
2299 data.to = to;
2300 store_by_pieces_1 (&data, align);
2301 if (endp)
2302 {
2303 rtx to1;
2304
2305 gcc_assert (!data.reverse);
2306 if (data.autinc_to)
2307 {
2308 if (endp == 2)
2309 {
2310 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2311 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2312 else
2313 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2314 -1));
2315 }
2316 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2317 data.offset);
2318 }
2319 else
2320 {
2321 if (endp == 2)
2322 --data.offset;
2323 to1 = adjust_address (data.to, QImode, data.offset);
2324 }
2325 return to1;
2326 }
2327 else
2328 return data.to;
2329 }
2330
2331 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2332 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2333
2334 static void
2335 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2336 {
2337 struct store_by_pieces data;
2338
2339 if (len == 0)
2340 return;
2341
2342 data.constfun = clear_by_pieces_1;
2343 data.constfundata = NULL;
2344 data.len = len;
2345 data.to = to;
2346 store_by_pieces_1 (&data, align);
2347 }
2348
2349 /* Callback routine for clear_by_pieces.
2350 Return const0_rtx unconditionally. */
2351
2352 static rtx
2353 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2354 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2355 enum machine_mode mode ATTRIBUTE_UNUSED)
2356 {
2357 return const0_rtx;
2358 }
2359
2360 /* Subroutine of clear_by_pieces and store_by_pieces.
2361 Generate several move instructions to store LEN bytes of block TO. (A MEM
2362 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2363
2364 static void
2365 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2366 unsigned int align ATTRIBUTE_UNUSED)
2367 {
2368 rtx to_addr = XEXP (data->to, 0);
2369 unsigned int max_size = STORE_MAX_PIECES + 1;
2370 enum machine_mode mode = VOIDmode, tmode;
2371 enum insn_code icode;
2372
2373 data->offset = 0;
2374 data->to_addr = to_addr;
2375 data->autinc_to
2376 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2377 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2378
2379 data->explicit_inc_to = 0;
2380 data->reverse
2381 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2382 if (data->reverse)
2383 data->offset = data->len;
2384
2385 /* If storing requires more than two move insns,
2386 copy addresses to registers (to make displacements shorter)
2387 and use post-increment if available. */
2388 if (!data->autinc_to
2389 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2390 {
2391 /* Determine the main mode we'll be using. */
2392 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2393 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2394 if (GET_MODE_SIZE (tmode) < max_size)
2395 mode = tmode;
2396
2397 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2398 {
2399 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2400 data->autinc_to = 1;
2401 data->explicit_inc_to = -1;
2402 }
2403
2404 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2405 && ! data->autinc_to)
2406 {
2407 data->to_addr = copy_addr_to_reg (to_addr);
2408 data->autinc_to = 1;
2409 data->explicit_inc_to = 1;
2410 }
2411
2412 if ( !data->autinc_to && CONSTANT_P (to_addr))
2413 data->to_addr = copy_addr_to_reg (to_addr);
2414 }
2415
2416 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2417 if (align >= GET_MODE_ALIGNMENT (tmode))
2418 align = GET_MODE_ALIGNMENT (tmode);
2419 else
2420 {
2421 enum machine_mode xmode;
2422
2423 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2424 tmode != VOIDmode;
2425 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2426 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2427 || SLOW_UNALIGNED_ACCESS (tmode, align))
2428 break;
2429
2430 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2431 }
2432
2433 /* First store what we can in the largest integer mode, then go to
2434 successively smaller modes. */
2435
2436 while (max_size > 1)
2437 {
2438 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2439 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2440 if (GET_MODE_SIZE (tmode) < max_size)
2441 mode = tmode;
2442
2443 if (mode == VOIDmode)
2444 break;
2445
2446 icode = mov_optab->handlers[(int) mode].insn_code;
2447 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2448 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2449
2450 max_size = GET_MODE_SIZE (mode);
2451 }
2452
2453 /* The code above should have handled everything. */
2454 gcc_assert (!data->len);
2455 }
2456
2457 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2458 with move instructions for mode MODE. GENFUN is the gen_... function
2459 to make a move insn for that mode. DATA has all the other info. */
2460
2461 static void
2462 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2463 struct store_by_pieces *data)
2464 {
2465 unsigned int size = GET_MODE_SIZE (mode);
2466 rtx to1, cst;
2467
2468 while (data->len >= size)
2469 {
2470 if (data->reverse)
2471 data->offset -= size;
2472
2473 if (data->autinc_to)
2474 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2475 data->offset);
2476 else
2477 to1 = adjust_address (data->to, mode, data->offset);
2478
2479 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2480 emit_insn (gen_add2_insn (data->to_addr,
2481 GEN_INT (-(HOST_WIDE_INT) size)));
2482
2483 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2484 emit_insn ((*genfun) (to1, cst));
2485
2486 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2487 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2488
2489 if (! data->reverse)
2490 data->offset += size;
2491
2492 data->len -= size;
2493 }
2494 }
2495 \f
2496 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2497 its length in bytes. */
2498
2499 rtx
2500 clear_storage (rtx object, rtx size, enum block_op_methods method)
2501 {
2502 enum machine_mode mode = GET_MODE (object);
2503 unsigned int align;
2504
2505 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2506
2507 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2508 just move a zero. Otherwise, do this a piece at a time. */
2509 if (mode != BLKmode
2510 && GET_CODE (size) == CONST_INT
2511 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2512 {
2513 rtx zero = CONST0_RTX (mode);
2514 if (zero != NULL)
2515 {
2516 emit_move_insn (object, zero);
2517 return NULL;
2518 }
2519
2520 if (COMPLEX_MODE_P (mode))
2521 {
2522 zero = CONST0_RTX (GET_MODE_INNER (mode));
2523 if (zero != NULL)
2524 {
2525 write_complex_part (object, zero, 0);
2526 write_complex_part (object, zero, 1);
2527 return NULL;
2528 }
2529 }
2530 }
2531
2532 if (size == const0_rtx)
2533 return NULL;
2534
2535 align = MEM_ALIGN (object);
2536
2537 if (GET_CODE (size) == CONST_INT
2538 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2539 clear_by_pieces (object, INTVAL (size), align);
2540 else if (set_storage_via_setmem (object, size, const0_rtx, align))
2541 ;
2542 else
2543 return clear_storage_via_libcall (object, size,
2544 method == BLOCK_OP_TAILCALL);
2545
2546 return NULL;
2547 }
2548
2549 /* A subroutine of clear_storage. Expand a call to memset.
2550 Return the return value of memset, 0 otherwise. */
2551
2552 static rtx
2553 clear_storage_via_libcall (rtx object, rtx size, bool tailcall)
2554 {
2555 tree call_expr, arg_list, fn, object_tree, size_tree;
2556 enum machine_mode size_mode;
2557 rtx retval;
2558
2559 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2560 place those into new pseudos into a VAR_DECL and use them later. */
2561
2562 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2563
2564 size_mode = TYPE_MODE (sizetype);
2565 size = convert_to_mode (size_mode, size, 1);
2566 size = copy_to_mode_reg (size_mode, size);
2567
2568 /* It is incorrect to use the libcall calling conventions to call
2569 memset in this context. This could be a user call to memset and
2570 the user may wish to examine the return value from memset. For
2571 targets where libcalls and normal calls have different conventions
2572 for returning pointers, we could end up generating incorrect code. */
2573
2574 object_tree = make_tree (ptr_type_node, object);
2575 size_tree = make_tree (sizetype, size);
2576
2577 fn = clear_storage_libcall_fn (true);
2578 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2579 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2580 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2581
2582 /* Now we have to build up the CALL_EXPR itself. */
2583 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2584 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2585 call_expr, arg_list, NULL_TREE);
2586 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2587
2588 retval = expand_normal (call_expr);
2589
2590 return retval;
2591 }
2592
2593 /* A subroutine of clear_storage_via_libcall. Create the tree node
2594 for the function we use for block clears. The first time FOR_CALL
2595 is true, we call assemble_external. */
2596
2597 static GTY(()) tree block_clear_fn;
2598
2599 void
2600 init_block_clear_fn (const char *asmspec)
2601 {
2602 if (!block_clear_fn)
2603 {
2604 tree fn, args;
2605
2606 fn = get_identifier ("memset");
2607 args = build_function_type_list (ptr_type_node, ptr_type_node,
2608 integer_type_node, sizetype,
2609 NULL_TREE);
2610
2611 fn = build_decl (FUNCTION_DECL, fn, args);
2612 DECL_EXTERNAL (fn) = 1;
2613 TREE_PUBLIC (fn) = 1;
2614 DECL_ARTIFICIAL (fn) = 1;
2615 TREE_NOTHROW (fn) = 1;
2616 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2617 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2618
2619 block_clear_fn = fn;
2620 }
2621
2622 if (asmspec)
2623 set_user_assembler_name (block_clear_fn, asmspec);
2624 }
2625
2626 static tree
2627 clear_storage_libcall_fn (int for_call)
2628 {
2629 static bool emitted_extern;
2630
2631 if (!block_clear_fn)
2632 init_block_clear_fn (NULL);
2633
2634 if (for_call && !emitted_extern)
2635 {
2636 emitted_extern = true;
2637 make_decl_rtl (block_clear_fn);
2638 assemble_external (block_clear_fn);
2639 }
2640
2641 return block_clear_fn;
2642 }
2643 \f
2644 /* Expand a setmem pattern; return true if successful. */
2645
2646 bool
2647 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align)
2648 {
2649 /* Try the most limited insn first, because there's no point
2650 including more than one in the machine description unless
2651 the more limited one has some advantage. */
2652
2653 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2654 enum machine_mode mode;
2655
2656 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2657 mode = GET_MODE_WIDER_MODE (mode))
2658 {
2659 enum insn_code code = setmem_optab[(int) mode];
2660 insn_operand_predicate_fn pred;
2661
2662 if (code != CODE_FOR_nothing
2663 /* We don't need MODE to be narrower than
2664 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2665 the mode mask, as it is returned by the macro, it will
2666 definitely be less than the actual mode mask. */
2667 && ((GET_CODE (size) == CONST_INT
2668 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2669 <= (GET_MODE_MASK (mode) >> 1)))
2670 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2671 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2672 || (*pred) (object, BLKmode))
2673 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2674 || (*pred) (opalign, VOIDmode)))
2675 {
2676 rtx opsize, opchar;
2677 enum machine_mode char_mode;
2678 rtx last = get_last_insn ();
2679 rtx pat;
2680
2681 opsize = convert_to_mode (mode, size, 1);
2682 pred = insn_data[(int) code].operand[1].predicate;
2683 if (pred != 0 && ! (*pred) (opsize, mode))
2684 opsize = copy_to_mode_reg (mode, opsize);
2685
2686 opchar = val;
2687 char_mode = insn_data[(int) code].operand[2].mode;
2688 if (char_mode != VOIDmode)
2689 {
2690 opchar = convert_to_mode (char_mode, opchar, 1);
2691 pred = insn_data[(int) code].operand[2].predicate;
2692 if (pred != 0 && ! (*pred) (opchar, char_mode))
2693 opchar = copy_to_mode_reg (char_mode, opchar);
2694 }
2695
2696 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2697 if (pat)
2698 {
2699 emit_insn (pat);
2700 return true;
2701 }
2702 else
2703 delete_insns_since (last);
2704 }
2705 }
2706
2707 return false;
2708 }
2709
2710 \f
2711 /* Write to one of the components of the complex value CPLX. Write VAL to
2712 the real part if IMAG_P is false, and the imaginary part if its true. */
2713
2714 static void
2715 write_complex_part (rtx cplx, rtx val, bool imag_p)
2716 {
2717 enum machine_mode cmode;
2718 enum machine_mode imode;
2719 unsigned ibitsize;
2720
2721 if (GET_CODE (cplx) == CONCAT)
2722 {
2723 emit_move_insn (XEXP (cplx, imag_p), val);
2724 return;
2725 }
2726
2727 cmode = GET_MODE (cplx);
2728 imode = GET_MODE_INNER (cmode);
2729 ibitsize = GET_MODE_BITSIZE (imode);
2730
2731 /* For MEMs simplify_gen_subreg may generate an invalid new address
2732 because, e.g., the original address is considered mode-dependent
2733 by the target, which restricts simplify_subreg from invoking
2734 adjust_address_nv. Instead of preparing fallback support for an
2735 invalid address, we call adjust_address_nv directly. */
2736 if (MEM_P (cplx))
2737 {
2738 emit_move_insn (adjust_address_nv (cplx, imode,
2739 imag_p ? GET_MODE_SIZE (imode) : 0),
2740 val);
2741 return;
2742 }
2743
2744 /* If the sub-object is at least word sized, then we know that subregging
2745 will work. This special case is important, since store_bit_field
2746 wants to operate on integer modes, and there's rarely an OImode to
2747 correspond to TCmode. */
2748 if (ibitsize >= BITS_PER_WORD
2749 /* For hard regs we have exact predicates. Assume we can split
2750 the original object if it spans an even number of hard regs.
2751 This special case is important for SCmode on 64-bit platforms
2752 where the natural size of floating-point regs is 32-bit. */
2753 || (REG_P (cplx)
2754 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2755 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2756 {
2757 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2758 imag_p ? GET_MODE_SIZE (imode) : 0);
2759 if (part)
2760 {
2761 emit_move_insn (part, val);
2762 return;
2763 }
2764 else
2765 /* simplify_gen_subreg may fail for sub-word MEMs. */
2766 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2767 }
2768
2769 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2770 }
2771
2772 /* Extract one of the components of the complex value CPLX. Extract the
2773 real part if IMAG_P is false, and the imaginary part if it's true. */
2774
2775 static rtx
2776 read_complex_part (rtx cplx, bool imag_p)
2777 {
2778 enum machine_mode cmode, imode;
2779 unsigned ibitsize;
2780
2781 if (GET_CODE (cplx) == CONCAT)
2782 return XEXP (cplx, imag_p);
2783
2784 cmode = GET_MODE (cplx);
2785 imode = GET_MODE_INNER (cmode);
2786 ibitsize = GET_MODE_BITSIZE (imode);
2787
2788 /* Special case reads from complex constants that got spilled to memory. */
2789 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2790 {
2791 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2792 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2793 {
2794 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2795 if (CONSTANT_CLASS_P (part))
2796 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2797 }
2798 }
2799
2800 /* For MEMs simplify_gen_subreg may generate an invalid new address
2801 because, e.g., the original address is considered mode-dependent
2802 by the target, which restricts simplify_subreg from invoking
2803 adjust_address_nv. Instead of preparing fallback support for an
2804 invalid address, we call adjust_address_nv directly. */
2805 if (MEM_P (cplx))
2806 return adjust_address_nv (cplx, imode,
2807 imag_p ? GET_MODE_SIZE (imode) : 0);
2808
2809 /* If the sub-object is at least word sized, then we know that subregging
2810 will work. This special case is important, since extract_bit_field
2811 wants to operate on integer modes, and there's rarely an OImode to
2812 correspond to TCmode. */
2813 if (ibitsize >= BITS_PER_WORD
2814 /* For hard regs we have exact predicates. Assume we can split
2815 the original object if it spans an even number of hard regs.
2816 This special case is important for SCmode on 64-bit platforms
2817 where the natural size of floating-point regs is 32-bit. */
2818 || (REG_P (cplx)
2819 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2820 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2821 {
2822 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2823 imag_p ? GET_MODE_SIZE (imode) : 0);
2824 if (ret)
2825 return ret;
2826 else
2827 /* simplify_gen_subreg may fail for sub-word MEMs. */
2828 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2829 }
2830
2831 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2832 true, NULL_RTX, imode, imode);
2833 }
2834 \f
2835 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2836 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2837 represented in NEW_MODE. If FORCE is true, this will never happen, as
2838 we'll force-create a SUBREG if needed. */
2839
2840 static rtx
2841 emit_move_change_mode (enum machine_mode new_mode,
2842 enum machine_mode old_mode, rtx x, bool force)
2843 {
2844 rtx ret;
2845
2846 if (MEM_P (x))
2847 {
2848 /* We don't have to worry about changing the address since the
2849 size in bytes is supposed to be the same. */
2850 if (reload_in_progress)
2851 {
2852 /* Copy the MEM to change the mode and move any
2853 substitutions from the old MEM to the new one. */
2854 ret = adjust_address_nv (x, new_mode, 0);
2855 copy_replacements (x, ret);
2856 }
2857 else
2858 ret = adjust_address (x, new_mode, 0);
2859 }
2860 else
2861 {
2862 /* Note that we do want simplify_subreg's behavior of validating
2863 that the new mode is ok for a hard register. If we were to use
2864 simplify_gen_subreg, we would create the subreg, but would
2865 probably run into the target not being able to implement it. */
2866 /* Except, of course, when FORCE is true, when this is exactly what
2867 we want. Which is needed for CCmodes on some targets. */
2868 if (force)
2869 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2870 else
2871 ret = simplify_subreg (new_mode, x, old_mode, 0);
2872 }
2873
2874 return ret;
2875 }
2876
2877 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2878 an integer mode of the same size as MODE. Returns the instruction
2879 emitted, or NULL if such a move could not be generated. */
2880
2881 static rtx
2882 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2883 {
2884 enum machine_mode imode;
2885 enum insn_code code;
2886
2887 /* There must exist a mode of the exact size we require. */
2888 imode = int_mode_for_mode (mode);
2889 if (imode == BLKmode)
2890 return NULL_RTX;
2891
2892 /* The target must support moves in this mode. */
2893 code = mov_optab->handlers[imode].insn_code;
2894 if (code == CODE_FOR_nothing)
2895 return NULL_RTX;
2896
2897 x = emit_move_change_mode (imode, mode, x, force);
2898 if (x == NULL_RTX)
2899 return NULL_RTX;
2900 y = emit_move_change_mode (imode, mode, y, force);
2901 if (y == NULL_RTX)
2902 return NULL_RTX;
2903 return emit_insn (GEN_FCN (code) (x, y));
2904 }
2905
2906 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2907 Return an equivalent MEM that does not use an auto-increment. */
2908
2909 static rtx
2910 emit_move_resolve_push (enum machine_mode mode, rtx x)
2911 {
2912 enum rtx_code code = GET_CODE (XEXP (x, 0));
2913 HOST_WIDE_INT adjust;
2914 rtx temp;
2915
2916 adjust = GET_MODE_SIZE (mode);
2917 #ifdef PUSH_ROUNDING
2918 adjust = PUSH_ROUNDING (adjust);
2919 #endif
2920 if (code == PRE_DEC || code == POST_DEC)
2921 adjust = -adjust;
2922 else if (code == PRE_MODIFY || code == POST_MODIFY)
2923 {
2924 rtx expr = XEXP (XEXP (x, 0), 1);
2925 HOST_WIDE_INT val;
2926
2927 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2928 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2929 val = INTVAL (XEXP (expr, 1));
2930 if (GET_CODE (expr) == MINUS)
2931 val = -val;
2932 gcc_assert (adjust == val || adjust == -val);
2933 adjust = val;
2934 }
2935
2936 /* Do not use anti_adjust_stack, since we don't want to update
2937 stack_pointer_delta. */
2938 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2939 GEN_INT (adjust), stack_pointer_rtx,
2940 0, OPTAB_LIB_WIDEN);
2941 if (temp != stack_pointer_rtx)
2942 emit_move_insn (stack_pointer_rtx, temp);
2943
2944 switch (code)
2945 {
2946 case PRE_INC:
2947 case PRE_DEC:
2948 case PRE_MODIFY:
2949 temp = stack_pointer_rtx;
2950 break;
2951 case POST_INC:
2952 case POST_DEC:
2953 case POST_MODIFY:
2954 temp = plus_constant (stack_pointer_rtx, -adjust);
2955 break;
2956 default:
2957 gcc_unreachable ();
2958 }
2959
2960 return replace_equiv_address (x, temp);
2961 }
2962
2963 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2964 X is known to satisfy push_operand, and MODE is known to be complex.
2965 Returns the last instruction emitted. */
2966
2967 static rtx
2968 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2969 {
2970 enum machine_mode submode = GET_MODE_INNER (mode);
2971 bool imag_first;
2972
2973 #ifdef PUSH_ROUNDING
2974 unsigned int submodesize = GET_MODE_SIZE (submode);
2975
2976 /* In case we output to the stack, but the size is smaller than the
2977 machine can push exactly, we need to use move instructions. */
2978 if (PUSH_ROUNDING (submodesize) != submodesize)
2979 {
2980 x = emit_move_resolve_push (mode, x);
2981 return emit_move_insn (x, y);
2982 }
2983 #endif
2984
2985 /* Note that the real part always precedes the imag part in memory
2986 regardless of machine's endianness. */
2987 switch (GET_CODE (XEXP (x, 0)))
2988 {
2989 case PRE_DEC:
2990 case POST_DEC:
2991 imag_first = true;
2992 break;
2993 case PRE_INC:
2994 case POST_INC:
2995 imag_first = false;
2996 break;
2997 default:
2998 gcc_unreachable ();
2999 }
3000
3001 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3002 read_complex_part (y, imag_first));
3003 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3004 read_complex_part (y, !imag_first));
3005 }
3006
3007 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3008 MODE is known to be complex. Returns the last instruction emitted. */
3009
3010 static rtx
3011 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3012 {
3013 bool try_int;
3014
3015 /* Need to take special care for pushes, to maintain proper ordering
3016 of the data, and possibly extra padding. */
3017 if (push_operand (x, mode))
3018 return emit_move_complex_push (mode, x, y);
3019
3020 /* See if we can coerce the target into moving both values at once. */
3021
3022 /* Move floating point as parts. */
3023 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3024 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
3025 try_int = false;
3026 /* Not possible if the values are inherently not adjacent. */
3027 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3028 try_int = false;
3029 /* Is possible if both are registers (or subregs of registers). */
3030 else if (register_operand (x, mode) && register_operand (y, mode))
3031 try_int = true;
3032 /* If one of the operands is a memory, and alignment constraints
3033 are friendly enough, we may be able to do combined memory operations.
3034 We do not attempt this if Y is a constant because that combination is
3035 usually better with the by-parts thing below. */
3036 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3037 && (!STRICT_ALIGNMENT
3038 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3039 try_int = true;
3040 else
3041 try_int = false;
3042
3043 if (try_int)
3044 {
3045 rtx ret;
3046
3047 /* For memory to memory moves, optimal behavior can be had with the
3048 existing block move logic. */
3049 if (MEM_P (x) && MEM_P (y))
3050 {
3051 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3052 BLOCK_OP_NO_LIBCALL);
3053 return get_last_insn ();
3054 }
3055
3056 ret = emit_move_via_integer (mode, x, y, true);
3057 if (ret)
3058 return ret;
3059 }
3060
3061 /* Show the output dies here. This is necessary for SUBREGs
3062 of pseudos since we cannot track their lifetimes correctly;
3063 hard regs shouldn't appear here except as return values. */
3064 if (!reload_completed && !reload_in_progress
3065 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3066 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3067
3068 write_complex_part (x, read_complex_part (y, false), false);
3069 write_complex_part (x, read_complex_part (y, true), true);
3070 return get_last_insn ();
3071 }
3072
3073 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3074 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3075
3076 static rtx
3077 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3078 {
3079 rtx ret;
3080
3081 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3082 if (mode != CCmode)
3083 {
3084 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3085 if (code != CODE_FOR_nothing)
3086 {
3087 x = emit_move_change_mode (CCmode, mode, x, true);
3088 y = emit_move_change_mode (CCmode, mode, y, true);
3089 return emit_insn (GEN_FCN (code) (x, y));
3090 }
3091 }
3092
3093 /* Otherwise, find the MODE_INT mode of the same width. */
3094 ret = emit_move_via_integer (mode, x, y, false);
3095 gcc_assert (ret != NULL);
3096 return ret;
3097 }
3098
3099 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3100 MODE is any multi-word or full-word mode that lacks a move_insn
3101 pattern. Note that you will get better code if you define such
3102 patterns, even if they must turn into multiple assembler instructions. */
3103
3104 static rtx
3105 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3106 {
3107 rtx last_insn = 0;
3108 rtx seq, inner;
3109 bool need_clobber;
3110 int i;
3111
3112 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3113
3114 /* If X is a push on the stack, do the push now and replace
3115 X with a reference to the stack pointer. */
3116 if (push_operand (x, mode))
3117 x = emit_move_resolve_push (mode, x);
3118
3119 /* If we are in reload, see if either operand is a MEM whose address
3120 is scheduled for replacement. */
3121 if (reload_in_progress && MEM_P (x)
3122 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3123 x = replace_equiv_address_nv (x, inner);
3124 if (reload_in_progress && MEM_P (y)
3125 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3126 y = replace_equiv_address_nv (y, inner);
3127
3128 start_sequence ();
3129
3130 need_clobber = false;
3131 for (i = 0;
3132 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3133 i++)
3134 {
3135 rtx xpart = operand_subword (x, i, 1, mode);
3136 rtx ypart = operand_subword (y, i, 1, mode);
3137
3138 /* If we can't get a part of Y, put Y into memory if it is a
3139 constant. Otherwise, force it into a register. Then we must
3140 be able to get a part of Y. */
3141 if (ypart == 0 && CONSTANT_P (y))
3142 {
3143 y = use_anchored_address (force_const_mem (mode, y));
3144 ypart = operand_subword (y, i, 1, mode);
3145 }
3146 else if (ypart == 0)
3147 ypart = operand_subword_force (y, i, mode);
3148
3149 gcc_assert (xpart && ypart);
3150
3151 need_clobber |= (GET_CODE (xpart) == SUBREG);
3152
3153 last_insn = emit_move_insn (xpart, ypart);
3154 }
3155
3156 seq = get_insns ();
3157 end_sequence ();
3158
3159 /* Show the output dies here. This is necessary for SUBREGs
3160 of pseudos since we cannot track their lifetimes correctly;
3161 hard regs shouldn't appear here except as return values.
3162 We never want to emit such a clobber after reload. */
3163 if (x != y
3164 && ! (reload_in_progress || reload_completed)
3165 && need_clobber != 0)
3166 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3167
3168 emit_insn (seq);
3169
3170 return last_insn;
3171 }
3172
3173 /* Low level part of emit_move_insn.
3174 Called just like emit_move_insn, but assumes X and Y
3175 are basically valid. */
3176
3177 rtx
3178 emit_move_insn_1 (rtx x, rtx y)
3179 {
3180 enum machine_mode mode = GET_MODE (x);
3181 enum insn_code code;
3182
3183 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3184
3185 code = mov_optab->handlers[mode].insn_code;
3186 if (code != CODE_FOR_nothing)
3187 return emit_insn (GEN_FCN (code) (x, y));
3188
3189 /* Expand complex moves by moving real part and imag part. */
3190 if (COMPLEX_MODE_P (mode))
3191 return emit_move_complex (mode, x, y);
3192
3193 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT)
3194 {
3195 rtx result = emit_move_via_integer (mode, x, y, true);
3196
3197 /* If we can't find an integer mode, use multi words. */
3198 if (result)
3199 return result;
3200 else
3201 return emit_move_multi_word (mode, x, y);
3202 }
3203
3204 if (GET_MODE_CLASS (mode) == MODE_CC)
3205 return emit_move_ccmode (mode, x, y);
3206
3207 /* Try using a move pattern for the corresponding integer mode. This is
3208 only safe when simplify_subreg can convert MODE constants into integer
3209 constants. At present, it can only do this reliably if the value
3210 fits within a HOST_WIDE_INT. */
3211 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3212 {
3213 rtx ret = emit_move_via_integer (mode, x, y, false);
3214 if (ret)
3215 return ret;
3216 }
3217
3218 return emit_move_multi_word (mode, x, y);
3219 }
3220
3221 /* Generate code to copy Y into X.
3222 Both Y and X must have the same mode, except that
3223 Y can be a constant with VOIDmode.
3224 This mode cannot be BLKmode; use emit_block_move for that.
3225
3226 Return the last instruction emitted. */
3227
3228 rtx
3229 emit_move_insn (rtx x, rtx y)
3230 {
3231 enum machine_mode mode = GET_MODE (x);
3232 rtx y_cst = NULL_RTX;
3233 rtx last_insn, set;
3234
3235 gcc_assert (mode != BLKmode
3236 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3237
3238 if (CONSTANT_P (y))
3239 {
3240 if (optimize
3241 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3242 && (last_insn = compress_float_constant (x, y)))
3243 return last_insn;
3244
3245 y_cst = y;
3246
3247 if (!LEGITIMATE_CONSTANT_P (y))
3248 {
3249 y = force_const_mem (mode, y);
3250
3251 /* If the target's cannot_force_const_mem prevented the spill,
3252 assume that the target's move expanders will also take care
3253 of the non-legitimate constant. */
3254 if (!y)
3255 y = y_cst;
3256 else
3257 y = use_anchored_address (y);
3258 }
3259 }
3260
3261 /* If X or Y are memory references, verify that their addresses are valid
3262 for the machine. */
3263 if (MEM_P (x)
3264 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3265 && ! push_operand (x, GET_MODE (x)))
3266 || (flag_force_addr
3267 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3268 x = validize_mem (x);
3269
3270 if (MEM_P (y)
3271 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3272 || (flag_force_addr
3273 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3274 y = validize_mem (y);
3275
3276 gcc_assert (mode != BLKmode);
3277
3278 last_insn = emit_move_insn_1 (x, y);
3279
3280 if (y_cst && REG_P (x)
3281 && (set = single_set (last_insn)) != NULL_RTX
3282 && SET_DEST (set) == x
3283 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3284 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3285
3286 return last_insn;
3287 }
3288
3289 /* If Y is representable exactly in a narrower mode, and the target can
3290 perform the extension directly from constant or memory, then emit the
3291 move as an extension. */
3292
3293 static rtx
3294 compress_float_constant (rtx x, rtx y)
3295 {
3296 enum machine_mode dstmode = GET_MODE (x);
3297 enum machine_mode orig_srcmode = GET_MODE (y);
3298 enum machine_mode srcmode;
3299 REAL_VALUE_TYPE r;
3300 int oldcost, newcost;
3301
3302 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3303
3304 if (LEGITIMATE_CONSTANT_P (y))
3305 oldcost = rtx_cost (y, SET);
3306 else
3307 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3308
3309 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3310 srcmode != orig_srcmode;
3311 srcmode = GET_MODE_WIDER_MODE (srcmode))
3312 {
3313 enum insn_code ic;
3314 rtx trunc_y, last_insn;
3315
3316 /* Skip if the target can't extend this way. */
3317 ic = can_extend_p (dstmode, srcmode, 0);
3318 if (ic == CODE_FOR_nothing)
3319 continue;
3320
3321 /* Skip if the narrowed value isn't exact. */
3322 if (! exact_real_truncate (srcmode, &r))
3323 continue;
3324
3325 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3326
3327 if (LEGITIMATE_CONSTANT_P (trunc_y))
3328 {
3329 /* Skip if the target needs extra instructions to perform
3330 the extension. */
3331 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3332 continue;
3333 /* This is valid, but may not be cheaper than the original. */
3334 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3335 if (oldcost < newcost)
3336 continue;
3337 }
3338 else if (float_extend_from_mem[dstmode][srcmode])
3339 {
3340 trunc_y = force_const_mem (srcmode, trunc_y);
3341 /* This is valid, but may not be cheaper than the original. */
3342 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3343 if (oldcost < newcost)
3344 continue;
3345 trunc_y = validize_mem (trunc_y);
3346 }
3347 else
3348 continue;
3349
3350 /* For CSE's benefit, force the compressed constant pool entry
3351 into a new pseudo. This constant may be used in different modes,
3352 and if not, combine will put things back together for us. */
3353 trunc_y = force_reg (srcmode, trunc_y);
3354 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3355 last_insn = get_last_insn ();
3356
3357 if (REG_P (x))
3358 set_unique_reg_note (last_insn, REG_EQUAL, y);
3359
3360 return last_insn;
3361 }
3362
3363 return NULL_RTX;
3364 }
3365 \f
3366 /* Pushing data onto the stack. */
3367
3368 /* Push a block of length SIZE (perhaps variable)
3369 and return an rtx to address the beginning of the block.
3370 The value may be virtual_outgoing_args_rtx.
3371
3372 EXTRA is the number of bytes of padding to push in addition to SIZE.
3373 BELOW nonzero means this padding comes at low addresses;
3374 otherwise, the padding comes at high addresses. */
3375
3376 rtx
3377 push_block (rtx size, int extra, int below)
3378 {
3379 rtx temp;
3380
3381 size = convert_modes (Pmode, ptr_mode, size, 1);
3382 if (CONSTANT_P (size))
3383 anti_adjust_stack (plus_constant (size, extra));
3384 else if (REG_P (size) && extra == 0)
3385 anti_adjust_stack (size);
3386 else
3387 {
3388 temp = copy_to_mode_reg (Pmode, size);
3389 if (extra != 0)
3390 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3391 temp, 0, OPTAB_LIB_WIDEN);
3392 anti_adjust_stack (temp);
3393 }
3394
3395 #ifndef STACK_GROWS_DOWNWARD
3396 if (0)
3397 #else
3398 if (1)
3399 #endif
3400 {
3401 temp = virtual_outgoing_args_rtx;
3402 if (extra != 0 && below)
3403 temp = plus_constant (temp, extra);
3404 }
3405 else
3406 {
3407 if (GET_CODE (size) == CONST_INT)
3408 temp = plus_constant (virtual_outgoing_args_rtx,
3409 -INTVAL (size) - (below ? 0 : extra));
3410 else if (extra != 0 && !below)
3411 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3412 negate_rtx (Pmode, plus_constant (size, extra)));
3413 else
3414 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3415 negate_rtx (Pmode, size));
3416 }
3417
3418 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3419 }
3420
3421 #ifdef PUSH_ROUNDING
3422
3423 /* Emit single push insn. */
3424
3425 static void
3426 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3427 {
3428 rtx dest_addr;
3429 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3430 rtx dest;
3431 enum insn_code icode;
3432 insn_operand_predicate_fn pred;
3433
3434 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3435 /* If there is push pattern, use it. Otherwise try old way of throwing
3436 MEM representing push operation to move expander. */
3437 icode = push_optab->handlers[(int) mode].insn_code;
3438 if (icode != CODE_FOR_nothing)
3439 {
3440 if (((pred = insn_data[(int) icode].operand[0].predicate)
3441 && !((*pred) (x, mode))))
3442 x = force_reg (mode, x);
3443 emit_insn (GEN_FCN (icode) (x));
3444 return;
3445 }
3446 if (GET_MODE_SIZE (mode) == rounded_size)
3447 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3448 /* If we are to pad downward, adjust the stack pointer first and
3449 then store X into the stack location using an offset. This is
3450 because emit_move_insn does not know how to pad; it does not have
3451 access to type. */
3452 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3453 {
3454 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3455 HOST_WIDE_INT offset;
3456
3457 emit_move_insn (stack_pointer_rtx,
3458 expand_binop (Pmode,
3459 #ifdef STACK_GROWS_DOWNWARD
3460 sub_optab,
3461 #else
3462 add_optab,
3463 #endif
3464 stack_pointer_rtx,
3465 GEN_INT (rounded_size),
3466 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3467
3468 offset = (HOST_WIDE_INT) padding_size;
3469 #ifdef STACK_GROWS_DOWNWARD
3470 if (STACK_PUSH_CODE == POST_DEC)
3471 /* We have already decremented the stack pointer, so get the
3472 previous value. */
3473 offset += (HOST_WIDE_INT) rounded_size;
3474 #else
3475 if (STACK_PUSH_CODE == POST_INC)
3476 /* We have already incremented the stack pointer, so get the
3477 previous value. */
3478 offset -= (HOST_WIDE_INT) rounded_size;
3479 #endif
3480 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3481 }
3482 else
3483 {
3484 #ifdef STACK_GROWS_DOWNWARD
3485 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3486 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3487 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3488 #else
3489 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3490 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3491 GEN_INT (rounded_size));
3492 #endif
3493 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3494 }
3495
3496 dest = gen_rtx_MEM (mode, dest_addr);
3497
3498 if (type != 0)
3499 {
3500 set_mem_attributes (dest, type, 1);
3501
3502 if (flag_optimize_sibling_calls)
3503 /* Function incoming arguments may overlap with sibling call
3504 outgoing arguments and we cannot allow reordering of reads
3505 from function arguments with stores to outgoing arguments
3506 of sibling calls. */
3507 set_mem_alias_set (dest, 0);
3508 }
3509 emit_move_insn (dest, x);
3510 }
3511 #endif
3512
3513 /* Generate code to push X onto the stack, assuming it has mode MODE and
3514 type TYPE.
3515 MODE is redundant except when X is a CONST_INT (since they don't
3516 carry mode info).
3517 SIZE is an rtx for the size of data to be copied (in bytes),
3518 needed only if X is BLKmode.
3519
3520 ALIGN (in bits) is maximum alignment we can assume.
3521
3522 If PARTIAL and REG are both nonzero, then copy that many of the first
3523 bytes of X into registers starting with REG, and push the rest of X.
3524 The amount of space pushed is decreased by PARTIAL bytes.
3525 REG must be a hard register in this case.
3526 If REG is zero but PARTIAL is not, take any all others actions for an
3527 argument partially in registers, but do not actually load any
3528 registers.
3529
3530 EXTRA is the amount in bytes of extra space to leave next to this arg.
3531 This is ignored if an argument block has already been allocated.
3532
3533 On a machine that lacks real push insns, ARGS_ADDR is the address of
3534 the bottom of the argument block for this call. We use indexing off there
3535 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3536 argument block has not been preallocated.
3537
3538 ARGS_SO_FAR is the size of args previously pushed for this call.
3539
3540 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3541 for arguments passed in registers. If nonzero, it will be the number
3542 of bytes required. */
3543
3544 void
3545 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3546 unsigned int align, int partial, rtx reg, int extra,
3547 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3548 rtx alignment_pad)
3549 {
3550 rtx xinner;
3551 enum direction stack_direction
3552 #ifdef STACK_GROWS_DOWNWARD
3553 = downward;
3554 #else
3555 = upward;
3556 #endif
3557
3558 /* Decide where to pad the argument: `downward' for below,
3559 `upward' for above, or `none' for don't pad it.
3560 Default is below for small data on big-endian machines; else above. */
3561 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3562
3563 /* Invert direction if stack is post-decrement.
3564 FIXME: why? */
3565 if (STACK_PUSH_CODE == POST_DEC)
3566 if (where_pad != none)
3567 where_pad = (where_pad == downward ? upward : downward);
3568
3569 xinner = x;
3570
3571 if (mode == BLKmode)
3572 {
3573 /* Copy a block into the stack, entirely or partially. */
3574
3575 rtx temp;
3576 int used;
3577 int offset;
3578 int skip;
3579
3580 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3581 used = partial - offset;
3582
3583 gcc_assert (size);
3584
3585 /* USED is now the # of bytes we need not copy to the stack
3586 because registers will take care of them. */
3587
3588 if (partial != 0)
3589 xinner = adjust_address (xinner, BLKmode, used);
3590
3591 /* If the partial register-part of the arg counts in its stack size,
3592 skip the part of stack space corresponding to the registers.
3593 Otherwise, start copying to the beginning of the stack space,
3594 by setting SKIP to 0. */
3595 skip = (reg_parm_stack_space == 0) ? 0 : used;
3596
3597 #ifdef PUSH_ROUNDING
3598 /* Do it with several push insns if that doesn't take lots of insns
3599 and if there is no difficulty with push insns that skip bytes
3600 on the stack for alignment purposes. */
3601 if (args_addr == 0
3602 && PUSH_ARGS
3603 && GET_CODE (size) == CONST_INT
3604 && skip == 0
3605 && MEM_ALIGN (xinner) >= align
3606 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3607 /* Here we avoid the case of a structure whose weak alignment
3608 forces many pushes of a small amount of data,
3609 and such small pushes do rounding that causes trouble. */
3610 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3611 || align >= BIGGEST_ALIGNMENT
3612 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3613 == (align / BITS_PER_UNIT)))
3614 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3615 {
3616 /* Push padding now if padding above and stack grows down,
3617 or if padding below and stack grows up.
3618 But if space already allocated, this has already been done. */
3619 if (extra && args_addr == 0
3620 && where_pad != none && where_pad != stack_direction)
3621 anti_adjust_stack (GEN_INT (extra));
3622
3623 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3624 }
3625 else
3626 #endif /* PUSH_ROUNDING */
3627 {
3628 rtx target;
3629
3630 /* Otherwise make space on the stack and copy the data
3631 to the address of that space. */
3632
3633 /* Deduct words put into registers from the size we must copy. */
3634 if (partial != 0)
3635 {
3636 if (GET_CODE (size) == CONST_INT)
3637 size = GEN_INT (INTVAL (size) - used);
3638 else
3639 size = expand_binop (GET_MODE (size), sub_optab, size,
3640 GEN_INT (used), NULL_RTX, 0,
3641 OPTAB_LIB_WIDEN);
3642 }
3643
3644 /* Get the address of the stack space.
3645 In this case, we do not deal with EXTRA separately.
3646 A single stack adjust will do. */
3647 if (! args_addr)
3648 {
3649 temp = push_block (size, extra, where_pad == downward);
3650 extra = 0;
3651 }
3652 else if (GET_CODE (args_so_far) == CONST_INT)
3653 temp = memory_address (BLKmode,
3654 plus_constant (args_addr,
3655 skip + INTVAL (args_so_far)));
3656 else
3657 temp = memory_address (BLKmode,
3658 plus_constant (gen_rtx_PLUS (Pmode,
3659 args_addr,
3660 args_so_far),
3661 skip));
3662
3663 if (!ACCUMULATE_OUTGOING_ARGS)
3664 {
3665 /* If the source is referenced relative to the stack pointer,
3666 copy it to another register to stabilize it. We do not need
3667 to do this if we know that we won't be changing sp. */
3668
3669 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3670 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3671 temp = copy_to_reg (temp);
3672 }
3673
3674 target = gen_rtx_MEM (BLKmode, temp);
3675
3676 /* We do *not* set_mem_attributes here, because incoming arguments
3677 may overlap with sibling call outgoing arguments and we cannot
3678 allow reordering of reads from function arguments with stores
3679 to outgoing arguments of sibling calls. We do, however, want
3680 to record the alignment of the stack slot. */
3681 /* ALIGN may well be better aligned than TYPE, e.g. due to
3682 PARM_BOUNDARY. Assume the caller isn't lying. */
3683 set_mem_align (target, align);
3684
3685 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3686 }
3687 }
3688 else if (partial > 0)
3689 {
3690 /* Scalar partly in registers. */
3691
3692 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3693 int i;
3694 int not_stack;
3695 /* # bytes of start of argument
3696 that we must make space for but need not store. */
3697 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3698 int args_offset = INTVAL (args_so_far);
3699 int skip;
3700
3701 /* Push padding now if padding above and stack grows down,
3702 or if padding below and stack grows up.
3703 But if space already allocated, this has already been done. */
3704 if (extra && args_addr == 0
3705 && where_pad != none && where_pad != stack_direction)
3706 anti_adjust_stack (GEN_INT (extra));
3707
3708 /* If we make space by pushing it, we might as well push
3709 the real data. Otherwise, we can leave OFFSET nonzero
3710 and leave the space uninitialized. */
3711 if (args_addr == 0)
3712 offset = 0;
3713
3714 /* Now NOT_STACK gets the number of words that we don't need to
3715 allocate on the stack. Convert OFFSET to words too. */
3716 not_stack = (partial - offset) / UNITS_PER_WORD;
3717 offset /= UNITS_PER_WORD;
3718
3719 /* If the partial register-part of the arg counts in its stack size,
3720 skip the part of stack space corresponding to the registers.
3721 Otherwise, start copying to the beginning of the stack space,
3722 by setting SKIP to 0. */
3723 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3724
3725 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3726 x = validize_mem (force_const_mem (mode, x));
3727
3728 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3729 SUBREGs of such registers are not allowed. */
3730 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3731 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3732 x = copy_to_reg (x);
3733
3734 /* Loop over all the words allocated on the stack for this arg. */
3735 /* We can do it by words, because any scalar bigger than a word
3736 has a size a multiple of a word. */
3737 #ifndef PUSH_ARGS_REVERSED
3738 for (i = not_stack; i < size; i++)
3739 #else
3740 for (i = size - 1; i >= not_stack; i--)
3741 #endif
3742 if (i >= not_stack + offset)
3743 emit_push_insn (operand_subword_force (x, i, mode),
3744 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3745 0, args_addr,
3746 GEN_INT (args_offset + ((i - not_stack + skip)
3747 * UNITS_PER_WORD)),
3748 reg_parm_stack_space, alignment_pad);
3749 }
3750 else
3751 {
3752 rtx addr;
3753 rtx dest;
3754
3755 /* Push padding now if padding above and stack grows down,
3756 or if padding below and stack grows up.
3757 But if space already allocated, this has already been done. */
3758 if (extra && args_addr == 0
3759 && where_pad != none && where_pad != stack_direction)
3760 anti_adjust_stack (GEN_INT (extra));
3761
3762 #ifdef PUSH_ROUNDING
3763 if (args_addr == 0 && PUSH_ARGS)
3764 emit_single_push_insn (mode, x, type);
3765 else
3766 #endif
3767 {
3768 if (GET_CODE (args_so_far) == CONST_INT)
3769 addr
3770 = memory_address (mode,
3771 plus_constant (args_addr,
3772 INTVAL (args_so_far)));
3773 else
3774 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3775 args_so_far));
3776 dest = gen_rtx_MEM (mode, addr);
3777
3778 /* We do *not* set_mem_attributes here, because incoming arguments
3779 may overlap with sibling call outgoing arguments and we cannot
3780 allow reordering of reads from function arguments with stores
3781 to outgoing arguments of sibling calls. We do, however, want
3782 to record the alignment of the stack slot. */
3783 /* ALIGN may well be better aligned than TYPE, e.g. due to
3784 PARM_BOUNDARY. Assume the caller isn't lying. */
3785 set_mem_align (dest, align);
3786
3787 emit_move_insn (dest, x);
3788 }
3789 }
3790
3791 /* If part should go in registers, copy that part
3792 into the appropriate registers. Do this now, at the end,
3793 since mem-to-mem copies above may do function calls. */
3794 if (partial > 0 && reg != 0)
3795 {
3796 /* Handle calls that pass values in multiple non-contiguous locations.
3797 The Irix 6 ABI has examples of this. */
3798 if (GET_CODE (reg) == PARALLEL)
3799 emit_group_load (reg, x, type, -1);
3800 else
3801 {
3802 gcc_assert (partial % UNITS_PER_WORD == 0);
3803 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3804 }
3805 }
3806
3807 if (extra && args_addr == 0 && where_pad == stack_direction)
3808 anti_adjust_stack (GEN_INT (extra));
3809
3810 if (alignment_pad && args_addr == 0)
3811 anti_adjust_stack (alignment_pad);
3812 }
3813 \f
3814 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3815 operations. */
3816
3817 static rtx
3818 get_subtarget (rtx x)
3819 {
3820 return (optimize
3821 || x == 0
3822 /* Only registers can be subtargets. */
3823 || !REG_P (x)
3824 /* Don't use hard regs to avoid extending their life. */
3825 || REGNO (x) < FIRST_PSEUDO_REGISTER
3826 ? 0 : x);
3827 }
3828
3829 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3830 FIELD is a bitfield. Returns true if the optimization was successful,
3831 and there's nothing else to do. */
3832
3833 static bool
3834 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3835 unsigned HOST_WIDE_INT bitpos,
3836 enum machine_mode mode1, rtx str_rtx,
3837 tree to, tree src)
3838 {
3839 enum machine_mode str_mode = GET_MODE (str_rtx);
3840 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3841 tree op0, op1;
3842 rtx value, result;
3843 optab binop;
3844
3845 if (mode1 != VOIDmode
3846 || bitsize >= BITS_PER_WORD
3847 || str_bitsize > BITS_PER_WORD
3848 || TREE_SIDE_EFFECTS (to)
3849 || TREE_THIS_VOLATILE (to))
3850 return false;
3851
3852 STRIP_NOPS (src);
3853 if (!BINARY_CLASS_P (src)
3854 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3855 return false;
3856
3857 op0 = TREE_OPERAND (src, 0);
3858 op1 = TREE_OPERAND (src, 1);
3859 STRIP_NOPS (op0);
3860
3861 if (!operand_equal_p (to, op0, 0))
3862 return false;
3863
3864 if (MEM_P (str_rtx))
3865 {
3866 unsigned HOST_WIDE_INT offset1;
3867
3868 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3869 str_mode = word_mode;
3870 str_mode = get_best_mode (bitsize, bitpos,
3871 MEM_ALIGN (str_rtx), str_mode, 0);
3872 if (str_mode == VOIDmode)
3873 return false;
3874 str_bitsize = GET_MODE_BITSIZE (str_mode);
3875
3876 offset1 = bitpos;
3877 bitpos %= str_bitsize;
3878 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3879 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3880 }
3881 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3882 return false;
3883
3884 /* If the bit field covers the whole REG/MEM, store_field
3885 will likely generate better code. */
3886 if (bitsize >= str_bitsize)
3887 return false;
3888
3889 /* We can't handle fields split across multiple entities. */
3890 if (bitpos + bitsize > str_bitsize)
3891 return false;
3892
3893 if (BYTES_BIG_ENDIAN)
3894 bitpos = str_bitsize - bitpos - bitsize;
3895
3896 switch (TREE_CODE (src))
3897 {
3898 case PLUS_EXPR:
3899 case MINUS_EXPR:
3900 /* For now, just optimize the case of the topmost bitfield
3901 where we don't need to do any masking and also
3902 1 bit bitfields where xor can be used.
3903 We might win by one instruction for the other bitfields
3904 too if insv/extv instructions aren't used, so that
3905 can be added later. */
3906 if (bitpos + bitsize != str_bitsize
3907 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3908 break;
3909
3910 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3911 value = convert_modes (str_mode,
3912 TYPE_MODE (TREE_TYPE (op1)), value,
3913 TYPE_UNSIGNED (TREE_TYPE (op1)));
3914
3915 /* We may be accessing data outside the field, which means
3916 we can alias adjacent data. */
3917 if (MEM_P (str_rtx))
3918 {
3919 str_rtx = shallow_copy_rtx (str_rtx);
3920 set_mem_alias_set (str_rtx, 0);
3921 set_mem_expr (str_rtx, 0);
3922 }
3923
3924 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3925 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3926 {
3927 value = expand_and (str_mode, value, const1_rtx, NULL);
3928 binop = xor_optab;
3929 }
3930 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3931 build_int_cst (NULL_TREE, bitpos),
3932 NULL_RTX, 1);
3933 result = expand_binop (str_mode, binop, str_rtx,
3934 value, str_rtx, 1, OPTAB_WIDEN);
3935 if (result != str_rtx)
3936 emit_move_insn (str_rtx, result);
3937 return true;
3938
3939 case BIT_IOR_EXPR:
3940 case BIT_XOR_EXPR:
3941 if (TREE_CODE (op1) != INTEGER_CST)
3942 break;
3943 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3944 value = convert_modes (GET_MODE (str_rtx),
3945 TYPE_MODE (TREE_TYPE (op1)), value,
3946 TYPE_UNSIGNED (TREE_TYPE (op1)));
3947
3948 /* We may be accessing data outside the field, which means
3949 we can alias adjacent data. */
3950 if (MEM_P (str_rtx))
3951 {
3952 str_rtx = shallow_copy_rtx (str_rtx);
3953 set_mem_alias_set (str_rtx, 0);
3954 set_mem_expr (str_rtx, 0);
3955 }
3956
3957 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
3958 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3959 {
3960 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
3961 - 1);
3962 value = expand_and (GET_MODE (str_rtx), value, mask,
3963 NULL_RTX);
3964 }
3965 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
3966 build_int_cst (NULL_TREE, bitpos),
3967 NULL_RTX, 1);
3968 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3969 value, str_rtx, 1, OPTAB_WIDEN);
3970 if (result != str_rtx)
3971 emit_move_insn (str_rtx, result);
3972 return true;
3973
3974 default:
3975 break;
3976 }
3977
3978 return false;
3979 }
3980
3981
3982 /* Expand an assignment that stores the value of FROM into TO. */
3983
3984 void
3985 expand_assignment (tree to, tree from)
3986 {
3987 rtx to_rtx = 0;
3988 rtx result;
3989
3990 /* Don't crash if the lhs of the assignment was erroneous. */
3991 if (TREE_CODE (to) == ERROR_MARK)
3992 {
3993 result = expand_normal (from);
3994 return;
3995 }
3996
3997 /* Optimize away no-op moves without side-effects. */
3998 if (operand_equal_p (to, from, 0))
3999 return;
4000
4001 /* Assignment of a structure component needs special treatment
4002 if the structure component's rtx is not simply a MEM.
4003 Assignment of an array element at a constant index, and assignment of
4004 an array element in an unaligned packed structure field, has the same
4005 problem. */
4006 if (handled_component_p (to)
4007 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4008 {
4009 enum machine_mode mode1;
4010 HOST_WIDE_INT bitsize, bitpos;
4011 tree offset;
4012 int unsignedp;
4013 int volatilep = 0;
4014 tree tem;
4015
4016 push_temp_slots ();
4017 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4018 &unsignedp, &volatilep, true);
4019
4020 /* If we are going to use store_bit_field and extract_bit_field,
4021 make sure to_rtx will be safe for multiple use. */
4022
4023 to_rtx = expand_normal (tem);
4024
4025 if (offset != 0)
4026 {
4027 rtx offset_rtx;
4028
4029 if (!MEM_P (to_rtx))
4030 {
4031 /* We can get constant negative offsets into arrays with broken
4032 user code. Translate this to a trap instead of ICEing. */
4033 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4034 expand_builtin_trap ();
4035 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4036 }
4037
4038 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4039 #ifdef POINTERS_EXTEND_UNSIGNED
4040 if (GET_MODE (offset_rtx) != Pmode)
4041 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4042 #else
4043 if (GET_MODE (offset_rtx) != ptr_mode)
4044 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4045 #endif
4046
4047 /* A constant address in TO_RTX can have VOIDmode, we must not try
4048 to call force_reg for that case. Avoid that case. */
4049 if (MEM_P (to_rtx)
4050 && GET_MODE (to_rtx) == BLKmode
4051 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4052 && bitsize > 0
4053 && (bitpos % bitsize) == 0
4054 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4055 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4056 {
4057 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4058 bitpos = 0;
4059 }
4060
4061 to_rtx = offset_address (to_rtx, offset_rtx,
4062 highest_pow2_factor_for_target (to,
4063 offset));
4064 }
4065
4066 /* Handle expand_expr of a complex value returning a CONCAT. */
4067 if (GET_CODE (to_rtx) == CONCAT)
4068 {
4069 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4070 {
4071 gcc_assert (bitpos == 0);
4072 result = store_expr (from, to_rtx, false);
4073 }
4074 else
4075 {
4076 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4077 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
4078 }
4079 }
4080 else
4081 {
4082 if (MEM_P (to_rtx))
4083 {
4084 /* If the field is at offset zero, we could have been given the
4085 DECL_RTX of the parent struct. Don't munge it. */
4086 to_rtx = shallow_copy_rtx (to_rtx);
4087
4088 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4089
4090 /* Deal with volatile and readonly fields. The former is only
4091 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4092 if (volatilep)
4093 MEM_VOLATILE_P (to_rtx) = 1;
4094 if (component_uses_parent_alias_set (to))
4095 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4096 }
4097
4098 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4099 to_rtx, to, from))
4100 result = NULL;
4101 else
4102 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4103 TREE_TYPE (tem), get_alias_set (to));
4104 }
4105
4106 if (result)
4107 preserve_temp_slots (result);
4108 free_temp_slots ();
4109 pop_temp_slots ();
4110 return;
4111 }
4112
4113 /* If the rhs is a function call and its value is not an aggregate,
4114 call the function before we start to compute the lhs.
4115 This is needed for correct code for cases such as
4116 val = setjmp (buf) on machines where reference to val
4117 requires loading up part of an address in a separate insn.
4118
4119 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4120 since it might be a promoted variable where the zero- or sign- extension
4121 needs to be done. Handling this in the normal way is safe because no
4122 computation is done before the call. */
4123 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4124 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4125 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4126 && REG_P (DECL_RTL (to))))
4127 {
4128 rtx value;
4129
4130 push_temp_slots ();
4131 value = expand_normal (from);
4132 if (to_rtx == 0)
4133 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4134
4135 /* Handle calls that return values in multiple non-contiguous locations.
4136 The Irix 6 ABI has examples of this. */
4137 if (GET_CODE (to_rtx) == PARALLEL)
4138 emit_group_load (to_rtx, value, TREE_TYPE (from),
4139 int_size_in_bytes (TREE_TYPE (from)));
4140 else if (GET_MODE (to_rtx) == BLKmode)
4141 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4142 else
4143 {
4144 if (POINTER_TYPE_P (TREE_TYPE (to)))
4145 value = convert_memory_address (GET_MODE (to_rtx), value);
4146 emit_move_insn (to_rtx, value);
4147 }
4148 preserve_temp_slots (to_rtx);
4149 free_temp_slots ();
4150 pop_temp_slots ();
4151 return;
4152 }
4153
4154 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4155 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4156
4157 if (to_rtx == 0)
4158 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4159
4160 /* Don't move directly into a return register. */
4161 if (TREE_CODE (to) == RESULT_DECL
4162 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4163 {
4164 rtx temp;
4165
4166 push_temp_slots ();
4167 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4168
4169 if (GET_CODE (to_rtx) == PARALLEL)
4170 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4171 int_size_in_bytes (TREE_TYPE (from)));
4172 else
4173 emit_move_insn (to_rtx, temp);
4174
4175 preserve_temp_slots (to_rtx);
4176 free_temp_slots ();
4177 pop_temp_slots ();
4178 return;
4179 }
4180
4181 /* In case we are returning the contents of an object which overlaps
4182 the place the value is being stored, use a safe function when copying
4183 a value through a pointer into a structure value return block. */
4184 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4185 && current_function_returns_struct
4186 && !current_function_returns_pcc_struct)
4187 {
4188 rtx from_rtx, size;
4189
4190 push_temp_slots ();
4191 size = expr_size (from);
4192 from_rtx = expand_normal (from);
4193
4194 emit_library_call (memmove_libfunc, LCT_NORMAL,
4195 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4196 XEXP (from_rtx, 0), Pmode,
4197 convert_to_mode (TYPE_MODE (sizetype),
4198 size, TYPE_UNSIGNED (sizetype)),
4199 TYPE_MODE (sizetype));
4200
4201 preserve_temp_slots (to_rtx);
4202 free_temp_slots ();
4203 pop_temp_slots ();
4204 return;
4205 }
4206
4207 /* Compute FROM and store the value in the rtx we got. */
4208
4209 push_temp_slots ();
4210 result = store_expr (from, to_rtx, 0);
4211 preserve_temp_slots (result);
4212 free_temp_slots ();
4213 pop_temp_slots ();
4214 return;
4215 }
4216
4217 /* Generate code for computing expression EXP,
4218 and storing the value into TARGET.
4219
4220 If the mode is BLKmode then we may return TARGET itself.
4221 It turns out that in BLKmode it doesn't cause a problem.
4222 because C has no operators that could combine two different
4223 assignments into the same BLKmode object with different values
4224 with no sequence point. Will other languages need this to
4225 be more thorough?
4226
4227 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4228 stack, and block moves may need to be treated specially. */
4229
4230 rtx
4231 store_expr (tree exp, rtx target, int call_param_p)
4232 {
4233 rtx temp;
4234 rtx alt_rtl = NULL_RTX;
4235 int dont_return_target = 0;
4236
4237 if (VOID_TYPE_P (TREE_TYPE (exp)))
4238 {
4239 /* C++ can generate ?: expressions with a throw expression in one
4240 branch and an rvalue in the other. Here, we resolve attempts to
4241 store the throw expression's nonexistent result. */
4242 gcc_assert (!call_param_p);
4243 expand_expr (exp, const0_rtx, VOIDmode, 0);
4244 return NULL_RTX;
4245 }
4246 if (TREE_CODE (exp) == COMPOUND_EXPR)
4247 {
4248 /* Perform first part of compound expression, then assign from second
4249 part. */
4250 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4251 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4252 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4253 }
4254 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4255 {
4256 /* For conditional expression, get safe form of the target. Then
4257 test the condition, doing the appropriate assignment on either
4258 side. This avoids the creation of unnecessary temporaries.
4259 For non-BLKmode, it is more efficient not to do this. */
4260
4261 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4262
4263 do_pending_stack_adjust ();
4264 NO_DEFER_POP;
4265 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4266 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4267 emit_jump_insn (gen_jump (lab2));
4268 emit_barrier ();
4269 emit_label (lab1);
4270 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4271 emit_label (lab2);
4272 OK_DEFER_POP;
4273
4274 return NULL_RTX;
4275 }
4276 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4277 /* If this is a scalar in a register that is stored in a wider mode
4278 than the declared mode, compute the result into its declared mode
4279 and then convert to the wider mode. Our value is the computed
4280 expression. */
4281 {
4282 rtx inner_target = 0;
4283
4284 /* We can do the conversion inside EXP, which will often result
4285 in some optimizations. Do the conversion in two steps: first
4286 change the signedness, if needed, then the extend. But don't
4287 do this if the type of EXP is a subtype of something else
4288 since then the conversion might involve more than just
4289 converting modes. */
4290 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4291 && TREE_TYPE (TREE_TYPE (exp)) == 0
4292 && (!lang_hooks.reduce_bit_field_operations
4293 || (GET_MODE_PRECISION (GET_MODE (target))
4294 == TYPE_PRECISION (TREE_TYPE (exp)))))
4295 {
4296 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4297 != SUBREG_PROMOTED_UNSIGNED_P (target))
4298 exp = fold_convert
4299 (lang_hooks.types.signed_or_unsigned_type
4300 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4301
4302 exp = fold_convert (lang_hooks.types.type_for_mode
4303 (GET_MODE (SUBREG_REG (target)),
4304 SUBREG_PROMOTED_UNSIGNED_P (target)),
4305 exp);
4306
4307 inner_target = SUBREG_REG (target);
4308 }
4309
4310 temp = expand_expr (exp, inner_target, VOIDmode,
4311 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4312
4313 /* If TEMP is a VOIDmode constant, use convert_modes to make
4314 sure that we properly convert it. */
4315 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4316 {
4317 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4318 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4319 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4320 GET_MODE (target), temp,
4321 SUBREG_PROMOTED_UNSIGNED_P (target));
4322 }
4323
4324 convert_move (SUBREG_REG (target), temp,
4325 SUBREG_PROMOTED_UNSIGNED_P (target));
4326
4327 return NULL_RTX;
4328 }
4329 else
4330 {
4331 temp = expand_expr_real (exp, target, GET_MODE (target),
4332 (call_param_p
4333 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4334 &alt_rtl);
4335 /* Return TARGET if it's a specified hardware register.
4336 If TARGET is a volatile mem ref, either return TARGET
4337 or return a reg copied *from* TARGET; ANSI requires this.
4338
4339 Otherwise, if TEMP is not TARGET, return TEMP
4340 if it is constant (for efficiency),
4341 or if we really want the correct value. */
4342 if (!(target && REG_P (target)
4343 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4344 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4345 && ! rtx_equal_p (temp, target)
4346 && CONSTANT_P (temp))
4347 dont_return_target = 1;
4348 }
4349
4350 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4351 the same as that of TARGET, adjust the constant. This is needed, for
4352 example, in case it is a CONST_DOUBLE and we want only a word-sized
4353 value. */
4354 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4355 && TREE_CODE (exp) != ERROR_MARK
4356 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4357 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4358 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4359
4360 /* If value was not generated in the target, store it there.
4361 Convert the value to TARGET's type first if necessary and emit the
4362 pending incrementations that have been queued when expanding EXP.
4363 Note that we cannot emit the whole queue blindly because this will
4364 effectively disable the POST_INC optimization later.
4365
4366 If TEMP and TARGET compare equal according to rtx_equal_p, but
4367 one or both of them are volatile memory refs, we have to distinguish
4368 two cases:
4369 - expand_expr has used TARGET. In this case, we must not generate
4370 another copy. This can be detected by TARGET being equal according
4371 to == .
4372 - expand_expr has not used TARGET - that means that the source just
4373 happens to have the same RTX form. Since temp will have been created
4374 by expand_expr, it will compare unequal according to == .
4375 We must generate a copy in this case, to reach the correct number
4376 of volatile memory references. */
4377
4378 if ((! rtx_equal_p (temp, target)
4379 || (temp != target && (side_effects_p (temp)
4380 || side_effects_p (target))))
4381 && TREE_CODE (exp) != ERROR_MARK
4382 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4383 but TARGET is not valid memory reference, TEMP will differ
4384 from TARGET although it is really the same location. */
4385 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4386 /* If there's nothing to copy, don't bother. Don't call
4387 expr_size unless necessary, because some front-ends (C++)
4388 expr_size-hook must not be given objects that are not
4389 supposed to be bit-copied or bit-initialized. */
4390 && expr_size (exp) != const0_rtx)
4391 {
4392 if (GET_MODE (temp) != GET_MODE (target)
4393 && GET_MODE (temp) != VOIDmode)
4394 {
4395 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4396 if (dont_return_target)
4397 {
4398 /* In this case, we will return TEMP,
4399 so make sure it has the proper mode.
4400 But don't forget to store the value into TARGET. */
4401 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4402 emit_move_insn (target, temp);
4403 }
4404 else
4405 convert_move (target, temp, unsignedp);
4406 }
4407
4408 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4409 {
4410 /* Handle copying a string constant into an array. The string
4411 constant may be shorter than the array. So copy just the string's
4412 actual length, and clear the rest. First get the size of the data
4413 type of the string, which is actually the size of the target. */
4414 rtx size = expr_size (exp);
4415
4416 if (GET_CODE (size) == CONST_INT
4417 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4418 emit_block_move (target, temp, size,
4419 (call_param_p
4420 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4421 else
4422 {
4423 /* Compute the size of the data to copy from the string. */
4424 tree copy_size
4425 = size_binop (MIN_EXPR,
4426 make_tree (sizetype, size),
4427 size_int (TREE_STRING_LENGTH (exp)));
4428 rtx copy_size_rtx
4429 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4430 (call_param_p
4431 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4432 rtx label = 0;
4433
4434 /* Copy that much. */
4435 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4436 TYPE_UNSIGNED (sizetype));
4437 emit_block_move (target, temp, copy_size_rtx,
4438 (call_param_p
4439 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4440
4441 /* Figure out how much is left in TARGET that we have to clear.
4442 Do all calculations in ptr_mode. */
4443 if (GET_CODE (copy_size_rtx) == CONST_INT)
4444 {
4445 size = plus_constant (size, -INTVAL (copy_size_rtx));
4446 target = adjust_address (target, BLKmode,
4447 INTVAL (copy_size_rtx));
4448 }
4449 else
4450 {
4451 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4452 copy_size_rtx, NULL_RTX, 0,
4453 OPTAB_LIB_WIDEN);
4454
4455 #ifdef POINTERS_EXTEND_UNSIGNED
4456 if (GET_MODE (copy_size_rtx) != Pmode)
4457 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4458 TYPE_UNSIGNED (sizetype));
4459 #endif
4460
4461 target = offset_address (target, copy_size_rtx,
4462 highest_pow2_factor (copy_size));
4463 label = gen_label_rtx ();
4464 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4465 GET_MODE (size), 0, label);
4466 }
4467
4468 if (size != const0_rtx)
4469 clear_storage (target, size, BLOCK_OP_NORMAL);
4470
4471 if (label)
4472 emit_label (label);
4473 }
4474 }
4475 /* Handle calls that return values in multiple non-contiguous locations.
4476 The Irix 6 ABI has examples of this. */
4477 else if (GET_CODE (target) == PARALLEL)
4478 emit_group_load (target, temp, TREE_TYPE (exp),
4479 int_size_in_bytes (TREE_TYPE (exp)));
4480 else if (GET_MODE (temp) == BLKmode)
4481 emit_block_move (target, temp, expr_size (exp),
4482 (call_param_p
4483 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4484 else
4485 {
4486 temp = force_operand (temp, target);
4487 if (temp != target)
4488 emit_move_insn (target, temp);
4489 }
4490 }
4491
4492 return NULL_RTX;
4493 }
4494 \f
4495 /* Examine CTOR to discover:
4496 * how many scalar fields are set to nonzero values,
4497 and place it in *P_NZ_ELTS;
4498 * how many scalar fields are set to non-constant values,
4499 and place it in *P_NC_ELTS; and
4500 * how many scalar fields in total are in CTOR,
4501 and place it in *P_ELT_COUNT.
4502 * if a type is a union, and the initializer from the constructor
4503 is not the largest element in the union, then set *p_must_clear. */
4504
4505 static void
4506 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4507 HOST_WIDE_INT *p_nc_elts,
4508 HOST_WIDE_INT *p_elt_count,
4509 bool *p_must_clear)
4510 {
4511 unsigned HOST_WIDE_INT idx;
4512 HOST_WIDE_INT nz_elts, nc_elts, elt_count;
4513 tree value, purpose;
4514
4515 nz_elts = 0;
4516 nc_elts = 0;
4517 elt_count = 0;
4518
4519 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4520 {
4521 HOST_WIDE_INT mult;
4522
4523 mult = 1;
4524 if (TREE_CODE (purpose) == RANGE_EXPR)
4525 {
4526 tree lo_index = TREE_OPERAND (purpose, 0);
4527 tree hi_index = TREE_OPERAND (purpose, 1);
4528
4529 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4530 mult = (tree_low_cst (hi_index, 1)
4531 - tree_low_cst (lo_index, 1) + 1);
4532 }
4533
4534 switch (TREE_CODE (value))
4535 {
4536 case CONSTRUCTOR:
4537 {
4538 HOST_WIDE_INT nz = 0, nc = 0, ic = 0;
4539 categorize_ctor_elements_1 (value, &nz, &nc, &ic, p_must_clear);
4540 nz_elts += mult * nz;
4541 nc_elts += mult * nc;
4542 elt_count += mult * ic;
4543 }
4544 break;
4545
4546 case INTEGER_CST:
4547 case REAL_CST:
4548 if (!initializer_zerop (value))
4549 nz_elts += mult;
4550 elt_count += mult;
4551 break;
4552
4553 case STRING_CST:
4554 nz_elts += mult * TREE_STRING_LENGTH (value);
4555 elt_count += mult * TREE_STRING_LENGTH (value);
4556 break;
4557
4558 case COMPLEX_CST:
4559 if (!initializer_zerop (TREE_REALPART (value)))
4560 nz_elts += mult;
4561 if (!initializer_zerop (TREE_IMAGPART (value)))
4562 nz_elts += mult;
4563 elt_count += mult;
4564 break;
4565
4566 case VECTOR_CST:
4567 {
4568 tree v;
4569 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4570 {
4571 if (!initializer_zerop (TREE_VALUE (v)))
4572 nz_elts += mult;
4573 elt_count += mult;
4574 }
4575 }
4576 break;
4577
4578 default:
4579 nz_elts += mult;
4580 elt_count += mult;
4581 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4582 nc_elts += mult;
4583 break;
4584 }
4585 }
4586
4587 if (!*p_must_clear
4588 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4589 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4590 {
4591 tree init_sub_type;
4592 bool clear_this = true;
4593
4594 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4595 {
4596 /* We don't expect more than one element of the union to be
4597 initialized. Not sure what we should do otherwise... */
4598 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4599 == 1);
4600
4601 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4602 CONSTRUCTOR_ELTS (ctor),
4603 0)->value);
4604
4605 /* ??? We could look at each element of the union, and find the
4606 largest element. Which would avoid comparing the size of the
4607 initialized element against any tail padding in the union.
4608 Doesn't seem worth the effort... */
4609 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4610 TYPE_SIZE (init_sub_type)) == 1)
4611 {
4612 /* And now we have to find out if the element itself is fully
4613 constructed. E.g. for union { struct { int a, b; } s; } u
4614 = { .s = { .a = 1 } }. */
4615 if (elt_count == count_type_elements (init_sub_type, false))
4616 clear_this = false;
4617 }
4618 }
4619
4620 *p_must_clear = clear_this;
4621 }
4622
4623 *p_nz_elts += nz_elts;
4624 *p_nc_elts += nc_elts;
4625 *p_elt_count += elt_count;
4626 }
4627
4628 void
4629 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4630 HOST_WIDE_INT *p_nc_elts,
4631 HOST_WIDE_INT *p_elt_count,
4632 bool *p_must_clear)
4633 {
4634 *p_nz_elts = 0;
4635 *p_nc_elts = 0;
4636 *p_elt_count = 0;
4637 *p_must_clear = false;
4638 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts, p_elt_count,
4639 p_must_clear);
4640 }
4641
4642 /* Count the number of scalars in TYPE. Return -1 on overflow or
4643 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4644 array member at the end of the structure. */
4645
4646 HOST_WIDE_INT
4647 count_type_elements (tree type, bool allow_flexarr)
4648 {
4649 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4650 switch (TREE_CODE (type))
4651 {
4652 case ARRAY_TYPE:
4653 {
4654 tree telts = array_type_nelts (type);
4655 if (telts && host_integerp (telts, 1))
4656 {
4657 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4658 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4659 if (n == 0)
4660 return 0;
4661 else if (max / n > m)
4662 return n * m;
4663 }
4664 return -1;
4665 }
4666
4667 case RECORD_TYPE:
4668 {
4669 HOST_WIDE_INT n = 0, t;
4670 tree f;
4671
4672 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4673 if (TREE_CODE (f) == FIELD_DECL)
4674 {
4675 t = count_type_elements (TREE_TYPE (f), false);
4676 if (t < 0)
4677 {
4678 /* Check for structures with flexible array member. */
4679 tree tf = TREE_TYPE (f);
4680 if (allow_flexarr
4681 && TREE_CHAIN (f) == NULL
4682 && TREE_CODE (tf) == ARRAY_TYPE
4683 && TYPE_DOMAIN (tf)
4684 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4685 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4686 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4687 && int_size_in_bytes (type) >= 0)
4688 break;
4689
4690 return -1;
4691 }
4692 n += t;
4693 }
4694
4695 return n;
4696 }
4697
4698 case UNION_TYPE:
4699 case QUAL_UNION_TYPE:
4700 {
4701 /* Ho hum. How in the world do we guess here? Clearly it isn't
4702 right to count the fields. Guess based on the number of words. */
4703 HOST_WIDE_INT n = int_size_in_bytes (type);
4704 if (n < 0)
4705 return -1;
4706 return n / UNITS_PER_WORD;
4707 }
4708
4709 case COMPLEX_TYPE:
4710 return 2;
4711
4712 case VECTOR_TYPE:
4713 return TYPE_VECTOR_SUBPARTS (type);
4714
4715 case INTEGER_TYPE:
4716 case REAL_TYPE:
4717 case ENUMERAL_TYPE:
4718 case BOOLEAN_TYPE:
4719 case POINTER_TYPE:
4720 case OFFSET_TYPE:
4721 case REFERENCE_TYPE:
4722 return 1;
4723
4724 case VOID_TYPE:
4725 case METHOD_TYPE:
4726 case FUNCTION_TYPE:
4727 case LANG_TYPE:
4728 default:
4729 gcc_unreachable ();
4730 }
4731 }
4732
4733 /* Return 1 if EXP contains mostly (3/4) zeros. */
4734
4735 static int
4736 mostly_zeros_p (tree exp)
4737 {
4738 if (TREE_CODE (exp) == CONSTRUCTOR)
4739
4740 {
4741 HOST_WIDE_INT nz_elts, nc_elts, count, elts;
4742 bool must_clear;
4743
4744 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4745 if (must_clear)
4746 return 1;
4747
4748 elts = count_type_elements (TREE_TYPE (exp), false);
4749
4750 return nz_elts < elts / 4;
4751 }
4752
4753 return initializer_zerop (exp);
4754 }
4755
4756 /* Return 1 if EXP contains all zeros. */
4757
4758 static int
4759 all_zeros_p (tree exp)
4760 {
4761 if (TREE_CODE (exp) == CONSTRUCTOR)
4762
4763 {
4764 HOST_WIDE_INT nz_elts, nc_elts, count;
4765 bool must_clear;
4766
4767 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4768 return nz_elts == 0;
4769 }
4770
4771 return initializer_zerop (exp);
4772 }
4773 \f
4774 /* Helper function for store_constructor.
4775 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4776 TYPE is the type of the CONSTRUCTOR, not the element type.
4777 CLEARED is as for store_constructor.
4778 ALIAS_SET is the alias set to use for any stores.
4779
4780 This provides a recursive shortcut back to store_constructor when it isn't
4781 necessary to go through store_field. This is so that we can pass through
4782 the cleared field to let store_constructor know that we may not have to
4783 clear a substructure if the outer structure has already been cleared. */
4784
4785 static void
4786 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4787 HOST_WIDE_INT bitpos, enum machine_mode mode,
4788 tree exp, tree type, int cleared, int alias_set)
4789 {
4790 if (TREE_CODE (exp) == CONSTRUCTOR
4791 /* We can only call store_constructor recursively if the size and
4792 bit position are on a byte boundary. */
4793 && bitpos % BITS_PER_UNIT == 0
4794 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4795 /* If we have a nonzero bitpos for a register target, then we just
4796 let store_field do the bitfield handling. This is unlikely to
4797 generate unnecessary clear instructions anyways. */
4798 && (bitpos == 0 || MEM_P (target)))
4799 {
4800 if (MEM_P (target))
4801 target
4802 = adjust_address (target,
4803 GET_MODE (target) == BLKmode
4804 || 0 != (bitpos
4805 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4806 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4807
4808
4809 /* Update the alias set, if required. */
4810 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4811 && MEM_ALIAS_SET (target) != 0)
4812 {
4813 target = copy_rtx (target);
4814 set_mem_alias_set (target, alias_set);
4815 }
4816
4817 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4818 }
4819 else
4820 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4821 }
4822
4823 /* Store the value of constructor EXP into the rtx TARGET.
4824 TARGET is either a REG or a MEM; we know it cannot conflict, since
4825 safe_from_p has been called.
4826 CLEARED is true if TARGET is known to have been zero'd.
4827 SIZE is the number of bytes of TARGET we are allowed to modify: this
4828 may not be the same as the size of EXP if we are assigning to a field
4829 which has been packed to exclude padding bits. */
4830
4831 static void
4832 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4833 {
4834 tree type = TREE_TYPE (exp);
4835 #ifdef WORD_REGISTER_OPERATIONS
4836 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4837 #endif
4838
4839 switch (TREE_CODE (type))
4840 {
4841 case RECORD_TYPE:
4842 case UNION_TYPE:
4843 case QUAL_UNION_TYPE:
4844 {
4845 unsigned HOST_WIDE_INT idx;
4846 tree field, value;
4847
4848 /* If size is zero or the target is already cleared, do nothing. */
4849 if (size == 0 || cleared)
4850 cleared = 1;
4851 /* We either clear the aggregate or indicate the value is dead. */
4852 else if ((TREE_CODE (type) == UNION_TYPE
4853 || TREE_CODE (type) == QUAL_UNION_TYPE)
4854 && ! CONSTRUCTOR_ELTS (exp))
4855 /* If the constructor is empty, clear the union. */
4856 {
4857 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4858 cleared = 1;
4859 }
4860
4861 /* If we are building a static constructor into a register,
4862 set the initial value as zero so we can fold the value into
4863 a constant. But if more than one register is involved,
4864 this probably loses. */
4865 else if (REG_P (target) && TREE_STATIC (exp)
4866 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4867 {
4868 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4869 cleared = 1;
4870 }
4871
4872 /* If the constructor has fewer fields than the structure or
4873 if we are initializing the structure to mostly zeros, clear
4874 the whole structure first. Don't do this if TARGET is a
4875 register whose mode size isn't equal to SIZE since
4876 clear_storage can't handle this case. */
4877 else if (size > 0
4878 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
4879 != fields_length (type))
4880 || mostly_zeros_p (exp))
4881 && (!REG_P (target)
4882 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4883 == size)))
4884 {
4885 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4886 cleared = 1;
4887 }
4888
4889 if (! cleared)
4890 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4891
4892 /* Store each element of the constructor into the
4893 corresponding field of TARGET. */
4894 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
4895 {
4896 enum machine_mode mode;
4897 HOST_WIDE_INT bitsize;
4898 HOST_WIDE_INT bitpos = 0;
4899 tree offset;
4900 rtx to_rtx = target;
4901
4902 /* Just ignore missing fields. We cleared the whole
4903 structure, above, if any fields are missing. */
4904 if (field == 0)
4905 continue;
4906
4907 if (cleared && initializer_zerop (value))
4908 continue;
4909
4910 if (host_integerp (DECL_SIZE (field), 1))
4911 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4912 else
4913 bitsize = -1;
4914
4915 mode = DECL_MODE (field);
4916 if (DECL_BIT_FIELD (field))
4917 mode = VOIDmode;
4918
4919 offset = DECL_FIELD_OFFSET (field);
4920 if (host_integerp (offset, 0)
4921 && host_integerp (bit_position (field), 0))
4922 {
4923 bitpos = int_bit_position (field);
4924 offset = 0;
4925 }
4926 else
4927 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4928
4929 if (offset)
4930 {
4931 rtx offset_rtx;
4932
4933 offset
4934 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4935 make_tree (TREE_TYPE (exp),
4936 target));
4937
4938 offset_rtx = expand_normal (offset);
4939 gcc_assert (MEM_P (to_rtx));
4940
4941 #ifdef POINTERS_EXTEND_UNSIGNED
4942 if (GET_MODE (offset_rtx) != Pmode)
4943 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4944 #else
4945 if (GET_MODE (offset_rtx) != ptr_mode)
4946 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4947 #endif
4948
4949 to_rtx = offset_address (to_rtx, offset_rtx,
4950 highest_pow2_factor (offset));
4951 }
4952
4953 #ifdef WORD_REGISTER_OPERATIONS
4954 /* If this initializes a field that is smaller than a
4955 word, at the start of a word, try to widen it to a full
4956 word. This special case allows us to output C++ member
4957 function initializations in a form that the optimizers
4958 can understand. */
4959 if (REG_P (target)
4960 && bitsize < BITS_PER_WORD
4961 && bitpos % BITS_PER_WORD == 0
4962 && GET_MODE_CLASS (mode) == MODE_INT
4963 && TREE_CODE (value) == INTEGER_CST
4964 && exp_size >= 0
4965 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4966 {
4967 tree type = TREE_TYPE (value);
4968
4969 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4970 {
4971 type = lang_hooks.types.type_for_size
4972 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4973 value = fold_convert (type, value);
4974 }
4975
4976 if (BYTES_BIG_ENDIAN)
4977 value
4978 = fold_build2 (LSHIFT_EXPR, type, value,
4979 build_int_cst (type,
4980 BITS_PER_WORD - bitsize));
4981 bitsize = BITS_PER_WORD;
4982 mode = word_mode;
4983 }
4984 #endif
4985
4986 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4987 && DECL_NONADDRESSABLE_P (field))
4988 {
4989 to_rtx = copy_rtx (to_rtx);
4990 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4991 }
4992
4993 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4994 value, type, cleared,
4995 get_alias_set (TREE_TYPE (field)));
4996 }
4997 break;
4998 }
4999 case ARRAY_TYPE:
5000 {
5001 tree value, index;
5002 unsigned HOST_WIDE_INT i;
5003 int need_to_clear;
5004 tree domain;
5005 tree elttype = TREE_TYPE (type);
5006 int const_bounds_p;
5007 HOST_WIDE_INT minelt = 0;
5008 HOST_WIDE_INT maxelt = 0;
5009
5010 domain = TYPE_DOMAIN (type);
5011 const_bounds_p = (TYPE_MIN_VALUE (domain)
5012 && TYPE_MAX_VALUE (domain)
5013 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5014 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5015
5016 /* If we have constant bounds for the range of the type, get them. */
5017 if (const_bounds_p)
5018 {
5019 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5020 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5021 }
5022
5023 /* If the constructor has fewer elements than the array, clear
5024 the whole array first. Similarly if this is static
5025 constructor of a non-BLKmode object. */
5026 if (cleared)
5027 need_to_clear = 0;
5028 else if (REG_P (target) && TREE_STATIC (exp))
5029 need_to_clear = 1;
5030 else
5031 {
5032 unsigned HOST_WIDE_INT idx;
5033 tree index, value;
5034 HOST_WIDE_INT count = 0, zero_count = 0;
5035 need_to_clear = ! const_bounds_p;
5036
5037 /* This loop is a more accurate version of the loop in
5038 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5039 is also needed to check for missing elements. */
5040 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5041 {
5042 HOST_WIDE_INT this_node_count;
5043
5044 if (need_to_clear)
5045 break;
5046
5047 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5048 {
5049 tree lo_index = TREE_OPERAND (index, 0);
5050 tree hi_index = TREE_OPERAND (index, 1);
5051
5052 if (! host_integerp (lo_index, 1)
5053 || ! host_integerp (hi_index, 1))
5054 {
5055 need_to_clear = 1;
5056 break;
5057 }
5058
5059 this_node_count = (tree_low_cst (hi_index, 1)
5060 - tree_low_cst (lo_index, 1) + 1);
5061 }
5062 else
5063 this_node_count = 1;
5064
5065 count += this_node_count;
5066 if (mostly_zeros_p (value))
5067 zero_count += this_node_count;
5068 }
5069
5070 /* Clear the entire array first if there are any missing
5071 elements, or if the incidence of zero elements is >=
5072 75%. */
5073 if (! need_to_clear
5074 && (count < maxelt - minelt + 1
5075 || 4 * zero_count >= 3 * count))
5076 need_to_clear = 1;
5077 }
5078
5079 if (need_to_clear && size > 0)
5080 {
5081 if (REG_P (target))
5082 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5083 else
5084 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5085 cleared = 1;
5086 }
5087
5088 if (!cleared && REG_P (target))
5089 /* Inform later passes that the old value is dead. */
5090 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5091
5092 /* Store each element of the constructor into the
5093 corresponding element of TARGET, determined by counting the
5094 elements. */
5095 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5096 {
5097 enum machine_mode mode;
5098 HOST_WIDE_INT bitsize;
5099 HOST_WIDE_INT bitpos;
5100 int unsignedp;
5101 rtx xtarget = target;
5102
5103 if (cleared && initializer_zerop (value))
5104 continue;
5105
5106 unsignedp = TYPE_UNSIGNED (elttype);
5107 mode = TYPE_MODE (elttype);
5108 if (mode == BLKmode)
5109 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5110 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5111 : -1);
5112 else
5113 bitsize = GET_MODE_BITSIZE (mode);
5114
5115 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5116 {
5117 tree lo_index = TREE_OPERAND (index, 0);
5118 tree hi_index = TREE_OPERAND (index, 1);
5119 rtx index_r, pos_rtx;
5120 HOST_WIDE_INT lo, hi, count;
5121 tree position;
5122
5123 /* If the range is constant and "small", unroll the loop. */
5124 if (const_bounds_p
5125 && host_integerp (lo_index, 0)
5126 && host_integerp (hi_index, 0)
5127 && (lo = tree_low_cst (lo_index, 0),
5128 hi = tree_low_cst (hi_index, 0),
5129 count = hi - lo + 1,
5130 (!MEM_P (target)
5131 || count <= 2
5132 || (host_integerp (TYPE_SIZE (elttype), 1)
5133 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5134 <= 40 * 8)))))
5135 {
5136 lo -= minelt; hi -= minelt;
5137 for (; lo <= hi; lo++)
5138 {
5139 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5140
5141 if (MEM_P (target)
5142 && !MEM_KEEP_ALIAS_SET_P (target)
5143 && TREE_CODE (type) == ARRAY_TYPE
5144 && TYPE_NONALIASED_COMPONENT (type))
5145 {
5146 target = copy_rtx (target);
5147 MEM_KEEP_ALIAS_SET_P (target) = 1;
5148 }
5149
5150 store_constructor_field
5151 (target, bitsize, bitpos, mode, value, type, cleared,
5152 get_alias_set (elttype));
5153 }
5154 }
5155 else
5156 {
5157 rtx loop_start = gen_label_rtx ();
5158 rtx loop_end = gen_label_rtx ();
5159 tree exit_cond;
5160
5161 expand_normal (hi_index);
5162 unsignedp = TYPE_UNSIGNED (domain);
5163
5164 index = build_decl (VAR_DECL, NULL_TREE, domain);
5165
5166 index_r
5167 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5168 &unsignedp, 0));
5169 SET_DECL_RTL (index, index_r);
5170 store_expr (lo_index, index_r, 0);
5171
5172 /* Build the head of the loop. */
5173 do_pending_stack_adjust ();
5174 emit_label (loop_start);
5175
5176 /* Assign value to element index. */
5177 position =
5178 fold_convert (ssizetype,
5179 fold_build2 (MINUS_EXPR,
5180 TREE_TYPE (index),
5181 index,
5182 TYPE_MIN_VALUE (domain)));
5183
5184 position =
5185 size_binop (MULT_EXPR, position,
5186 fold_convert (ssizetype,
5187 TYPE_SIZE_UNIT (elttype)));
5188
5189 pos_rtx = expand_normal (position);
5190 xtarget = offset_address (target, pos_rtx,
5191 highest_pow2_factor (position));
5192 xtarget = adjust_address (xtarget, mode, 0);
5193 if (TREE_CODE (value) == CONSTRUCTOR)
5194 store_constructor (value, xtarget, cleared,
5195 bitsize / BITS_PER_UNIT);
5196 else
5197 store_expr (value, xtarget, 0);
5198
5199 /* Generate a conditional jump to exit the loop. */
5200 exit_cond = build2 (LT_EXPR, integer_type_node,
5201 index, hi_index);
5202 jumpif (exit_cond, loop_end);
5203
5204 /* Update the loop counter, and jump to the head of
5205 the loop. */
5206 expand_assignment (index,
5207 build2 (PLUS_EXPR, TREE_TYPE (index),
5208 index, integer_one_node));
5209
5210 emit_jump (loop_start);
5211
5212 /* Build the end of the loop. */
5213 emit_label (loop_end);
5214 }
5215 }
5216 else if ((index != 0 && ! host_integerp (index, 0))
5217 || ! host_integerp (TYPE_SIZE (elttype), 1))
5218 {
5219 tree position;
5220
5221 if (index == 0)
5222 index = ssize_int (1);
5223
5224 if (minelt)
5225 index = fold_convert (ssizetype,
5226 fold_build2 (MINUS_EXPR,
5227 TREE_TYPE (index),
5228 index,
5229 TYPE_MIN_VALUE (domain)));
5230
5231 position =
5232 size_binop (MULT_EXPR, index,
5233 fold_convert (ssizetype,
5234 TYPE_SIZE_UNIT (elttype)));
5235 xtarget = offset_address (target,
5236 expand_normal (position),
5237 highest_pow2_factor (position));
5238 xtarget = adjust_address (xtarget, mode, 0);
5239 store_expr (value, xtarget, 0);
5240 }
5241 else
5242 {
5243 if (index != 0)
5244 bitpos = ((tree_low_cst (index, 0) - minelt)
5245 * tree_low_cst (TYPE_SIZE (elttype), 1));
5246 else
5247 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5248
5249 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5250 && TREE_CODE (type) == ARRAY_TYPE
5251 && TYPE_NONALIASED_COMPONENT (type))
5252 {
5253 target = copy_rtx (target);
5254 MEM_KEEP_ALIAS_SET_P (target) = 1;
5255 }
5256 store_constructor_field (target, bitsize, bitpos, mode, value,
5257 type, cleared, get_alias_set (elttype));
5258 }
5259 }
5260 break;
5261 }
5262
5263 case VECTOR_TYPE:
5264 {
5265 unsigned HOST_WIDE_INT idx;
5266 constructor_elt *ce;
5267 int i;
5268 int need_to_clear;
5269 int icode = 0;
5270 tree elttype = TREE_TYPE (type);
5271 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5272 enum machine_mode eltmode = TYPE_MODE (elttype);
5273 HOST_WIDE_INT bitsize;
5274 HOST_WIDE_INT bitpos;
5275 rtvec vector = NULL;
5276 unsigned n_elts;
5277
5278 gcc_assert (eltmode != BLKmode);
5279
5280 n_elts = TYPE_VECTOR_SUBPARTS (type);
5281 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5282 {
5283 enum machine_mode mode = GET_MODE (target);
5284
5285 icode = (int) vec_init_optab->handlers[mode].insn_code;
5286 if (icode != CODE_FOR_nothing)
5287 {
5288 unsigned int i;
5289
5290 vector = rtvec_alloc (n_elts);
5291 for (i = 0; i < n_elts; i++)
5292 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5293 }
5294 }
5295
5296 /* If the constructor has fewer elements than the vector,
5297 clear the whole array first. Similarly if this is static
5298 constructor of a non-BLKmode object. */
5299 if (cleared)
5300 need_to_clear = 0;
5301 else if (REG_P (target) && TREE_STATIC (exp))
5302 need_to_clear = 1;
5303 else
5304 {
5305 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5306 tree value;
5307
5308 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5309 {
5310 int n_elts_here = tree_low_cst
5311 (int_const_binop (TRUNC_DIV_EXPR,
5312 TYPE_SIZE (TREE_TYPE (value)),
5313 TYPE_SIZE (elttype), 0), 1);
5314
5315 count += n_elts_here;
5316 if (mostly_zeros_p (value))
5317 zero_count += n_elts_here;
5318 }
5319
5320 /* Clear the entire vector first if there are any missing elements,
5321 or if the incidence of zero elements is >= 75%. */
5322 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5323 }
5324
5325 if (need_to_clear && size > 0 && !vector)
5326 {
5327 if (REG_P (target))
5328 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5329 else
5330 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5331 cleared = 1;
5332 }
5333
5334 /* Inform later passes that the old value is dead. */
5335 if (!cleared && !vector && REG_P (target))
5336 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5337
5338 /* Store each element of the constructor into the corresponding
5339 element of TARGET, determined by counting the elements. */
5340 for (idx = 0, i = 0;
5341 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5342 idx++, i += bitsize / elt_size)
5343 {
5344 HOST_WIDE_INT eltpos;
5345 tree value = ce->value;
5346
5347 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5348 if (cleared && initializer_zerop (value))
5349 continue;
5350
5351 if (ce->index)
5352 eltpos = tree_low_cst (ce->index, 1);
5353 else
5354 eltpos = i;
5355
5356 if (vector)
5357 {
5358 /* Vector CONSTRUCTORs should only be built from smaller
5359 vectors in the case of BLKmode vectors. */
5360 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5361 RTVEC_ELT (vector, eltpos)
5362 = expand_normal (value);
5363 }
5364 else
5365 {
5366 enum machine_mode value_mode =
5367 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5368 ? TYPE_MODE (TREE_TYPE (value))
5369 : eltmode;
5370 bitpos = eltpos * elt_size;
5371 store_constructor_field (target, bitsize, bitpos,
5372 value_mode, value, type,
5373 cleared, get_alias_set (elttype));
5374 }
5375 }
5376
5377 if (vector)
5378 emit_insn (GEN_FCN (icode)
5379 (target,
5380 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5381 break;
5382 }
5383
5384 default:
5385 gcc_unreachable ();
5386 }
5387 }
5388
5389 /* Store the value of EXP (an expression tree)
5390 into a subfield of TARGET which has mode MODE and occupies
5391 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5392 If MODE is VOIDmode, it means that we are storing into a bit-field.
5393
5394 Always return const0_rtx unless we have something particular to
5395 return.
5396
5397 TYPE is the type of the underlying object,
5398
5399 ALIAS_SET is the alias set for the destination. This value will
5400 (in general) be different from that for TARGET, since TARGET is a
5401 reference to the containing structure. */
5402
5403 static rtx
5404 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5405 enum machine_mode mode, tree exp, tree type, int alias_set)
5406 {
5407 HOST_WIDE_INT width_mask = 0;
5408
5409 if (TREE_CODE (exp) == ERROR_MARK)
5410 return const0_rtx;
5411
5412 /* If we have nothing to store, do nothing unless the expression has
5413 side-effects. */
5414 if (bitsize == 0)
5415 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5416 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5417 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5418
5419 /* If we are storing into an unaligned field of an aligned union that is
5420 in a register, we may have the mode of TARGET being an integer mode but
5421 MODE == BLKmode. In that case, get an aligned object whose size and
5422 alignment are the same as TARGET and store TARGET into it (we can avoid
5423 the store if the field being stored is the entire width of TARGET). Then
5424 call ourselves recursively to store the field into a BLKmode version of
5425 that object. Finally, load from the object into TARGET. This is not
5426 very efficient in general, but should only be slightly more expensive
5427 than the otherwise-required unaligned accesses. Perhaps this can be
5428 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5429 twice, once with emit_move_insn and once via store_field. */
5430
5431 if (mode == BLKmode
5432 && (REG_P (target) || GET_CODE (target) == SUBREG))
5433 {
5434 rtx object = assign_temp (type, 0, 1, 1);
5435 rtx blk_object = adjust_address (object, BLKmode, 0);
5436
5437 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5438 emit_move_insn (object, target);
5439
5440 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5441
5442 emit_move_insn (target, object);
5443
5444 /* We want to return the BLKmode version of the data. */
5445 return blk_object;
5446 }
5447
5448 if (GET_CODE (target) == CONCAT)
5449 {
5450 /* We're storing into a struct containing a single __complex. */
5451
5452 gcc_assert (!bitpos);
5453 return store_expr (exp, target, 0);
5454 }
5455
5456 /* If the structure is in a register or if the component
5457 is a bit field, we cannot use addressing to access it.
5458 Use bit-field techniques or SUBREG to store in it. */
5459
5460 if (mode == VOIDmode
5461 || (mode != BLKmode && ! direct_store[(int) mode]
5462 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5463 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5464 || REG_P (target)
5465 || GET_CODE (target) == SUBREG
5466 /* If the field isn't aligned enough to store as an ordinary memref,
5467 store it as a bit field. */
5468 || (mode != BLKmode
5469 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5470 || bitpos % GET_MODE_ALIGNMENT (mode))
5471 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5472 || (bitpos % BITS_PER_UNIT != 0)))
5473 /* If the RHS and field are a constant size and the size of the
5474 RHS isn't the same size as the bitfield, we must use bitfield
5475 operations. */
5476 || (bitsize >= 0
5477 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5478 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5479 {
5480 rtx temp;
5481
5482 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5483 implies a mask operation. If the precision is the same size as
5484 the field we're storing into, that mask is redundant. This is
5485 particularly common with bit field assignments generated by the
5486 C front end. */
5487 if (TREE_CODE (exp) == NOP_EXPR)
5488 {
5489 tree type = TREE_TYPE (exp);
5490 if (INTEGRAL_TYPE_P (type)
5491 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5492 && bitsize == TYPE_PRECISION (type))
5493 {
5494 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5495 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5496 exp = TREE_OPERAND (exp, 0);
5497 }
5498 }
5499
5500 temp = expand_normal (exp);
5501
5502 /* If BITSIZE is narrower than the size of the type of EXP
5503 we will be narrowing TEMP. Normally, what's wanted are the
5504 low-order bits. However, if EXP's type is a record and this is
5505 big-endian machine, we want the upper BITSIZE bits. */
5506 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5507 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5508 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5509 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5510 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5511 - bitsize),
5512 NULL_RTX, 1);
5513
5514 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5515 MODE. */
5516 if (mode != VOIDmode && mode != BLKmode
5517 && mode != TYPE_MODE (TREE_TYPE (exp)))
5518 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5519
5520 /* If the modes of TARGET and TEMP are both BLKmode, both
5521 must be in memory and BITPOS must be aligned on a byte
5522 boundary. If so, we simply do a block copy. */
5523 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5524 {
5525 gcc_assert (MEM_P (target) && MEM_P (temp)
5526 && !(bitpos % BITS_PER_UNIT));
5527
5528 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5529 emit_block_move (target, temp,
5530 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5531 / BITS_PER_UNIT),
5532 BLOCK_OP_NORMAL);
5533
5534 return const0_rtx;
5535 }
5536
5537 /* Store the value in the bitfield. */
5538 store_bit_field (target, bitsize, bitpos, mode, temp);
5539
5540 return const0_rtx;
5541 }
5542 else
5543 {
5544 /* Now build a reference to just the desired component. */
5545 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5546
5547 if (to_rtx == target)
5548 to_rtx = copy_rtx (to_rtx);
5549
5550 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5551 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5552 set_mem_alias_set (to_rtx, alias_set);
5553
5554 return store_expr (exp, to_rtx, 0);
5555 }
5556 }
5557 \f
5558 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5559 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5560 codes and find the ultimate containing object, which we return.
5561
5562 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5563 bit position, and *PUNSIGNEDP to the signedness of the field.
5564 If the position of the field is variable, we store a tree
5565 giving the variable offset (in units) in *POFFSET.
5566 This offset is in addition to the bit position.
5567 If the position is not variable, we store 0 in *POFFSET.
5568
5569 If any of the extraction expressions is volatile,
5570 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5571
5572 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5573 is a mode that can be used to access the field. In that case, *PBITSIZE
5574 is redundant.
5575
5576 If the field describes a variable-sized object, *PMODE is set to
5577 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5578 this case, but the address of the object can be found.
5579
5580 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5581 look through nodes that serve as markers of a greater alignment than
5582 the one that can be deduced from the expression. These nodes make it
5583 possible for front-ends to prevent temporaries from being created by
5584 the middle-end on alignment considerations. For that purpose, the
5585 normal operating mode at high-level is to always pass FALSE so that
5586 the ultimate containing object is really returned; moreover, the
5587 associated predicate handled_component_p will always return TRUE
5588 on these nodes, thus indicating that they are essentially handled
5589 by get_inner_reference. TRUE should only be passed when the caller
5590 is scanning the expression in order to build another representation
5591 and specifically knows how to handle these nodes; as such, this is
5592 the normal operating mode in the RTL expanders. */
5593
5594 tree
5595 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5596 HOST_WIDE_INT *pbitpos, tree *poffset,
5597 enum machine_mode *pmode, int *punsignedp,
5598 int *pvolatilep, bool keep_aligning)
5599 {
5600 tree size_tree = 0;
5601 enum machine_mode mode = VOIDmode;
5602 tree offset = size_zero_node;
5603 tree bit_offset = bitsize_zero_node;
5604 tree tem;
5605
5606 /* First get the mode, signedness, and size. We do this from just the
5607 outermost expression. */
5608 if (TREE_CODE (exp) == COMPONENT_REF)
5609 {
5610 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5611 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5612 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5613
5614 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5615 }
5616 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5617 {
5618 size_tree = TREE_OPERAND (exp, 1);
5619 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5620 }
5621 else
5622 {
5623 mode = TYPE_MODE (TREE_TYPE (exp));
5624 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5625
5626 if (mode == BLKmode)
5627 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5628 else
5629 *pbitsize = GET_MODE_BITSIZE (mode);
5630 }
5631
5632 if (size_tree != 0)
5633 {
5634 if (! host_integerp (size_tree, 1))
5635 mode = BLKmode, *pbitsize = -1;
5636 else
5637 *pbitsize = tree_low_cst (size_tree, 1);
5638 }
5639
5640 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5641 and find the ultimate containing object. */
5642 while (1)
5643 {
5644 switch (TREE_CODE (exp))
5645 {
5646 case BIT_FIELD_REF:
5647 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5648 TREE_OPERAND (exp, 2));
5649 break;
5650
5651 case COMPONENT_REF:
5652 {
5653 tree field = TREE_OPERAND (exp, 1);
5654 tree this_offset = component_ref_field_offset (exp);
5655
5656 /* If this field hasn't been filled in yet, don't go past it.
5657 This should only happen when folding expressions made during
5658 type construction. */
5659 if (this_offset == 0)
5660 break;
5661
5662 offset = size_binop (PLUS_EXPR, offset, this_offset);
5663 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5664 DECL_FIELD_BIT_OFFSET (field));
5665
5666 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5667 }
5668 break;
5669
5670 case ARRAY_REF:
5671 case ARRAY_RANGE_REF:
5672 {
5673 tree index = TREE_OPERAND (exp, 1);
5674 tree low_bound = array_ref_low_bound (exp);
5675 tree unit_size = array_ref_element_size (exp);
5676
5677 /* We assume all arrays have sizes that are a multiple of a byte.
5678 First subtract the lower bound, if any, in the type of the
5679 index, then convert to sizetype and multiply by the size of
5680 the array element. */
5681 if (! integer_zerop (low_bound))
5682 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5683 index, low_bound);
5684
5685 offset = size_binop (PLUS_EXPR, offset,
5686 size_binop (MULT_EXPR,
5687 fold_convert (sizetype, index),
5688 unit_size));
5689 }
5690 break;
5691
5692 case REALPART_EXPR:
5693 break;
5694
5695 case IMAGPART_EXPR:
5696 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5697 bitsize_int (*pbitsize));
5698 break;
5699
5700 case VIEW_CONVERT_EXPR:
5701 if (keep_aligning && STRICT_ALIGNMENT
5702 && (TYPE_ALIGN (TREE_TYPE (exp))
5703 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5704 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5705 < BIGGEST_ALIGNMENT)
5706 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5707 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5708 goto done;
5709 break;
5710
5711 default:
5712 goto done;
5713 }
5714
5715 /* If any reference in the chain is volatile, the effect is volatile. */
5716 if (TREE_THIS_VOLATILE (exp))
5717 *pvolatilep = 1;
5718
5719 exp = TREE_OPERAND (exp, 0);
5720 }
5721 done:
5722
5723 /* If OFFSET is constant, see if we can return the whole thing as a
5724 constant bit position. Otherwise, split it up. */
5725 if (host_integerp (offset, 0)
5726 && 0 != (tem = size_binop (MULT_EXPR,
5727 fold_convert (bitsizetype, offset),
5728 bitsize_unit_node))
5729 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5730 && host_integerp (tem, 0))
5731 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5732 else
5733 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5734
5735 *pmode = mode;
5736 return exp;
5737 }
5738
5739 /* Return a tree of sizetype representing the size, in bytes, of the element
5740 of EXP, an ARRAY_REF. */
5741
5742 tree
5743 array_ref_element_size (tree exp)
5744 {
5745 tree aligned_size = TREE_OPERAND (exp, 3);
5746 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5747
5748 /* If a size was specified in the ARRAY_REF, it's the size measured
5749 in alignment units of the element type. So multiply by that value. */
5750 if (aligned_size)
5751 {
5752 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5753 sizetype from another type of the same width and signedness. */
5754 if (TREE_TYPE (aligned_size) != sizetype)
5755 aligned_size = fold_convert (sizetype, aligned_size);
5756 return size_binop (MULT_EXPR, aligned_size,
5757 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5758 }
5759
5760 /* Otherwise, take the size from that of the element type. Substitute
5761 any PLACEHOLDER_EXPR that we have. */
5762 else
5763 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5764 }
5765
5766 /* Return a tree representing the lower bound of the array mentioned in
5767 EXP, an ARRAY_REF. */
5768
5769 tree
5770 array_ref_low_bound (tree exp)
5771 {
5772 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5773
5774 /* If a lower bound is specified in EXP, use it. */
5775 if (TREE_OPERAND (exp, 2))
5776 return TREE_OPERAND (exp, 2);
5777
5778 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5779 substituting for a PLACEHOLDER_EXPR as needed. */
5780 if (domain_type && TYPE_MIN_VALUE (domain_type))
5781 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5782
5783 /* Otherwise, return a zero of the appropriate type. */
5784 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5785 }
5786
5787 /* Return a tree representing the upper bound of the array mentioned in
5788 EXP, an ARRAY_REF. */
5789
5790 tree
5791 array_ref_up_bound (tree exp)
5792 {
5793 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5794
5795 /* If there is a domain type and it has an upper bound, use it, substituting
5796 for a PLACEHOLDER_EXPR as needed. */
5797 if (domain_type && TYPE_MAX_VALUE (domain_type))
5798 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5799
5800 /* Otherwise fail. */
5801 return NULL_TREE;
5802 }
5803
5804 /* Return a tree representing the offset, in bytes, of the field referenced
5805 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5806
5807 tree
5808 component_ref_field_offset (tree exp)
5809 {
5810 tree aligned_offset = TREE_OPERAND (exp, 2);
5811 tree field = TREE_OPERAND (exp, 1);
5812
5813 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5814 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5815 value. */
5816 if (aligned_offset)
5817 {
5818 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5819 sizetype from another type of the same width and signedness. */
5820 if (TREE_TYPE (aligned_offset) != sizetype)
5821 aligned_offset = fold_convert (sizetype, aligned_offset);
5822 return size_binop (MULT_EXPR, aligned_offset,
5823 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5824 }
5825
5826 /* Otherwise, take the offset from that of the field. Substitute
5827 any PLACEHOLDER_EXPR that we have. */
5828 else
5829 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5830 }
5831
5832 /* Return 1 if T is an expression that get_inner_reference handles. */
5833
5834 int
5835 handled_component_p (tree t)
5836 {
5837 switch (TREE_CODE (t))
5838 {
5839 case BIT_FIELD_REF:
5840 case COMPONENT_REF:
5841 case ARRAY_REF:
5842 case ARRAY_RANGE_REF:
5843 case VIEW_CONVERT_EXPR:
5844 case REALPART_EXPR:
5845 case IMAGPART_EXPR:
5846 return 1;
5847
5848 default:
5849 return 0;
5850 }
5851 }
5852 \f
5853 /* Given an rtx VALUE that may contain additions and multiplications, return
5854 an equivalent value that just refers to a register, memory, or constant.
5855 This is done by generating instructions to perform the arithmetic and
5856 returning a pseudo-register containing the value.
5857
5858 The returned value may be a REG, SUBREG, MEM or constant. */
5859
5860 rtx
5861 force_operand (rtx value, rtx target)
5862 {
5863 rtx op1, op2;
5864 /* Use subtarget as the target for operand 0 of a binary operation. */
5865 rtx subtarget = get_subtarget (target);
5866 enum rtx_code code = GET_CODE (value);
5867
5868 /* Check for subreg applied to an expression produced by loop optimizer. */
5869 if (code == SUBREG
5870 && !REG_P (SUBREG_REG (value))
5871 && !MEM_P (SUBREG_REG (value)))
5872 {
5873 value = simplify_gen_subreg (GET_MODE (value),
5874 force_reg (GET_MODE (SUBREG_REG (value)),
5875 force_operand (SUBREG_REG (value),
5876 NULL_RTX)),
5877 GET_MODE (SUBREG_REG (value)),
5878 SUBREG_BYTE (value));
5879 code = GET_CODE (value);
5880 }
5881
5882 /* Check for a PIC address load. */
5883 if ((code == PLUS || code == MINUS)
5884 && XEXP (value, 0) == pic_offset_table_rtx
5885 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5886 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5887 || GET_CODE (XEXP (value, 1)) == CONST))
5888 {
5889 if (!subtarget)
5890 subtarget = gen_reg_rtx (GET_MODE (value));
5891 emit_move_insn (subtarget, value);
5892 return subtarget;
5893 }
5894
5895 if (ARITHMETIC_P (value))
5896 {
5897 op2 = XEXP (value, 1);
5898 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5899 subtarget = 0;
5900 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5901 {
5902 code = PLUS;
5903 op2 = negate_rtx (GET_MODE (value), op2);
5904 }
5905
5906 /* Check for an addition with OP2 a constant integer and our first
5907 operand a PLUS of a virtual register and something else. In that
5908 case, we want to emit the sum of the virtual register and the
5909 constant first and then add the other value. This allows virtual
5910 register instantiation to simply modify the constant rather than
5911 creating another one around this addition. */
5912 if (code == PLUS && GET_CODE (op2) == CONST_INT
5913 && GET_CODE (XEXP (value, 0)) == PLUS
5914 && REG_P (XEXP (XEXP (value, 0), 0))
5915 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5916 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5917 {
5918 rtx temp = expand_simple_binop (GET_MODE (value), code,
5919 XEXP (XEXP (value, 0), 0), op2,
5920 subtarget, 0, OPTAB_LIB_WIDEN);
5921 return expand_simple_binop (GET_MODE (value), code, temp,
5922 force_operand (XEXP (XEXP (value,
5923 0), 1), 0),
5924 target, 0, OPTAB_LIB_WIDEN);
5925 }
5926
5927 op1 = force_operand (XEXP (value, 0), subtarget);
5928 op2 = force_operand (op2, NULL_RTX);
5929 switch (code)
5930 {
5931 case MULT:
5932 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5933 case DIV:
5934 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5935 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5936 target, 1, OPTAB_LIB_WIDEN);
5937 else
5938 return expand_divmod (0,
5939 FLOAT_MODE_P (GET_MODE (value))
5940 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5941 GET_MODE (value), op1, op2, target, 0);
5942 break;
5943 case MOD:
5944 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5945 target, 0);
5946 break;
5947 case UDIV:
5948 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5949 target, 1);
5950 break;
5951 case UMOD:
5952 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5953 target, 1);
5954 break;
5955 case ASHIFTRT:
5956 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5957 target, 0, OPTAB_LIB_WIDEN);
5958 break;
5959 default:
5960 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5961 target, 1, OPTAB_LIB_WIDEN);
5962 }
5963 }
5964 if (UNARY_P (value))
5965 {
5966 if (!target)
5967 target = gen_reg_rtx (GET_MODE (value));
5968 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5969 switch (code)
5970 {
5971 case ZERO_EXTEND:
5972 case SIGN_EXTEND:
5973 case TRUNCATE:
5974 convert_move (target, op1, code == ZERO_EXTEND);
5975 return target;
5976
5977 case FIX:
5978 case UNSIGNED_FIX:
5979 expand_fix (target, op1, code == UNSIGNED_FIX);
5980 return target;
5981
5982 case FLOAT:
5983 case UNSIGNED_FLOAT:
5984 expand_float (target, op1, code == UNSIGNED_FLOAT);
5985 return target;
5986
5987 default:
5988 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5989 }
5990 }
5991
5992 #ifdef INSN_SCHEDULING
5993 /* On machines that have insn scheduling, we want all memory reference to be
5994 explicit, so we need to deal with such paradoxical SUBREGs. */
5995 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5996 && (GET_MODE_SIZE (GET_MODE (value))
5997 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5998 value
5999 = simplify_gen_subreg (GET_MODE (value),
6000 force_reg (GET_MODE (SUBREG_REG (value)),
6001 force_operand (SUBREG_REG (value),
6002 NULL_RTX)),
6003 GET_MODE (SUBREG_REG (value)),
6004 SUBREG_BYTE (value));
6005 #endif
6006
6007 return value;
6008 }
6009 \f
6010 /* Subroutine of expand_expr: return nonzero iff there is no way that
6011 EXP can reference X, which is being modified. TOP_P is nonzero if this
6012 call is going to be used to determine whether we need a temporary
6013 for EXP, as opposed to a recursive call to this function.
6014
6015 It is always safe for this routine to return zero since it merely
6016 searches for optimization opportunities. */
6017
6018 int
6019 safe_from_p (rtx x, tree exp, int top_p)
6020 {
6021 rtx exp_rtl = 0;
6022 int i, nops;
6023
6024 if (x == 0
6025 /* If EXP has varying size, we MUST use a target since we currently
6026 have no way of allocating temporaries of variable size
6027 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6028 So we assume here that something at a higher level has prevented a
6029 clash. This is somewhat bogus, but the best we can do. Only
6030 do this when X is BLKmode and when we are at the top level. */
6031 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6032 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6033 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6034 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6035 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6036 != INTEGER_CST)
6037 && GET_MODE (x) == BLKmode)
6038 /* If X is in the outgoing argument area, it is always safe. */
6039 || (MEM_P (x)
6040 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6041 || (GET_CODE (XEXP (x, 0)) == PLUS
6042 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6043 return 1;
6044
6045 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6046 find the underlying pseudo. */
6047 if (GET_CODE (x) == SUBREG)
6048 {
6049 x = SUBREG_REG (x);
6050 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6051 return 0;
6052 }
6053
6054 /* Now look at our tree code and possibly recurse. */
6055 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6056 {
6057 case tcc_declaration:
6058 exp_rtl = DECL_RTL_IF_SET (exp);
6059 break;
6060
6061 case tcc_constant:
6062 return 1;
6063
6064 case tcc_exceptional:
6065 if (TREE_CODE (exp) == TREE_LIST)
6066 {
6067 while (1)
6068 {
6069 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6070 return 0;
6071 exp = TREE_CHAIN (exp);
6072 if (!exp)
6073 return 1;
6074 if (TREE_CODE (exp) != TREE_LIST)
6075 return safe_from_p (x, exp, 0);
6076 }
6077 }
6078 else if (TREE_CODE (exp) == ERROR_MARK)
6079 return 1; /* An already-visited SAVE_EXPR? */
6080 else
6081 return 0;
6082
6083 case tcc_statement:
6084 /* The only case we look at here is the DECL_INITIAL inside a
6085 DECL_EXPR. */
6086 return (TREE_CODE (exp) != DECL_EXPR
6087 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6088 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6089 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6090
6091 case tcc_binary:
6092 case tcc_comparison:
6093 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6094 return 0;
6095 /* Fall through. */
6096
6097 case tcc_unary:
6098 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6099
6100 case tcc_expression:
6101 case tcc_reference:
6102 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6103 the expression. If it is set, we conflict iff we are that rtx or
6104 both are in memory. Otherwise, we check all operands of the
6105 expression recursively. */
6106
6107 switch (TREE_CODE (exp))
6108 {
6109 case ADDR_EXPR:
6110 /* If the operand is static or we are static, we can't conflict.
6111 Likewise if we don't conflict with the operand at all. */
6112 if (staticp (TREE_OPERAND (exp, 0))
6113 || TREE_STATIC (exp)
6114 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6115 return 1;
6116
6117 /* Otherwise, the only way this can conflict is if we are taking
6118 the address of a DECL a that address if part of X, which is
6119 very rare. */
6120 exp = TREE_OPERAND (exp, 0);
6121 if (DECL_P (exp))
6122 {
6123 if (!DECL_RTL_SET_P (exp)
6124 || !MEM_P (DECL_RTL (exp)))
6125 return 0;
6126 else
6127 exp_rtl = XEXP (DECL_RTL (exp), 0);
6128 }
6129 break;
6130
6131 case MISALIGNED_INDIRECT_REF:
6132 case ALIGN_INDIRECT_REF:
6133 case INDIRECT_REF:
6134 if (MEM_P (x)
6135 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6136 get_alias_set (exp)))
6137 return 0;
6138 break;
6139
6140 case CALL_EXPR:
6141 /* Assume that the call will clobber all hard registers and
6142 all of memory. */
6143 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6144 || MEM_P (x))
6145 return 0;
6146 break;
6147
6148 case WITH_CLEANUP_EXPR:
6149 case CLEANUP_POINT_EXPR:
6150 /* Lowered by gimplify.c. */
6151 gcc_unreachable ();
6152
6153 case SAVE_EXPR:
6154 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6155
6156 default:
6157 break;
6158 }
6159
6160 /* If we have an rtx, we do not need to scan our operands. */
6161 if (exp_rtl)
6162 break;
6163
6164 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
6165 for (i = 0; i < nops; i++)
6166 if (TREE_OPERAND (exp, i) != 0
6167 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6168 return 0;
6169
6170 /* If this is a language-specific tree code, it may require
6171 special handling. */
6172 if ((unsigned int) TREE_CODE (exp)
6173 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6174 && !lang_hooks.safe_from_p (x, exp))
6175 return 0;
6176 break;
6177
6178 case tcc_type:
6179 /* Should never get a type here. */
6180 gcc_unreachable ();
6181 }
6182
6183 /* If we have an rtl, find any enclosed object. Then see if we conflict
6184 with it. */
6185 if (exp_rtl)
6186 {
6187 if (GET_CODE (exp_rtl) == SUBREG)
6188 {
6189 exp_rtl = SUBREG_REG (exp_rtl);
6190 if (REG_P (exp_rtl)
6191 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6192 return 0;
6193 }
6194
6195 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6196 are memory and they conflict. */
6197 return ! (rtx_equal_p (x, exp_rtl)
6198 || (MEM_P (x) && MEM_P (exp_rtl)
6199 && true_dependence (exp_rtl, VOIDmode, x,
6200 rtx_addr_varies_p)));
6201 }
6202
6203 /* If we reach here, it is safe. */
6204 return 1;
6205 }
6206
6207 \f
6208 /* Return the highest power of two that EXP is known to be a multiple of.
6209 This is used in updating alignment of MEMs in array references. */
6210
6211 unsigned HOST_WIDE_INT
6212 highest_pow2_factor (tree exp)
6213 {
6214 unsigned HOST_WIDE_INT c0, c1;
6215
6216 switch (TREE_CODE (exp))
6217 {
6218 case INTEGER_CST:
6219 /* We can find the lowest bit that's a one. If the low
6220 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6221 We need to handle this case since we can find it in a COND_EXPR,
6222 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6223 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6224 later ICE. */
6225 if (TREE_CONSTANT_OVERFLOW (exp))
6226 return BIGGEST_ALIGNMENT;
6227 else
6228 {
6229 /* Note: tree_low_cst is intentionally not used here,
6230 we don't care about the upper bits. */
6231 c0 = TREE_INT_CST_LOW (exp);
6232 c0 &= -c0;
6233 return c0 ? c0 : BIGGEST_ALIGNMENT;
6234 }
6235 break;
6236
6237 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6238 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6239 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6240 return MIN (c0, c1);
6241
6242 case MULT_EXPR:
6243 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6244 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6245 return c0 * c1;
6246
6247 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6248 case CEIL_DIV_EXPR:
6249 if (integer_pow2p (TREE_OPERAND (exp, 1))
6250 && host_integerp (TREE_OPERAND (exp, 1), 1))
6251 {
6252 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6253 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6254 return MAX (1, c0 / c1);
6255 }
6256 break;
6257
6258 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6259 case SAVE_EXPR:
6260 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6261
6262 case COMPOUND_EXPR:
6263 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6264
6265 case COND_EXPR:
6266 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6267 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6268 return MIN (c0, c1);
6269
6270 default:
6271 break;
6272 }
6273
6274 return 1;
6275 }
6276
6277 /* Similar, except that the alignment requirements of TARGET are
6278 taken into account. Assume it is at least as aligned as its
6279 type, unless it is a COMPONENT_REF in which case the layout of
6280 the structure gives the alignment. */
6281
6282 static unsigned HOST_WIDE_INT
6283 highest_pow2_factor_for_target (tree target, tree exp)
6284 {
6285 unsigned HOST_WIDE_INT target_align, factor;
6286
6287 factor = highest_pow2_factor (exp);
6288 if (TREE_CODE (target) == COMPONENT_REF)
6289 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6290 else
6291 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6292 return MAX (factor, target_align);
6293 }
6294 \f
6295 /* Expands variable VAR. */
6296
6297 void
6298 expand_var (tree var)
6299 {
6300 if (DECL_EXTERNAL (var))
6301 return;
6302
6303 if (TREE_STATIC (var))
6304 /* If this is an inlined copy of a static local variable,
6305 look up the original decl. */
6306 var = DECL_ORIGIN (var);
6307
6308 if (TREE_STATIC (var)
6309 ? !TREE_ASM_WRITTEN (var)
6310 : !DECL_RTL_SET_P (var))
6311 {
6312 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6313 /* Should be ignored. */;
6314 else if (lang_hooks.expand_decl (var))
6315 /* OK. */;
6316 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6317 expand_decl (var);
6318 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6319 rest_of_decl_compilation (var, 0, 0);
6320 else
6321 /* No expansion needed. */
6322 gcc_assert (TREE_CODE (var) == TYPE_DECL
6323 || TREE_CODE (var) == CONST_DECL
6324 || TREE_CODE (var) == FUNCTION_DECL
6325 || TREE_CODE (var) == LABEL_DECL);
6326 }
6327 }
6328
6329 /* Subroutine of expand_expr. Expand the two operands of a binary
6330 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6331 The value may be stored in TARGET if TARGET is nonzero. The
6332 MODIFIER argument is as documented by expand_expr. */
6333
6334 static void
6335 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6336 enum expand_modifier modifier)
6337 {
6338 if (! safe_from_p (target, exp1, 1))
6339 target = 0;
6340 if (operand_equal_p (exp0, exp1, 0))
6341 {
6342 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6343 *op1 = copy_rtx (*op0);
6344 }
6345 else
6346 {
6347 /* If we need to preserve evaluation order, copy exp0 into its own
6348 temporary variable so that it can't be clobbered by exp1. */
6349 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6350 exp0 = save_expr (exp0);
6351 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6352 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6353 }
6354 }
6355
6356 \f
6357 /* Return a MEM that contains constant EXP. DEFER is as for
6358 output_constant_def and MODIFIER is as for expand_expr. */
6359
6360 static rtx
6361 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6362 {
6363 rtx mem;
6364
6365 mem = output_constant_def (exp, defer);
6366 if (modifier != EXPAND_INITIALIZER)
6367 mem = use_anchored_address (mem);
6368 return mem;
6369 }
6370
6371 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6372 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6373
6374 static rtx
6375 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6376 enum expand_modifier modifier)
6377 {
6378 rtx result, subtarget;
6379 tree inner, offset;
6380 HOST_WIDE_INT bitsize, bitpos;
6381 int volatilep, unsignedp;
6382 enum machine_mode mode1;
6383
6384 /* If we are taking the address of a constant and are at the top level,
6385 we have to use output_constant_def since we can't call force_const_mem
6386 at top level. */
6387 /* ??? This should be considered a front-end bug. We should not be
6388 generating ADDR_EXPR of something that isn't an LVALUE. The only
6389 exception here is STRING_CST. */
6390 if (TREE_CODE (exp) == CONSTRUCTOR
6391 || CONSTANT_CLASS_P (exp))
6392 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6393
6394 /* Everything must be something allowed by is_gimple_addressable. */
6395 switch (TREE_CODE (exp))
6396 {
6397 case INDIRECT_REF:
6398 /* This case will happen via recursion for &a->b. */
6399 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6400
6401 case CONST_DECL:
6402 /* Recurse and make the output_constant_def clause above handle this. */
6403 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6404 tmode, modifier);
6405
6406 case REALPART_EXPR:
6407 /* The real part of the complex number is always first, therefore
6408 the address is the same as the address of the parent object. */
6409 offset = 0;
6410 bitpos = 0;
6411 inner = TREE_OPERAND (exp, 0);
6412 break;
6413
6414 case IMAGPART_EXPR:
6415 /* The imaginary part of the complex number is always second.
6416 The expression is therefore always offset by the size of the
6417 scalar type. */
6418 offset = 0;
6419 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6420 inner = TREE_OPERAND (exp, 0);
6421 break;
6422
6423 default:
6424 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6425 expand_expr, as that can have various side effects; LABEL_DECLs for
6426 example, may not have their DECL_RTL set yet. Assume language
6427 specific tree nodes can be expanded in some interesting way. */
6428 if (DECL_P (exp)
6429 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6430 {
6431 result = expand_expr (exp, target, tmode,
6432 modifier == EXPAND_INITIALIZER
6433 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6434
6435 /* If the DECL isn't in memory, then the DECL wasn't properly
6436 marked TREE_ADDRESSABLE, which will be either a front-end
6437 or a tree optimizer bug. */
6438 gcc_assert (MEM_P (result));
6439 result = XEXP (result, 0);
6440
6441 /* ??? Is this needed anymore? */
6442 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6443 {
6444 assemble_external (exp);
6445 TREE_USED (exp) = 1;
6446 }
6447
6448 if (modifier != EXPAND_INITIALIZER
6449 && modifier != EXPAND_CONST_ADDRESS)
6450 result = force_operand (result, target);
6451 return result;
6452 }
6453
6454 /* Pass FALSE as the last argument to get_inner_reference although
6455 we are expanding to RTL. The rationale is that we know how to
6456 handle "aligning nodes" here: we can just bypass them because
6457 they won't change the final object whose address will be returned
6458 (they actually exist only for that purpose). */
6459 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6460 &mode1, &unsignedp, &volatilep, false);
6461 break;
6462 }
6463
6464 /* We must have made progress. */
6465 gcc_assert (inner != exp);
6466
6467 subtarget = offset || bitpos ? NULL_RTX : target;
6468 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6469
6470 if (offset)
6471 {
6472 rtx tmp;
6473
6474 if (modifier != EXPAND_NORMAL)
6475 result = force_operand (result, NULL);
6476 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6477
6478 result = convert_memory_address (tmode, result);
6479 tmp = convert_memory_address (tmode, tmp);
6480
6481 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6482 result = gen_rtx_PLUS (tmode, result, tmp);
6483 else
6484 {
6485 subtarget = bitpos ? NULL_RTX : target;
6486 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6487 1, OPTAB_LIB_WIDEN);
6488 }
6489 }
6490
6491 if (bitpos)
6492 {
6493 /* Someone beforehand should have rejected taking the address
6494 of such an object. */
6495 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6496
6497 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6498 if (modifier < EXPAND_SUM)
6499 result = force_operand (result, target);
6500 }
6501
6502 return result;
6503 }
6504
6505 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6506 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6507
6508 static rtx
6509 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6510 enum expand_modifier modifier)
6511 {
6512 enum machine_mode rmode;
6513 rtx result;
6514
6515 /* Target mode of VOIDmode says "whatever's natural". */
6516 if (tmode == VOIDmode)
6517 tmode = TYPE_MODE (TREE_TYPE (exp));
6518
6519 /* We can get called with some Weird Things if the user does silliness
6520 like "(short) &a". In that case, convert_memory_address won't do
6521 the right thing, so ignore the given target mode. */
6522 if (tmode != Pmode && tmode != ptr_mode)
6523 tmode = Pmode;
6524
6525 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6526 tmode, modifier);
6527
6528 /* Despite expand_expr claims concerning ignoring TMODE when not
6529 strictly convenient, stuff breaks if we don't honor it. Note
6530 that combined with the above, we only do this for pointer modes. */
6531 rmode = GET_MODE (result);
6532 if (rmode == VOIDmode)
6533 rmode = tmode;
6534 if (rmode != tmode)
6535 result = convert_memory_address (tmode, result);
6536
6537 return result;
6538 }
6539
6540
6541 /* expand_expr: generate code for computing expression EXP.
6542 An rtx for the computed value is returned. The value is never null.
6543 In the case of a void EXP, const0_rtx is returned.
6544
6545 The value may be stored in TARGET if TARGET is nonzero.
6546 TARGET is just a suggestion; callers must assume that
6547 the rtx returned may not be the same as TARGET.
6548
6549 If TARGET is CONST0_RTX, it means that the value will be ignored.
6550
6551 If TMODE is not VOIDmode, it suggests generating the
6552 result in mode TMODE. But this is done only when convenient.
6553 Otherwise, TMODE is ignored and the value generated in its natural mode.
6554 TMODE is just a suggestion; callers must assume that
6555 the rtx returned may not have mode TMODE.
6556
6557 Note that TARGET may have neither TMODE nor MODE. In that case, it
6558 probably will not be used.
6559
6560 If MODIFIER is EXPAND_SUM then when EXP is an addition
6561 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6562 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6563 products as above, or REG or MEM, or constant.
6564 Ordinarily in such cases we would output mul or add instructions
6565 and then return a pseudo reg containing the sum.
6566
6567 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6568 it also marks a label as absolutely required (it can't be dead).
6569 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6570 This is used for outputting expressions used in initializers.
6571
6572 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6573 with a constant address even if that address is not normally legitimate.
6574 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6575
6576 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6577 a call parameter. Such targets require special care as we haven't yet
6578 marked TARGET so that it's safe from being trashed by libcalls. We
6579 don't want to use TARGET for anything but the final result;
6580 Intermediate values must go elsewhere. Additionally, calls to
6581 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6582
6583 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6584 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6585 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6586 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6587 recursively. */
6588
6589 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6590 enum expand_modifier, rtx *);
6591
6592 rtx
6593 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6594 enum expand_modifier modifier, rtx *alt_rtl)
6595 {
6596 int rn = -1;
6597 rtx ret, last = NULL;
6598
6599 /* Handle ERROR_MARK before anybody tries to access its type. */
6600 if (TREE_CODE (exp) == ERROR_MARK
6601 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6602 {
6603 ret = CONST0_RTX (tmode);
6604 return ret ? ret : const0_rtx;
6605 }
6606
6607 if (flag_non_call_exceptions)
6608 {
6609 rn = lookup_stmt_eh_region (exp);
6610 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6611 if (rn >= 0)
6612 last = get_last_insn ();
6613 }
6614
6615 /* If this is an expression of some kind and it has an associated line
6616 number, then emit the line number before expanding the expression.
6617
6618 We need to save and restore the file and line information so that
6619 errors discovered during expansion are emitted with the right
6620 information. It would be better of the diagnostic routines
6621 used the file/line information embedded in the tree nodes rather
6622 than globals. */
6623 if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp))
6624 {
6625 location_t saved_location = input_location;
6626 input_location = EXPR_LOCATION (exp);
6627 emit_line_note (input_location);
6628
6629 /* Record where the insns produced belong. */
6630 record_block_change (TREE_BLOCK (exp));
6631
6632 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6633
6634 input_location = saved_location;
6635 }
6636 else
6637 {
6638 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6639 }
6640
6641 /* If using non-call exceptions, mark all insns that may trap.
6642 expand_call() will mark CALL_INSNs before we get to this code,
6643 but it doesn't handle libcalls, and these may trap. */
6644 if (rn >= 0)
6645 {
6646 rtx insn;
6647 for (insn = next_real_insn (last); insn;
6648 insn = next_real_insn (insn))
6649 {
6650 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6651 /* If we want exceptions for non-call insns, any
6652 may_trap_p instruction may throw. */
6653 && GET_CODE (PATTERN (insn)) != CLOBBER
6654 && GET_CODE (PATTERN (insn)) != USE
6655 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6656 {
6657 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6658 REG_NOTES (insn));
6659 }
6660 }
6661 }
6662
6663 return ret;
6664 }
6665
6666 static rtx
6667 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6668 enum expand_modifier modifier, rtx *alt_rtl)
6669 {
6670 rtx op0, op1, temp, decl_rtl;
6671 tree type = TREE_TYPE (exp);
6672 int unsignedp;
6673 enum machine_mode mode;
6674 enum tree_code code = TREE_CODE (exp);
6675 optab this_optab;
6676 rtx subtarget, original_target;
6677 int ignore;
6678 tree context, subexp0, subexp1;
6679 bool reduce_bit_field = false;
6680 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6681 ? reduce_to_bit_field_precision ((expr), \
6682 target, \
6683 type) \
6684 : (expr))
6685
6686 mode = TYPE_MODE (type);
6687 unsignedp = TYPE_UNSIGNED (type);
6688 if (lang_hooks.reduce_bit_field_operations
6689 && TREE_CODE (type) == INTEGER_TYPE
6690 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6691 {
6692 /* An operation in what may be a bit-field type needs the
6693 result to be reduced to the precision of the bit-field type,
6694 which is narrower than that of the type's mode. */
6695 reduce_bit_field = true;
6696 if (modifier == EXPAND_STACK_PARM)
6697 target = 0;
6698 }
6699
6700 /* Use subtarget as the target for operand 0 of a binary operation. */
6701 subtarget = get_subtarget (target);
6702 original_target = target;
6703 ignore = (target == const0_rtx
6704 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6705 || code == CONVERT_EXPR || code == COND_EXPR
6706 || code == VIEW_CONVERT_EXPR)
6707 && TREE_CODE (type) == VOID_TYPE));
6708
6709 /* If we are going to ignore this result, we need only do something
6710 if there is a side-effect somewhere in the expression. If there
6711 is, short-circuit the most common cases here. Note that we must
6712 not call expand_expr with anything but const0_rtx in case this
6713 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6714
6715 if (ignore)
6716 {
6717 if (! TREE_SIDE_EFFECTS (exp))
6718 return const0_rtx;
6719
6720 /* Ensure we reference a volatile object even if value is ignored, but
6721 don't do this if all we are doing is taking its address. */
6722 if (TREE_THIS_VOLATILE (exp)
6723 && TREE_CODE (exp) != FUNCTION_DECL
6724 && mode != VOIDmode && mode != BLKmode
6725 && modifier != EXPAND_CONST_ADDRESS)
6726 {
6727 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6728 if (MEM_P (temp))
6729 temp = copy_to_reg (temp);
6730 return const0_rtx;
6731 }
6732
6733 if (TREE_CODE_CLASS (code) == tcc_unary
6734 || code == COMPONENT_REF || code == INDIRECT_REF)
6735 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6736 modifier);
6737
6738 else if (TREE_CODE_CLASS (code) == tcc_binary
6739 || TREE_CODE_CLASS (code) == tcc_comparison
6740 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6741 {
6742 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6743 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6744 return const0_rtx;
6745 }
6746 else if (code == BIT_FIELD_REF)
6747 {
6748 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6749 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6750 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6751 return const0_rtx;
6752 }
6753
6754 target = 0;
6755 }
6756
6757
6758 switch (code)
6759 {
6760 case LABEL_DECL:
6761 {
6762 tree function = decl_function_context (exp);
6763
6764 temp = label_rtx (exp);
6765 temp = gen_rtx_LABEL_REF (Pmode, temp);
6766
6767 if (function != current_function_decl
6768 && function != 0)
6769 LABEL_REF_NONLOCAL_P (temp) = 1;
6770
6771 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6772 return temp;
6773 }
6774
6775 case SSA_NAME:
6776 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6777 NULL);
6778
6779 case PARM_DECL:
6780 case VAR_DECL:
6781 /* If a static var's type was incomplete when the decl was written,
6782 but the type is complete now, lay out the decl now. */
6783 if (DECL_SIZE (exp) == 0
6784 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6785 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6786 layout_decl (exp, 0);
6787
6788 /* ... fall through ... */
6789
6790 case FUNCTION_DECL:
6791 case RESULT_DECL:
6792 decl_rtl = DECL_RTL (exp);
6793 gcc_assert (decl_rtl);
6794
6795 /* Ensure variable marked as used even if it doesn't go through
6796 a parser. If it hasn't be used yet, write out an external
6797 definition. */
6798 if (! TREE_USED (exp))
6799 {
6800 assemble_external (exp);
6801 TREE_USED (exp) = 1;
6802 }
6803
6804 /* Show we haven't gotten RTL for this yet. */
6805 temp = 0;
6806
6807 /* Variables inherited from containing functions should have
6808 been lowered by this point. */
6809 context = decl_function_context (exp);
6810 gcc_assert (!context
6811 || context == current_function_decl
6812 || TREE_STATIC (exp)
6813 /* ??? C++ creates functions that are not TREE_STATIC. */
6814 || TREE_CODE (exp) == FUNCTION_DECL);
6815
6816 /* This is the case of an array whose size is to be determined
6817 from its initializer, while the initializer is still being parsed.
6818 See expand_decl. */
6819
6820 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
6821 temp = validize_mem (decl_rtl);
6822
6823 /* If DECL_RTL is memory, we are in the normal case and either
6824 the address is not valid or it is not a register and -fforce-addr
6825 is specified, get the address into a register. */
6826
6827 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
6828 {
6829 if (alt_rtl)
6830 *alt_rtl = decl_rtl;
6831 decl_rtl = use_anchored_address (decl_rtl);
6832 if (modifier != EXPAND_CONST_ADDRESS
6833 && modifier != EXPAND_SUM
6834 && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
6835 || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
6836 temp = replace_equiv_address (decl_rtl,
6837 copy_rtx (XEXP (decl_rtl, 0)));
6838 }
6839
6840 /* If we got something, return it. But first, set the alignment
6841 if the address is a register. */
6842 if (temp != 0)
6843 {
6844 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6845 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6846
6847 return temp;
6848 }
6849
6850 /* If the mode of DECL_RTL does not match that of the decl, it
6851 must be a promoted value. We return a SUBREG of the wanted mode,
6852 but mark it so that we know that it was already extended. */
6853
6854 if (REG_P (decl_rtl)
6855 && GET_MODE (decl_rtl) != DECL_MODE (exp))
6856 {
6857 enum machine_mode pmode;
6858
6859 /* Get the signedness used for this variable. Ensure we get the
6860 same mode we got when the variable was declared. */
6861 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6862 (TREE_CODE (exp) == RESULT_DECL
6863 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
6864 gcc_assert (GET_MODE (decl_rtl) == pmode);
6865
6866 temp = gen_lowpart_SUBREG (mode, decl_rtl);
6867 SUBREG_PROMOTED_VAR_P (temp) = 1;
6868 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6869 return temp;
6870 }
6871
6872 return decl_rtl;
6873
6874 case INTEGER_CST:
6875 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6876 TREE_INT_CST_HIGH (exp), mode);
6877
6878 /* ??? If overflow is set, fold will have done an incomplete job,
6879 which can result in (plus xx (const_int 0)), which can get
6880 simplified by validate_replace_rtx during virtual register
6881 instantiation, which can result in unrecognizable insns.
6882 Avoid this by forcing all overflows into registers. */
6883 if (TREE_CONSTANT_OVERFLOW (exp)
6884 && modifier != EXPAND_INITIALIZER)
6885 temp = force_reg (mode, temp);
6886
6887 return temp;
6888
6889 case VECTOR_CST:
6890 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6891 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6892 return const_vector_from_tree (exp);
6893 else
6894 return expand_expr (build_constructor_from_list
6895 (TREE_TYPE (exp),
6896 TREE_VECTOR_CST_ELTS (exp)),
6897 ignore ? const0_rtx : target, tmode, modifier);
6898
6899 case CONST_DECL:
6900 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6901
6902 case REAL_CST:
6903 /* If optimized, generate immediate CONST_DOUBLE
6904 which will be turned into memory by reload if necessary.
6905
6906 We used to force a register so that loop.c could see it. But
6907 this does not allow gen_* patterns to perform optimizations with
6908 the constants. It also produces two insns in cases like "x = 1.0;".
6909 On most machines, floating-point constants are not permitted in
6910 many insns, so we'd end up copying it to a register in any case.
6911
6912 Now, we do the copying in expand_binop, if appropriate. */
6913 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6914 TYPE_MODE (TREE_TYPE (exp)));
6915
6916 case COMPLEX_CST:
6917 /* Handle evaluating a complex constant in a CONCAT target. */
6918 if (original_target && GET_CODE (original_target) == CONCAT)
6919 {
6920 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6921 rtx rtarg, itarg;
6922
6923 rtarg = XEXP (original_target, 0);
6924 itarg = XEXP (original_target, 1);
6925
6926 /* Move the real and imaginary parts separately. */
6927 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6928 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6929
6930 if (op0 != rtarg)
6931 emit_move_insn (rtarg, op0);
6932 if (op1 != itarg)
6933 emit_move_insn (itarg, op1);
6934
6935 return original_target;
6936 }
6937
6938 /* ... fall through ... */
6939
6940 case STRING_CST:
6941 temp = expand_expr_constant (exp, 1, modifier);
6942
6943 /* temp contains a constant address.
6944 On RISC machines where a constant address isn't valid,
6945 make some insns to get that address into a register. */
6946 if (modifier != EXPAND_CONST_ADDRESS
6947 && modifier != EXPAND_INITIALIZER
6948 && modifier != EXPAND_SUM
6949 && (! memory_address_p (mode, XEXP (temp, 0))
6950 || flag_force_addr))
6951 return replace_equiv_address (temp,
6952 copy_rtx (XEXP (temp, 0)));
6953 return temp;
6954
6955 case SAVE_EXPR:
6956 {
6957 tree val = TREE_OPERAND (exp, 0);
6958 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6959
6960 if (!SAVE_EXPR_RESOLVED_P (exp))
6961 {
6962 /* We can indeed still hit this case, typically via builtin
6963 expanders calling save_expr immediately before expanding
6964 something. Assume this means that we only have to deal
6965 with non-BLKmode values. */
6966 gcc_assert (GET_MODE (ret) != BLKmode);
6967
6968 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6969 DECL_ARTIFICIAL (val) = 1;
6970 DECL_IGNORED_P (val) = 1;
6971 TREE_OPERAND (exp, 0) = val;
6972 SAVE_EXPR_RESOLVED_P (exp) = 1;
6973
6974 if (!CONSTANT_P (ret))
6975 ret = copy_to_reg (ret);
6976 SET_DECL_RTL (val, ret);
6977 }
6978
6979 return ret;
6980 }
6981
6982 case GOTO_EXPR:
6983 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6984 expand_goto (TREE_OPERAND (exp, 0));
6985 else
6986 expand_computed_goto (TREE_OPERAND (exp, 0));
6987 return const0_rtx;
6988
6989 case CONSTRUCTOR:
6990 /* If we don't need the result, just ensure we evaluate any
6991 subexpressions. */
6992 if (ignore)
6993 {
6994 unsigned HOST_WIDE_INT idx;
6995 tree value;
6996
6997 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6998 expand_expr (value, const0_rtx, VOIDmode, 0);
6999
7000 return const0_rtx;
7001 }
7002
7003 /* Try to avoid creating a temporary at all. This is possible
7004 if all of the initializer is zero.
7005 FIXME: try to handle all [0..255] initializers we can handle
7006 with memset. */
7007 else if (TREE_STATIC (exp)
7008 && !TREE_ADDRESSABLE (exp)
7009 && target != 0 && mode == BLKmode
7010 && all_zeros_p (exp))
7011 {
7012 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7013 return target;
7014 }
7015
7016 /* All elts simple constants => refer to a constant in memory. But
7017 if this is a non-BLKmode mode, let it store a field at a time
7018 since that should make a CONST_INT or CONST_DOUBLE when we
7019 fold. Likewise, if we have a target we can use, it is best to
7020 store directly into the target unless the type is large enough
7021 that memcpy will be used. If we are making an initializer and
7022 all operands are constant, put it in memory as well.
7023
7024 FIXME: Avoid trying to fill vector constructors piece-meal.
7025 Output them with output_constant_def below unless we're sure
7026 they're zeros. This should go away when vector initializers
7027 are treated like VECTOR_CST instead of arrays.
7028 */
7029 else if ((TREE_STATIC (exp)
7030 && ((mode == BLKmode
7031 && ! (target != 0 && safe_from_p (target, exp, 1)))
7032 || TREE_ADDRESSABLE (exp)
7033 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7034 && (! MOVE_BY_PIECES_P
7035 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7036 TYPE_ALIGN (type)))
7037 && ! mostly_zeros_p (exp))))
7038 || ((modifier == EXPAND_INITIALIZER
7039 || modifier == EXPAND_CONST_ADDRESS)
7040 && TREE_CONSTANT (exp)))
7041 {
7042 rtx constructor = expand_expr_constant (exp, 1, modifier);
7043
7044 if (modifier != EXPAND_CONST_ADDRESS
7045 && modifier != EXPAND_INITIALIZER
7046 && modifier != EXPAND_SUM)
7047 constructor = validize_mem (constructor);
7048
7049 return constructor;
7050 }
7051 else
7052 {
7053 /* Handle calls that pass values in multiple non-contiguous
7054 locations. The Irix 6 ABI has examples of this. */
7055 if (target == 0 || ! safe_from_p (target, exp, 1)
7056 || GET_CODE (target) == PARALLEL
7057 || modifier == EXPAND_STACK_PARM)
7058 target
7059 = assign_temp (build_qualified_type (type,
7060 (TYPE_QUALS (type)
7061 | (TREE_READONLY (exp)
7062 * TYPE_QUAL_CONST))),
7063 0, TREE_ADDRESSABLE (exp), 1);
7064
7065 store_constructor (exp, target, 0, int_expr_size (exp));
7066 return target;
7067 }
7068
7069 case MISALIGNED_INDIRECT_REF:
7070 case ALIGN_INDIRECT_REF:
7071 case INDIRECT_REF:
7072 {
7073 tree exp1 = TREE_OPERAND (exp, 0);
7074
7075 if (modifier != EXPAND_WRITE)
7076 {
7077 tree t;
7078
7079 t = fold_read_from_constant_string (exp);
7080 if (t)
7081 return expand_expr (t, target, tmode, modifier);
7082 }
7083
7084 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7085 op0 = memory_address (mode, op0);
7086
7087 if (code == ALIGN_INDIRECT_REF)
7088 {
7089 int align = TYPE_ALIGN_UNIT (type);
7090 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7091 op0 = memory_address (mode, op0);
7092 }
7093
7094 temp = gen_rtx_MEM (mode, op0);
7095
7096 set_mem_attributes (temp, exp, 0);
7097
7098 /* Resolve the misalignment now, so that we don't have to remember
7099 to resolve it later. Of course, this only works for reads. */
7100 /* ??? When we get around to supporting writes, we'll have to handle
7101 this in store_expr directly. The vectorizer isn't generating
7102 those yet, however. */
7103 if (code == MISALIGNED_INDIRECT_REF)
7104 {
7105 int icode;
7106 rtx reg, insn;
7107
7108 gcc_assert (modifier == EXPAND_NORMAL
7109 || modifier == EXPAND_STACK_PARM);
7110
7111 /* The vectorizer should have already checked the mode. */
7112 icode = movmisalign_optab->handlers[mode].insn_code;
7113 gcc_assert (icode != CODE_FOR_nothing);
7114
7115 /* We've already validated the memory, and we're creating a
7116 new pseudo destination. The predicates really can't fail. */
7117 reg = gen_reg_rtx (mode);
7118
7119 /* Nor can the insn generator. */
7120 insn = GEN_FCN (icode) (reg, temp);
7121 emit_insn (insn);
7122
7123 return reg;
7124 }
7125
7126 return temp;
7127 }
7128
7129 case TARGET_MEM_REF:
7130 {
7131 struct mem_address addr;
7132
7133 get_address_description (exp, &addr);
7134 op0 = addr_for_mem_ref (&addr, true);
7135 op0 = memory_address (mode, op0);
7136 temp = gen_rtx_MEM (mode, op0);
7137 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7138 }
7139 return temp;
7140
7141 case ARRAY_REF:
7142
7143 {
7144 tree array = TREE_OPERAND (exp, 0);
7145 tree index = TREE_OPERAND (exp, 1);
7146
7147 /* Fold an expression like: "foo"[2].
7148 This is not done in fold so it won't happen inside &.
7149 Don't fold if this is for wide characters since it's too
7150 difficult to do correctly and this is a very rare case. */
7151
7152 if (modifier != EXPAND_CONST_ADDRESS
7153 && modifier != EXPAND_INITIALIZER
7154 && modifier != EXPAND_MEMORY)
7155 {
7156 tree t = fold_read_from_constant_string (exp);
7157
7158 if (t)
7159 return expand_expr (t, target, tmode, modifier);
7160 }
7161
7162 /* If this is a constant index into a constant array,
7163 just get the value from the array. Handle both the cases when
7164 we have an explicit constructor and when our operand is a variable
7165 that was declared const. */
7166
7167 if (modifier != EXPAND_CONST_ADDRESS
7168 && modifier != EXPAND_INITIALIZER
7169 && modifier != EXPAND_MEMORY
7170 && TREE_CODE (array) == CONSTRUCTOR
7171 && ! TREE_SIDE_EFFECTS (array)
7172 && TREE_CODE (index) == INTEGER_CST)
7173 {
7174 unsigned HOST_WIDE_INT ix;
7175 tree field, value;
7176
7177 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7178 field, value)
7179 if (tree_int_cst_equal (field, index))
7180 {
7181 if (!TREE_SIDE_EFFECTS (value))
7182 return expand_expr (fold (value), target, tmode, modifier);
7183 break;
7184 }
7185 }
7186
7187 else if (optimize >= 1
7188 && modifier != EXPAND_CONST_ADDRESS
7189 && modifier != EXPAND_INITIALIZER
7190 && modifier != EXPAND_MEMORY
7191 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7192 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7193 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7194 && targetm.binds_local_p (array))
7195 {
7196 if (TREE_CODE (index) == INTEGER_CST)
7197 {
7198 tree init = DECL_INITIAL (array);
7199
7200 if (TREE_CODE (init) == CONSTRUCTOR)
7201 {
7202 unsigned HOST_WIDE_INT ix;
7203 tree field, value;
7204
7205 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7206 field, value)
7207 if (tree_int_cst_equal (field, index))
7208 {
7209 if (!TREE_SIDE_EFFECTS (value))
7210 return expand_expr (fold (value), target, tmode,
7211 modifier);
7212 break;
7213 }
7214 }
7215 else if(TREE_CODE (init) == STRING_CST)
7216 {
7217 tree index1 = index;
7218 tree low_bound = array_ref_low_bound (exp);
7219 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7220
7221 /* Optimize the special-case of a zero lower bound.
7222
7223 We convert the low_bound to sizetype to avoid some problems
7224 with constant folding. (E.g. suppose the lower bound is 1,
7225 and its mode is QI. Without the conversion,l (ARRAY
7226 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7227 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7228
7229 if (! integer_zerop (low_bound))
7230 index1 = size_diffop (index1, fold_convert (sizetype,
7231 low_bound));
7232
7233 if (0 > compare_tree_int (index1,
7234 TREE_STRING_LENGTH (init)))
7235 {
7236 tree type = TREE_TYPE (TREE_TYPE (init));
7237 enum machine_mode mode = TYPE_MODE (type);
7238
7239 if (GET_MODE_CLASS (mode) == MODE_INT
7240 && GET_MODE_SIZE (mode) == 1)
7241 return gen_int_mode (TREE_STRING_POINTER (init)
7242 [TREE_INT_CST_LOW (index1)],
7243 mode);
7244 }
7245 }
7246 }
7247 }
7248 }
7249 goto normal_inner_ref;
7250
7251 case COMPONENT_REF:
7252 /* If the operand is a CONSTRUCTOR, we can just extract the
7253 appropriate field if it is present. */
7254 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7255 {
7256 unsigned HOST_WIDE_INT idx;
7257 tree field, value;
7258
7259 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7260 idx, field, value)
7261 if (field == TREE_OPERAND (exp, 1)
7262 /* We can normally use the value of the field in the
7263 CONSTRUCTOR. However, if this is a bitfield in
7264 an integral mode that we can fit in a HOST_WIDE_INT,
7265 we must mask only the number of bits in the bitfield,
7266 since this is done implicitly by the constructor. If
7267 the bitfield does not meet either of those conditions,
7268 we can't do this optimization. */
7269 && (! DECL_BIT_FIELD (field)
7270 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7271 && (GET_MODE_BITSIZE (DECL_MODE (field))
7272 <= HOST_BITS_PER_WIDE_INT))))
7273 {
7274 if (DECL_BIT_FIELD (field)
7275 && modifier == EXPAND_STACK_PARM)
7276 target = 0;
7277 op0 = expand_expr (value, target, tmode, modifier);
7278 if (DECL_BIT_FIELD (field))
7279 {
7280 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7281 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7282
7283 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7284 {
7285 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7286 op0 = expand_and (imode, op0, op1, target);
7287 }
7288 else
7289 {
7290 tree count
7291 = build_int_cst (NULL_TREE,
7292 GET_MODE_BITSIZE (imode) - bitsize);
7293
7294 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7295 target, 0);
7296 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7297 target, 0);
7298 }
7299 }
7300
7301 return op0;
7302 }
7303 }
7304 goto normal_inner_ref;
7305
7306 case BIT_FIELD_REF:
7307 case ARRAY_RANGE_REF:
7308 normal_inner_ref:
7309 {
7310 enum machine_mode mode1;
7311 HOST_WIDE_INT bitsize, bitpos;
7312 tree offset;
7313 int volatilep = 0;
7314 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7315 &mode1, &unsignedp, &volatilep, true);
7316 rtx orig_op0;
7317
7318 /* If we got back the original object, something is wrong. Perhaps
7319 we are evaluating an expression too early. In any event, don't
7320 infinitely recurse. */
7321 gcc_assert (tem != exp);
7322
7323 /* If TEM's type is a union of variable size, pass TARGET to the inner
7324 computation, since it will need a temporary and TARGET is known
7325 to have to do. This occurs in unchecked conversion in Ada. */
7326
7327 orig_op0 = op0
7328 = expand_expr (tem,
7329 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7330 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7331 != INTEGER_CST)
7332 && modifier != EXPAND_STACK_PARM
7333 ? target : NULL_RTX),
7334 VOIDmode,
7335 (modifier == EXPAND_INITIALIZER
7336 || modifier == EXPAND_CONST_ADDRESS
7337 || modifier == EXPAND_STACK_PARM)
7338 ? modifier : EXPAND_NORMAL);
7339
7340 /* If this is a constant, put it into a register if it is a legitimate
7341 constant, OFFSET is 0, and we won't try to extract outside the
7342 register (in case we were passed a partially uninitialized object
7343 or a view_conversion to a larger size). Force the constant to
7344 memory otherwise. */
7345 if (CONSTANT_P (op0))
7346 {
7347 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7348 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7349 && offset == 0
7350 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7351 op0 = force_reg (mode, op0);
7352 else
7353 op0 = validize_mem (force_const_mem (mode, op0));
7354 }
7355
7356 /* Otherwise, if this object not in memory and we either have an
7357 offset, a BLKmode result, or a reference outside the object, put it
7358 there. Such cases can occur in Ada if we have unchecked conversion
7359 of an expression from a scalar type to an array or record type or
7360 for an ARRAY_RANGE_REF whose type is BLKmode. */
7361 else if (!MEM_P (op0)
7362 && (offset != 0
7363 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7364 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7365 {
7366 tree nt = build_qualified_type (TREE_TYPE (tem),
7367 (TYPE_QUALS (TREE_TYPE (tem))
7368 | TYPE_QUAL_CONST));
7369 rtx memloc = assign_temp (nt, 1, 1, 1);
7370
7371 emit_move_insn (memloc, op0);
7372 op0 = memloc;
7373 }
7374
7375 if (offset != 0)
7376 {
7377 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7378 EXPAND_SUM);
7379
7380 gcc_assert (MEM_P (op0));
7381
7382 #ifdef POINTERS_EXTEND_UNSIGNED
7383 if (GET_MODE (offset_rtx) != Pmode)
7384 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7385 #else
7386 if (GET_MODE (offset_rtx) != ptr_mode)
7387 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7388 #endif
7389
7390 if (GET_MODE (op0) == BLKmode
7391 /* A constant address in OP0 can have VOIDmode, we must
7392 not try to call force_reg in that case. */
7393 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7394 && bitsize != 0
7395 && (bitpos % bitsize) == 0
7396 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7397 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7398 {
7399 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7400 bitpos = 0;
7401 }
7402
7403 op0 = offset_address (op0, offset_rtx,
7404 highest_pow2_factor (offset));
7405 }
7406
7407 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7408 record its alignment as BIGGEST_ALIGNMENT. */
7409 if (MEM_P (op0) && bitpos == 0 && offset != 0
7410 && is_aligning_offset (offset, tem))
7411 set_mem_align (op0, BIGGEST_ALIGNMENT);
7412
7413 /* Don't forget about volatility even if this is a bitfield. */
7414 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7415 {
7416 if (op0 == orig_op0)
7417 op0 = copy_rtx (op0);
7418
7419 MEM_VOLATILE_P (op0) = 1;
7420 }
7421
7422 /* The following code doesn't handle CONCAT.
7423 Assume only bitpos == 0 can be used for CONCAT, due to
7424 one element arrays having the same mode as its element. */
7425 if (GET_CODE (op0) == CONCAT)
7426 {
7427 gcc_assert (bitpos == 0
7428 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7429 return op0;
7430 }
7431
7432 /* In cases where an aligned union has an unaligned object
7433 as a field, we might be extracting a BLKmode value from
7434 an integer-mode (e.g., SImode) object. Handle this case
7435 by doing the extract into an object as wide as the field
7436 (which we know to be the width of a basic mode), then
7437 storing into memory, and changing the mode to BLKmode. */
7438 if (mode1 == VOIDmode
7439 || REG_P (op0) || GET_CODE (op0) == SUBREG
7440 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7441 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7442 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7443 && modifier != EXPAND_CONST_ADDRESS
7444 && modifier != EXPAND_INITIALIZER)
7445 /* If the field isn't aligned enough to fetch as a memref,
7446 fetch it as a bit field. */
7447 || (mode1 != BLKmode
7448 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7449 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7450 || (MEM_P (op0)
7451 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7452 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7453 && ((modifier == EXPAND_CONST_ADDRESS
7454 || modifier == EXPAND_INITIALIZER)
7455 ? STRICT_ALIGNMENT
7456 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7457 || (bitpos % BITS_PER_UNIT != 0)))
7458 /* If the type and the field are a constant size and the
7459 size of the type isn't the same size as the bitfield,
7460 we must use bitfield operations. */
7461 || (bitsize >= 0
7462 && TYPE_SIZE (TREE_TYPE (exp))
7463 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7464 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7465 bitsize)))
7466 {
7467 enum machine_mode ext_mode = mode;
7468
7469 if (ext_mode == BLKmode
7470 && ! (target != 0 && MEM_P (op0)
7471 && MEM_P (target)
7472 && bitpos % BITS_PER_UNIT == 0))
7473 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7474
7475 if (ext_mode == BLKmode)
7476 {
7477 if (target == 0)
7478 target = assign_temp (type, 0, 1, 1);
7479
7480 if (bitsize == 0)
7481 return target;
7482
7483 /* In this case, BITPOS must start at a byte boundary and
7484 TARGET, if specified, must be a MEM. */
7485 gcc_assert (MEM_P (op0)
7486 && (!target || MEM_P (target))
7487 && !(bitpos % BITS_PER_UNIT));
7488
7489 emit_block_move (target,
7490 adjust_address (op0, VOIDmode,
7491 bitpos / BITS_PER_UNIT),
7492 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7493 / BITS_PER_UNIT),
7494 (modifier == EXPAND_STACK_PARM
7495 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7496
7497 return target;
7498 }
7499
7500 op0 = validize_mem (op0);
7501
7502 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7503 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7504
7505 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7506 (modifier == EXPAND_STACK_PARM
7507 ? NULL_RTX : target),
7508 ext_mode, ext_mode);
7509
7510 /* If the result is a record type and BITSIZE is narrower than
7511 the mode of OP0, an integral mode, and this is a big endian
7512 machine, we must put the field into the high-order bits. */
7513 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7514 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7515 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7516 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7517 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7518 - bitsize),
7519 op0, 1);
7520
7521 /* If the result type is BLKmode, store the data into a temporary
7522 of the appropriate type, but with the mode corresponding to the
7523 mode for the data we have (op0's mode). It's tempting to make
7524 this a constant type, since we know it's only being stored once,
7525 but that can cause problems if we are taking the address of this
7526 COMPONENT_REF because the MEM of any reference via that address
7527 will have flags corresponding to the type, which will not
7528 necessarily be constant. */
7529 if (mode == BLKmode)
7530 {
7531 rtx new
7532 = assign_stack_temp_for_type
7533 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7534
7535 emit_move_insn (new, op0);
7536 op0 = copy_rtx (new);
7537 PUT_MODE (op0, BLKmode);
7538 set_mem_attributes (op0, exp, 1);
7539 }
7540
7541 return op0;
7542 }
7543
7544 /* If the result is BLKmode, use that to access the object
7545 now as well. */
7546 if (mode == BLKmode)
7547 mode1 = BLKmode;
7548
7549 /* Get a reference to just this component. */
7550 if (modifier == EXPAND_CONST_ADDRESS
7551 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7552 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7553 else
7554 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7555
7556 if (op0 == orig_op0)
7557 op0 = copy_rtx (op0);
7558
7559 set_mem_attributes (op0, exp, 0);
7560 if (REG_P (XEXP (op0, 0)))
7561 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7562
7563 MEM_VOLATILE_P (op0) |= volatilep;
7564 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7565 || modifier == EXPAND_CONST_ADDRESS
7566 || modifier == EXPAND_INITIALIZER)
7567 return op0;
7568 else if (target == 0)
7569 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7570
7571 convert_move (target, op0, unsignedp);
7572 return target;
7573 }
7574
7575 case OBJ_TYPE_REF:
7576 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7577
7578 case CALL_EXPR:
7579 /* Check for a built-in function. */
7580 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7581 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7582 == FUNCTION_DECL)
7583 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7584 {
7585 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7586 == BUILT_IN_FRONTEND)
7587 return lang_hooks.expand_expr (exp, original_target,
7588 tmode, modifier,
7589 alt_rtl);
7590 else
7591 return expand_builtin (exp, target, subtarget, tmode, ignore);
7592 }
7593
7594 return expand_call (exp, target, ignore);
7595
7596 case NON_LVALUE_EXPR:
7597 case NOP_EXPR:
7598 case CONVERT_EXPR:
7599 if (TREE_OPERAND (exp, 0) == error_mark_node)
7600 return const0_rtx;
7601
7602 if (TREE_CODE (type) == UNION_TYPE)
7603 {
7604 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7605
7606 /* If both input and output are BLKmode, this conversion isn't doing
7607 anything except possibly changing memory attribute. */
7608 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7609 {
7610 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7611 modifier);
7612
7613 result = copy_rtx (result);
7614 set_mem_attributes (result, exp, 0);
7615 return result;
7616 }
7617
7618 if (target == 0)
7619 {
7620 if (TYPE_MODE (type) != BLKmode)
7621 target = gen_reg_rtx (TYPE_MODE (type));
7622 else
7623 target = assign_temp (type, 0, 1, 1);
7624 }
7625
7626 if (MEM_P (target))
7627 /* Store data into beginning of memory target. */
7628 store_expr (TREE_OPERAND (exp, 0),
7629 adjust_address (target, TYPE_MODE (valtype), 0),
7630 modifier == EXPAND_STACK_PARM);
7631
7632 else
7633 {
7634 gcc_assert (REG_P (target));
7635
7636 /* Store this field into a union of the proper type. */
7637 store_field (target,
7638 MIN ((int_size_in_bytes (TREE_TYPE
7639 (TREE_OPERAND (exp, 0)))
7640 * BITS_PER_UNIT),
7641 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7642 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7643 type, 0);
7644 }
7645
7646 /* Return the entire union. */
7647 return target;
7648 }
7649
7650 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7651 {
7652 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7653 modifier);
7654
7655 /* If the signedness of the conversion differs and OP0 is
7656 a promoted SUBREG, clear that indication since we now
7657 have to do the proper extension. */
7658 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7659 && GET_CODE (op0) == SUBREG)
7660 SUBREG_PROMOTED_VAR_P (op0) = 0;
7661
7662 return REDUCE_BIT_FIELD (op0);
7663 }
7664
7665 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7666 if (GET_MODE (op0) == mode)
7667 ;
7668
7669 /* If OP0 is a constant, just convert it into the proper mode. */
7670 else if (CONSTANT_P (op0))
7671 {
7672 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7673 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7674
7675 if (modifier == EXPAND_INITIALIZER)
7676 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7677 subreg_lowpart_offset (mode,
7678 inner_mode));
7679 else
7680 op0= convert_modes (mode, inner_mode, op0,
7681 TYPE_UNSIGNED (inner_type));
7682 }
7683
7684 else if (modifier == EXPAND_INITIALIZER)
7685 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7686
7687 else if (target == 0)
7688 op0 = convert_to_mode (mode, op0,
7689 TYPE_UNSIGNED (TREE_TYPE
7690 (TREE_OPERAND (exp, 0))));
7691 else
7692 {
7693 convert_move (target, op0,
7694 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7695 op0 = target;
7696 }
7697
7698 return REDUCE_BIT_FIELD (op0);
7699
7700 case VIEW_CONVERT_EXPR:
7701 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7702
7703 /* If the input and output modes are both the same, we are done. */
7704 if (TYPE_MODE (type) == GET_MODE (op0))
7705 ;
7706 /* If neither mode is BLKmode, and both modes are the same size
7707 then we can use gen_lowpart. */
7708 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7709 && GET_MODE_SIZE (TYPE_MODE (type))
7710 == GET_MODE_SIZE (GET_MODE (op0)))
7711 {
7712 if (GET_CODE (op0) == SUBREG)
7713 op0 = force_reg (GET_MODE (op0), op0);
7714 op0 = gen_lowpart (TYPE_MODE (type), op0);
7715 }
7716 /* If both modes are integral, then we can convert from one to the
7717 other. */
7718 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7719 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7720 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7721 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7722 /* As a last resort, spill op0 to memory, and reload it in a
7723 different mode. */
7724 else if (!MEM_P (op0))
7725 {
7726 /* If the operand is not a MEM, force it into memory. Since we
7727 are going to be changing the mode of the MEM, don't call
7728 force_const_mem for constants because we don't allow pool
7729 constants to change mode. */
7730 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7731
7732 gcc_assert (!TREE_ADDRESSABLE (exp));
7733
7734 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7735 target
7736 = assign_stack_temp_for_type
7737 (TYPE_MODE (inner_type),
7738 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7739
7740 emit_move_insn (target, op0);
7741 op0 = target;
7742 }
7743
7744 /* At this point, OP0 is in the correct mode. If the output type is such
7745 that the operand is known to be aligned, indicate that it is.
7746 Otherwise, we need only be concerned about alignment for non-BLKmode
7747 results. */
7748 if (MEM_P (op0))
7749 {
7750 op0 = copy_rtx (op0);
7751
7752 if (TYPE_ALIGN_OK (type))
7753 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7754 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7755 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7756 {
7757 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7758 HOST_WIDE_INT temp_size
7759 = MAX (int_size_in_bytes (inner_type),
7760 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7761 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7762 temp_size, 0, type);
7763 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7764
7765 gcc_assert (!TREE_ADDRESSABLE (exp));
7766
7767 if (GET_MODE (op0) == BLKmode)
7768 emit_block_move (new_with_op0_mode, op0,
7769 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7770 (modifier == EXPAND_STACK_PARM
7771 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7772 else
7773 emit_move_insn (new_with_op0_mode, op0);
7774
7775 op0 = new;
7776 }
7777
7778 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7779 }
7780
7781 return op0;
7782
7783 case PLUS_EXPR:
7784 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7785 something else, make sure we add the register to the constant and
7786 then to the other thing. This case can occur during strength
7787 reduction and doing it this way will produce better code if the
7788 frame pointer or argument pointer is eliminated.
7789
7790 fold-const.c will ensure that the constant is always in the inner
7791 PLUS_EXPR, so the only case we need to do anything about is if
7792 sp, ap, or fp is our second argument, in which case we must swap
7793 the innermost first argument and our second argument. */
7794
7795 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7796 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7797 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7798 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7799 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7800 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7801 {
7802 tree t = TREE_OPERAND (exp, 1);
7803
7804 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7805 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7806 }
7807
7808 /* If the result is to be ptr_mode and we are adding an integer to
7809 something, we might be forming a constant. So try to use
7810 plus_constant. If it produces a sum and we can't accept it,
7811 use force_operand. This allows P = &ARR[const] to generate
7812 efficient code on machines where a SYMBOL_REF is not a valid
7813 address.
7814
7815 If this is an EXPAND_SUM call, always return the sum. */
7816 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7817 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7818 {
7819 if (modifier == EXPAND_STACK_PARM)
7820 target = 0;
7821 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7822 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7823 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7824 {
7825 rtx constant_part;
7826
7827 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7828 EXPAND_SUM);
7829 /* Use immed_double_const to ensure that the constant is
7830 truncated according to the mode of OP1, then sign extended
7831 to a HOST_WIDE_INT. Using the constant directly can result
7832 in non-canonical RTL in a 64x32 cross compile. */
7833 constant_part
7834 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7835 (HOST_WIDE_INT) 0,
7836 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7837 op1 = plus_constant (op1, INTVAL (constant_part));
7838 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7839 op1 = force_operand (op1, target);
7840 return REDUCE_BIT_FIELD (op1);
7841 }
7842
7843 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7844 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7845 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7846 {
7847 rtx constant_part;
7848
7849 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7850 (modifier == EXPAND_INITIALIZER
7851 ? EXPAND_INITIALIZER : EXPAND_SUM));
7852 if (! CONSTANT_P (op0))
7853 {
7854 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7855 VOIDmode, modifier);
7856 /* Return a PLUS if modifier says it's OK. */
7857 if (modifier == EXPAND_SUM
7858 || modifier == EXPAND_INITIALIZER)
7859 return simplify_gen_binary (PLUS, mode, op0, op1);
7860 goto binop2;
7861 }
7862 /* Use immed_double_const to ensure that the constant is
7863 truncated according to the mode of OP1, then sign extended
7864 to a HOST_WIDE_INT. Using the constant directly can result
7865 in non-canonical RTL in a 64x32 cross compile. */
7866 constant_part
7867 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7868 (HOST_WIDE_INT) 0,
7869 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7870 op0 = plus_constant (op0, INTVAL (constant_part));
7871 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7872 op0 = force_operand (op0, target);
7873 return REDUCE_BIT_FIELD (op0);
7874 }
7875 }
7876
7877 /* No sense saving up arithmetic to be done
7878 if it's all in the wrong mode to form part of an address.
7879 And force_operand won't know whether to sign-extend or
7880 zero-extend. */
7881 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7882 || mode != ptr_mode)
7883 {
7884 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7885 subtarget, &op0, &op1, 0);
7886 if (op0 == const0_rtx)
7887 return op1;
7888 if (op1 == const0_rtx)
7889 return op0;
7890 goto binop2;
7891 }
7892
7893 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7894 subtarget, &op0, &op1, modifier);
7895 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7896
7897 case MINUS_EXPR:
7898 /* For initializers, we are allowed to return a MINUS of two
7899 symbolic constants. Here we handle all cases when both operands
7900 are constant. */
7901 /* Handle difference of two symbolic constants,
7902 for the sake of an initializer. */
7903 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7904 && really_constant_p (TREE_OPERAND (exp, 0))
7905 && really_constant_p (TREE_OPERAND (exp, 1)))
7906 {
7907 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7908 NULL_RTX, &op0, &op1, modifier);
7909
7910 /* If the last operand is a CONST_INT, use plus_constant of
7911 the negated constant. Else make the MINUS. */
7912 if (GET_CODE (op1) == CONST_INT)
7913 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7914 else
7915 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7916 }
7917
7918 /* No sense saving up arithmetic to be done
7919 if it's all in the wrong mode to form part of an address.
7920 And force_operand won't know whether to sign-extend or
7921 zero-extend. */
7922 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7923 || mode != ptr_mode)
7924 goto binop;
7925
7926 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7927 subtarget, &op0, &op1, modifier);
7928
7929 /* Convert A - const to A + (-const). */
7930 if (GET_CODE (op1) == CONST_INT)
7931 {
7932 op1 = negate_rtx (mode, op1);
7933 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7934 }
7935
7936 goto binop2;
7937
7938 case MULT_EXPR:
7939 /* If first operand is constant, swap them.
7940 Thus the following special case checks need only
7941 check the second operand. */
7942 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7943 {
7944 tree t1 = TREE_OPERAND (exp, 0);
7945 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7946 TREE_OPERAND (exp, 1) = t1;
7947 }
7948
7949 /* Attempt to return something suitable for generating an
7950 indexed address, for machines that support that. */
7951
7952 if (modifier == EXPAND_SUM && mode == ptr_mode
7953 && host_integerp (TREE_OPERAND (exp, 1), 0))
7954 {
7955 tree exp1 = TREE_OPERAND (exp, 1);
7956
7957 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7958 EXPAND_SUM);
7959
7960 if (!REG_P (op0))
7961 op0 = force_operand (op0, NULL_RTX);
7962 if (!REG_P (op0))
7963 op0 = copy_to_mode_reg (mode, op0);
7964
7965 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7966 gen_int_mode (tree_low_cst (exp1, 0),
7967 TYPE_MODE (TREE_TYPE (exp1)))));
7968 }
7969
7970 if (modifier == EXPAND_STACK_PARM)
7971 target = 0;
7972
7973 /* Check for multiplying things that have been extended
7974 from a narrower type. If this machine supports multiplying
7975 in that narrower type with a result in the desired type,
7976 do it that way, and avoid the explicit type-conversion. */
7977
7978 subexp0 = TREE_OPERAND (exp, 0);
7979 subexp1 = TREE_OPERAND (exp, 1);
7980 /* First, check if we have a multiplication of one signed and one
7981 unsigned operand. */
7982 if (TREE_CODE (subexp0) == NOP_EXPR
7983 && TREE_CODE (subexp1) == NOP_EXPR
7984 && TREE_CODE (type) == INTEGER_TYPE
7985 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
7986 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7987 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
7988 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
7989 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
7990 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
7991 {
7992 enum machine_mode innermode
7993 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
7994 this_optab = usmul_widen_optab;
7995 if (mode == GET_MODE_WIDER_MODE (innermode))
7996 {
7997 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7998 {
7999 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8000 expand_operands (TREE_OPERAND (subexp0, 0),
8001 TREE_OPERAND (subexp1, 0),
8002 NULL_RTX, &op0, &op1, 0);
8003 else
8004 expand_operands (TREE_OPERAND (subexp0, 0),
8005 TREE_OPERAND (subexp1, 0),
8006 NULL_RTX, &op1, &op0, 0);
8007
8008 goto binop3;
8009 }
8010 }
8011 }
8012 /* Check for a multiplication with matching signedness. */
8013 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8014 && TREE_CODE (type) == INTEGER_TYPE
8015 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8016 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8017 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8018 && int_fits_type_p (TREE_OPERAND (exp, 1),
8019 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8020 /* Don't use a widening multiply if a shift will do. */
8021 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8022 > HOST_BITS_PER_WIDE_INT)
8023 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8024 ||
8025 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8026 && (TYPE_PRECISION (TREE_TYPE
8027 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8028 == TYPE_PRECISION (TREE_TYPE
8029 (TREE_OPERAND
8030 (TREE_OPERAND (exp, 0), 0))))
8031 /* If both operands are extended, they must either both
8032 be zero-extended or both be sign-extended. */
8033 && (TYPE_UNSIGNED (TREE_TYPE
8034 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8035 == TYPE_UNSIGNED (TREE_TYPE
8036 (TREE_OPERAND
8037 (TREE_OPERAND (exp, 0), 0)))))))
8038 {
8039 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8040 enum machine_mode innermode = TYPE_MODE (op0type);
8041 bool zextend_p = TYPE_UNSIGNED (op0type);
8042 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8043 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8044
8045 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8046 {
8047 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8048 {
8049 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8050 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8051 TREE_OPERAND (exp, 1),
8052 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8053 else
8054 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8055 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8056 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8057 goto binop3;
8058 }
8059 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8060 && innermode == word_mode)
8061 {
8062 rtx htem, hipart;
8063 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8064 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8065 op1 = convert_modes (innermode, mode,
8066 expand_normal (TREE_OPERAND (exp, 1)),
8067 unsignedp);
8068 else
8069 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8070 temp = expand_binop (mode, other_optab, op0, op1, target,
8071 unsignedp, OPTAB_LIB_WIDEN);
8072 hipart = gen_highpart (innermode, temp);
8073 htem = expand_mult_highpart_adjust (innermode, hipart,
8074 op0, op1, hipart,
8075 zextend_p);
8076 if (htem != hipart)
8077 emit_move_insn (hipart, htem);
8078 return REDUCE_BIT_FIELD (temp);
8079 }
8080 }
8081 }
8082 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8083 subtarget, &op0, &op1, 0);
8084 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8085
8086 case TRUNC_DIV_EXPR:
8087 case FLOOR_DIV_EXPR:
8088 case CEIL_DIV_EXPR:
8089 case ROUND_DIV_EXPR:
8090 case EXACT_DIV_EXPR:
8091 if (modifier == EXPAND_STACK_PARM)
8092 target = 0;
8093 /* Possible optimization: compute the dividend with EXPAND_SUM
8094 then if the divisor is constant can optimize the case
8095 where some terms of the dividend have coeffs divisible by it. */
8096 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8097 subtarget, &op0, &op1, 0);
8098 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8099
8100 case RDIV_EXPR:
8101 goto binop;
8102
8103 case TRUNC_MOD_EXPR:
8104 case FLOOR_MOD_EXPR:
8105 case CEIL_MOD_EXPR:
8106 case ROUND_MOD_EXPR:
8107 if (modifier == EXPAND_STACK_PARM)
8108 target = 0;
8109 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8110 subtarget, &op0, &op1, 0);
8111 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8112
8113 case FIX_ROUND_EXPR:
8114 case FIX_FLOOR_EXPR:
8115 case FIX_CEIL_EXPR:
8116 gcc_unreachable (); /* Not used for C. */
8117
8118 case FIX_TRUNC_EXPR:
8119 op0 = expand_normal (TREE_OPERAND (exp, 0));
8120 if (target == 0 || modifier == EXPAND_STACK_PARM)
8121 target = gen_reg_rtx (mode);
8122 expand_fix (target, op0, unsignedp);
8123 return target;
8124
8125 case FLOAT_EXPR:
8126 op0 = expand_normal (TREE_OPERAND (exp, 0));
8127 if (target == 0 || modifier == EXPAND_STACK_PARM)
8128 target = gen_reg_rtx (mode);
8129 /* expand_float can't figure out what to do if FROM has VOIDmode.
8130 So give it the correct mode. With -O, cse will optimize this. */
8131 if (GET_MODE (op0) == VOIDmode)
8132 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8133 op0);
8134 expand_float (target, op0,
8135 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8136 return target;
8137
8138 case NEGATE_EXPR:
8139 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8140 if (modifier == EXPAND_STACK_PARM)
8141 target = 0;
8142 temp = expand_unop (mode,
8143 optab_for_tree_code (NEGATE_EXPR, type),
8144 op0, target, 0);
8145 gcc_assert (temp);
8146 return REDUCE_BIT_FIELD (temp);
8147
8148 case ABS_EXPR:
8149 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8150 if (modifier == EXPAND_STACK_PARM)
8151 target = 0;
8152
8153 /* ABS_EXPR is not valid for complex arguments. */
8154 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8155 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8156
8157 /* Unsigned abs is simply the operand. Testing here means we don't
8158 risk generating incorrect code below. */
8159 if (TYPE_UNSIGNED (type))
8160 return op0;
8161
8162 return expand_abs (mode, op0, target, unsignedp,
8163 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8164
8165 case MAX_EXPR:
8166 case MIN_EXPR:
8167 target = original_target;
8168 if (target == 0
8169 || modifier == EXPAND_STACK_PARM
8170 || (MEM_P (target) && MEM_VOLATILE_P (target))
8171 || GET_MODE (target) != mode
8172 || (REG_P (target)
8173 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8174 target = gen_reg_rtx (mode);
8175 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8176 target, &op0, &op1, 0);
8177
8178 /* First try to do it with a special MIN or MAX instruction.
8179 If that does not win, use a conditional jump to select the proper
8180 value. */
8181 this_optab = optab_for_tree_code (code, type);
8182 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8183 OPTAB_WIDEN);
8184 if (temp != 0)
8185 return temp;
8186
8187 /* At this point, a MEM target is no longer useful; we will get better
8188 code without it. */
8189
8190 if (! REG_P (target))
8191 target = gen_reg_rtx (mode);
8192
8193 /* If op1 was placed in target, swap op0 and op1. */
8194 if (target != op0 && target == op1)
8195 {
8196 temp = op0;
8197 op0 = op1;
8198 op1 = temp;
8199 }
8200
8201 /* We generate better code and avoid problems with op1 mentioning
8202 target by forcing op1 into a pseudo if it isn't a constant. */
8203 if (! CONSTANT_P (op1))
8204 op1 = force_reg (mode, op1);
8205
8206 {
8207 enum rtx_code comparison_code;
8208 rtx cmpop1 = op1;
8209
8210 if (code == MAX_EXPR)
8211 comparison_code = unsignedp ? GEU : GE;
8212 else
8213 comparison_code = unsignedp ? LEU : LE;
8214
8215 /* Canonicalize to comparisons against 0. */
8216 if (op1 == const1_rtx)
8217 {
8218 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8219 or (a != 0 ? a : 1) for unsigned.
8220 For MIN we are safe converting (a <= 1 ? a : 1)
8221 into (a <= 0 ? a : 1) */
8222 cmpop1 = const0_rtx;
8223 if (code == MAX_EXPR)
8224 comparison_code = unsignedp ? NE : GT;
8225 }
8226 if (op1 == constm1_rtx && !unsignedp)
8227 {
8228 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8229 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8230 cmpop1 = const0_rtx;
8231 if (code == MIN_EXPR)
8232 comparison_code = LT;
8233 }
8234 #ifdef HAVE_conditional_move
8235 /* Use a conditional move if possible. */
8236 if (can_conditionally_move_p (mode))
8237 {
8238 rtx insn;
8239
8240 /* ??? Same problem as in expmed.c: emit_conditional_move
8241 forces a stack adjustment via compare_from_rtx, and we
8242 lose the stack adjustment if the sequence we are about
8243 to create is discarded. */
8244 do_pending_stack_adjust ();
8245
8246 start_sequence ();
8247
8248 /* Try to emit the conditional move. */
8249 insn = emit_conditional_move (target, comparison_code,
8250 op0, cmpop1, mode,
8251 op0, op1, mode,
8252 unsignedp);
8253
8254 /* If we could do the conditional move, emit the sequence,
8255 and return. */
8256 if (insn)
8257 {
8258 rtx seq = get_insns ();
8259 end_sequence ();
8260 emit_insn (seq);
8261 return target;
8262 }
8263
8264 /* Otherwise discard the sequence and fall back to code with
8265 branches. */
8266 end_sequence ();
8267 }
8268 #endif
8269 if (target != op0)
8270 emit_move_insn (target, op0);
8271
8272 temp = gen_label_rtx ();
8273 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8274 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8275 }
8276 emit_move_insn (target, op1);
8277 emit_label (temp);
8278 return target;
8279
8280 case BIT_NOT_EXPR:
8281 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8282 if (modifier == EXPAND_STACK_PARM)
8283 target = 0;
8284 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8285 gcc_assert (temp);
8286 return temp;
8287
8288 /* ??? Can optimize bitwise operations with one arg constant.
8289 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8290 and (a bitwise1 b) bitwise2 b (etc)
8291 but that is probably not worth while. */
8292
8293 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8294 boolean values when we want in all cases to compute both of them. In
8295 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8296 as actual zero-or-1 values and then bitwise anding. In cases where
8297 there cannot be any side effects, better code would be made by
8298 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8299 how to recognize those cases. */
8300
8301 case TRUTH_AND_EXPR:
8302 code = BIT_AND_EXPR;
8303 case BIT_AND_EXPR:
8304 goto binop;
8305
8306 case TRUTH_OR_EXPR:
8307 code = BIT_IOR_EXPR;
8308 case BIT_IOR_EXPR:
8309 goto binop;
8310
8311 case TRUTH_XOR_EXPR:
8312 code = BIT_XOR_EXPR;
8313 case BIT_XOR_EXPR:
8314 goto binop;
8315
8316 case LSHIFT_EXPR:
8317 case RSHIFT_EXPR:
8318 case LROTATE_EXPR:
8319 case RROTATE_EXPR:
8320 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8321 subtarget = 0;
8322 if (modifier == EXPAND_STACK_PARM)
8323 target = 0;
8324 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8325 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8326 unsignedp);
8327
8328 /* Could determine the answer when only additive constants differ. Also,
8329 the addition of one can be handled by changing the condition. */
8330 case LT_EXPR:
8331 case LE_EXPR:
8332 case GT_EXPR:
8333 case GE_EXPR:
8334 case EQ_EXPR:
8335 case NE_EXPR:
8336 case UNORDERED_EXPR:
8337 case ORDERED_EXPR:
8338 case UNLT_EXPR:
8339 case UNLE_EXPR:
8340 case UNGT_EXPR:
8341 case UNGE_EXPR:
8342 case UNEQ_EXPR:
8343 case LTGT_EXPR:
8344 temp = do_store_flag (exp,
8345 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8346 tmode != VOIDmode ? tmode : mode, 0);
8347 if (temp != 0)
8348 return temp;
8349
8350 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8351 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8352 && original_target
8353 && REG_P (original_target)
8354 && (GET_MODE (original_target)
8355 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8356 {
8357 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8358 VOIDmode, 0);
8359
8360 /* If temp is constant, we can just compute the result. */
8361 if (GET_CODE (temp) == CONST_INT)
8362 {
8363 if (INTVAL (temp) != 0)
8364 emit_move_insn (target, const1_rtx);
8365 else
8366 emit_move_insn (target, const0_rtx);
8367
8368 return target;
8369 }
8370
8371 if (temp != original_target)
8372 {
8373 enum machine_mode mode1 = GET_MODE (temp);
8374 if (mode1 == VOIDmode)
8375 mode1 = tmode != VOIDmode ? tmode : mode;
8376
8377 temp = copy_to_mode_reg (mode1, temp);
8378 }
8379
8380 op1 = gen_label_rtx ();
8381 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8382 GET_MODE (temp), unsignedp, op1);
8383 emit_move_insn (temp, const1_rtx);
8384 emit_label (op1);
8385 return temp;
8386 }
8387
8388 /* If no set-flag instruction, must generate a conditional store
8389 into a temporary variable. Drop through and handle this
8390 like && and ||. */
8391
8392 if (! ignore
8393 && (target == 0
8394 || modifier == EXPAND_STACK_PARM
8395 || ! safe_from_p (target, exp, 1)
8396 /* Make sure we don't have a hard reg (such as function's return
8397 value) live across basic blocks, if not optimizing. */
8398 || (!optimize && REG_P (target)
8399 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8400 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8401
8402 if (target)
8403 emit_move_insn (target, const0_rtx);
8404
8405 op1 = gen_label_rtx ();
8406 jumpifnot (exp, op1);
8407
8408 if (target)
8409 emit_move_insn (target, const1_rtx);
8410
8411 emit_label (op1);
8412 return ignore ? const0_rtx : target;
8413
8414 case TRUTH_NOT_EXPR:
8415 if (modifier == EXPAND_STACK_PARM)
8416 target = 0;
8417 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8418 /* The parser is careful to generate TRUTH_NOT_EXPR
8419 only with operands that are always zero or one. */
8420 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8421 target, 1, OPTAB_LIB_WIDEN);
8422 gcc_assert (temp);
8423 return temp;
8424
8425 case STATEMENT_LIST:
8426 {
8427 tree_stmt_iterator iter;
8428
8429 gcc_assert (ignore);
8430
8431 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8432 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8433 }
8434 return const0_rtx;
8435
8436 case COND_EXPR:
8437 /* A COND_EXPR with its type being VOID_TYPE represents a
8438 conditional jump and is handled in
8439 expand_gimple_cond_expr. */
8440 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8441
8442 /* Note that COND_EXPRs whose type is a structure or union
8443 are required to be constructed to contain assignments of
8444 a temporary variable, so that we can evaluate them here
8445 for side effect only. If type is void, we must do likewise. */
8446
8447 gcc_assert (!TREE_ADDRESSABLE (type)
8448 && !ignore
8449 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8450 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8451
8452 /* If we are not to produce a result, we have no target. Otherwise,
8453 if a target was specified use it; it will not be used as an
8454 intermediate target unless it is safe. If no target, use a
8455 temporary. */
8456
8457 if (modifier != EXPAND_STACK_PARM
8458 && original_target
8459 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8460 && GET_MODE (original_target) == mode
8461 #ifdef HAVE_conditional_move
8462 && (! can_conditionally_move_p (mode)
8463 || REG_P (original_target))
8464 #endif
8465 && !MEM_P (original_target))
8466 temp = original_target;
8467 else
8468 temp = assign_temp (type, 0, 0, 1);
8469
8470 do_pending_stack_adjust ();
8471 NO_DEFER_POP;
8472 op0 = gen_label_rtx ();
8473 op1 = gen_label_rtx ();
8474 jumpifnot (TREE_OPERAND (exp, 0), op0);
8475 store_expr (TREE_OPERAND (exp, 1), temp,
8476 modifier == EXPAND_STACK_PARM);
8477
8478 emit_jump_insn (gen_jump (op1));
8479 emit_barrier ();
8480 emit_label (op0);
8481 store_expr (TREE_OPERAND (exp, 2), temp,
8482 modifier == EXPAND_STACK_PARM);
8483
8484 emit_label (op1);
8485 OK_DEFER_POP;
8486 return temp;
8487
8488 case VEC_COND_EXPR:
8489 target = expand_vec_cond_expr (exp, target);
8490 return target;
8491
8492 case MODIFY_EXPR:
8493 {
8494 tree lhs = TREE_OPERAND (exp, 0);
8495 tree rhs = TREE_OPERAND (exp, 1);
8496
8497 gcc_assert (ignore);
8498
8499 /* Check for |= or &= of a bitfield of size one into another bitfield
8500 of size 1. In this case, (unless we need the result of the
8501 assignment) we can do this more efficiently with a
8502 test followed by an assignment, if necessary.
8503
8504 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8505 things change so we do, this code should be enhanced to
8506 support it. */
8507 if (TREE_CODE (lhs) == COMPONENT_REF
8508 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8509 || TREE_CODE (rhs) == BIT_AND_EXPR)
8510 && TREE_OPERAND (rhs, 0) == lhs
8511 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8512 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8513 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8514 {
8515 rtx label = gen_label_rtx ();
8516 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
8517 do_jump (TREE_OPERAND (rhs, 1),
8518 value ? label : 0,
8519 value ? 0 : label);
8520 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value));
8521 do_pending_stack_adjust ();
8522 emit_label (label);
8523 return const0_rtx;
8524 }
8525
8526 expand_assignment (lhs, rhs);
8527
8528 return const0_rtx;
8529 }
8530
8531 case RETURN_EXPR:
8532 if (!TREE_OPERAND (exp, 0))
8533 expand_null_return ();
8534 else
8535 expand_return (TREE_OPERAND (exp, 0));
8536 return const0_rtx;
8537
8538 case ADDR_EXPR:
8539 return expand_expr_addr_expr (exp, target, tmode, modifier);
8540
8541 case COMPLEX_EXPR:
8542 /* Get the rtx code of the operands. */
8543 op0 = expand_normal (TREE_OPERAND (exp, 0));
8544 op1 = expand_normal (TREE_OPERAND (exp, 1));
8545
8546 if (!target)
8547 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8548
8549 /* Move the real (op0) and imaginary (op1) parts to their location. */
8550 write_complex_part (target, op0, false);
8551 write_complex_part (target, op1, true);
8552
8553 return target;
8554
8555 case REALPART_EXPR:
8556 op0 = expand_normal (TREE_OPERAND (exp, 0));
8557 return read_complex_part (op0, false);
8558
8559 case IMAGPART_EXPR:
8560 op0 = expand_normal (TREE_OPERAND (exp, 0));
8561 return read_complex_part (op0, true);
8562
8563 case RESX_EXPR:
8564 expand_resx_expr (exp);
8565 return const0_rtx;
8566
8567 case TRY_CATCH_EXPR:
8568 case CATCH_EXPR:
8569 case EH_FILTER_EXPR:
8570 case TRY_FINALLY_EXPR:
8571 /* Lowered by tree-eh.c. */
8572 gcc_unreachable ();
8573
8574 case WITH_CLEANUP_EXPR:
8575 case CLEANUP_POINT_EXPR:
8576 case TARGET_EXPR:
8577 case CASE_LABEL_EXPR:
8578 case VA_ARG_EXPR:
8579 case BIND_EXPR:
8580 case INIT_EXPR:
8581 case CONJ_EXPR:
8582 case COMPOUND_EXPR:
8583 case PREINCREMENT_EXPR:
8584 case PREDECREMENT_EXPR:
8585 case POSTINCREMENT_EXPR:
8586 case POSTDECREMENT_EXPR:
8587 case LOOP_EXPR:
8588 case EXIT_EXPR:
8589 case TRUTH_ANDIF_EXPR:
8590 case TRUTH_ORIF_EXPR:
8591 /* Lowered by gimplify.c. */
8592 gcc_unreachable ();
8593
8594 case EXC_PTR_EXPR:
8595 return get_exception_pointer (cfun);
8596
8597 case FILTER_EXPR:
8598 return get_exception_filter (cfun);
8599
8600 case FDESC_EXPR:
8601 /* Function descriptors are not valid except for as
8602 initialization constants, and should not be expanded. */
8603 gcc_unreachable ();
8604
8605 case SWITCH_EXPR:
8606 expand_case (exp);
8607 return const0_rtx;
8608
8609 case LABEL_EXPR:
8610 expand_label (TREE_OPERAND (exp, 0));
8611 return const0_rtx;
8612
8613 case ASM_EXPR:
8614 expand_asm_expr (exp);
8615 return const0_rtx;
8616
8617 case WITH_SIZE_EXPR:
8618 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8619 have pulled out the size to use in whatever context it needed. */
8620 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8621 modifier, alt_rtl);
8622
8623 case REALIGN_LOAD_EXPR:
8624 {
8625 tree oprnd0 = TREE_OPERAND (exp, 0);
8626 tree oprnd1 = TREE_OPERAND (exp, 1);
8627 tree oprnd2 = TREE_OPERAND (exp, 2);
8628 rtx op2;
8629
8630 this_optab = optab_for_tree_code (code, type);
8631 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8632 op2 = expand_normal (oprnd2);
8633 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8634 target, unsignedp);
8635 gcc_assert (temp);
8636 return temp;
8637 }
8638
8639 case DOT_PROD_EXPR:
8640 {
8641 tree oprnd0 = TREE_OPERAND (exp, 0);
8642 tree oprnd1 = TREE_OPERAND (exp, 1);
8643 tree oprnd2 = TREE_OPERAND (exp, 2);
8644 rtx op2;
8645
8646 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8647 op2 = expand_normal (oprnd2);
8648 target = expand_widen_pattern_expr (exp, op0, op1, op2,
8649 target, unsignedp);
8650 return target;
8651 }
8652
8653 case WIDEN_SUM_EXPR:
8654 {
8655 tree oprnd0 = TREE_OPERAND (exp, 0);
8656 tree oprnd1 = TREE_OPERAND (exp, 1);
8657
8658 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8659 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
8660 target, unsignedp);
8661 return target;
8662 }
8663
8664 case REDUC_MAX_EXPR:
8665 case REDUC_MIN_EXPR:
8666 case REDUC_PLUS_EXPR:
8667 {
8668 op0 = expand_normal (TREE_OPERAND (exp, 0));
8669 this_optab = optab_for_tree_code (code, type);
8670 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8671 gcc_assert (temp);
8672 return temp;
8673 }
8674
8675 case VEC_LSHIFT_EXPR:
8676 case VEC_RSHIFT_EXPR:
8677 {
8678 target = expand_vec_shift_expr (exp, target);
8679 return target;
8680 }
8681
8682 default:
8683 return lang_hooks.expand_expr (exp, original_target, tmode,
8684 modifier, alt_rtl);
8685 }
8686
8687 /* Here to do an ordinary binary operator. */
8688 binop:
8689 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8690 subtarget, &op0, &op1, 0);
8691 binop2:
8692 this_optab = optab_for_tree_code (code, type);
8693 binop3:
8694 if (modifier == EXPAND_STACK_PARM)
8695 target = 0;
8696 temp = expand_binop (mode, this_optab, op0, op1, target,
8697 unsignedp, OPTAB_LIB_WIDEN);
8698 gcc_assert (temp);
8699 return REDUCE_BIT_FIELD (temp);
8700 }
8701 #undef REDUCE_BIT_FIELD
8702 \f
8703 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8704 signedness of TYPE), possibly returning the result in TARGET. */
8705 static rtx
8706 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8707 {
8708 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8709 if (target && GET_MODE (target) != GET_MODE (exp))
8710 target = 0;
8711 if (TYPE_UNSIGNED (type))
8712 {
8713 rtx mask;
8714 if (prec < HOST_BITS_PER_WIDE_INT)
8715 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8716 GET_MODE (exp));
8717 else
8718 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8719 ((unsigned HOST_WIDE_INT) 1
8720 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8721 GET_MODE (exp));
8722 return expand_and (GET_MODE (exp), exp, mask, target);
8723 }
8724 else
8725 {
8726 tree count = build_int_cst (NULL_TREE,
8727 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8728 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8729 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8730 }
8731 }
8732 \f
8733 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8734 when applied to the address of EXP produces an address known to be
8735 aligned more than BIGGEST_ALIGNMENT. */
8736
8737 static int
8738 is_aligning_offset (tree offset, tree exp)
8739 {
8740 /* Strip off any conversions. */
8741 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8742 || TREE_CODE (offset) == NOP_EXPR
8743 || TREE_CODE (offset) == CONVERT_EXPR)
8744 offset = TREE_OPERAND (offset, 0);
8745
8746 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8747 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8748 if (TREE_CODE (offset) != BIT_AND_EXPR
8749 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8750 || compare_tree_int (TREE_OPERAND (offset, 1),
8751 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8752 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8753 return 0;
8754
8755 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8756 It must be NEGATE_EXPR. Then strip any more conversions. */
8757 offset = TREE_OPERAND (offset, 0);
8758 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8759 || TREE_CODE (offset) == NOP_EXPR
8760 || TREE_CODE (offset) == CONVERT_EXPR)
8761 offset = TREE_OPERAND (offset, 0);
8762
8763 if (TREE_CODE (offset) != NEGATE_EXPR)
8764 return 0;
8765
8766 offset = TREE_OPERAND (offset, 0);
8767 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8768 || TREE_CODE (offset) == NOP_EXPR
8769 || TREE_CODE (offset) == CONVERT_EXPR)
8770 offset = TREE_OPERAND (offset, 0);
8771
8772 /* This must now be the address of EXP. */
8773 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8774 }
8775 \f
8776 /* Return the tree node if an ARG corresponds to a string constant or zero
8777 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8778 in bytes within the string that ARG is accessing. The type of the
8779 offset will be `sizetype'. */
8780
8781 tree
8782 string_constant (tree arg, tree *ptr_offset)
8783 {
8784 tree array, offset;
8785 STRIP_NOPS (arg);
8786
8787 if (TREE_CODE (arg) == ADDR_EXPR)
8788 {
8789 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8790 {
8791 *ptr_offset = size_zero_node;
8792 return TREE_OPERAND (arg, 0);
8793 }
8794 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8795 {
8796 array = TREE_OPERAND (arg, 0);
8797 offset = size_zero_node;
8798 }
8799 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8800 {
8801 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8802 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8803 if (TREE_CODE (array) != STRING_CST
8804 && TREE_CODE (array) != VAR_DECL)
8805 return 0;
8806 }
8807 else
8808 return 0;
8809 }
8810 else if (TREE_CODE (arg) == PLUS_EXPR)
8811 {
8812 tree arg0 = TREE_OPERAND (arg, 0);
8813 tree arg1 = TREE_OPERAND (arg, 1);
8814
8815 STRIP_NOPS (arg0);
8816 STRIP_NOPS (arg1);
8817
8818 if (TREE_CODE (arg0) == ADDR_EXPR
8819 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8820 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8821 {
8822 array = TREE_OPERAND (arg0, 0);
8823 offset = arg1;
8824 }
8825 else if (TREE_CODE (arg1) == ADDR_EXPR
8826 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8827 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8828 {
8829 array = TREE_OPERAND (arg1, 0);
8830 offset = arg0;
8831 }
8832 else
8833 return 0;
8834 }
8835 else
8836 return 0;
8837
8838 if (TREE_CODE (array) == STRING_CST)
8839 {
8840 *ptr_offset = fold_convert (sizetype, offset);
8841 return array;
8842 }
8843 else if (TREE_CODE (array) == VAR_DECL)
8844 {
8845 int length;
8846
8847 /* Variables initialized to string literals can be handled too. */
8848 if (DECL_INITIAL (array) == NULL_TREE
8849 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8850 return 0;
8851
8852 /* If they are read-only, non-volatile and bind locally. */
8853 if (! TREE_READONLY (array)
8854 || TREE_SIDE_EFFECTS (array)
8855 || ! targetm.binds_local_p (array))
8856 return 0;
8857
8858 /* Avoid const char foo[4] = "abcde"; */
8859 if (DECL_SIZE_UNIT (array) == NULL_TREE
8860 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8861 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8862 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8863 return 0;
8864
8865 /* If variable is bigger than the string literal, OFFSET must be constant
8866 and inside of the bounds of the string literal. */
8867 offset = fold_convert (sizetype, offset);
8868 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8869 && (! host_integerp (offset, 1)
8870 || compare_tree_int (offset, length) >= 0))
8871 return 0;
8872
8873 *ptr_offset = offset;
8874 return DECL_INITIAL (array);
8875 }
8876
8877 return 0;
8878 }
8879 \f
8880 /* Generate code to calculate EXP using a store-flag instruction
8881 and return an rtx for the result. EXP is either a comparison
8882 or a TRUTH_NOT_EXPR whose operand is a comparison.
8883
8884 If TARGET is nonzero, store the result there if convenient.
8885
8886 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8887 cheap.
8888
8889 Return zero if there is no suitable set-flag instruction
8890 available on this machine.
8891
8892 Once expand_expr has been called on the arguments of the comparison,
8893 we are committed to doing the store flag, since it is not safe to
8894 re-evaluate the expression. We emit the store-flag insn by calling
8895 emit_store_flag, but only expand the arguments if we have a reason
8896 to believe that emit_store_flag will be successful. If we think that
8897 it will, but it isn't, we have to simulate the store-flag with a
8898 set/jump/set sequence. */
8899
8900 static rtx
8901 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8902 {
8903 enum rtx_code code;
8904 tree arg0, arg1, type;
8905 tree tem;
8906 enum machine_mode operand_mode;
8907 int invert = 0;
8908 int unsignedp;
8909 rtx op0, op1;
8910 enum insn_code icode;
8911 rtx subtarget = target;
8912 rtx result, label;
8913
8914 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8915 result at the end. We can't simply invert the test since it would
8916 have already been inverted if it were valid. This case occurs for
8917 some floating-point comparisons. */
8918
8919 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8920 invert = 1, exp = TREE_OPERAND (exp, 0);
8921
8922 arg0 = TREE_OPERAND (exp, 0);
8923 arg1 = TREE_OPERAND (exp, 1);
8924
8925 /* Don't crash if the comparison was erroneous. */
8926 if (arg0 == error_mark_node || arg1 == error_mark_node)
8927 return const0_rtx;
8928
8929 type = TREE_TYPE (arg0);
8930 operand_mode = TYPE_MODE (type);
8931 unsignedp = TYPE_UNSIGNED (type);
8932
8933 /* We won't bother with BLKmode store-flag operations because it would mean
8934 passing a lot of information to emit_store_flag. */
8935 if (operand_mode == BLKmode)
8936 return 0;
8937
8938 /* We won't bother with store-flag operations involving function pointers
8939 when function pointers must be canonicalized before comparisons. */
8940 #ifdef HAVE_canonicalize_funcptr_for_compare
8941 if (HAVE_canonicalize_funcptr_for_compare
8942 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8943 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8944 == FUNCTION_TYPE))
8945 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8946 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8947 == FUNCTION_TYPE))))
8948 return 0;
8949 #endif
8950
8951 STRIP_NOPS (arg0);
8952 STRIP_NOPS (arg1);
8953
8954 /* Get the rtx comparison code to use. We know that EXP is a comparison
8955 operation of some type. Some comparisons against 1 and -1 can be
8956 converted to comparisons with zero. Do so here so that the tests
8957 below will be aware that we have a comparison with zero. These
8958 tests will not catch constants in the first operand, but constants
8959 are rarely passed as the first operand. */
8960
8961 switch (TREE_CODE (exp))
8962 {
8963 case EQ_EXPR:
8964 code = EQ;
8965 break;
8966 case NE_EXPR:
8967 code = NE;
8968 break;
8969 case LT_EXPR:
8970 if (integer_onep (arg1))
8971 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8972 else
8973 code = unsignedp ? LTU : LT;
8974 break;
8975 case LE_EXPR:
8976 if (! unsignedp && integer_all_onesp (arg1))
8977 arg1 = integer_zero_node, code = LT;
8978 else
8979 code = unsignedp ? LEU : LE;
8980 break;
8981 case GT_EXPR:
8982 if (! unsignedp && integer_all_onesp (arg1))
8983 arg1 = integer_zero_node, code = GE;
8984 else
8985 code = unsignedp ? GTU : GT;
8986 break;
8987 case GE_EXPR:
8988 if (integer_onep (arg1))
8989 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8990 else
8991 code = unsignedp ? GEU : GE;
8992 break;
8993
8994 case UNORDERED_EXPR:
8995 code = UNORDERED;
8996 break;
8997 case ORDERED_EXPR:
8998 code = ORDERED;
8999 break;
9000 case UNLT_EXPR:
9001 code = UNLT;
9002 break;
9003 case UNLE_EXPR:
9004 code = UNLE;
9005 break;
9006 case UNGT_EXPR:
9007 code = UNGT;
9008 break;
9009 case UNGE_EXPR:
9010 code = UNGE;
9011 break;
9012 case UNEQ_EXPR:
9013 code = UNEQ;
9014 break;
9015 case LTGT_EXPR:
9016 code = LTGT;
9017 break;
9018
9019 default:
9020 gcc_unreachable ();
9021 }
9022
9023 /* Put a constant second. */
9024 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9025 {
9026 tem = arg0; arg0 = arg1; arg1 = tem;
9027 code = swap_condition (code);
9028 }
9029
9030 /* If this is an equality or inequality test of a single bit, we can
9031 do this by shifting the bit being tested to the low-order bit and
9032 masking the result with the constant 1. If the condition was EQ,
9033 we xor it with 1. This does not require an scc insn and is faster
9034 than an scc insn even if we have it.
9035
9036 The code to make this transformation was moved into fold_single_bit_test,
9037 so we just call into the folder and expand its result. */
9038
9039 if ((code == NE || code == EQ)
9040 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9041 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9042 {
9043 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9044 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9045 arg0, arg1, type),
9046 target, VOIDmode, EXPAND_NORMAL);
9047 }
9048
9049 /* Now see if we are likely to be able to do this. Return if not. */
9050 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9051 return 0;
9052
9053 icode = setcc_gen_code[(int) code];
9054 if (icode == CODE_FOR_nothing
9055 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9056 {
9057 /* We can only do this if it is one of the special cases that
9058 can be handled without an scc insn. */
9059 if ((code == LT && integer_zerop (arg1))
9060 || (! only_cheap && code == GE && integer_zerop (arg1)))
9061 ;
9062 else if (! only_cheap && (code == NE || code == EQ)
9063 && TREE_CODE (type) != REAL_TYPE
9064 && ((abs_optab->handlers[(int) operand_mode].insn_code
9065 != CODE_FOR_nothing)
9066 || (ffs_optab->handlers[(int) operand_mode].insn_code
9067 != CODE_FOR_nothing)))
9068 ;
9069 else
9070 return 0;
9071 }
9072
9073 if (! get_subtarget (target)
9074 || GET_MODE (subtarget) != operand_mode)
9075 subtarget = 0;
9076
9077 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9078
9079 if (target == 0)
9080 target = gen_reg_rtx (mode);
9081
9082 result = emit_store_flag (target, code, op0, op1,
9083 operand_mode, unsignedp, 1);
9084
9085 if (result)
9086 {
9087 if (invert)
9088 result = expand_binop (mode, xor_optab, result, const1_rtx,
9089 result, 0, OPTAB_LIB_WIDEN);
9090 return result;
9091 }
9092
9093 /* If this failed, we have to do this with set/compare/jump/set code. */
9094 if (!REG_P (target)
9095 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9096 target = gen_reg_rtx (GET_MODE (target));
9097
9098 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9099 result = compare_from_rtx (op0, op1, code, unsignedp,
9100 operand_mode, NULL_RTX);
9101 if (GET_CODE (result) == CONST_INT)
9102 return (((result == const0_rtx && ! invert)
9103 || (result != const0_rtx && invert))
9104 ? const0_rtx : const1_rtx);
9105
9106 /* The code of RESULT may not match CODE if compare_from_rtx
9107 decided to swap its operands and reverse the original code.
9108
9109 We know that compare_from_rtx returns either a CONST_INT or
9110 a new comparison code, so it is safe to just extract the
9111 code from RESULT. */
9112 code = GET_CODE (result);
9113
9114 label = gen_label_rtx ();
9115 gcc_assert (bcc_gen_fctn[(int) code]);
9116
9117 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9118 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9119 emit_label (label);
9120
9121 return target;
9122 }
9123 \f
9124
9125 /* Stubs in case we haven't got a casesi insn. */
9126 #ifndef HAVE_casesi
9127 # define HAVE_casesi 0
9128 # define gen_casesi(a, b, c, d, e) (0)
9129 # define CODE_FOR_casesi CODE_FOR_nothing
9130 #endif
9131
9132 /* If the machine does not have a case insn that compares the bounds,
9133 this means extra overhead for dispatch tables, which raises the
9134 threshold for using them. */
9135 #ifndef CASE_VALUES_THRESHOLD
9136 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9137 #endif /* CASE_VALUES_THRESHOLD */
9138
9139 unsigned int
9140 case_values_threshold (void)
9141 {
9142 return CASE_VALUES_THRESHOLD;
9143 }
9144
9145 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9146 0 otherwise (i.e. if there is no casesi instruction). */
9147 int
9148 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9149 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9150 {
9151 enum machine_mode index_mode = SImode;
9152 int index_bits = GET_MODE_BITSIZE (index_mode);
9153 rtx op1, op2, index;
9154 enum machine_mode op_mode;
9155
9156 if (! HAVE_casesi)
9157 return 0;
9158
9159 /* Convert the index to SImode. */
9160 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9161 {
9162 enum machine_mode omode = TYPE_MODE (index_type);
9163 rtx rangertx = expand_normal (range);
9164
9165 /* We must handle the endpoints in the original mode. */
9166 index_expr = build2 (MINUS_EXPR, index_type,
9167 index_expr, minval);
9168 minval = integer_zero_node;
9169 index = expand_normal (index_expr);
9170 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9171 omode, 1, default_label);
9172 /* Now we can safely truncate. */
9173 index = convert_to_mode (index_mode, index, 0);
9174 }
9175 else
9176 {
9177 if (TYPE_MODE (index_type) != index_mode)
9178 {
9179 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9180 index_expr = fold_convert (index_type, index_expr);
9181 }
9182
9183 index = expand_normal (index_expr);
9184 }
9185
9186 do_pending_stack_adjust ();
9187
9188 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9189 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9190 (index, op_mode))
9191 index = copy_to_mode_reg (op_mode, index);
9192
9193 op1 = expand_normal (minval);
9194
9195 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9196 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9197 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9198 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9199 (op1, op_mode))
9200 op1 = copy_to_mode_reg (op_mode, op1);
9201
9202 op2 = expand_normal (range);
9203
9204 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9205 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9206 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9207 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9208 (op2, op_mode))
9209 op2 = copy_to_mode_reg (op_mode, op2);
9210
9211 emit_jump_insn (gen_casesi (index, op1, op2,
9212 table_label, default_label));
9213 return 1;
9214 }
9215
9216 /* Attempt to generate a tablejump instruction; same concept. */
9217 #ifndef HAVE_tablejump
9218 #define HAVE_tablejump 0
9219 #define gen_tablejump(x, y) (0)
9220 #endif
9221
9222 /* Subroutine of the next function.
9223
9224 INDEX is the value being switched on, with the lowest value
9225 in the table already subtracted.
9226 MODE is its expected mode (needed if INDEX is constant).
9227 RANGE is the length of the jump table.
9228 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9229
9230 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9231 index value is out of range. */
9232
9233 static void
9234 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9235 rtx default_label)
9236 {
9237 rtx temp, vector;
9238
9239 if (INTVAL (range) > cfun->max_jumptable_ents)
9240 cfun->max_jumptable_ents = INTVAL (range);
9241
9242 /* Do an unsigned comparison (in the proper mode) between the index
9243 expression and the value which represents the length of the range.
9244 Since we just finished subtracting the lower bound of the range
9245 from the index expression, this comparison allows us to simultaneously
9246 check that the original index expression value is both greater than
9247 or equal to the minimum value of the range and less than or equal to
9248 the maximum value of the range. */
9249
9250 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9251 default_label);
9252
9253 /* If index is in range, it must fit in Pmode.
9254 Convert to Pmode so we can index with it. */
9255 if (mode != Pmode)
9256 index = convert_to_mode (Pmode, index, 1);
9257
9258 /* Don't let a MEM slip through, because then INDEX that comes
9259 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9260 and break_out_memory_refs will go to work on it and mess it up. */
9261 #ifdef PIC_CASE_VECTOR_ADDRESS
9262 if (flag_pic && !REG_P (index))
9263 index = copy_to_mode_reg (Pmode, index);
9264 #endif
9265
9266 /* If flag_force_addr were to affect this address
9267 it could interfere with the tricky assumptions made
9268 about addresses that contain label-refs,
9269 which may be valid only very near the tablejump itself. */
9270 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9271 GET_MODE_SIZE, because this indicates how large insns are. The other
9272 uses should all be Pmode, because they are addresses. This code
9273 could fail if addresses and insns are not the same size. */
9274 index = gen_rtx_PLUS (Pmode,
9275 gen_rtx_MULT (Pmode, index,
9276 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9277 gen_rtx_LABEL_REF (Pmode, table_label));
9278 #ifdef PIC_CASE_VECTOR_ADDRESS
9279 if (flag_pic)
9280 index = PIC_CASE_VECTOR_ADDRESS (index);
9281 else
9282 #endif
9283 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9284 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9285 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9286 convert_move (temp, vector, 0);
9287
9288 emit_jump_insn (gen_tablejump (temp, table_label));
9289
9290 /* If we are generating PIC code or if the table is PC-relative, the
9291 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9292 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9293 emit_barrier ();
9294 }
9295
9296 int
9297 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9298 rtx table_label, rtx default_label)
9299 {
9300 rtx index;
9301
9302 if (! HAVE_tablejump)
9303 return 0;
9304
9305 index_expr = fold_build2 (MINUS_EXPR, index_type,
9306 fold_convert (index_type, index_expr),
9307 fold_convert (index_type, minval));
9308 index = expand_normal (index_expr);
9309 do_pending_stack_adjust ();
9310
9311 do_tablejump (index, TYPE_MODE (index_type),
9312 convert_modes (TYPE_MODE (index_type),
9313 TYPE_MODE (TREE_TYPE (range)),
9314 expand_normal (range),
9315 TYPE_UNSIGNED (TREE_TYPE (range))),
9316 table_label, default_label);
9317 return 1;
9318 }
9319
9320 /* Nonzero if the mode is a valid vector mode for this architecture.
9321 This returns nonzero even if there is no hardware support for the
9322 vector mode, but we can emulate with narrower modes. */
9323
9324 int
9325 vector_mode_valid_p (enum machine_mode mode)
9326 {
9327 enum mode_class class = GET_MODE_CLASS (mode);
9328 enum machine_mode innermode;
9329
9330 /* Doh! What's going on? */
9331 if (class != MODE_VECTOR_INT
9332 && class != MODE_VECTOR_FLOAT)
9333 return 0;
9334
9335 /* Hardware support. Woo hoo! */
9336 if (targetm.vector_mode_supported_p (mode))
9337 return 1;
9338
9339 innermode = GET_MODE_INNER (mode);
9340
9341 /* We should probably return 1 if requesting V4DI and we have no DI,
9342 but we have V2DI, but this is probably very unlikely. */
9343
9344 /* If we have support for the inner mode, we can safely emulate it.
9345 We may not have V2DI, but me can emulate with a pair of DIs. */
9346 return targetm.scalar_mode_supported_p (innermode);
9347 }
9348
9349 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9350 static rtx
9351 const_vector_from_tree (tree exp)
9352 {
9353 rtvec v;
9354 int units, i;
9355 tree link, elt;
9356 enum machine_mode inner, mode;
9357
9358 mode = TYPE_MODE (TREE_TYPE (exp));
9359
9360 if (initializer_zerop (exp))
9361 return CONST0_RTX (mode);
9362
9363 units = GET_MODE_NUNITS (mode);
9364 inner = GET_MODE_INNER (mode);
9365
9366 v = rtvec_alloc (units);
9367
9368 link = TREE_VECTOR_CST_ELTS (exp);
9369 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9370 {
9371 elt = TREE_VALUE (link);
9372
9373 if (TREE_CODE (elt) == REAL_CST)
9374 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9375 inner);
9376 else
9377 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9378 TREE_INT_CST_HIGH (elt),
9379 inner);
9380 }
9381
9382 /* Initialize remaining elements to 0. */
9383 for (; i < units; ++i)
9384 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9385
9386 return gen_rtx_CONST_VECTOR (mode, v);
9387 }
9388 #include "gt-expr.h"