]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/expr.c
Remove trailing white spaces.
[thirdparty/gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
55 #include "df.h"
56 #include "diagnostic.h"
57 #include "ssaexpand.h"
58
59 /* Decide whether a function's arguments should be processed
60 from first to last or from last to first.
61
62 They should if the stack and args grow in opposite directions, but
63 only if we have push insns. */
64
65 #ifdef PUSH_ROUNDING
66
67 #ifndef PUSH_ARGS_REVERSED
68 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
69 #define PUSH_ARGS_REVERSED /* If it's last to first. */
70 #endif
71 #endif
72
73 #endif
74
75 #ifndef STACK_PUSH_CODE
76 #ifdef STACK_GROWS_DOWNWARD
77 #define STACK_PUSH_CODE PRE_DEC
78 #else
79 #define STACK_PUSH_CODE PRE_INC
80 #endif
81 #endif
82
83
84 /* If this is nonzero, we do not bother generating VOLATILE
85 around volatile memory references, and we are willing to
86 output indirect addresses. If cse is to follow, we reject
87 indirect addresses so a useful potential cse is generated;
88 if it is used only once, instruction combination will produce
89 the same indirect address eventually. */
90 int cse_not_expected;
91
92 /* This structure is used by move_by_pieces to describe the move to
93 be performed. */
94 struct move_by_pieces_d
95 {
96 rtx to;
97 rtx to_addr;
98 int autinc_to;
99 int explicit_inc_to;
100 rtx from;
101 rtx from_addr;
102 int autinc_from;
103 int explicit_inc_from;
104 unsigned HOST_WIDE_INT len;
105 HOST_WIDE_INT offset;
106 int reverse;
107 };
108
109 /* This structure is used by store_by_pieces to describe the clear to
110 be performed. */
111
112 struct store_by_pieces_d
113 {
114 rtx to;
115 rtx to_addr;
116 int autinc_to;
117 int explicit_inc_to;
118 unsigned HOST_WIDE_INT len;
119 HOST_WIDE_INT offset;
120 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
121 void *constfundata;
122 int reverse;
123 };
124
125 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
126 unsigned int,
127 unsigned int);
128 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
129 struct move_by_pieces_d *);
130 static bool block_move_libcall_safe_for_call_parm (void);
131 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
132 static tree emit_block_move_libcall_fn (int);
133 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
134 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
135 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
136 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
137 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
138 struct store_by_pieces_d *);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, alias_set_type);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, tree, alias_set_type, bool);
148
149 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
150
151 static int is_aligning_offset (const_tree, const_tree);
152 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153 enum expand_modifier);
154 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155 static rtx do_store_flag (sepops, rtx, enum machine_mode);
156 #ifdef PUSH_ROUNDING
157 static void emit_single_push_insn (enum machine_mode, rtx, tree);
158 #endif
159 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160 static rtx const_vector_from_tree (tree);
161 static void write_complex_part (rtx, rtx, bool);
162
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
166
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
169
170 /* Record for each mode whether we can float-extend from memory. */
171
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
180 #endif
181
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
188 #endif
189
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero. */
192 #ifndef SET_BY_PIECES_P
193 #define SET_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
195 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
196 #endif
197
198 /* This macro is used to determine whether store_by_pieces should be
199 called to "memcpy" storage when the source is a constant string. */
200 #ifndef STORE_BY_PIECES_P
201 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
202 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
203 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
204 #endif
205
206 /* This array records the insn_code of insns to perform block moves. */
207 enum insn_code movmem_optab[NUM_MACHINE_MODES];
208
209 /* This array records the insn_code of insns to perform block sets. */
210 enum insn_code setmem_optab[NUM_MACHINE_MODES];
211
212 /* These arrays record the insn_code of three different kinds of insns
213 to perform block compares. */
214 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
215 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
216 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
217
218 /* Synchronization primitives. */
219 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
231 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
232 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
233 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
234 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
235 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
236 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
237 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
238 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
239 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
240
241 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
242
243 #ifndef SLOW_UNALIGNED_ACCESS
244 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
245 #endif
246 \f
247 /* This is run to set up which modes can be used
248 directly in memory and to initialize the block move optab. It is run
249 at the beginning of compilation and when the target is reinitialized. */
250
251 void
252 init_expr_target (void)
253 {
254 rtx insn, pat;
255 enum machine_mode mode;
256 int num_clobbers;
257 rtx mem, mem1;
258 rtx reg;
259
260 /* Try indexing by frame ptr and try by stack ptr.
261 It is known that on the Convex the stack ptr isn't a valid index.
262 With luck, one or the other is valid on any machine. */
263 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
264 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
265
266 /* A scratch register we can modify in-place below to avoid
267 useless RTL allocations. */
268 reg = gen_rtx_REG (VOIDmode, -1);
269
270 insn = rtx_alloc (INSN);
271 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
272 PATTERN (insn) = pat;
273
274 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
275 mode = (enum machine_mode) ((int) mode + 1))
276 {
277 int regno;
278
279 direct_load[(int) mode] = direct_store[(int) mode] = 0;
280 PUT_MODE (mem, mode);
281 PUT_MODE (mem1, mode);
282 PUT_MODE (reg, mode);
283
284 /* See if there is some register that can be used in this mode and
285 directly loaded or stored from memory. */
286
287 if (mode != VOIDmode && mode != BLKmode)
288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
289 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
290 regno++)
291 {
292 if (! HARD_REGNO_MODE_OK (regno, mode))
293 continue;
294
295 SET_REGNO (reg, regno);
296
297 SET_SRC (pat) = mem;
298 SET_DEST (pat) = reg;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_load[(int) mode] = 1;
301
302 SET_SRC (pat) = mem1;
303 SET_DEST (pat) = reg;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_load[(int) mode] = 1;
306
307 SET_SRC (pat) = reg;
308 SET_DEST (pat) = mem;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_store[(int) mode] = 1;
311
312 SET_SRC (pat) = reg;
313 SET_DEST (pat) = mem1;
314 if (recog (pat, insn, &num_clobbers) >= 0)
315 direct_store[(int) mode] = 1;
316 }
317 }
318
319 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
320
321 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
322 mode = GET_MODE_WIDER_MODE (mode))
323 {
324 enum machine_mode srcmode;
325 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
326 srcmode = GET_MODE_WIDER_MODE (srcmode))
327 {
328 enum insn_code ic;
329
330 ic = can_extend_p (mode, srcmode, 0);
331 if (ic == CODE_FOR_nothing)
332 continue;
333
334 PUT_MODE (mem, srcmode);
335
336 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
337 float_extend_from_mem[mode][srcmode] = true;
338 }
339 }
340 }
341
342 /* This is run at the start of compiling a function. */
343
344 void
345 init_expr (void)
346 {
347 memset (&crtl->expr, 0, sizeof (crtl->expr));
348 }
349 \f
350 /* Copy data from FROM to TO, where the machine modes are not the same.
351 Both modes may be integer, or both may be floating, or both may be
352 fixed-point.
353 UNSIGNEDP should be nonzero if FROM is an unsigned type.
354 This causes zero-extension instead of sign-extension. */
355
356 void
357 convert_move (rtx to, rtx from, int unsignedp)
358 {
359 enum machine_mode to_mode = GET_MODE (to);
360 enum machine_mode from_mode = GET_MODE (from);
361 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
362 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
363 enum insn_code code;
364 rtx libcall;
365
366 /* rtx code for making an equivalent value. */
367 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
368 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
369
370
371 gcc_assert (to_real == from_real);
372 gcc_assert (to_mode != BLKmode);
373 gcc_assert (from_mode != BLKmode);
374
375 /* If the source and destination are already the same, then there's
376 nothing to do. */
377 if (to == from)
378 return;
379
380 /* If FROM is a SUBREG that indicates that we have already done at least
381 the required extension, strip it. We don't handle such SUBREGs as
382 TO here. */
383
384 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
385 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
386 >= GET_MODE_SIZE (to_mode))
387 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
388 from = gen_lowpart (to_mode, from), from_mode = to_mode;
389
390 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
391
392 if (to_mode == from_mode
393 || (from_mode == VOIDmode && CONSTANT_P (from)))
394 {
395 emit_move_insn (to, from);
396 return;
397 }
398
399 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
400 {
401 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
402
403 if (VECTOR_MODE_P (to_mode))
404 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
405 else
406 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
407
408 emit_move_insn (to, from);
409 return;
410 }
411
412 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
413 {
414 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
415 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
416 return;
417 }
418
419 if (to_real)
420 {
421 rtx value, insns;
422 convert_optab tab;
423
424 gcc_assert ((GET_MODE_PRECISION (from_mode)
425 != GET_MODE_PRECISION (to_mode))
426 || (DECIMAL_FLOAT_MODE_P (from_mode)
427 != DECIMAL_FLOAT_MODE_P (to_mode)));
428
429 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
430 /* Conversion between decimal float and binary float, same size. */
431 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
432 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
433 tab = sext_optab;
434 else
435 tab = trunc_optab;
436
437 /* Try converting directly if the insn is supported. */
438
439 code = convert_optab_handler (tab, to_mode, from_mode)->insn_code;
440 if (code != CODE_FOR_nothing)
441 {
442 emit_unop_insn (code, to, from,
443 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
444 return;
445 }
446
447 /* Otherwise use a libcall. */
448 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
449
450 /* Is this conversion implemented yet? */
451 gcc_assert (libcall);
452
453 start_sequence ();
454 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
455 1, from, from_mode);
456 insns = get_insns ();
457 end_sequence ();
458 emit_libcall_block (insns, to, value,
459 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
460 from)
461 : gen_rtx_FLOAT_EXTEND (to_mode, from));
462 return;
463 }
464
465 /* Handle pointer conversion. */ /* SPEE 900220. */
466 /* Targets are expected to provide conversion insns between PxImode and
467 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
468 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
469 {
470 enum machine_mode full_mode
471 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
472
473 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code
474 != CODE_FOR_nothing);
475
476 if (full_mode != from_mode)
477 from = convert_to_mode (full_mode, from, unsignedp);
478 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code,
479 to, from, UNKNOWN);
480 return;
481 }
482 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
483 {
484 rtx new_from;
485 enum machine_mode full_mode
486 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
487
488 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code
489 != CODE_FOR_nothing);
490
491 if (to_mode == full_mode)
492 {
493 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
494 to, from, UNKNOWN);
495 return;
496 }
497
498 new_from = gen_reg_rtx (full_mode);
499 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
500 new_from, from, UNKNOWN);
501
502 /* else proceed to integer conversions below. */
503 from_mode = full_mode;
504 from = new_from;
505 }
506
507 /* Make sure both are fixed-point modes or both are not. */
508 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
509 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
510 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
511 {
512 /* If we widen from_mode to to_mode and they are in the same class,
513 we won't saturate the result.
514 Otherwise, always saturate the result to play safe. */
515 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
516 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
517 expand_fixed_convert (to, from, 0, 0);
518 else
519 expand_fixed_convert (to, from, 0, 1);
520 return;
521 }
522
523 /* Now both modes are integers. */
524
525 /* Handle expanding beyond a word. */
526 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
527 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
528 {
529 rtx insns;
530 rtx lowpart;
531 rtx fill_value;
532 rtx lowfrom;
533 int i;
534 enum machine_mode lowpart_mode;
535 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
536
537 /* Try converting directly if the insn is supported. */
538 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
539 != CODE_FOR_nothing)
540 {
541 /* If FROM is a SUBREG, put it into a register. Do this
542 so that we always generate the same set of insns for
543 better cse'ing; if an intermediate assignment occurred,
544 we won't be doing the operation directly on the SUBREG. */
545 if (optimize > 0 && GET_CODE (from) == SUBREG)
546 from = force_reg (from_mode, from);
547 emit_unop_insn (code, to, from, equiv_code);
548 return;
549 }
550 /* Next, try converting via full word. */
551 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
552 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
553 != CODE_FOR_nothing))
554 {
555 rtx word_to = gen_reg_rtx (word_mode);
556 if (REG_P (to))
557 {
558 if (reg_overlap_mentioned_p (to, from))
559 from = force_reg (from_mode, from);
560 emit_clobber (to);
561 }
562 convert_move (word_to, from, unsignedp);
563 emit_unop_insn (code, to, word_to, equiv_code);
564 return;
565 }
566
567 /* No special multiword conversion insn; do it by hand. */
568 start_sequence ();
569
570 /* Since we will turn this into a no conflict block, we must ensure
571 that the source does not overlap the target. */
572
573 if (reg_overlap_mentioned_p (to, from))
574 from = force_reg (from_mode, from);
575
576 /* Get a copy of FROM widened to a word, if necessary. */
577 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
578 lowpart_mode = word_mode;
579 else
580 lowpart_mode = from_mode;
581
582 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
583
584 lowpart = gen_lowpart (lowpart_mode, to);
585 emit_move_insn (lowpart, lowfrom);
586
587 /* Compute the value to put in each remaining word. */
588 if (unsignedp)
589 fill_value = const0_rtx;
590 else
591 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
592 LT, lowfrom, const0_rtx,
593 VOIDmode, 0, -1);
594
595 /* Fill the remaining words. */
596 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
597 {
598 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
599 rtx subword = operand_subword (to, index, 1, to_mode);
600
601 gcc_assert (subword);
602
603 if (fill_value != subword)
604 emit_move_insn (subword, fill_value);
605 }
606
607 insns = get_insns ();
608 end_sequence ();
609
610 emit_insn (insns);
611 return;
612 }
613
614 /* Truncating multi-word to a word or less. */
615 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
616 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
617 {
618 if (!((MEM_P (from)
619 && ! MEM_VOLATILE_P (from)
620 && direct_load[(int) to_mode]
621 && ! mode_dependent_address_p (XEXP (from, 0)))
622 || REG_P (from)
623 || GET_CODE (from) == SUBREG))
624 from = force_reg (from_mode, from);
625 convert_move (to, gen_lowpart (word_mode, from), 0);
626 return;
627 }
628
629 /* Now follow all the conversions between integers
630 no more than a word long. */
631
632 /* For truncation, usually we can just refer to FROM in a narrower mode. */
633 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
634 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
635 GET_MODE_BITSIZE (from_mode)))
636 {
637 if (!((MEM_P (from)
638 && ! MEM_VOLATILE_P (from)
639 && direct_load[(int) to_mode]
640 && ! mode_dependent_address_p (XEXP (from, 0)))
641 || REG_P (from)
642 || GET_CODE (from) == SUBREG))
643 from = force_reg (from_mode, from);
644 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
645 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
646 from = copy_to_reg (from);
647 emit_move_insn (to, gen_lowpart (to_mode, from));
648 return;
649 }
650
651 /* Handle extension. */
652 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
653 {
654 /* Convert directly if that works. */
655 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
656 != CODE_FOR_nothing)
657 {
658 emit_unop_insn (code, to, from, equiv_code);
659 return;
660 }
661 else
662 {
663 enum machine_mode intermediate;
664 rtx tmp;
665 tree shift_amount;
666
667 /* Search for a mode to convert via. */
668 for (intermediate = from_mode; intermediate != VOIDmode;
669 intermediate = GET_MODE_WIDER_MODE (intermediate))
670 if (((can_extend_p (to_mode, intermediate, unsignedp)
671 != CODE_FOR_nothing)
672 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
673 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
674 GET_MODE_BITSIZE (intermediate))))
675 && (can_extend_p (intermediate, from_mode, unsignedp)
676 != CODE_FOR_nothing))
677 {
678 convert_move (to, convert_to_mode (intermediate, from,
679 unsignedp), unsignedp);
680 return;
681 }
682
683 /* No suitable intermediate mode.
684 Generate what we need with shifts. */
685 shift_amount = build_int_cst (NULL_TREE,
686 GET_MODE_BITSIZE (to_mode)
687 - GET_MODE_BITSIZE (from_mode));
688 from = gen_lowpart (to_mode, force_reg (from_mode, from));
689 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
690 to, unsignedp);
691 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
692 to, unsignedp);
693 if (tmp != to)
694 emit_move_insn (to, tmp);
695 return;
696 }
697 }
698
699 /* Support special truncate insns for certain modes. */
700 if (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code != CODE_FOR_nothing)
701 {
702 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code,
703 to, from, UNKNOWN);
704 return;
705 }
706
707 /* Handle truncation of volatile memrefs, and so on;
708 the things that couldn't be truncated directly,
709 and for which there was no special instruction.
710
711 ??? Code above formerly short-circuited this, for most integer
712 mode pairs, with a force_reg in from_mode followed by a recursive
713 call to this routine. Appears always to have been wrong. */
714 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
715 {
716 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
717 emit_move_insn (to, temp);
718 return;
719 }
720
721 /* Mode combination is not recognized. */
722 gcc_unreachable ();
723 }
724
725 /* Return an rtx for a value that would result
726 from converting X to mode MODE.
727 Both X and MODE may be floating, or both integer.
728 UNSIGNEDP is nonzero if X is an unsigned value.
729 This can be done by referring to a part of X in place
730 or by copying to a new temporary with conversion. */
731
732 rtx
733 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
734 {
735 return convert_modes (mode, VOIDmode, x, unsignedp);
736 }
737
738 /* Return an rtx for a value that would result
739 from converting X from mode OLDMODE to mode MODE.
740 Both modes may be floating, or both integer.
741 UNSIGNEDP is nonzero if X is an unsigned value.
742
743 This can be done by referring to a part of X in place
744 or by copying to a new temporary with conversion.
745
746 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
747
748 rtx
749 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
750 {
751 rtx temp;
752
753 /* If FROM is a SUBREG that indicates that we have already done at least
754 the required extension, strip it. */
755
756 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
757 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
758 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
759 x = gen_lowpart (mode, x);
760
761 if (GET_MODE (x) != VOIDmode)
762 oldmode = GET_MODE (x);
763
764 if (mode == oldmode)
765 return x;
766
767 /* There is one case that we must handle specially: If we are converting
768 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
769 we are to interpret the constant as unsigned, gen_lowpart will do
770 the wrong if the constant appears negative. What we want to do is
771 make the high-order word of the constant zero, not all ones. */
772
773 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
774 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
775 && CONST_INT_P (x) && INTVAL (x) < 0)
776 {
777 HOST_WIDE_INT val = INTVAL (x);
778
779 if (oldmode != VOIDmode
780 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
781 {
782 int width = GET_MODE_BITSIZE (oldmode);
783
784 /* We need to zero extend VAL. */
785 val &= ((HOST_WIDE_INT) 1 << width) - 1;
786 }
787
788 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
789 }
790
791 /* We can do this with a gen_lowpart if both desired and current modes
792 are integer, and this is either a constant integer, a register, or a
793 non-volatile MEM. Except for the constant case where MODE is no
794 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
795
796 if ((CONST_INT_P (x)
797 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
798 || (GET_MODE_CLASS (mode) == MODE_INT
799 && GET_MODE_CLASS (oldmode) == MODE_INT
800 && (GET_CODE (x) == CONST_DOUBLE
801 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
802 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
803 && direct_load[(int) mode])
804 || (REG_P (x)
805 && (! HARD_REGISTER_P (x)
806 || HARD_REGNO_MODE_OK (REGNO (x), mode))
807 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
808 GET_MODE_BITSIZE (GET_MODE (x)))))))))
809 {
810 /* ?? If we don't know OLDMODE, we have to assume here that
811 X does not need sign- or zero-extension. This may not be
812 the case, but it's the best we can do. */
813 if (CONST_INT_P (x) && oldmode != VOIDmode
814 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
815 {
816 HOST_WIDE_INT val = INTVAL (x);
817 int width = GET_MODE_BITSIZE (oldmode);
818
819 /* We must sign or zero-extend in this case. Start by
820 zero-extending, then sign extend if we need to. */
821 val &= ((HOST_WIDE_INT) 1 << width) - 1;
822 if (! unsignedp
823 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
824 val |= (HOST_WIDE_INT) (-1) << width;
825
826 return gen_int_mode (val, mode);
827 }
828
829 return gen_lowpart (mode, x);
830 }
831
832 /* Converting from integer constant into mode is always equivalent to an
833 subreg operation. */
834 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
835 {
836 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
837 return simplify_gen_subreg (mode, x, oldmode, 0);
838 }
839
840 temp = gen_reg_rtx (mode);
841 convert_move (temp, x, unsignedp);
842 return temp;
843 }
844 \f
845 /* STORE_MAX_PIECES is the number of bytes at a time that we can
846 store efficiently. Due to internal GCC limitations, this is
847 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
848 for an immediate constant. */
849
850 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
851
852 /* Determine whether the LEN bytes can be moved by using several move
853 instructions. Return nonzero if a call to move_by_pieces should
854 succeed. */
855
856 int
857 can_move_by_pieces (unsigned HOST_WIDE_INT len,
858 unsigned int align ATTRIBUTE_UNUSED)
859 {
860 return MOVE_BY_PIECES_P (len, align);
861 }
862
863 /* Generate several move instructions to copy LEN bytes from block FROM to
864 block TO. (These are MEM rtx's with BLKmode).
865
866 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
867 used to push FROM to the stack.
868
869 ALIGN is maximum stack alignment we can assume.
870
871 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
872 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
873 stpcpy. */
874
875 rtx
876 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
877 unsigned int align, int endp)
878 {
879 struct move_by_pieces_d data;
880 enum machine_mode to_addr_mode, from_addr_mode
881 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (from));
882 rtx to_addr, from_addr = XEXP (from, 0);
883 unsigned int max_size = MOVE_MAX_PIECES + 1;
884 enum machine_mode mode = VOIDmode, tmode;
885 enum insn_code icode;
886
887 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
888
889 data.offset = 0;
890 data.from_addr = from_addr;
891 if (to)
892 {
893 to_addr_mode = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
894 to_addr = XEXP (to, 0);
895 data.to = to;
896 data.autinc_to
897 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
898 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
899 data.reverse
900 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
901 }
902 else
903 {
904 to_addr_mode = VOIDmode;
905 to_addr = NULL_RTX;
906 data.to = NULL_RTX;
907 data.autinc_to = 1;
908 #ifdef STACK_GROWS_DOWNWARD
909 data.reverse = 1;
910 #else
911 data.reverse = 0;
912 #endif
913 }
914 data.to_addr = to_addr;
915 data.from = from;
916 data.autinc_from
917 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
918 || GET_CODE (from_addr) == POST_INC
919 || GET_CODE (from_addr) == POST_DEC);
920
921 data.explicit_inc_from = 0;
922 data.explicit_inc_to = 0;
923 if (data.reverse) data.offset = len;
924 data.len = len;
925
926 /* If copying requires more than two move insns,
927 copy addresses to registers (to make displacements shorter)
928 and use post-increment if available. */
929 if (!(data.autinc_from && data.autinc_to)
930 && move_by_pieces_ninsns (len, align, max_size) > 2)
931 {
932 /* Find the mode of the largest move... */
933 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
934 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
935 if (GET_MODE_SIZE (tmode) < max_size)
936 mode = tmode;
937
938 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
939 {
940 data.from_addr = copy_to_mode_reg (from_addr_mode,
941 plus_constant (from_addr, len));
942 data.autinc_from = 1;
943 data.explicit_inc_from = -1;
944 }
945 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
946 {
947 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
948 data.autinc_from = 1;
949 data.explicit_inc_from = 1;
950 }
951 if (!data.autinc_from && CONSTANT_P (from_addr))
952 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
953 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
954 {
955 data.to_addr = copy_to_mode_reg (to_addr_mode,
956 plus_constant (to_addr, len));
957 data.autinc_to = 1;
958 data.explicit_inc_to = -1;
959 }
960 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
961 {
962 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
963 data.autinc_to = 1;
964 data.explicit_inc_to = 1;
965 }
966 if (!data.autinc_to && CONSTANT_P (to_addr))
967 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
968 }
969
970 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
971 if (align >= GET_MODE_ALIGNMENT (tmode))
972 align = GET_MODE_ALIGNMENT (tmode);
973 else
974 {
975 enum machine_mode xmode;
976
977 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
978 tmode != VOIDmode;
979 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
980 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
981 || SLOW_UNALIGNED_ACCESS (tmode, align))
982 break;
983
984 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
985 }
986
987 /* First move what we can in the largest integer mode, then go to
988 successively smaller modes. */
989
990 while (max_size > 1)
991 {
992 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
993 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
994 if (GET_MODE_SIZE (tmode) < max_size)
995 mode = tmode;
996
997 if (mode == VOIDmode)
998 break;
999
1000 icode = optab_handler (mov_optab, mode)->insn_code;
1001 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1002 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1003
1004 max_size = GET_MODE_SIZE (mode);
1005 }
1006
1007 /* The code above should have handled everything. */
1008 gcc_assert (!data.len);
1009
1010 if (endp)
1011 {
1012 rtx to1;
1013
1014 gcc_assert (!data.reverse);
1015 if (data.autinc_to)
1016 {
1017 if (endp == 2)
1018 {
1019 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1020 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1021 else
1022 data.to_addr = copy_to_mode_reg (to_addr_mode,
1023 plus_constant (data.to_addr,
1024 -1));
1025 }
1026 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1027 data.offset);
1028 }
1029 else
1030 {
1031 if (endp == 2)
1032 --data.offset;
1033 to1 = adjust_address (data.to, QImode, data.offset);
1034 }
1035 return to1;
1036 }
1037 else
1038 return data.to;
1039 }
1040
1041 /* Return number of insns required to move L bytes by pieces.
1042 ALIGN (in bits) is maximum alignment we can assume. */
1043
1044 static unsigned HOST_WIDE_INT
1045 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1046 unsigned int max_size)
1047 {
1048 unsigned HOST_WIDE_INT n_insns = 0;
1049 enum machine_mode tmode;
1050
1051 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1052 if (align >= GET_MODE_ALIGNMENT (tmode))
1053 align = GET_MODE_ALIGNMENT (tmode);
1054 else
1055 {
1056 enum machine_mode tmode, xmode;
1057
1058 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1059 tmode != VOIDmode;
1060 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1061 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1062 || SLOW_UNALIGNED_ACCESS (tmode, align))
1063 break;
1064
1065 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1066 }
1067
1068 while (max_size > 1)
1069 {
1070 enum machine_mode mode = VOIDmode;
1071 enum insn_code icode;
1072
1073 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1074 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1075 if (GET_MODE_SIZE (tmode) < max_size)
1076 mode = tmode;
1077
1078 if (mode == VOIDmode)
1079 break;
1080
1081 icode = optab_handler (mov_optab, mode)->insn_code;
1082 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1083 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1084
1085 max_size = GET_MODE_SIZE (mode);
1086 }
1087
1088 gcc_assert (!l);
1089 return n_insns;
1090 }
1091
1092 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1093 with move instructions for mode MODE. GENFUN is the gen_... function
1094 to make a move insn for that mode. DATA has all the other info. */
1095
1096 static void
1097 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1098 struct move_by_pieces_d *data)
1099 {
1100 unsigned int size = GET_MODE_SIZE (mode);
1101 rtx to1 = NULL_RTX, from1;
1102
1103 while (data->len >= size)
1104 {
1105 if (data->reverse)
1106 data->offset -= size;
1107
1108 if (data->to)
1109 {
1110 if (data->autinc_to)
1111 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1112 data->offset);
1113 else
1114 to1 = adjust_address (data->to, mode, data->offset);
1115 }
1116
1117 if (data->autinc_from)
1118 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1119 data->offset);
1120 else
1121 from1 = adjust_address (data->from, mode, data->offset);
1122
1123 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1124 emit_insn (gen_add2_insn (data->to_addr,
1125 GEN_INT (-(HOST_WIDE_INT)size)));
1126 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1127 emit_insn (gen_add2_insn (data->from_addr,
1128 GEN_INT (-(HOST_WIDE_INT)size)));
1129
1130 if (data->to)
1131 emit_insn ((*genfun) (to1, from1));
1132 else
1133 {
1134 #ifdef PUSH_ROUNDING
1135 emit_single_push_insn (mode, from1, NULL);
1136 #else
1137 gcc_unreachable ();
1138 #endif
1139 }
1140
1141 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1142 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1143 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1144 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1145
1146 if (! data->reverse)
1147 data->offset += size;
1148
1149 data->len -= size;
1150 }
1151 }
1152 \f
1153 /* Emit code to move a block Y to a block X. This may be done with
1154 string-move instructions, with multiple scalar move instructions,
1155 or with a library call.
1156
1157 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1158 SIZE is an rtx that says how long they are.
1159 ALIGN is the maximum alignment we can assume they have.
1160 METHOD describes what kind of copy this is, and what mechanisms may be used.
1161
1162 Return the address of the new block, if memcpy is called and returns it,
1163 0 otherwise. */
1164
1165 rtx
1166 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1167 unsigned int expected_align, HOST_WIDE_INT expected_size)
1168 {
1169 bool may_use_call;
1170 rtx retval = 0;
1171 unsigned int align;
1172
1173 switch (method)
1174 {
1175 case BLOCK_OP_NORMAL:
1176 case BLOCK_OP_TAILCALL:
1177 may_use_call = true;
1178 break;
1179
1180 case BLOCK_OP_CALL_PARM:
1181 may_use_call = block_move_libcall_safe_for_call_parm ();
1182
1183 /* Make inhibit_defer_pop nonzero around the library call
1184 to force it to pop the arguments right away. */
1185 NO_DEFER_POP;
1186 break;
1187
1188 case BLOCK_OP_NO_LIBCALL:
1189 may_use_call = false;
1190 break;
1191
1192 default:
1193 gcc_unreachable ();
1194 }
1195
1196 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1197
1198 gcc_assert (MEM_P (x));
1199 gcc_assert (MEM_P (y));
1200 gcc_assert (size);
1201
1202 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1203 block copy is more efficient for other large modes, e.g. DCmode. */
1204 x = adjust_address (x, BLKmode, 0);
1205 y = adjust_address (y, BLKmode, 0);
1206
1207 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1208 can be incorrect is coming from __builtin_memcpy. */
1209 if (CONST_INT_P (size))
1210 {
1211 if (INTVAL (size) == 0)
1212 return 0;
1213
1214 x = shallow_copy_rtx (x);
1215 y = shallow_copy_rtx (y);
1216 set_mem_size (x, size);
1217 set_mem_size (y, size);
1218 }
1219
1220 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1221 move_by_pieces (x, y, INTVAL (size), align, 0);
1222 else if (emit_block_move_via_movmem (x, y, size, align,
1223 expected_align, expected_size))
1224 ;
1225 else if (may_use_call
1226 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1227 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1228 retval = emit_block_move_via_libcall (x, y, size,
1229 method == BLOCK_OP_TAILCALL);
1230 else
1231 emit_block_move_via_loop (x, y, size, align);
1232
1233 if (method == BLOCK_OP_CALL_PARM)
1234 OK_DEFER_POP;
1235
1236 return retval;
1237 }
1238
1239 rtx
1240 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1241 {
1242 return emit_block_move_hints (x, y, size, method, 0, -1);
1243 }
1244
1245 /* A subroutine of emit_block_move. Returns true if calling the
1246 block move libcall will not clobber any parameters which may have
1247 already been placed on the stack. */
1248
1249 static bool
1250 block_move_libcall_safe_for_call_parm (void)
1251 {
1252 #if defined (REG_PARM_STACK_SPACE)
1253 tree fn;
1254 #endif
1255
1256 /* If arguments are pushed on the stack, then they're safe. */
1257 if (PUSH_ARGS)
1258 return true;
1259
1260 /* If registers go on the stack anyway, any argument is sure to clobber
1261 an outgoing argument. */
1262 #if defined (REG_PARM_STACK_SPACE)
1263 fn = emit_block_move_libcall_fn (false);
1264 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1265 && REG_PARM_STACK_SPACE (fn) != 0)
1266 return false;
1267 #endif
1268
1269 /* If any argument goes in memory, then it might clobber an outgoing
1270 argument. */
1271 {
1272 CUMULATIVE_ARGS args_so_far;
1273 tree fn, arg;
1274
1275 fn = emit_block_move_libcall_fn (false);
1276 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1277
1278 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1279 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1280 {
1281 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1282 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1283 if (!tmp || !REG_P (tmp))
1284 return false;
1285 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1286 return false;
1287 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1288 }
1289 }
1290 return true;
1291 }
1292
1293 /* A subroutine of emit_block_move. Expand a movmem pattern;
1294 return true if successful. */
1295
1296 static bool
1297 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1298 unsigned int expected_align, HOST_WIDE_INT expected_size)
1299 {
1300 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1301 int save_volatile_ok = volatile_ok;
1302 enum machine_mode mode;
1303
1304 if (expected_align < align)
1305 expected_align = align;
1306
1307 /* Since this is a move insn, we don't care about volatility. */
1308 volatile_ok = 1;
1309
1310 /* Try the most limited insn first, because there's no point
1311 including more than one in the machine description unless
1312 the more limited one has some advantage. */
1313
1314 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1315 mode = GET_MODE_WIDER_MODE (mode))
1316 {
1317 enum insn_code code = movmem_optab[(int) mode];
1318 insn_operand_predicate_fn pred;
1319
1320 if (code != CODE_FOR_nothing
1321 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1322 here because if SIZE is less than the mode mask, as it is
1323 returned by the macro, it will definitely be less than the
1324 actual mode mask. */
1325 && ((CONST_INT_P (size)
1326 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1327 <= (GET_MODE_MASK (mode) >> 1)))
1328 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1329 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1330 || (*pred) (x, BLKmode))
1331 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1332 || (*pred) (y, BLKmode))
1333 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1334 || (*pred) (opalign, VOIDmode)))
1335 {
1336 rtx op2;
1337 rtx last = get_last_insn ();
1338 rtx pat;
1339
1340 op2 = convert_to_mode (mode, size, 1);
1341 pred = insn_data[(int) code].operand[2].predicate;
1342 if (pred != 0 && ! (*pred) (op2, mode))
1343 op2 = copy_to_mode_reg (mode, op2);
1344
1345 /* ??? When called via emit_block_move_for_call, it'd be
1346 nice if there were some way to inform the backend, so
1347 that it doesn't fail the expansion because it thinks
1348 emitting the libcall would be more efficient. */
1349
1350 if (insn_data[(int) code].n_operands == 4)
1351 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1352 else
1353 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1354 GEN_INT (expected_align
1355 / BITS_PER_UNIT),
1356 GEN_INT (expected_size));
1357 if (pat)
1358 {
1359 emit_insn (pat);
1360 volatile_ok = save_volatile_ok;
1361 return true;
1362 }
1363 else
1364 delete_insns_since (last);
1365 }
1366 }
1367
1368 volatile_ok = save_volatile_ok;
1369 return false;
1370 }
1371
1372 /* A subroutine of emit_block_move. Expand a call to memcpy.
1373 Return the return value from memcpy, 0 otherwise. */
1374
1375 rtx
1376 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1377 {
1378 rtx dst_addr, src_addr;
1379 tree call_expr, fn, src_tree, dst_tree, size_tree;
1380 enum machine_mode size_mode;
1381 rtx retval;
1382
1383 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1384 pseudos. We can then place those new pseudos into a VAR_DECL and
1385 use them later. */
1386
1387 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1388 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1389
1390 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1391 src_addr = convert_memory_address (ptr_mode, src_addr);
1392
1393 dst_tree = make_tree (ptr_type_node, dst_addr);
1394 src_tree = make_tree (ptr_type_node, src_addr);
1395
1396 size_mode = TYPE_MODE (sizetype);
1397
1398 size = convert_to_mode (size_mode, size, 1);
1399 size = copy_to_mode_reg (size_mode, size);
1400
1401 /* It is incorrect to use the libcall calling conventions to call
1402 memcpy in this context. This could be a user call to memcpy and
1403 the user may wish to examine the return value from memcpy. For
1404 targets where libcalls and normal calls have different conventions
1405 for returning pointers, we could end up generating incorrect code. */
1406
1407 size_tree = make_tree (sizetype, size);
1408
1409 fn = emit_block_move_libcall_fn (true);
1410 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1411 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1412
1413 retval = expand_normal (call_expr);
1414
1415 return retval;
1416 }
1417
1418 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1419 for the function we use for block copies. The first time FOR_CALL
1420 is true, we call assemble_external. */
1421
1422 static GTY(()) tree block_move_fn;
1423
1424 void
1425 init_block_move_fn (const char *asmspec)
1426 {
1427 if (!block_move_fn)
1428 {
1429 tree args, fn;
1430
1431 fn = get_identifier ("memcpy");
1432 args = build_function_type_list (ptr_type_node, ptr_type_node,
1433 const_ptr_type_node, sizetype,
1434 NULL_TREE);
1435
1436 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1437 DECL_EXTERNAL (fn) = 1;
1438 TREE_PUBLIC (fn) = 1;
1439 DECL_ARTIFICIAL (fn) = 1;
1440 TREE_NOTHROW (fn) = 1;
1441 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1442 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1443
1444 block_move_fn = fn;
1445 }
1446
1447 if (asmspec)
1448 set_user_assembler_name (block_move_fn, asmspec);
1449 }
1450
1451 static tree
1452 emit_block_move_libcall_fn (int for_call)
1453 {
1454 static bool emitted_extern;
1455
1456 if (!block_move_fn)
1457 init_block_move_fn (NULL);
1458
1459 if (for_call && !emitted_extern)
1460 {
1461 emitted_extern = true;
1462 make_decl_rtl (block_move_fn);
1463 assemble_external (block_move_fn);
1464 }
1465
1466 return block_move_fn;
1467 }
1468
1469 /* A subroutine of emit_block_move. Copy the data via an explicit
1470 loop. This is used only when libcalls are forbidden. */
1471 /* ??? It'd be nice to copy in hunks larger than QImode. */
1472
1473 static void
1474 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1475 unsigned int align ATTRIBUTE_UNUSED)
1476 {
1477 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1478 enum machine_mode x_addr_mode
1479 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
1480 enum machine_mode y_addr_mode
1481 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (y));
1482 enum machine_mode iter_mode;
1483
1484 iter_mode = GET_MODE (size);
1485 if (iter_mode == VOIDmode)
1486 iter_mode = word_mode;
1487
1488 top_label = gen_label_rtx ();
1489 cmp_label = gen_label_rtx ();
1490 iter = gen_reg_rtx (iter_mode);
1491
1492 emit_move_insn (iter, const0_rtx);
1493
1494 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1495 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1496 do_pending_stack_adjust ();
1497
1498 emit_jump (cmp_label);
1499 emit_label (top_label);
1500
1501 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1502 x_addr = gen_rtx_PLUS (x_addr_mode, x_addr, tmp);
1503
1504 if (x_addr_mode != y_addr_mode)
1505 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1506 y_addr = gen_rtx_PLUS (y_addr_mode, y_addr, tmp);
1507
1508 x = change_address (x, QImode, x_addr);
1509 y = change_address (y, QImode, y_addr);
1510
1511 emit_move_insn (x, y);
1512
1513 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1514 true, OPTAB_LIB_WIDEN);
1515 if (tmp != iter)
1516 emit_move_insn (iter, tmp);
1517
1518 emit_label (cmp_label);
1519
1520 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1521 true, top_label);
1522 }
1523 \f
1524 /* Copy all or part of a value X into registers starting at REGNO.
1525 The number of registers to be filled is NREGS. */
1526
1527 void
1528 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1529 {
1530 int i;
1531 #ifdef HAVE_load_multiple
1532 rtx pat;
1533 rtx last;
1534 #endif
1535
1536 if (nregs == 0)
1537 return;
1538
1539 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1540 x = validize_mem (force_const_mem (mode, x));
1541
1542 /* See if the machine can do this with a load multiple insn. */
1543 #ifdef HAVE_load_multiple
1544 if (HAVE_load_multiple)
1545 {
1546 last = get_last_insn ();
1547 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1548 GEN_INT (nregs));
1549 if (pat)
1550 {
1551 emit_insn (pat);
1552 return;
1553 }
1554 else
1555 delete_insns_since (last);
1556 }
1557 #endif
1558
1559 for (i = 0; i < nregs; i++)
1560 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1561 operand_subword_force (x, i, mode));
1562 }
1563
1564 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1565 The number of registers to be filled is NREGS. */
1566
1567 void
1568 move_block_from_reg (int regno, rtx x, int nregs)
1569 {
1570 int i;
1571
1572 if (nregs == 0)
1573 return;
1574
1575 /* See if the machine can do this with a store multiple insn. */
1576 #ifdef HAVE_store_multiple
1577 if (HAVE_store_multiple)
1578 {
1579 rtx last = get_last_insn ();
1580 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1581 GEN_INT (nregs));
1582 if (pat)
1583 {
1584 emit_insn (pat);
1585 return;
1586 }
1587 else
1588 delete_insns_since (last);
1589 }
1590 #endif
1591
1592 for (i = 0; i < nregs; i++)
1593 {
1594 rtx tem = operand_subword (x, i, 1, BLKmode);
1595
1596 gcc_assert (tem);
1597
1598 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1599 }
1600 }
1601
1602 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1603 ORIG, where ORIG is a non-consecutive group of registers represented by
1604 a PARALLEL. The clone is identical to the original except in that the
1605 original set of registers is replaced by a new set of pseudo registers.
1606 The new set has the same modes as the original set. */
1607
1608 rtx
1609 gen_group_rtx (rtx orig)
1610 {
1611 int i, length;
1612 rtx *tmps;
1613
1614 gcc_assert (GET_CODE (orig) == PARALLEL);
1615
1616 length = XVECLEN (orig, 0);
1617 tmps = XALLOCAVEC (rtx, length);
1618
1619 /* Skip a NULL entry in first slot. */
1620 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1621
1622 if (i)
1623 tmps[0] = 0;
1624
1625 for (; i < length; i++)
1626 {
1627 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1628 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1629
1630 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1631 }
1632
1633 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1634 }
1635
1636 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1637 except that values are placed in TMPS[i], and must later be moved
1638 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1639
1640 static void
1641 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1642 {
1643 rtx src;
1644 int start, i;
1645 enum machine_mode m = GET_MODE (orig_src);
1646
1647 gcc_assert (GET_CODE (dst) == PARALLEL);
1648
1649 if (m != VOIDmode
1650 && !SCALAR_INT_MODE_P (m)
1651 && !MEM_P (orig_src)
1652 && GET_CODE (orig_src) != CONCAT)
1653 {
1654 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1655 if (imode == BLKmode)
1656 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1657 else
1658 src = gen_reg_rtx (imode);
1659 if (imode != BLKmode)
1660 src = gen_lowpart (GET_MODE (orig_src), src);
1661 emit_move_insn (src, orig_src);
1662 /* ...and back again. */
1663 if (imode != BLKmode)
1664 src = gen_lowpart (imode, src);
1665 emit_group_load_1 (tmps, dst, src, type, ssize);
1666 return;
1667 }
1668
1669 /* Check for a NULL entry, used to indicate that the parameter goes
1670 both on the stack and in registers. */
1671 if (XEXP (XVECEXP (dst, 0, 0), 0))
1672 start = 0;
1673 else
1674 start = 1;
1675
1676 /* Process the pieces. */
1677 for (i = start; i < XVECLEN (dst, 0); i++)
1678 {
1679 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1680 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1681 unsigned int bytelen = GET_MODE_SIZE (mode);
1682 int shift = 0;
1683
1684 /* Handle trailing fragments that run over the size of the struct. */
1685 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1686 {
1687 /* Arrange to shift the fragment to where it belongs.
1688 extract_bit_field loads to the lsb of the reg. */
1689 if (
1690 #ifdef BLOCK_REG_PADDING
1691 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1692 == (BYTES_BIG_ENDIAN ? upward : downward)
1693 #else
1694 BYTES_BIG_ENDIAN
1695 #endif
1696 )
1697 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1698 bytelen = ssize - bytepos;
1699 gcc_assert (bytelen > 0);
1700 }
1701
1702 /* If we won't be loading directly from memory, protect the real source
1703 from strange tricks we might play; but make sure that the source can
1704 be loaded directly into the destination. */
1705 src = orig_src;
1706 if (!MEM_P (orig_src)
1707 && (!CONSTANT_P (orig_src)
1708 || (GET_MODE (orig_src) != mode
1709 && GET_MODE (orig_src) != VOIDmode)))
1710 {
1711 if (GET_MODE (orig_src) == VOIDmode)
1712 src = gen_reg_rtx (mode);
1713 else
1714 src = gen_reg_rtx (GET_MODE (orig_src));
1715
1716 emit_move_insn (src, orig_src);
1717 }
1718
1719 /* Optimize the access just a bit. */
1720 if (MEM_P (src)
1721 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1722 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1723 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1724 && bytelen == GET_MODE_SIZE (mode))
1725 {
1726 tmps[i] = gen_reg_rtx (mode);
1727 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1728 }
1729 else if (COMPLEX_MODE_P (mode)
1730 && GET_MODE (src) == mode
1731 && bytelen == GET_MODE_SIZE (mode))
1732 /* Let emit_move_complex do the bulk of the work. */
1733 tmps[i] = src;
1734 else if (GET_CODE (src) == CONCAT)
1735 {
1736 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1737 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1738
1739 if ((bytepos == 0 && bytelen == slen0)
1740 || (bytepos != 0 && bytepos + bytelen <= slen))
1741 {
1742 /* The following assumes that the concatenated objects all
1743 have the same size. In this case, a simple calculation
1744 can be used to determine the object and the bit field
1745 to be extracted. */
1746 tmps[i] = XEXP (src, bytepos / slen0);
1747 if (! CONSTANT_P (tmps[i])
1748 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1749 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1750 (bytepos % slen0) * BITS_PER_UNIT,
1751 1, NULL_RTX, mode, mode);
1752 }
1753 else
1754 {
1755 rtx mem;
1756
1757 gcc_assert (!bytepos);
1758 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1759 emit_move_insn (mem, src);
1760 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1761 0, 1, NULL_RTX, mode, mode);
1762 }
1763 }
1764 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1765 SIMD register, which is currently broken. While we get GCC
1766 to emit proper RTL for these cases, let's dump to memory. */
1767 else if (VECTOR_MODE_P (GET_MODE (dst))
1768 && REG_P (src))
1769 {
1770 int slen = GET_MODE_SIZE (GET_MODE (src));
1771 rtx mem;
1772
1773 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1774 emit_move_insn (mem, src);
1775 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1776 }
1777 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1778 && XVECLEN (dst, 0) > 1)
1779 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1780 else if (CONSTANT_P (src))
1781 {
1782 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1783
1784 if (len == ssize)
1785 tmps[i] = src;
1786 else
1787 {
1788 rtx first, second;
1789
1790 gcc_assert (2 * len == ssize);
1791 split_double (src, &first, &second);
1792 if (i)
1793 tmps[i] = second;
1794 else
1795 tmps[i] = first;
1796 }
1797 }
1798 else if (REG_P (src) && GET_MODE (src) == mode)
1799 tmps[i] = src;
1800 else
1801 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1802 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1803 mode, mode);
1804
1805 if (shift)
1806 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1807 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1808 }
1809 }
1810
1811 /* Emit code to move a block SRC of type TYPE to a block DST,
1812 where DST is non-consecutive registers represented by a PARALLEL.
1813 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1814 if not known. */
1815
1816 void
1817 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1818 {
1819 rtx *tmps;
1820 int i;
1821
1822 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1823 emit_group_load_1 (tmps, dst, src, type, ssize);
1824
1825 /* Copy the extracted pieces into the proper (probable) hard regs. */
1826 for (i = 0; i < XVECLEN (dst, 0); i++)
1827 {
1828 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1829 if (d == NULL)
1830 continue;
1831 emit_move_insn (d, tmps[i]);
1832 }
1833 }
1834
1835 /* Similar, but load SRC into new pseudos in a format that looks like
1836 PARALLEL. This can later be fed to emit_group_move to get things
1837 in the right place. */
1838
1839 rtx
1840 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1841 {
1842 rtvec vec;
1843 int i;
1844
1845 vec = rtvec_alloc (XVECLEN (parallel, 0));
1846 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1847
1848 /* Convert the vector to look just like the original PARALLEL, except
1849 with the computed values. */
1850 for (i = 0; i < XVECLEN (parallel, 0); i++)
1851 {
1852 rtx e = XVECEXP (parallel, 0, i);
1853 rtx d = XEXP (e, 0);
1854
1855 if (d)
1856 {
1857 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1858 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1859 }
1860 RTVEC_ELT (vec, i) = e;
1861 }
1862
1863 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1864 }
1865
1866 /* Emit code to move a block SRC to block DST, where SRC and DST are
1867 non-consecutive groups of registers, each represented by a PARALLEL. */
1868
1869 void
1870 emit_group_move (rtx dst, rtx src)
1871 {
1872 int i;
1873
1874 gcc_assert (GET_CODE (src) == PARALLEL
1875 && GET_CODE (dst) == PARALLEL
1876 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1877
1878 /* Skip first entry if NULL. */
1879 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1880 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1881 XEXP (XVECEXP (src, 0, i), 0));
1882 }
1883
1884 /* Move a group of registers represented by a PARALLEL into pseudos. */
1885
1886 rtx
1887 emit_group_move_into_temps (rtx src)
1888 {
1889 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1890 int i;
1891
1892 for (i = 0; i < XVECLEN (src, 0); i++)
1893 {
1894 rtx e = XVECEXP (src, 0, i);
1895 rtx d = XEXP (e, 0);
1896
1897 if (d)
1898 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1899 RTVEC_ELT (vec, i) = e;
1900 }
1901
1902 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1903 }
1904
1905 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1906 where SRC is non-consecutive registers represented by a PARALLEL.
1907 SSIZE represents the total size of block ORIG_DST, or -1 if not
1908 known. */
1909
1910 void
1911 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1912 {
1913 rtx *tmps, dst;
1914 int start, finish, i;
1915 enum machine_mode m = GET_MODE (orig_dst);
1916
1917 gcc_assert (GET_CODE (src) == PARALLEL);
1918
1919 if (!SCALAR_INT_MODE_P (m)
1920 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1921 {
1922 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1923 if (imode == BLKmode)
1924 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1925 else
1926 dst = gen_reg_rtx (imode);
1927 emit_group_store (dst, src, type, ssize);
1928 if (imode != BLKmode)
1929 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1930 emit_move_insn (orig_dst, dst);
1931 return;
1932 }
1933
1934 /* Check for a NULL entry, used to indicate that the parameter goes
1935 both on the stack and in registers. */
1936 if (XEXP (XVECEXP (src, 0, 0), 0))
1937 start = 0;
1938 else
1939 start = 1;
1940 finish = XVECLEN (src, 0);
1941
1942 tmps = XALLOCAVEC (rtx, finish);
1943
1944 /* Copy the (probable) hard regs into pseudos. */
1945 for (i = start; i < finish; i++)
1946 {
1947 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1948 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1949 {
1950 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1951 emit_move_insn (tmps[i], reg);
1952 }
1953 else
1954 tmps[i] = reg;
1955 }
1956
1957 /* If we won't be storing directly into memory, protect the real destination
1958 from strange tricks we might play. */
1959 dst = orig_dst;
1960 if (GET_CODE (dst) == PARALLEL)
1961 {
1962 rtx temp;
1963
1964 /* We can get a PARALLEL dst if there is a conditional expression in
1965 a return statement. In that case, the dst and src are the same,
1966 so no action is necessary. */
1967 if (rtx_equal_p (dst, src))
1968 return;
1969
1970 /* It is unclear if we can ever reach here, but we may as well handle
1971 it. Allocate a temporary, and split this into a store/load to/from
1972 the temporary. */
1973
1974 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1975 emit_group_store (temp, src, type, ssize);
1976 emit_group_load (dst, temp, type, ssize);
1977 return;
1978 }
1979 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1980 {
1981 enum machine_mode outer = GET_MODE (dst);
1982 enum machine_mode inner;
1983 HOST_WIDE_INT bytepos;
1984 bool done = false;
1985 rtx temp;
1986
1987 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1988 dst = gen_reg_rtx (outer);
1989
1990 /* Make life a bit easier for combine. */
1991 /* If the first element of the vector is the low part
1992 of the destination mode, use a paradoxical subreg to
1993 initialize the destination. */
1994 if (start < finish)
1995 {
1996 inner = GET_MODE (tmps[start]);
1997 bytepos = subreg_lowpart_offset (inner, outer);
1998 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1999 {
2000 temp = simplify_gen_subreg (outer, tmps[start],
2001 inner, 0);
2002 if (temp)
2003 {
2004 emit_move_insn (dst, temp);
2005 done = true;
2006 start++;
2007 }
2008 }
2009 }
2010
2011 /* If the first element wasn't the low part, try the last. */
2012 if (!done
2013 && start < finish - 1)
2014 {
2015 inner = GET_MODE (tmps[finish - 1]);
2016 bytepos = subreg_lowpart_offset (inner, outer);
2017 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2018 {
2019 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2020 inner, 0);
2021 if (temp)
2022 {
2023 emit_move_insn (dst, temp);
2024 done = true;
2025 finish--;
2026 }
2027 }
2028 }
2029
2030 /* Otherwise, simply initialize the result to zero. */
2031 if (!done)
2032 emit_move_insn (dst, CONST0_RTX (outer));
2033 }
2034
2035 /* Process the pieces. */
2036 for (i = start; i < finish; i++)
2037 {
2038 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2039 enum machine_mode mode = GET_MODE (tmps[i]);
2040 unsigned int bytelen = GET_MODE_SIZE (mode);
2041 unsigned int adj_bytelen = bytelen;
2042 rtx dest = dst;
2043
2044 /* Handle trailing fragments that run over the size of the struct. */
2045 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2046 adj_bytelen = ssize - bytepos;
2047
2048 if (GET_CODE (dst) == CONCAT)
2049 {
2050 if (bytepos + adj_bytelen
2051 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2052 dest = XEXP (dst, 0);
2053 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2054 {
2055 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2056 dest = XEXP (dst, 1);
2057 }
2058 else
2059 {
2060 enum machine_mode dest_mode = GET_MODE (dest);
2061 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2062
2063 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2064
2065 if (GET_MODE_ALIGNMENT (dest_mode)
2066 >= GET_MODE_ALIGNMENT (tmp_mode))
2067 {
2068 dest = assign_stack_temp (dest_mode,
2069 GET_MODE_SIZE (dest_mode),
2070 0);
2071 emit_move_insn (adjust_address (dest,
2072 tmp_mode,
2073 bytepos),
2074 tmps[i]);
2075 dst = dest;
2076 }
2077 else
2078 {
2079 dest = assign_stack_temp (tmp_mode,
2080 GET_MODE_SIZE (tmp_mode),
2081 0);
2082 emit_move_insn (dest, tmps[i]);
2083 dst = adjust_address (dest, dest_mode, bytepos);
2084 }
2085 break;
2086 }
2087 }
2088
2089 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2090 {
2091 /* store_bit_field always takes its value from the lsb.
2092 Move the fragment to the lsb if it's not already there. */
2093 if (
2094 #ifdef BLOCK_REG_PADDING
2095 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2096 == (BYTES_BIG_ENDIAN ? upward : downward)
2097 #else
2098 BYTES_BIG_ENDIAN
2099 #endif
2100 )
2101 {
2102 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2103 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2104 build_int_cst (NULL_TREE, shift),
2105 tmps[i], 0);
2106 }
2107 bytelen = adj_bytelen;
2108 }
2109
2110 /* Optimize the access just a bit. */
2111 if (MEM_P (dest)
2112 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2113 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2114 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2115 && bytelen == GET_MODE_SIZE (mode))
2116 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2117 else
2118 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2119 mode, tmps[i]);
2120 }
2121
2122 /* Copy from the pseudo into the (probable) hard reg. */
2123 if (orig_dst != dst)
2124 emit_move_insn (orig_dst, dst);
2125 }
2126
2127 /* Generate code to copy a BLKmode object of TYPE out of a
2128 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2129 is null, a stack temporary is created. TGTBLK is returned.
2130
2131 The purpose of this routine is to handle functions that return
2132 BLKmode structures in registers. Some machines (the PA for example)
2133 want to return all small structures in registers regardless of the
2134 structure's alignment. */
2135
2136 rtx
2137 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2138 {
2139 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2140 rtx src = NULL, dst = NULL;
2141 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2142 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2143 enum machine_mode copy_mode;
2144
2145 if (tgtblk == 0)
2146 {
2147 tgtblk = assign_temp (build_qualified_type (type,
2148 (TYPE_QUALS (type)
2149 | TYPE_QUAL_CONST)),
2150 0, 1, 1);
2151 preserve_temp_slots (tgtblk);
2152 }
2153
2154 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2155 into a new pseudo which is a full word. */
2156
2157 if (GET_MODE (srcreg) != BLKmode
2158 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2159 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2160
2161 /* If the structure doesn't take up a whole number of words, see whether
2162 SRCREG is padded on the left or on the right. If it's on the left,
2163 set PADDING_CORRECTION to the number of bits to skip.
2164
2165 In most ABIs, the structure will be returned at the least end of
2166 the register, which translates to right padding on little-endian
2167 targets and left padding on big-endian targets. The opposite
2168 holds if the structure is returned at the most significant
2169 end of the register. */
2170 if (bytes % UNITS_PER_WORD != 0
2171 && (targetm.calls.return_in_msb (type)
2172 ? !BYTES_BIG_ENDIAN
2173 : BYTES_BIG_ENDIAN))
2174 padding_correction
2175 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2176
2177 /* Copy the structure BITSIZE bits at a time. If the target lives in
2178 memory, take care of not reading/writing past its end by selecting
2179 a copy mode suited to BITSIZE. This should always be possible given
2180 how it is computed.
2181
2182 We could probably emit more efficient code for machines which do not use
2183 strict alignment, but it doesn't seem worth the effort at the current
2184 time. */
2185
2186 copy_mode = word_mode;
2187 if (MEM_P (tgtblk))
2188 {
2189 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2190 if (mem_mode != BLKmode)
2191 copy_mode = mem_mode;
2192 }
2193
2194 for (bitpos = 0, xbitpos = padding_correction;
2195 bitpos < bytes * BITS_PER_UNIT;
2196 bitpos += bitsize, xbitpos += bitsize)
2197 {
2198 /* We need a new source operand each time xbitpos is on a
2199 word boundary and when xbitpos == padding_correction
2200 (the first time through). */
2201 if (xbitpos % BITS_PER_WORD == 0
2202 || xbitpos == padding_correction)
2203 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2204 GET_MODE (srcreg));
2205
2206 /* We need a new destination operand each time bitpos is on
2207 a word boundary. */
2208 if (bitpos % BITS_PER_WORD == 0)
2209 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2210
2211 /* Use xbitpos for the source extraction (right justified) and
2212 bitpos for the destination store (left justified). */
2213 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
2214 extract_bit_field (src, bitsize,
2215 xbitpos % BITS_PER_WORD, 1,
2216 NULL_RTX, copy_mode, copy_mode));
2217 }
2218
2219 return tgtblk;
2220 }
2221
2222 /* Add a USE expression for REG to the (possibly empty) list pointed
2223 to by CALL_FUSAGE. REG must denote a hard register. */
2224
2225 void
2226 use_reg (rtx *call_fusage, rtx reg)
2227 {
2228 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2229
2230 *call_fusage
2231 = gen_rtx_EXPR_LIST (VOIDmode,
2232 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2233 }
2234
2235 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2236 starting at REGNO. All of these registers must be hard registers. */
2237
2238 void
2239 use_regs (rtx *call_fusage, int regno, int nregs)
2240 {
2241 int i;
2242
2243 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2244
2245 for (i = 0; i < nregs; i++)
2246 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2247 }
2248
2249 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2250 PARALLEL REGS. This is for calls that pass values in multiple
2251 non-contiguous locations. The Irix 6 ABI has examples of this. */
2252
2253 void
2254 use_group_regs (rtx *call_fusage, rtx regs)
2255 {
2256 int i;
2257
2258 for (i = 0; i < XVECLEN (regs, 0); i++)
2259 {
2260 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2261
2262 /* A NULL entry means the parameter goes both on the stack and in
2263 registers. This can also be a MEM for targets that pass values
2264 partially on the stack and partially in registers. */
2265 if (reg != 0 && REG_P (reg))
2266 use_reg (call_fusage, reg);
2267 }
2268 }
2269
2270 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2271 assigment and the code of the expresion on the RHS is CODE. Return
2272 NULL otherwise. */
2273
2274 static gimple
2275 get_def_for_expr (tree name, enum tree_code code)
2276 {
2277 gimple def_stmt;
2278
2279 if (TREE_CODE (name) != SSA_NAME)
2280 return NULL;
2281
2282 def_stmt = get_gimple_for_ssa_name (name);
2283 if (!def_stmt
2284 || gimple_assign_rhs_code (def_stmt) != code)
2285 return NULL;
2286
2287 return def_stmt;
2288 }
2289 \f
2290
2291 /* Determine whether the LEN bytes generated by CONSTFUN can be
2292 stored to memory using several move instructions. CONSTFUNDATA is
2293 a pointer which will be passed as argument in every CONSTFUN call.
2294 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2295 a memset operation and false if it's a copy of a constant string.
2296 Return nonzero if a call to store_by_pieces should succeed. */
2297
2298 int
2299 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2300 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2301 void *constfundata, unsigned int align, bool memsetp)
2302 {
2303 unsigned HOST_WIDE_INT l;
2304 unsigned int max_size;
2305 HOST_WIDE_INT offset = 0;
2306 enum machine_mode mode, tmode;
2307 enum insn_code icode;
2308 int reverse;
2309 rtx cst;
2310
2311 if (len == 0)
2312 return 1;
2313
2314 if (! (memsetp
2315 ? SET_BY_PIECES_P (len, align)
2316 : STORE_BY_PIECES_P (len, align)))
2317 return 0;
2318
2319 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2320 if (align >= GET_MODE_ALIGNMENT (tmode))
2321 align = GET_MODE_ALIGNMENT (tmode);
2322 else
2323 {
2324 enum machine_mode xmode;
2325
2326 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2327 tmode != VOIDmode;
2328 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2329 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2330 || SLOW_UNALIGNED_ACCESS (tmode, align))
2331 break;
2332
2333 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2334 }
2335
2336 /* We would first store what we can in the largest integer mode, then go to
2337 successively smaller modes. */
2338
2339 for (reverse = 0;
2340 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2341 reverse++)
2342 {
2343 l = len;
2344 mode = VOIDmode;
2345 max_size = STORE_MAX_PIECES + 1;
2346 while (max_size > 1)
2347 {
2348 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2349 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2350 if (GET_MODE_SIZE (tmode) < max_size)
2351 mode = tmode;
2352
2353 if (mode == VOIDmode)
2354 break;
2355
2356 icode = optab_handler (mov_optab, mode)->insn_code;
2357 if (icode != CODE_FOR_nothing
2358 && align >= GET_MODE_ALIGNMENT (mode))
2359 {
2360 unsigned int size = GET_MODE_SIZE (mode);
2361
2362 while (l >= size)
2363 {
2364 if (reverse)
2365 offset -= size;
2366
2367 cst = (*constfun) (constfundata, offset, mode);
2368 if (!LEGITIMATE_CONSTANT_P (cst))
2369 return 0;
2370
2371 if (!reverse)
2372 offset += size;
2373
2374 l -= size;
2375 }
2376 }
2377
2378 max_size = GET_MODE_SIZE (mode);
2379 }
2380
2381 /* The code above should have handled everything. */
2382 gcc_assert (!l);
2383 }
2384
2385 return 1;
2386 }
2387
2388 /* Generate several move instructions to store LEN bytes generated by
2389 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2390 pointer which will be passed as argument in every CONSTFUN call.
2391 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2392 a memset operation and false if it's a copy of a constant string.
2393 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2394 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2395 stpcpy. */
2396
2397 rtx
2398 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2399 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2400 void *constfundata, unsigned int align, bool memsetp, int endp)
2401 {
2402 enum machine_mode to_addr_mode
2403 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
2404 struct store_by_pieces_d data;
2405
2406 if (len == 0)
2407 {
2408 gcc_assert (endp != 2);
2409 return to;
2410 }
2411
2412 gcc_assert (memsetp
2413 ? SET_BY_PIECES_P (len, align)
2414 : STORE_BY_PIECES_P (len, align));
2415 data.constfun = constfun;
2416 data.constfundata = constfundata;
2417 data.len = len;
2418 data.to = to;
2419 store_by_pieces_1 (&data, align);
2420 if (endp)
2421 {
2422 rtx to1;
2423
2424 gcc_assert (!data.reverse);
2425 if (data.autinc_to)
2426 {
2427 if (endp == 2)
2428 {
2429 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2430 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2431 else
2432 data.to_addr = copy_to_mode_reg (to_addr_mode,
2433 plus_constant (data.to_addr,
2434 -1));
2435 }
2436 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2437 data.offset);
2438 }
2439 else
2440 {
2441 if (endp == 2)
2442 --data.offset;
2443 to1 = adjust_address (data.to, QImode, data.offset);
2444 }
2445 return to1;
2446 }
2447 else
2448 return data.to;
2449 }
2450
2451 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2452 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2453
2454 static void
2455 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2456 {
2457 struct store_by_pieces_d data;
2458
2459 if (len == 0)
2460 return;
2461
2462 data.constfun = clear_by_pieces_1;
2463 data.constfundata = NULL;
2464 data.len = len;
2465 data.to = to;
2466 store_by_pieces_1 (&data, align);
2467 }
2468
2469 /* Callback routine for clear_by_pieces.
2470 Return const0_rtx unconditionally. */
2471
2472 static rtx
2473 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2474 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2475 enum machine_mode mode ATTRIBUTE_UNUSED)
2476 {
2477 return const0_rtx;
2478 }
2479
2480 /* Subroutine of clear_by_pieces and store_by_pieces.
2481 Generate several move instructions to store LEN bytes of block TO. (A MEM
2482 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2483
2484 static void
2485 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2486 unsigned int align ATTRIBUTE_UNUSED)
2487 {
2488 enum machine_mode to_addr_mode
2489 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (data->to));
2490 rtx to_addr = XEXP (data->to, 0);
2491 unsigned int max_size = STORE_MAX_PIECES + 1;
2492 enum machine_mode mode = VOIDmode, tmode;
2493 enum insn_code icode;
2494
2495 data->offset = 0;
2496 data->to_addr = to_addr;
2497 data->autinc_to
2498 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2499 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2500
2501 data->explicit_inc_to = 0;
2502 data->reverse
2503 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2504 if (data->reverse)
2505 data->offset = data->len;
2506
2507 /* If storing requires more than two move insns,
2508 copy addresses to registers (to make displacements shorter)
2509 and use post-increment if available. */
2510 if (!data->autinc_to
2511 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2512 {
2513 /* Determine the main mode we'll be using. */
2514 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2515 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2516 if (GET_MODE_SIZE (tmode) < max_size)
2517 mode = tmode;
2518
2519 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2520 {
2521 data->to_addr = copy_to_mode_reg (to_addr_mode,
2522 plus_constant (to_addr, data->len));
2523 data->autinc_to = 1;
2524 data->explicit_inc_to = -1;
2525 }
2526
2527 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2528 && ! data->autinc_to)
2529 {
2530 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2531 data->autinc_to = 1;
2532 data->explicit_inc_to = 1;
2533 }
2534
2535 if ( !data->autinc_to && CONSTANT_P (to_addr))
2536 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2537 }
2538
2539 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2540 if (align >= GET_MODE_ALIGNMENT (tmode))
2541 align = GET_MODE_ALIGNMENT (tmode);
2542 else
2543 {
2544 enum machine_mode xmode;
2545
2546 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2547 tmode != VOIDmode;
2548 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2549 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2550 || SLOW_UNALIGNED_ACCESS (tmode, align))
2551 break;
2552
2553 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2554 }
2555
2556 /* First store what we can in the largest integer mode, then go to
2557 successively smaller modes. */
2558
2559 while (max_size > 1)
2560 {
2561 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2562 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2563 if (GET_MODE_SIZE (tmode) < max_size)
2564 mode = tmode;
2565
2566 if (mode == VOIDmode)
2567 break;
2568
2569 icode = optab_handler (mov_optab, mode)->insn_code;
2570 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2571 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2572
2573 max_size = GET_MODE_SIZE (mode);
2574 }
2575
2576 /* The code above should have handled everything. */
2577 gcc_assert (!data->len);
2578 }
2579
2580 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2581 with move instructions for mode MODE. GENFUN is the gen_... function
2582 to make a move insn for that mode. DATA has all the other info. */
2583
2584 static void
2585 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2586 struct store_by_pieces_d *data)
2587 {
2588 unsigned int size = GET_MODE_SIZE (mode);
2589 rtx to1, cst;
2590
2591 while (data->len >= size)
2592 {
2593 if (data->reverse)
2594 data->offset -= size;
2595
2596 if (data->autinc_to)
2597 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2598 data->offset);
2599 else
2600 to1 = adjust_address (data->to, mode, data->offset);
2601
2602 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2603 emit_insn (gen_add2_insn (data->to_addr,
2604 GEN_INT (-(HOST_WIDE_INT) size)));
2605
2606 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2607 emit_insn ((*genfun) (to1, cst));
2608
2609 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2610 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2611
2612 if (! data->reverse)
2613 data->offset += size;
2614
2615 data->len -= size;
2616 }
2617 }
2618 \f
2619 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2620 its length in bytes. */
2621
2622 rtx
2623 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2624 unsigned int expected_align, HOST_WIDE_INT expected_size)
2625 {
2626 enum machine_mode mode = GET_MODE (object);
2627 unsigned int align;
2628
2629 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2630
2631 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2632 just move a zero. Otherwise, do this a piece at a time. */
2633 if (mode != BLKmode
2634 && CONST_INT_P (size)
2635 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2636 {
2637 rtx zero = CONST0_RTX (mode);
2638 if (zero != NULL)
2639 {
2640 emit_move_insn (object, zero);
2641 return NULL;
2642 }
2643
2644 if (COMPLEX_MODE_P (mode))
2645 {
2646 zero = CONST0_RTX (GET_MODE_INNER (mode));
2647 if (zero != NULL)
2648 {
2649 write_complex_part (object, zero, 0);
2650 write_complex_part (object, zero, 1);
2651 return NULL;
2652 }
2653 }
2654 }
2655
2656 if (size == const0_rtx)
2657 return NULL;
2658
2659 align = MEM_ALIGN (object);
2660
2661 if (CONST_INT_P (size)
2662 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2663 clear_by_pieces (object, INTVAL (size), align);
2664 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2665 expected_align, expected_size))
2666 ;
2667 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2668 return set_storage_via_libcall (object, size, const0_rtx,
2669 method == BLOCK_OP_TAILCALL);
2670 else
2671 gcc_unreachable ();
2672
2673 return NULL;
2674 }
2675
2676 rtx
2677 clear_storage (rtx object, rtx size, enum block_op_methods method)
2678 {
2679 return clear_storage_hints (object, size, method, 0, -1);
2680 }
2681
2682
2683 /* A subroutine of clear_storage. Expand a call to memset.
2684 Return the return value of memset, 0 otherwise. */
2685
2686 rtx
2687 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2688 {
2689 tree call_expr, fn, object_tree, size_tree, val_tree;
2690 enum machine_mode size_mode;
2691 rtx retval;
2692
2693 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2694 place those into new pseudos into a VAR_DECL and use them later. */
2695
2696 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2697
2698 size_mode = TYPE_MODE (sizetype);
2699 size = convert_to_mode (size_mode, size, 1);
2700 size = copy_to_mode_reg (size_mode, size);
2701
2702 /* It is incorrect to use the libcall calling conventions to call
2703 memset in this context. This could be a user call to memset and
2704 the user may wish to examine the return value from memset. For
2705 targets where libcalls and normal calls have different conventions
2706 for returning pointers, we could end up generating incorrect code. */
2707
2708 object_tree = make_tree (ptr_type_node, object);
2709 if (!CONST_INT_P (val))
2710 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2711 size_tree = make_tree (sizetype, size);
2712 val_tree = make_tree (integer_type_node, val);
2713
2714 fn = clear_storage_libcall_fn (true);
2715 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2716 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2717
2718 retval = expand_normal (call_expr);
2719
2720 return retval;
2721 }
2722
2723 /* A subroutine of set_storage_via_libcall. Create the tree node
2724 for the function we use for block clears. The first time FOR_CALL
2725 is true, we call assemble_external. */
2726
2727 tree block_clear_fn;
2728
2729 void
2730 init_block_clear_fn (const char *asmspec)
2731 {
2732 if (!block_clear_fn)
2733 {
2734 tree fn, args;
2735
2736 fn = get_identifier ("memset");
2737 args = build_function_type_list (ptr_type_node, ptr_type_node,
2738 integer_type_node, sizetype,
2739 NULL_TREE);
2740
2741 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2742 DECL_EXTERNAL (fn) = 1;
2743 TREE_PUBLIC (fn) = 1;
2744 DECL_ARTIFICIAL (fn) = 1;
2745 TREE_NOTHROW (fn) = 1;
2746 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2747 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2748
2749 block_clear_fn = fn;
2750 }
2751
2752 if (asmspec)
2753 set_user_assembler_name (block_clear_fn, asmspec);
2754 }
2755
2756 static tree
2757 clear_storage_libcall_fn (int for_call)
2758 {
2759 static bool emitted_extern;
2760
2761 if (!block_clear_fn)
2762 init_block_clear_fn (NULL);
2763
2764 if (for_call && !emitted_extern)
2765 {
2766 emitted_extern = true;
2767 make_decl_rtl (block_clear_fn);
2768 assemble_external (block_clear_fn);
2769 }
2770
2771 return block_clear_fn;
2772 }
2773 \f
2774 /* Expand a setmem pattern; return true if successful. */
2775
2776 bool
2777 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2778 unsigned int expected_align, HOST_WIDE_INT expected_size)
2779 {
2780 /* Try the most limited insn first, because there's no point
2781 including more than one in the machine description unless
2782 the more limited one has some advantage. */
2783
2784 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2785 enum machine_mode mode;
2786
2787 if (expected_align < align)
2788 expected_align = align;
2789
2790 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2791 mode = GET_MODE_WIDER_MODE (mode))
2792 {
2793 enum insn_code code = setmem_optab[(int) mode];
2794 insn_operand_predicate_fn pred;
2795
2796 if (code != CODE_FOR_nothing
2797 /* We don't need MODE to be narrower than
2798 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2799 the mode mask, as it is returned by the macro, it will
2800 definitely be less than the actual mode mask. */
2801 && ((CONST_INT_P (size)
2802 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2803 <= (GET_MODE_MASK (mode) >> 1)))
2804 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2805 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2806 || (*pred) (object, BLKmode))
2807 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2808 || (*pred) (opalign, VOIDmode)))
2809 {
2810 rtx opsize, opchar;
2811 enum machine_mode char_mode;
2812 rtx last = get_last_insn ();
2813 rtx pat;
2814
2815 opsize = convert_to_mode (mode, size, 1);
2816 pred = insn_data[(int) code].operand[1].predicate;
2817 if (pred != 0 && ! (*pred) (opsize, mode))
2818 opsize = copy_to_mode_reg (mode, opsize);
2819
2820 opchar = val;
2821 char_mode = insn_data[(int) code].operand[2].mode;
2822 if (char_mode != VOIDmode)
2823 {
2824 opchar = convert_to_mode (char_mode, opchar, 1);
2825 pred = insn_data[(int) code].operand[2].predicate;
2826 if (pred != 0 && ! (*pred) (opchar, char_mode))
2827 opchar = copy_to_mode_reg (char_mode, opchar);
2828 }
2829
2830 if (insn_data[(int) code].n_operands == 4)
2831 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2832 else
2833 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2834 GEN_INT (expected_align
2835 / BITS_PER_UNIT),
2836 GEN_INT (expected_size));
2837 if (pat)
2838 {
2839 emit_insn (pat);
2840 return true;
2841 }
2842 else
2843 delete_insns_since (last);
2844 }
2845 }
2846
2847 return false;
2848 }
2849
2850 \f
2851 /* Write to one of the components of the complex value CPLX. Write VAL to
2852 the real part if IMAG_P is false, and the imaginary part if its true. */
2853
2854 static void
2855 write_complex_part (rtx cplx, rtx val, bool imag_p)
2856 {
2857 enum machine_mode cmode;
2858 enum machine_mode imode;
2859 unsigned ibitsize;
2860
2861 if (GET_CODE (cplx) == CONCAT)
2862 {
2863 emit_move_insn (XEXP (cplx, imag_p), val);
2864 return;
2865 }
2866
2867 cmode = GET_MODE (cplx);
2868 imode = GET_MODE_INNER (cmode);
2869 ibitsize = GET_MODE_BITSIZE (imode);
2870
2871 /* For MEMs simplify_gen_subreg may generate an invalid new address
2872 because, e.g., the original address is considered mode-dependent
2873 by the target, which restricts simplify_subreg from invoking
2874 adjust_address_nv. Instead of preparing fallback support for an
2875 invalid address, we call adjust_address_nv directly. */
2876 if (MEM_P (cplx))
2877 {
2878 emit_move_insn (adjust_address_nv (cplx, imode,
2879 imag_p ? GET_MODE_SIZE (imode) : 0),
2880 val);
2881 return;
2882 }
2883
2884 /* If the sub-object is at least word sized, then we know that subregging
2885 will work. This special case is important, since store_bit_field
2886 wants to operate on integer modes, and there's rarely an OImode to
2887 correspond to TCmode. */
2888 if (ibitsize >= BITS_PER_WORD
2889 /* For hard regs we have exact predicates. Assume we can split
2890 the original object if it spans an even number of hard regs.
2891 This special case is important for SCmode on 64-bit platforms
2892 where the natural size of floating-point regs is 32-bit. */
2893 || (REG_P (cplx)
2894 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2895 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2896 {
2897 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2898 imag_p ? GET_MODE_SIZE (imode) : 0);
2899 if (part)
2900 {
2901 emit_move_insn (part, val);
2902 return;
2903 }
2904 else
2905 /* simplify_gen_subreg may fail for sub-word MEMs. */
2906 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2907 }
2908
2909 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2910 }
2911
2912 /* Extract one of the components of the complex value CPLX. Extract the
2913 real part if IMAG_P is false, and the imaginary part if it's true. */
2914
2915 static rtx
2916 read_complex_part (rtx cplx, bool imag_p)
2917 {
2918 enum machine_mode cmode, imode;
2919 unsigned ibitsize;
2920
2921 if (GET_CODE (cplx) == CONCAT)
2922 return XEXP (cplx, imag_p);
2923
2924 cmode = GET_MODE (cplx);
2925 imode = GET_MODE_INNER (cmode);
2926 ibitsize = GET_MODE_BITSIZE (imode);
2927
2928 /* Special case reads from complex constants that got spilled to memory. */
2929 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2930 {
2931 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2932 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2933 {
2934 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2935 if (CONSTANT_CLASS_P (part))
2936 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2937 }
2938 }
2939
2940 /* For MEMs simplify_gen_subreg may generate an invalid new address
2941 because, e.g., the original address is considered mode-dependent
2942 by the target, which restricts simplify_subreg from invoking
2943 adjust_address_nv. Instead of preparing fallback support for an
2944 invalid address, we call adjust_address_nv directly. */
2945 if (MEM_P (cplx))
2946 return adjust_address_nv (cplx, imode,
2947 imag_p ? GET_MODE_SIZE (imode) : 0);
2948
2949 /* If the sub-object is at least word sized, then we know that subregging
2950 will work. This special case is important, since extract_bit_field
2951 wants to operate on integer modes, and there's rarely an OImode to
2952 correspond to TCmode. */
2953 if (ibitsize >= BITS_PER_WORD
2954 /* For hard regs we have exact predicates. Assume we can split
2955 the original object if it spans an even number of hard regs.
2956 This special case is important for SCmode on 64-bit platforms
2957 where the natural size of floating-point regs is 32-bit. */
2958 || (REG_P (cplx)
2959 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2960 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2961 {
2962 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2963 imag_p ? GET_MODE_SIZE (imode) : 0);
2964 if (ret)
2965 return ret;
2966 else
2967 /* simplify_gen_subreg may fail for sub-word MEMs. */
2968 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2969 }
2970
2971 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2972 true, NULL_RTX, imode, imode);
2973 }
2974 \f
2975 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2976 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2977 represented in NEW_MODE. If FORCE is true, this will never happen, as
2978 we'll force-create a SUBREG if needed. */
2979
2980 static rtx
2981 emit_move_change_mode (enum machine_mode new_mode,
2982 enum machine_mode old_mode, rtx x, bool force)
2983 {
2984 rtx ret;
2985
2986 if (push_operand (x, GET_MODE (x)))
2987 {
2988 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2989 MEM_COPY_ATTRIBUTES (ret, x);
2990 }
2991 else if (MEM_P (x))
2992 {
2993 /* We don't have to worry about changing the address since the
2994 size in bytes is supposed to be the same. */
2995 if (reload_in_progress)
2996 {
2997 /* Copy the MEM to change the mode and move any
2998 substitutions from the old MEM to the new one. */
2999 ret = adjust_address_nv (x, new_mode, 0);
3000 copy_replacements (x, ret);
3001 }
3002 else
3003 ret = adjust_address (x, new_mode, 0);
3004 }
3005 else
3006 {
3007 /* Note that we do want simplify_subreg's behavior of validating
3008 that the new mode is ok for a hard register. If we were to use
3009 simplify_gen_subreg, we would create the subreg, but would
3010 probably run into the target not being able to implement it. */
3011 /* Except, of course, when FORCE is true, when this is exactly what
3012 we want. Which is needed for CCmodes on some targets. */
3013 if (force)
3014 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3015 else
3016 ret = simplify_subreg (new_mode, x, old_mode, 0);
3017 }
3018
3019 return ret;
3020 }
3021
3022 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3023 an integer mode of the same size as MODE. Returns the instruction
3024 emitted, or NULL if such a move could not be generated. */
3025
3026 static rtx
3027 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3028 {
3029 enum machine_mode imode;
3030 enum insn_code code;
3031
3032 /* There must exist a mode of the exact size we require. */
3033 imode = int_mode_for_mode (mode);
3034 if (imode == BLKmode)
3035 return NULL_RTX;
3036
3037 /* The target must support moves in this mode. */
3038 code = optab_handler (mov_optab, imode)->insn_code;
3039 if (code == CODE_FOR_nothing)
3040 return NULL_RTX;
3041
3042 x = emit_move_change_mode (imode, mode, x, force);
3043 if (x == NULL_RTX)
3044 return NULL_RTX;
3045 y = emit_move_change_mode (imode, mode, y, force);
3046 if (y == NULL_RTX)
3047 return NULL_RTX;
3048 return emit_insn (GEN_FCN (code) (x, y));
3049 }
3050
3051 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3052 Return an equivalent MEM that does not use an auto-increment. */
3053
3054 static rtx
3055 emit_move_resolve_push (enum machine_mode mode, rtx x)
3056 {
3057 enum rtx_code code = GET_CODE (XEXP (x, 0));
3058 HOST_WIDE_INT adjust;
3059 rtx temp;
3060
3061 adjust = GET_MODE_SIZE (mode);
3062 #ifdef PUSH_ROUNDING
3063 adjust = PUSH_ROUNDING (adjust);
3064 #endif
3065 if (code == PRE_DEC || code == POST_DEC)
3066 adjust = -adjust;
3067 else if (code == PRE_MODIFY || code == POST_MODIFY)
3068 {
3069 rtx expr = XEXP (XEXP (x, 0), 1);
3070 HOST_WIDE_INT val;
3071
3072 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3073 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3074 val = INTVAL (XEXP (expr, 1));
3075 if (GET_CODE (expr) == MINUS)
3076 val = -val;
3077 gcc_assert (adjust == val || adjust == -val);
3078 adjust = val;
3079 }
3080
3081 /* Do not use anti_adjust_stack, since we don't want to update
3082 stack_pointer_delta. */
3083 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3084 GEN_INT (adjust), stack_pointer_rtx,
3085 0, OPTAB_LIB_WIDEN);
3086 if (temp != stack_pointer_rtx)
3087 emit_move_insn (stack_pointer_rtx, temp);
3088
3089 switch (code)
3090 {
3091 case PRE_INC:
3092 case PRE_DEC:
3093 case PRE_MODIFY:
3094 temp = stack_pointer_rtx;
3095 break;
3096 case POST_INC:
3097 case POST_DEC:
3098 case POST_MODIFY:
3099 temp = plus_constant (stack_pointer_rtx, -adjust);
3100 break;
3101 default:
3102 gcc_unreachable ();
3103 }
3104
3105 return replace_equiv_address (x, temp);
3106 }
3107
3108 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3109 X is known to satisfy push_operand, and MODE is known to be complex.
3110 Returns the last instruction emitted. */
3111
3112 rtx
3113 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3114 {
3115 enum machine_mode submode = GET_MODE_INNER (mode);
3116 bool imag_first;
3117
3118 #ifdef PUSH_ROUNDING
3119 unsigned int submodesize = GET_MODE_SIZE (submode);
3120
3121 /* In case we output to the stack, but the size is smaller than the
3122 machine can push exactly, we need to use move instructions. */
3123 if (PUSH_ROUNDING (submodesize) != submodesize)
3124 {
3125 x = emit_move_resolve_push (mode, x);
3126 return emit_move_insn (x, y);
3127 }
3128 #endif
3129
3130 /* Note that the real part always precedes the imag part in memory
3131 regardless of machine's endianness. */
3132 switch (GET_CODE (XEXP (x, 0)))
3133 {
3134 case PRE_DEC:
3135 case POST_DEC:
3136 imag_first = true;
3137 break;
3138 case PRE_INC:
3139 case POST_INC:
3140 imag_first = false;
3141 break;
3142 default:
3143 gcc_unreachable ();
3144 }
3145
3146 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3147 read_complex_part (y, imag_first));
3148 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3149 read_complex_part (y, !imag_first));
3150 }
3151
3152 /* A subroutine of emit_move_complex. Perform the move from Y to X
3153 via two moves of the parts. Returns the last instruction emitted. */
3154
3155 rtx
3156 emit_move_complex_parts (rtx x, rtx y)
3157 {
3158 /* Show the output dies here. This is necessary for SUBREGs
3159 of pseudos since we cannot track their lifetimes correctly;
3160 hard regs shouldn't appear here except as return values. */
3161 if (!reload_completed && !reload_in_progress
3162 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3163 emit_clobber (x);
3164
3165 write_complex_part (x, read_complex_part (y, false), false);
3166 write_complex_part (x, read_complex_part (y, true), true);
3167
3168 return get_last_insn ();
3169 }
3170
3171 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3172 MODE is known to be complex. Returns the last instruction emitted. */
3173
3174 static rtx
3175 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3176 {
3177 bool try_int;
3178
3179 /* Need to take special care for pushes, to maintain proper ordering
3180 of the data, and possibly extra padding. */
3181 if (push_operand (x, mode))
3182 return emit_move_complex_push (mode, x, y);
3183
3184 /* See if we can coerce the target into moving both values at once. */
3185
3186 /* Move floating point as parts. */
3187 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3188 && optab_handler (mov_optab, GET_MODE_INNER (mode))->insn_code != CODE_FOR_nothing)
3189 try_int = false;
3190 /* Not possible if the values are inherently not adjacent. */
3191 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3192 try_int = false;
3193 /* Is possible if both are registers (or subregs of registers). */
3194 else if (register_operand (x, mode) && register_operand (y, mode))
3195 try_int = true;
3196 /* If one of the operands is a memory, and alignment constraints
3197 are friendly enough, we may be able to do combined memory operations.
3198 We do not attempt this if Y is a constant because that combination is
3199 usually better with the by-parts thing below. */
3200 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3201 && (!STRICT_ALIGNMENT
3202 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3203 try_int = true;
3204 else
3205 try_int = false;
3206
3207 if (try_int)
3208 {
3209 rtx ret;
3210
3211 /* For memory to memory moves, optimal behavior can be had with the
3212 existing block move logic. */
3213 if (MEM_P (x) && MEM_P (y))
3214 {
3215 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3216 BLOCK_OP_NO_LIBCALL);
3217 return get_last_insn ();
3218 }
3219
3220 ret = emit_move_via_integer (mode, x, y, true);
3221 if (ret)
3222 return ret;
3223 }
3224
3225 return emit_move_complex_parts (x, y);
3226 }
3227
3228 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3229 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3230
3231 static rtx
3232 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3233 {
3234 rtx ret;
3235
3236 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3237 if (mode != CCmode)
3238 {
3239 enum insn_code code = optab_handler (mov_optab, CCmode)->insn_code;
3240 if (code != CODE_FOR_nothing)
3241 {
3242 x = emit_move_change_mode (CCmode, mode, x, true);
3243 y = emit_move_change_mode (CCmode, mode, y, true);
3244 return emit_insn (GEN_FCN (code) (x, y));
3245 }
3246 }
3247
3248 /* Otherwise, find the MODE_INT mode of the same width. */
3249 ret = emit_move_via_integer (mode, x, y, false);
3250 gcc_assert (ret != NULL);
3251 return ret;
3252 }
3253
3254 /* Return true if word I of OP lies entirely in the
3255 undefined bits of a paradoxical subreg. */
3256
3257 static bool
3258 undefined_operand_subword_p (const_rtx op, int i)
3259 {
3260 enum machine_mode innermode, innermostmode;
3261 int offset;
3262 if (GET_CODE (op) != SUBREG)
3263 return false;
3264 innermode = GET_MODE (op);
3265 innermostmode = GET_MODE (SUBREG_REG (op));
3266 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3267 /* The SUBREG_BYTE represents offset, as if the value were stored in
3268 memory, except for a paradoxical subreg where we define
3269 SUBREG_BYTE to be 0; undo this exception as in
3270 simplify_subreg. */
3271 if (SUBREG_BYTE (op) == 0
3272 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3273 {
3274 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3275 if (WORDS_BIG_ENDIAN)
3276 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3277 if (BYTES_BIG_ENDIAN)
3278 offset += difference % UNITS_PER_WORD;
3279 }
3280 if (offset >= GET_MODE_SIZE (innermostmode)
3281 || offset <= -GET_MODE_SIZE (word_mode))
3282 return true;
3283 return false;
3284 }
3285
3286 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3287 MODE is any multi-word or full-word mode that lacks a move_insn
3288 pattern. Note that you will get better code if you define such
3289 patterns, even if they must turn into multiple assembler instructions. */
3290
3291 static rtx
3292 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3293 {
3294 rtx last_insn = 0;
3295 rtx seq, inner;
3296 bool need_clobber;
3297 int i;
3298
3299 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3300
3301 /* If X is a push on the stack, do the push now and replace
3302 X with a reference to the stack pointer. */
3303 if (push_operand (x, mode))
3304 x = emit_move_resolve_push (mode, x);
3305
3306 /* If we are in reload, see if either operand is a MEM whose address
3307 is scheduled for replacement. */
3308 if (reload_in_progress && MEM_P (x)
3309 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3310 x = replace_equiv_address_nv (x, inner);
3311 if (reload_in_progress && MEM_P (y)
3312 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3313 y = replace_equiv_address_nv (y, inner);
3314
3315 start_sequence ();
3316
3317 need_clobber = false;
3318 for (i = 0;
3319 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3320 i++)
3321 {
3322 rtx xpart = operand_subword (x, i, 1, mode);
3323 rtx ypart;
3324
3325 /* Do not generate code for a move if it would come entirely
3326 from the undefined bits of a paradoxical subreg. */
3327 if (undefined_operand_subword_p (y, i))
3328 continue;
3329
3330 ypart = operand_subword (y, i, 1, mode);
3331
3332 /* If we can't get a part of Y, put Y into memory if it is a
3333 constant. Otherwise, force it into a register. Then we must
3334 be able to get a part of Y. */
3335 if (ypart == 0 && CONSTANT_P (y))
3336 {
3337 y = use_anchored_address (force_const_mem (mode, y));
3338 ypart = operand_subword (y, i, 1, mode);
3339 }
3340 else if (ypart == 0)
3341 ypart = operand_subword_force (y, i, mode);
3342
3343 gcc_assert (xpart && ypart);
3344
3345 need_clobber |= (GET_CODE (xpart) == SUBREG);
3346
3347 last_insn = emit_move_insn (xpart, ypart);
3348 }
3349
3350 seq = get_insns ();
3351 end_sequence ();
3352
3353 /* Show the output dies here. This is necessary for SUBREGs
3354 of pseudos since we cannot track their lifetimes correctly;
3355 hard regs shouldn't appear here except as return values.
3356 We never want to emit such a clobber after reload. */
3357 if (x != y
3358 && ! (reload_in_progress || reload_completed)
3359 && need_clobber != 0)
3360 emit_clobber (x);
3361
3362 emit_insn (seq);
3363
3364 return last_insn;
3365 }
3366
3367 /* Low level part of emit_move_insn.
3368 Called just like emit_move_insn, but assumes X and Y
3369 are basically valid. */
3370
3371 rtx
3372 emit_move_insn_1 (rtx x, rtx y)
3373 {
3374 enum machine_mode mode = GET_MODE (x);
3375 enum insn_code code;
3376
3377 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3378
3379 code = optab_handler (mov_optab, mode)->insn_code;
3380 if (code != CODE_FOR_nothing)
3381 return emit_insn (GEN_FCN (code) (x, y));
3382
3383 /* Expand complex moves by moving real part and imag part. */
3384 if (COMPLEX_MODE_P (mode))
3385 return emit_move_complex (mode, x, y);
3386
3387 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3388 || ALL_FIXED_POINT_MODE_P (mode))
3389 {
3390 rtx result = emit_move_via_integer (mode, x, y, true);
3391
3392 /* If we can't find an integer mode, use multi words. */
3393 if (result)
3394 return result;
3395 else
3396 return emit_move_multi_word (mode, x, y);
3397 }
3398
3399 if (GET_MODE_CLASS (mode) == MODE_CC)
3400 return emit_move_ccmode (mode, x, y);
3401
3402 /* Try using a move pattern for the corresponding integer mode. This is
3403 only safe when simplify_subreg can convert MODE constants into integer
3404 constants. At present, it can only do this reliably if the value
3405 fits within a HOST_WIDE_INT. */
3406 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3407 {
3408 rtx ret = emit_move_via_integer (mode, x, y, false);
3409 if (ret)
3410 return ret;
3411 }
3412
3413 return emit_move_multi_word (mode, x, y);
3414 }
3415
3416 /* Generate code to copy Y into X.
3417 Both Y and X must have the same mode, except that
3418 Y can be a constant with VOIDmode.
3419 This mode cannot be BLKmode; use emit_block_move for that.
3420
3421 Return the last instruction emitted. */
3422
3423 rtx
3424 emit_move_insn (rtx x, rtx y)
3425 {
3426 enum machine_mode mode = GET_MODE (x);
3427 rtx y_cst = NULL_RTX;
3428 rtx last_insn, set;
3429
3430 gcc_assert (mode != BLKmode
3431 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3432
3433 if (CONSTANT_P (y))
3434 {
3435 if (optimize
3436 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3437 && (last_insn = compress_float_constant (x, y)))
3438 return last_insn;
3439
3440 y_cst = y;
3441
3442 if (!LEGITIMATE_CONSTANT_P (y))
3443 {
3444 y = force_const_mem (mode, y);
3445
3446 /* If the target's cannot_force_const_mem prevented the spill,
3447 assume that the target's move expanders will also take care
3448 of the non-legitimate constant. */
3449 if (!y)
3450 y = y_cst;
3451 else
3452 y = use_anchored_address (y);
3453 }
3454 }
3455
3456 /* If X or Y are memory references, verify that their addresses are valid
3457 for the machine. */
3458 if (MEM_P (x)
3459 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3460 MEM_ADDR_SPACE (x))
3461 && ! push_operand (x, GET_MODE (x))))
3462 x = validize_mem (x);
3463
3464 if (MEM_P (y)
3465 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3466 MEM_ADDR_SPACE (y)))
3467 y = validize_mem (y);
3468
3469 gcc_assert (mode != BLKmode);
3470
3471 last_insn = emit_move_insn_1 (x, y);
3472
3473 if (y_cst && REG_P (x)
3474 && (set = single_set (last_insn)) != NULL_RTX
3475 && SET_DEST (set) == x
3476 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3477 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3478
3479 return last_insn;
3480 }
3481
3482 /* If Y is representable exactly in a narrower mode, and the target can
3483 perform the extension directly from constant or memory, then emit the
3484 move as an extension. */
3485
3486 static rtx
3487 compress_float_constant (rtx x, rtx y)
3488 {
3489 enum machine_mode dstmode = GET_MODE (x);
3490 enum machine_mode orig_srcmode = GET_MODE (y);
3491 enum machine_mode srcmode;
3492 REAL_VALUE_TYPE r;
3493 int oldcost, newcost;
3494 bool speed = optimize_insn_for_speed_p ();
3495
3496 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3497
3498 if (LEGITIMATE_CONSTANT_P (y))
3499 oldcost = rtx_cost (y, SET, speed);
3500 else
3501 oldcost = rtx_cost (force_const_mem (dstmode, y), SET, speed);
3502
3503 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3504 srcmode != orig_srcmode;
3505 srcmode = GET_MODE_WIDER_MODE (srcmode))
3506 {
3507 enum insn_code ic;
3508 rtx trunc_y, last_insn;
3509
3510 /* Skip if the target can't extend this way. */
3511 ic = can_extend_p (dstmode, srcmode, 0);
3512 if (ic == CODE_FOR_nothing)
3513 continue;
3514
3515 /* Skip if the narrowed value isn't exact. */
3516 if (! exact_real_truncate (srcmode, &r))
3517 continue;
3518
3519 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3520
3521 if (LEGITIMATE_CONSTANT_P (trunc_y))
3522 {
3523 /* Skip if the target needs extra instructions to perform
3524 the extension. */
3525 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3526 continue;
3527 /* This is valid, but may not be cheaper than the original. */
3528 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3529 if (oldcost < newcost)
3530 continue;
3531 }
3532 else if (float_extend_from_mem[dstmode][srcmode])
3533 {
3534 trunc_y = force_const_mem (srcmode, trunc_y);
3535 /* This is valid, but may not be cheaper than the original. */
3536 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3537 if (oldcost < newcost)
3538 continue;
3539 trunc_y = validize_mem (trunc_y);
3540 }
3541 else
3542 continue;
3543
3544 /* For CSE's benefit, force the compressed constant pool entry
3545 into a new pseudo. This constant may be used in different modes,
3546 and if not, combine will put things back together for us. */
3547 trunc_y = force_reg (srcmode, trunc_y);
3548 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3549 last_insn = get_last_insn ();
3550
3551 if (REG_P (x))
3552 set_unique_reg_note (last_insn, REG_EQUAL, y);
3553
3554 return last_insn;
3555 }
3556
3557 return NULL_RTX;
3558 }
3559 \f
3560 /* Pushing data onto the stack. */
3561
3562 /* Push a block of length SIZE (perhaps variable)
3563 and return an rtx to address the beginning of the block.
3564 The value may be virtual_outgoing_args_rtx.
3565
3566 EXTRA is the number of bytes of padding to push in addition to SIZE.
3567 BELOW nonzero means this padding comes at low addresses;
3568 otherwise, the padding comes at high addresses. */
3569
3570 rtx
3571 push_block (rtx size, int extra, int below)
3572 {
3573 rtx temp;
3574
3575 size = convert_modes (Pmode, ptr_mode, size, 1);
3576 if (CONSTANT_P (size))
3577 anti_adjust_stack (plus_constant (size, extra));
3578 else if (REG_P (size) && extra == 0)
3579 anti_adjust_stack (size);
3580 else
3581 {
3582 temp = copy_to_mode_reg (Pmode, size);
3583 if (extra != 0)
3584 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3585 temp, 0, OPTAB_LIB_WIDEN);
3586 anti_adjust_stack (temp);
3587 }
3588
3589 #ifndef STACK_GROWS_DOWNWARD
3590 if (0)
3591 #else
3592 if (1)
3593 #endif
3594 {
3595 temp = virtual_outgoing_args_rtx;
3596 if (extra != 0 && below)
3597 temp = plus_constant (temp, extra);
3598 }
3599 else
3600 {
3601 if (CONST_INT_P (size))
3602 temp = plus_constant (virtual_outgoing_args_rtx,
3603 -INTVAL (size) - (below ? 0 : extra));
3604 else if (extra != 0 && !below)
3605 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3606 negate_rtx (Pmode, plus_constant (size, extra)));
3607 else
3608 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3609 negate_rtx (Pmode, size));
3610 }
3611
3612 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3613 }
3614
3615 #ifdef PUSH_ROUNDING
3616
3617 /* Emit single push insn. */
3618
3619 static void
3620 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3621 {
3622 rtx dest_addr;
3623 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3624 rtx dest;
3625 enum insn_code icode;
3626 insn_operand_predicate_fn pred;
3627
3628 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3629 /* If there is push pattern, use it. Otherwise try old way of throwing
3630 MEM representing push operation to move expander. */
3631 icode = optab_handler (push_optab, mode)->insn_code;
3632 if (icode != CODE_FOR_nothing)
3633 {
3634 if (((pred = insn_data[(int) icode].operand[0].predicate)
3635 && !((*pred) (x, mode))))
3636 x = force_reg (mode, x);
3637 emit_insn (GEN_FCN (icode) (x));
3638 return;
3639 }
3640 if (GET_MODE_SIZE (mode) == rounded_size)
3641 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3642 /* If we are to pad downward, adjust the stack pointer first and
3643 then store X into the stack location using an offset. This is
3644 because emit_move_insn does not know how to pad; it does not have
3645 access to type. */
3646 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3647 {
3648 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3649 HOST_WIDE_INT offset;
3650
3651 emit_move_insn (stack_pointer_rtx,
3652 expand_binop (Pmode,
3653 #ifdef STACK_GROWS_DOWNWARD
3654 sub_optab,
3655 #else
3656 add_optab,
3657 #endif
3658 stack_pointer_rtx,
3659 GEN_INT (rounded_size),
3660 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3661
3662 offset = (HOST_WIDE_INT) padding_size;
3663 #ifdef STACK_GROWS_DOWNWARD
3664 if (STACK_PUSH_CODE == POST_DEC)
3665 /* We have already decremented the stack pointer, so get the
3666 previous value. */
3667 offset += (HOST_WIDE_INT) rounded_size;
3668 #else
3669 if (STACK_PUSH_CODE == POST_INC)
3670 /* We have already incremented the stack pointer, so get the
3671 previous value. */
3672 offset -= (HOST_WIDE_INT) rounded_size;
3673 #endif
3674 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3675 }
3676 else
3677 {
3678 #ifdef STACK_GROWS_DOWNWARD
3679 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3680 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3681 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3682 #else
3683 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3684 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3685 GEN_INT (rounded_size));
3686 #endif
3687 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3688 }
3689
3690 dest = gen_rtx_MEM (mode, dest_addr);
3691
3692 if (type != 0)
3693 {
3694 set_mem_attributes (dest, type, 1);
3695
3696 if (flag_optimize_sibling_calls)
3697 /* Function incoming arguments may overlap with sibling call
3698 outgoing arguments and we cannot allow reordering of reads
3699 from function arguments with stores to outgoing arguments
3700 of sibling calls. */
3701 set_mem_alias_set (dest, 0);
3702 }
3703 emit_move_insn (dest, x);
3704 }
3705 #endif
3706
3707 /* Generate code to push X onto the stack, assuming it has mode MODE and
3708 type TYPE.
3709 MODE is redundant except when X is a CONST_INT (since they don't
3710 carry mode info).
3711 SIZE is an rtx for the size of data to be copied (in bytes),
3712 needed only if X is BLKmode.
3713
3714 ALIGN (in bits) is maximum alignment we can assume.
3715
3716 If PARTIAL and REG are both nonzero, then copy that many of the first
3717 bytes of X into registers starting with REG, and push the rest of X.
3718 The amount of space pushed is decreased by PARTIAL bytes.
3719 REG must be a hard register in this case.
3720 If REG is zero but PARTIAL is not, take any all others actions for an
3721 argument partially in registers, but do not actually load any
3722 registers.
3723
3724 EXTRA is the amount in bytes of extra space to leave next to this arg.
3725 This is ignored if an argument block has already been allocated.
3726
3727 On a machine that lacks real push insns, ARGS_ADDR is the address of
3728 the bottom of the argument block for this call. We use indexing off there
3729 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3730 argument block has not been preallocated.
3731
3732 ARGS_SO_FAR is the size of args previously pushed for this call.
3733
3734 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3735 for arguments passed in registers. If nonzero, it will be the number
3736 of bytes required. */
3737
3738 void
3739 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3740 unsigned int align, int partial, rtx reg, int extra,
3741 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3742 rtx alignment_pad)
3743 {
3744 rtx xinner;
3745 enum direction stack_direction
3746 #ifdef STACK_GROWS_DOWNWARD
3747 = downward;
3748 #else
3749 = upward;
3750 #endif
3751
3752 /* Decide where to pad the argument: `downward' for below,
3753 `upward' for above, or `none' for don't pad it.
3754 Default is below for small data on big-endian machines; else above. */
3755 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3756
3757 /* Invert direction if stack is post-decrement.
3758 FIXME: why? */
3759 if (STACK_PUSH_CODE == POST_DEC)
3760 if (where_pad != none)
3761 where_pad = (where_pad == downward ? upward : downward);
3762
3763 xinner = x;
3764
3765 if (mode == BLKmode
3766 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3767 {
3768 /* Copy a block into the stack, entirely or partially. */
3769
3770 rtx temp;
3771 int used;
3772 int offset;
3773 int skip;
3774
3775 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3776 used = partial - offset;
3777
3778 if (mode != BLKmode)
3779 {
3780 /* A value is to be stored in an insufficiently aligned
3781 stack slot; copy via a suitably aligned slot if
3782 necessary. */
3783 size = GEN_INT (GET_MODE_SIZE (mode));
3784 if (!MEM_P (xinner))
3785 {
3786 temp = assign_temp (type, 0, 1, 1);
3787 emit_move_insn (temp, xinner);
3788 xinner = temp;
3789 }
3790 }
3791
3792 gcc_assert (size);
3793
3794 /* USED is now the # of bytes we need not copy to the stack
3795 because registers will take care of them. */
3796
3797 if (partial != 0)
3798 xinner = adjust_address (xinner, BLKmode, used);
3799
3800 /* If the partial register-part of the arg counts in its stack size,
3801 skip the part of stack space corresponding to the registers.
3802 Otherwise, start copying to the beginning of the stack space,
3803 by setting SKIP to 0. */
3804 skip = (reg_parm_stack_space == 0) ? 0 : used;
3805
3806 #ifdef PUSH_ROUNDING
3807 /* Do it with several push insns if that doesn't take lots of insns
3808 and if there is no difficulty with push insns that skip bytes
3809 on the stack for alignment purposes. */
3810 if (args_addr == 0
3811 && PUSH_ARGS
3812 && CONST_INT_P (size)
3813 && skip == 0
3814 && MEM_ALIGN (xinner) >= align
3815 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3816 /* Here we avoid the case of a structure whose weak alignment
3817 forces many pushes of a small amount of data,
3818 and such small pushes do rounding that causes trouble. */
3819 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3820 || align >= BIGGEST_ALIGNMENT
3821 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3822 == (align / BITS_PER_UNIT)))
3823 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3824 {
3825 /* Push padding now if padding above and stack grows down,
3826 or if padding below and stack grows up.
3827 But if space already allocated, this has already been done. */
3828 if (extra && args_addr == 0
3829 && where_pad != none && where_pad != stack_direction)
3830 anti_adjust_stack (GEN_INT (extra));
3831
3832 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3833 }
3834 else
3835 #endif /* PUSH_ROUNDING */
3836 {
3837 rtx target;
3838
3839 /* Otherwise make space on the stack and copy the data
3840 to the address of that space. */
3841
3842 /* Deduct words put into registers from the size we must copy. */
3843 if (partial != 0)
3844 {
3845 if (CONST_INT_P (size))
3846 size = GEN_INT (INTVAL (size) - used);
3847 else
3848 size = expand_binop (GET_MODE (size), sub_optab, size,
3849 GEN_INT (used), NULL_RTX, 0,
3850 OPTAB_LIB_WIDEN);
3851 }
3852
3853 /* Get the address of the stack space.
3854 In this case, we do not deal with EXTRA separately.
3855 A single stack adjust will do. */
3856 if (! args_addr)
3857 {
3858 temp = push_block (size, extra, where_pad == downward);
3859 extra = 0;
3860 }
3861 else if (CONST_INT_P (args_so_far))
3862 temp = memory_address (BLKmode,
3863 plus_constant (args_addr,
3864 skip + INTVAL (args_so_far)));
3865 else
3866 temp = memory_address (BLKmode,
3867 plus_constant (gen_rtx_PLUS (Pmode,
3868 args_addr,
3869 args_so_far),
3870 skip));
3871
3872 if (!ACCUMULATE_OUTGOING_ARGS)
3873 {
3874 /* If the source is referenced relative to the stack pointer,
3875 copy it to another register to stabilize it. We do not need
3876 to do this if we know that we won't be changing sp. */
3877
3878 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3879 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3880 temp = copy_to_reg (temp);
3881 }
3882
3883 target = gen_rtx_MEM (BLKmode, temp);
3884
3885 /* We do *not* set_mem_attributes here, because incoming arguments
3886 may overlap with sibling call outgoing arguments and we cannot
3887 allow reordering of reads from function arguments with stores
3888 to outgoing arguments of sibling calls. We do, however, want
3889 to record the alignment of the stack slot. */
3890 /* ALIGN may well be better aligned than TYPE, e.g. due to
3891 PARM_BOUNDARY. Assume the caller isn't lying. */
3892 set_mem_align (target, align);
3893
3894 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3895 }
3896 }
3897 else if (partial > 0)
3898 {
3899 /* Scalar partly in registers. */
3900
3901 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3902 int i;
3903 int not_stack;
3904 /* # bytes of start of argument
3905 that we must make space for but need not store. */
3906 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3907 int args_offset = INTVAL (args_so_far);
3908 int skip;
3909
3910 /* Push padding now if padding above and stack grows down,
3911 or if padding below and stack grows up.
3912 But if space already allocated, this has already been done. */
3913 if (extra && args_addr == 0
3914 && where_pad != none && where_pad != stack_direction)
3915 anti_adjust_stack (GEN_INT (extra));
3916
3917 /* If we make space by pushing it, we might as well push
3918 the real data. Otherwise, we can leave OFFSET nonzero
3919 and leave the space uninitialized. */
3920 if (args_addr == 0)
3921 offset = 0;
3922
3923 /* Now NOT_STACK gets the number of words that we don't need to
3924 allocate on the stack. Convert OFFSET to words too. */
3925 not_stack = (partial - offset) / UNITS_PER_WORD;
3926 offset /= UNITS_PER_WORD;
3927
3928 /* If the partial register-part of the arg counts in its stack size,
3929 skip the part of stack space corresponding to the registers.
3930 Otherwise, start copying to the beginning of the stack space,
3931 by setting SKIP to 0. */
3932 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3933
3934 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3935 x = validize_mem (force_const_mem (mode, x));
3936
3937 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3938 SUBREGs of such registers are not allowed. */
3939 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3940 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3941 x = copy_to_reg (x);
3942
3943 /* Loop over all the words allocated on the stack for this arg. */
3944 /* We can do it by words, because any scalar bigger than a word
3945 has a size a multiple of a word. */
3946 #ifndef PUSH_ARGS_REVERSED
3947 for (i = not_stack; i < size; i++)
3948 #else
3949 for (i = size - 1; i >= not_stack; i--)
3950 #endif
3951 if (i >= not_stack + offset)
3952 emit_push_insn (operand_subword_force (x, i, mode),
3953 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3954 0, args_addr,
3955 GEN_INT (args_offset + ((i - not_stack + skip)
3956 * UNITS_PER_WORD)),
3957 reg_parm_stack_space, alignment_pad);
3958 }
3959 else
3960 {
3961 rtx addr;
3962 rtx dest;
3963
3964 /* Push padding now if padding above and stack grows down,
3965 or if padding below and stack grows up.
3966 But if space already allocated, this has already been done. */
3967 if (extra && args_addr == 0
3968 && where_pad != none && where_pad != stack_direction)
3969 anti_adjust_stack (GEN_INT (extra));
3970
3971 #ifdef PUSH_ROUNDING
3972 if (args_addr == 0 && PUSH_ARGS)
3973 emit_single_push_insn (mode, x, type);
3974 else
3975 #endif
3976 {
3977 if (CONST_INT_P (args_so_far))
3978 addr
3979 = memory_address (mode,
3980 plus_constant (args_addr,
3981 INTVAL (args_so_far)));
3982 else
3983 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3984 args_so_far));
3985 dest = gen_rtx_MEM (mode, addr);
3986
3987 /* We do *not* set_mem_attributes here, because incoming arguments
3988 may overlap with sibling call outgoing arguments and we cannot
3989 allow reordering of reads from function arguments with stores
3990 to outgoing arguments of sibling calls. We do, however, want
3991 to record the alignment of the stack slot. */
3992 /* ALIGN may well be better aligned than TYPE, e.g. due to
3993 PARM_BOUNDARY. Assume the caller isn't lying. */
3994 set_mem_align (dest, align);
3995
3996 emit_move_insn (dest, x);
3997 }
3998 }
3999
4000 /* If part should go in registers, copy that part
4001 into the appropriate registers. Do this now, at the end,
4002 since mem-to-mem copies above may do function calls. */
4003 if (partial > 0 && reg != 0)
4004 {
4005 /* Handle calls that pass values in multiple non-contiguous locations.
4006 The Irix 6 ABI has examples of this. */
4007 if (GET_CODE (reg) == PARALLEL)
4008 emit_group_load (reg, x, type, -1);
4009 else
4010 {
4011 gcc_assert (partial % UNITS_PER_WORD == 0);
4012 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4013 }
4014 }
4015
4016 if (extra && args_addr == 0 && where_pad == stack_direction)
4017 anti_adjust_stack (GEN_INT (extra));
4018
4019 if (alignment_pad && args_addr == 0)
4020 anti_adjust_stack (alignment_pad);
4021 }
4022 \f
4023 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4024 operations. */
4025
4026 static rtx
4027 get_subtarget (rtx x)
4028 {
4029 return (optimize
4030 || x == 0
4031 /* Only registers can be subtargets. */
4032 || !REG_P (x)
4033 /* Don't use hard regs to avoid extending their life. */
4034 || REGNO (x) < FIRST_PSEUDO_REGISTER
4035 ? 0 : x);
4036 }
4037
4038 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4039 FIELD is a bitfield. Returns true if the optimization was successful,
4040 and there's nothing else to do. */
4041
4042 static bool
4043 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4044 unsigned HOST_WIDE_INT bitpos,
4045 enum machine_mode mode1, rtx str_rtx,
4046 tree to, tree src)
4047 {
4048 enum machine_mode str_mode = GET_MODE (str_rtx);
4049 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4050 tree op0, op1;
4051 rtx value, result;
4052 optab binop;
4053
4054 if (mode1 != VOIDmode
4055 || bitsize >= BITS_PER_WORD
4056 || str_bitsize > BITS_PER_WORD
4057 || TREE_SIDE_EFFECTS (to)
4058 || TREE_THIS_VOLATILE (to))
4059 return false;
4060
4061 STRIP_NOPS (src);
4062 if (!BINARY_CLASS_P (src)
4063 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4064 return false;
4065
4066 op0 = TREE_OPERAND (src, 0);
4067 op1 = TREE_OPERAND (src, 1);
4068 STRIP_NOPS (op0);
4069
4070 if (!operand_equal_p (to, op0, 0))
4071 return false;
4072
4073 if (MEM_P (str_rtx))
4074 {
4075 unsigned HOST_WIDE_INT offset1;
4076
4077 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4078 str_mode = word_mode;
4079 str_mode = get_best_mode (bitsize, bitpos,
4080 MEM_ALIGN (str_rtx), str_mode, 0);
4081 if (str_mode == VOIDmode)
4082 return false;
4083 str_bitsize = GET_MODE_BITSIZE (str_mode);
4084
4085 offset1 = bitpos;
4086 bitpos %= str_bitsize;
4087 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4088 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4089 }
4090 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4091 return false;
4092
4093 /* If the bit field covers the whole REG/MEM, store_field
4094 will likely generate better code. */
4095 if (bitsize >= str_bitsize)
4096 return false;
4097
4098 /* We can't handle fields split across multiple entities. */
4099 if (bitpos + bitsize > str_bitsize)
4100 return false;
4101
4102 if (BYTES_BIG_ENDIAN)
4103 bitpos = str_bitsize - bitpos - bitsize;
4104
4105 switch (TREE_CODE (src))
4106 {
4107 case PLUS_EXPR:
4108 case MINUS_EXPR:
4109 /* For now, just optimize the case of the topmost bitfield
4110 where we don't need to do any masking and also
4111 1 bit bitfields where xor can be used.
4112 We might win by one instruction for the other bitfields
4113 too if insv/extv instructions aren't used, so that
4114 can be added later. */
4115 if (bitpos + bitsize != str_bitsize
4116 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4117 break;
4118
4119 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4120 value = convert_modes (str_mode,
4121 TYPE_MODE (TREE_TYPE (op1)), value,
4122 TYPE_UNSIGNED (TREE_TYPE (op1)));
4123
4124 /* We may be accessing data outside the field, which means
4125 we can alias adjacent data. */
4126 if (MEM_P (str_rtx))
4127 {
4128 str_rtx = shallow_copy_rtx (str_rtx);
4129 set_mem_alias_set (str_rtx, 0);
4130 set_mem_expr (str_rtx, 0);
4131 }
4132
4133 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4134 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4135 {
4136 value = expand_and (str_mode, value, const1_rtx, NULL);
4137 binop = xor_optab;
4138 }
4139 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4140 build_int_cst (NULL_TREE, bitpos),
4141 NULL_RTX, 1);
4142 result = expand_binop (str_mode, binop, str_rtx,
4143 value, str_rtx, 1, OPTAB_WIDEN);
4144 if (result != str_rtx)
4145 emit_move_insn (str_rtx, result);
4146 return true;
4147
4148 case BIT_IOR_EXPR:
4149 case BIT_XOR_EXPR:
4150 if (TREE_CODE (op1) != INTEGER_CST)
4151 break;
4152 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4153 value = convert_modes (GET_MODE (str_rtx),
4154 TYPE_MODE (TREE_TYPE (op1)), value,
4155 TYPE_UNSIGNED (TREE_TYPE (op1)));
4156
4157 /* We may be accessing data outside the field, which means
4158 we can alias adjacent data. */
4159 if (MEM_P (str_rtx))
4160 {
4161 str_rtx = shallow_copy_rtx (str_rtx);
4162 set_mem_alias_set (str_rtx, 0);
4163 set_mem_expr (str_rtx, 0);
4164 }
4165
4166 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4167 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4168 {
4169 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4170 - 1);
4171 value = expand_and (GET_MODE (str_rtx), value, mask,
4172 NULL_RTX);
4173 }
4174 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4175 build_int_cst (NULL_TREE, bitpos),
4176 NULL_RTX, 1);
4177 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4178 value, str_rtx, 1, OPTAB_WIDEN);
4179 if (result != str_rtx)
4180 emit_move_insn (str_rtx, result);
4181 return true;
4182
4183 default:
4184 break;
4185 }
4186
4187 return false;
4188 }
4189
4190
4191 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4192 is true, try generating a nontemporal store. */
4193
4194 void
4195 expand_assignment (tree to, tree from, bool nontemporal)
4196 {
4197 rtx to_rtx = 0;
4198 rtx result;
4199
4200 /* Don't crash if the lhs of the assignment was erroneous. */
4201 if (TREE_CODE (to) == ERROR_MARK)
4202 {
4203 result = expand_normal (from);
4204 return;
4205 }
4206
4207 /* Optimize away no-op moves without side-effects. */
4208 if (operand_equal_p (to, from, 0))
4209 return;
4210
4211 /* Assignment of a structure component needs special treatment
4212 if the structure component's rtx is not simply a MEM.
4213 Assignment of an array element at a constant index, and assignment of
4214 an array element in an unaligned packed structure field, has the same
4215 problem. */
4216 if (handled_component_p (to)
4217 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4218 {
4219 enum machine_mode mode1;
4220 HOST_WIDE_INT bitsize, bitpos;
4221 tree offset;
4222 int unsignedp;
4223 int volatilep = 0;
4224 tree tem;
4225
4226 push_temp_slots ();
4227 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4228 &unsignedp, &volatilep, true);
4229
4230 /* If we are going to use store_bit_field and extract_bit_field,
4231 make sure to_rtx will be safe for multiple use. */
4232
4233 to_rtx = expand_normal (tem);
4234
4235 if (offset != 0)
4236 {
4237 enum machine_mode address_mode;
4238 rtx offset_rtx;
4239
4240 if (!MEM_P (to_rtx))
4241 {
4242 /* We can get constant negative offsets into arrays with broken
4243 user code. Translate this to a trap instead of ICEing. */
4244 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4245 expand_builtin_trap ();
4246 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4247 }
4248
4249 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4250 address_mode
4251 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
4252 if (GET_MODE (offset_rtx) != address_mode)
4253 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4254
4255 /* A constant address in TO_RTX can have VOIDmode, we must not try
4256 to call force_reg for that case. Avoid that case. */
4257 if (MEM_P (to_rtx)
4258 && GET_MODE (to_rtx) == BLKmode
4259 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4260 && bitsize > 0
4261 && (bitpos % bitsize) == 0
4262 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4263 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4264 {
4265 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4266 bitpos = 0;
4267 }
4268
4269 to_rtx = offset_address (to_rtx, offset_rtx,
4270 highest_pow2_factor_for_target (to,
4271 offset));
4272 }
4273
4274 /* Handle expand_expr of a complex value returning a CONCAT. */
4275 if (GET_CODE (to_rtx) == CONCAT)
4276 {
4277 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from))))
4278 {
4279 gcc_assert (bitpos == 0);
4280 result = store_expr (from, to_rtx, false, nontemporal);
4281 }
4282 else
4283 {
4284 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4285 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4286 nontemporal);
4287 }
4288 }
4289 else
4290 {
4291 if (MEM_P (to_rtx))
4292 {
4293 /* If the field is at offset zero, we could have been given the
4294 DECL_RTX of the parent struct. Don't munge it. */
4295 to_rtx = shallow_copy_rtx (to_rtx);
4296
4297 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4298
4299 /* Deal with volatile and readonly fields. The former is only
4300 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4301 if (volatilep)
4302 MEM_VOLATILE_P (to_rtx) = 1;
4303 if (component_uses_parent_alias_set (to))
4304 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4305 }
4306
4307 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4308 to_rtx, to, from))
4309 result = NULL;
4310 else
4311 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4312 TREE_TYPE (tem), get_alias_set (to),
4313 nontemporal);
4314 }
4315
4316 if (result)
4317 preserve_temp_slots (result);
4318 free_temp_slots ();
4319 pop_temp_slots ();
4320 return;
4321 }
4322
4323 else if (TREE_CODE (to) == MISALIGNED_INDIRECT_REF)
4324 {
4325 addr_space_t as = ADDR_SPACE_GENERIC;
4326 enum machine_mode mode, op_mode1;
4327 enum insn_code icode;
4328 rtx reg, addr, mem, insn;
4329
4330 if (POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (to, 0))))
4331 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 0))));
4332
4333 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4334 reg = force_not_mem (reg);
4335
4336 mode = TYPE_MODE (TREE_TYPE (to));
4337 addr = expand_expr (TREE_OPERAND (to, 0), NULL_RTX, VOIDmode,
4338 EXPAND_SUM);
4339 addr = memory_address_addr_space (mode, addr, as);
4340 mem = gen_rtx_MEM (mode, addr);
4341
4342 set_mem_attributes (mem, to, 0);
4343 set_mem_addr_space (mem, as);
4344
4345 icode = movmisalign_optab->handlers[mode].insn_code;
4346 gcc_assert (icode != CODE_FOR_nothing);
4347
4348 op_mode1 = insn_data[icode].operand[1].mode;
4349 if (! (*insn_data[icode].operand[1].predicate) (reg, op_mode1)
4350 && op_mode1 != VOIDmode)
4351 reg = copy_to_mode_reg (op_mode1, reg);
4352
4353 insn = GEN_FCN (icode) (mem, reg);
4354 emit_insn (insn);
4355 return;
4356 }
4357
4358 /* If the rhs is a function call and its value is not an aggregate,
4359 call the function before we start to compute the lhs.
4360 This is needed for correct code for cases such as
4361 val = setjmp (buf) on machines where reference to val
4362 requires loading up part of an address in a separate insn.
4363
4364 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4365 since it might be a promoted variable where the zero- or sign- extension
4366 needs to be done. Handling this in the normal way is safe because no
4367 computation is done before the call. The same is true for SSA names. */
4368 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4369 && COMPLETE_TYPE_P (TREE_TYPE (from))
4370 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4371 && ! (((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4372 && REG_P (DECL_RTL (to)))
4373 || TREE_CODE (to) == SSA_NAME))
4374 {
4375 rtx value;
4376
4377 push_temp_slots ();
4378 value = expand_normal (from);
4379 if (to_rtx == 0)
4380 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4381
4382 /* Handle calls that return values in multiple non-contiguous locations.
4383 The Irix 6 ABI has examples of this. */
4384 if (GET_CODE (to_rtx) == PARALLEL)
4385 emit_group_load (to_rtx, value, TREE_TYPE (from),
4386 int_size_in_bytes (TREE_TYPE (from)));
4387 else if (GET_MODE (to_rtx) == BLKmode)
4388 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4389 else
4390 {
4391 if (POINTER_TYPE_P (TREE_TYPE (to)))
4392 value = convert_memory_address_addr_space
4393 (GET_MODE (to_rtx), value,
4394 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4395
4396 emit_move_insn (to_rtx, value);
4397 }
4398 preserve_temp_slots (to_rtx);
4399 free_temp_slots ();
4400 pop_temp_slots ();
4401 return;
4402 }
4403
4404 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4405 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4406
4407 if (to_rtx == 0)
4408 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4409
4410 /* Don't move directly into a return register. */
4411 if (TREE_CODE (to) == RESULT_DECL
4412 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4413 {
4414 rtx temp;
4415
4416 push_temp_slots ();
4417 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4418
4419 if (GET_CODE (to_rtx) == PARALLEL)
4420 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4421 int_size_in_bytes (TREE_TYPE (from)));
4422 else
4423 emit_move_insn (to_rtx, temp);
4424
4425 preserve_temp_slots (to_rtx);
4426 free_temp_slots ();
4427 pop_temp_slots ();
4428 return;
4429 }
4430
4431 /* In case we are returning the contents of an object which overlaps
4432 the place the value is being stored, use a safe function when copying
4433 a value through a pointer into a structure value return block. */
4434 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4435 && ADDR_SPACE_GENERIC_P
4436 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4437 && cfun->returns_struct
4438 && !cfun->returns_pcc_struct)
4439 {
4440 rtx from_rtx, size;
4441
4442 push_temp_slots ();
4443 size = expr_size (from);
4444 from_rtx = expand_normal (from);
4445
4446 emit_library_call (memmove_libfunc, LCT_NORMAL,
4447 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4448 XEXP (from_rtx, 0), Pmode,
4449 convert_to_mode (TYPE_MODE (sizetype),
4450 size, TYPE_UNSIGNED (sizetype)),
4451 TYPE_MODE (sizetype));
4452
4453 preserve_temp_slots (to_rtx);
4454 free_temp_slots ();
4455 pop_temp_slots ();
4456 return;
4457 }
4458
4459 /* Compute FROM and store the value in the rtx we got. */
4460
4461 push_temp_slots ();
4462 result = store_expr (from, to_rtx, 0, nontemporal);
4463 preserve_temp_slots (result);
4464 free_temp_slots ();
4465 pop_temp_slots ();
4466 return;
4467 }
4468
4469 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4470 succeeded, false otherwise. */
4471
4472 bool
4473 emit_storent_insn (rtx to, rtx from)
4474 {
4475 enum machine_mode mode = GET_MODE (to), imode;
4476 enum insn_code code = optab_handler (storent_optab, mode)->insn_code;
4477 rtx pattern;
4478
4479 if (code == CODE_FOR_nothing)
4480 return false;
4481
4482 imode = insn_data[code].operand[0].mode;
4483 if (!insn_data[code].operand[0].predicate (to, imode))
4484 return false;
4485
4486 imode = insn_data[code].operand[1].mode;
4487 if (!insn_data[code].operand[1].predicate (from, imode))
4488 {
4489 from = copy_to_mode_reg (imode, from);
4490 if (!insn_data[code].operand[1].predicate (from, imode))
4491 return false;
4492 }
4493
4494 pattern = GEN_FCN (code) (to, from);
4495 if (pattern == NULL_RTX)
4496 return false;
4497
4498 emit_insn (pattern);
4499 return true;
4500 }
4501
4502 /* Generate code for computing expression EXP,
4503 and storing the value into TARGET.
4504
4505 If the mode is BLKmode then we may return TARGET itself.
4506 It turns out that in BLKmode it doesn't cause a problem.
4507 because C has no operators that could combine two different
4508 assignments into the same BLKmode object with different values
4509 with no sequence point. Will other languages need this to
4510 be more thorough?
4511
4512 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4513 stack, and block moves may need to be treated specially.
4514
4515 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4516
4517 rtx
4518 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4519 {
4520 rtx temp;
4521 rtx alt_rtl = NULL_RTX;
4522 location_t loc = EXPR_LOCATION (exp);
4523
4524 if (VOID_TYPE_P (TREE_TYPE (exp)))
4525 {
4526 /* C++ can generate ?: expressions with a throw expression in one
4527 branch and an rvalue in the other. Here, we resolve attempts to
4528 store the throw expression's nonexistent result. */
4529 gcc_assert (!call_param_p);
4530 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4531 return NULL_RTX;
4532 }
4533 if (TREE_CODE (exp) == COMPOUND_EXPR)
4534 {
4535 /* Perform first part of compound expression, then assign from second
4536 part. */
4537 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4538 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4539 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4540 nontemporal);
4541 }
4542 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4543 {
4544 /* For conditional expression, get safe form of the target. Then
4545 test the condition, doing the appropriate assignment on either
4546 side. This avoids the creation of unnecessary temporaries.
4547 For non-BLKmode, it is more efficient not to do this. */
4548
4549 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4550
4551 do_pending_stack_adjust ();
4552 NO_DEFER_POP;
4553 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4554 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4555 nontemporal);
4556 emit_jump_insn (gen_jump (lab2));
4557 emit_barrier ();
4558 emit_label (lab1);
4559 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4560 nontemporal);
4561 emit_label (lab2);
4562 OK_DEFER_POP;
4563
4564 return NULL_RTX;
4565 }
4566 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4567 /* If this is a scalar in a register that is stored in a wider mode
4568 than the declared mode, compute the result into its declared mode
4569 and then convert to the wider mode. Our value is the computed
4570 expression. */
4571 {
4572 rtx inner_target = 0;
4573
4574 /* We can do the conversion inside EXP, which will often result
4575 in some optimizations. Do the conversion in two steps: first
4576 change the signedness, if needed, then the extend. But don't
4577 do this if the type of EXP is a subtype of something else
4578 since then the conversion might involve more than just
4579 converting modes. */
4580 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4581 && TREE_TYPE (TREE_TYPE (exp)) == 0
4582 && GET_MODE_PRECISION (GET_MODE (target))
4583 == TYPE_PRECISION (TREE_TYPE (exp)))
4584 {
4585 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4586 != SUBREG_PROMOTED_UNSIGNED_P (target))
4587 {
4588 /* Some types, e.g. Fortran's logical*4, won't have a signed
4589 version, so use the mode instead. */
4590 tree ntype
4591 = (signed_or_unsigned_type_for
4592 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4593 if (ntype == NULL)
4594 ntype = lang_hooks.types.type_for_mode
4595 (TYPE_MODE (TREE_TYPE (exp)),
4596 SUBREG_PROMOTED_UNSIGNED_P (target));
4597
4598 exp = fold_convert_loc (loc, ntype, exp);
4599 }
4600
4601 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
4602 (GET_MODE (SUBREG_REG (target)),
4603 SUBREG_PROMOTED_UNSIGNED_P (target)),
4604 exp);
4605
4606 inner_target = SUBREG_REG (target);
4607 }
4608
4609 temp = expand_expr (exp, inner_target, VOIDmode,
4610 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4611
4612 /* If TEMP is a VOIDmode constant, use convert_modes to make
4613 sure that we properly convert it. */
4614 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4615 {
4616 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4617 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4618 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4619 GET_MODE (target), temp,
4620 SUBREG_PROMOTED_UNSIGNED_P (target));
4621 }
4622
4623 convert_move (SUBREG_REG (target), temp,
4624 SUBREG_PROMOTED_UNSIGNED_P (target));
4625
4626 return NULL_RTX;
4627 }
4628 else if (TREE_CODE (exp) == STRING_CST
4629 && !nontemporal && !call_param_p
4630 && TREE_STRING_LENGTH (exp) > 0
4631 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4632 {
4633 /* Optimize initialization of an array with a STRING_CST. */
4634 HOST_WIDE_INT exp_len, str_copy_len;
4635 rtx dest_mem;
4636
4637 exp_len = int_expr_size (exp);
4638 if (exp_len <= 0)
4639 goto normal_expr;
4640
4641 str_copy_len = strlen (TREE_STRING_POINTER (exp));
4642 if (str_copy_len < TREE_STRING_LENGTH (exp) - 1)
4643 goto normal_expr;
4644
4645 str_copy_len = TREE_STRING_LENGTH (exp);
4646 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4647 {
4648 str_copy_len += STORE_MAX_PIECES - 1;
4649 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4650 }
4651 str_copy_len = MIN (str_copy_len, exp_len);
4652 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4653 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4654 MEM_ALIGN (target), false))
4655 goto normal_expr;
4656
4657 dest_mem = target;
4658
4659 dest_mem = store_by_pieces (dest_mem,
4660 str_copy_len, builtin_strncpy_read_str,
4661 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4662 MEM_ALIGN (target), false,
4663 exp_len > str_copy_len ? 1 : 0);
4664 if (exp_len > str_copy_len)
4665 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4666 GEN_INT (exp_len - str_copy_len),
4667 BLOCK_OP_NORMAL);
4668 return NULL_RTX;
4669 }
4670 else
4671 {
4672 rtx tmp_target;
4673
4674 normal_expr:
4675 /* If we want to use a nontemporal store, force the value to
4676 register first. */
4677 tmp_target = nontemporal ? NULL_RTX : target;
4678 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4679 (call_param_p
4680 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4681 &alt_rtl);
4682 }
4683
4684 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4685 the same as that of TARGET, adjust the constant. This is needed, for
4686 example, in case it is a CONST_DOUBLE and we want only a word-sized
4687 value. */
4688 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4689 && TREE_CODE (exp) != ERROR_MARK
4690 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4691 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4692 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4693
4694 /* If value was not generated in the target, store it there.
4695 Convert the value to TARGET's type first if necessary and emit the
4696 pending incrementations that have been queued when expanding EXP.
4697 Note that we cannot emit the whole queue blindly because this will
4698 effectively disable the POST_INC optimization later.
4699
4700 If TEMP and TARGET compare equal according to rtx_equal_p, but
4701 one or both of them are volatile memory refs, we have to distinguish
4702 two cases:
4703 - expand_expr has used TARGET. In this case, we must not generate
4704 another copy. This can be detected by TARGET being equal according
4705 to == .
4706 - expand_expr has not used TARGET - that means that the source just
4707 happens to have the same RTX form. Since temp will have been created
4708 by expand_expr, it will compare unequal according to == .
4709 We must generate a copy in this case, to reach the correct number
4710 of volatile memory references. */
4711
4712 if ((! rtx_equal_p (temp, target)
4713 || (temp != target && (side_effects_p (temp)
4714 || side_effects_p (target))))
4715 && TREE_CODE (exp) != ERROR_MARK
4716 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4717 but TARGET is not valid memory reference, TEMP will differ
4718 from TARGET although it is really the same location. */
4719 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4720 /* If there's nothing to copy, don't bother. Don't call
4721 expr_size unless necessary, because some front-ends (C++)
4722 expr_size-hook must not be given objects that are not
4723 supposed to be bit-copied or bit-initialized. */
4724 && expr_size (exp) != const0_rtx)
4725 {
4726 if (GET_MODE (temp) != GET_MODE (target)
4727 && GET_MODE (temp) != VOIDmode)
4728 {
4729 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4730 if (GET_MODE (target) == BLKmode
4731 || GET_MODE (temp) == BLKmode)
4732 emit_block_move (target, temp, expr_size (exp),
4733 (call_param_p
4734 ? BLOCK_OP_CALL_PARM
4735 : BLOCK_OP_NORMAL));
4736 else
4737 convert_move (target, temp, unsignedp);
4738 }
4739
4740 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4741 {
4742 /* Handle copying a string constant into an array. The string
4743 constant may be shorter than the array. So copy just the string's
4744 actual length, and clear the rest. First get the size of the data
4745 type of the string, which is actually the size of the target. */
4746 rtx size = expr_size (exp);
4747
4748 if (CONST_INT_P (size)
4749 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4750 emit_block_move (target, temp, size,
4751 (call_param_p
4752 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4753 else
4754 {
4755 enum machine_mode pointer_mode
4756 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
4757 enum machine_mode address_mode
4758 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (target));
4759
4760 /* Compute the size of the data to copy from the string. */
4761 tree copy_size
4762 = size_binop_loc (loc, MIN_EXPR,
4763 make_tree (sizetype, size),
4764 size_int (TREE_STRING_LENGTH (exp)));
4765 rtx copy_size_rtx
4766 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4767 (call_param_p
4768 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4769 rtx label = 0;
4770
4771 /* Copy that much. */
4772 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
4773 TYPE_UNSIGNED (sizetype));
4774 emit_block_move (target, temp, copy_size_rtx,
4775 (call_param_p
4776 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4777
4778 /* Figure out how much is left in TARGET that we have to clear.
4779 Do all calculations in pointer_mode. */
4780 if (CONST_INT_P (copy_size_rtx))
4781 {
4782 size = plus_constant (size, -INTVAL (copy_size_rtx));
4783 target = adjust_address (target, BLKmode,
4784 INTVAL (copy_size_rtx));
4785 }
4786 else
4787 {
4788 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4789 copy_size_rtx, NULL_RTX, 0,
4790 OPTAB_LIB_WIDEN);
4791
4792 if (GET_MODE (copy_size_rtx) != address_mode)
4793 copy_size_rtx = convert_to_mode (address_mode,
4794 copy_size_rtx,
4795 TYPE_UNSIGNED (sizetype));
4796
4797 target = offset_address (target, copy_size_rtx,
4798 highest_pow2_factor (copy_size));
4799 label = gen_label_rtx ();
4800 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4801 GET_MODE (size), 0, label);
4802 }
4803
4804 if (size != const0_rtx)
4805 clear_storage (target, size, BLOCK_OP_NORMAL);
4806
4807 if (label)
4808 emit_label (label);
4809 }
4810 }
4811 /* Handle calls that return values in multiple non-contiguous locations.
4812 The Irix 6 ABI has examples of this. */
4813 else if (GET_CODE (target) == PARALLEL)
4814 emit_group_load (target, temp, TREE_TYPE (exp),
4815 int_size_in_bytes (TREE_TYPE (exp)));
4816 else if (GET_MODE (temp) == BLKmode)
4817 emit_block_move (target, temp, expr_size (exp),
4818 (call_param_p
4819 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4820 else if (nontemporal
4821 && emit_storent_insn (target, temp))
4822 /* If we managed to emit a nontemporal store, there is nothing else to
4823 do. */
4824 ;
4825 else
4826 {
4827 temp = force_operand (temp, target);
4828 if (temp != target)
4829 emit_move_insn (target, temp);
4830 }
4831 }
4832
4833 return NULL_RTX;
4834 }
4835 \f
4836 /* Helper for categorize_ctor_elements. Identical interface. */
4837
4838 static bool
4839 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4840 HOST_WIDE_INT *p_elt_count,
4841 bool *p_must_clear)
4842 {
4843 unsigned HOST_WIDE_INT idx;
4844 HOST_WIDE_INT nz_elts, elt_count;
4845 tree value, purpose;
4846
4847 /* Whether CTOR is a valid constant initializer, in accordance with what
4848 initializer_constant_valid_p does. If inferred from the constructor
4849 elements, true until proven otherwise. */
4850 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4851 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4852
4853 nz_elts = 0;
4854 elt_count = 0;
4855
4856 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4857 {
4858 HOST_WIDE_INT mult;
4859
4860 mult = 1;
4861 if (TREE_CODE (purpose) == RANGE_EXPR)
4862 {
4863 tree lo_index = TREE_OPERAND (purpose, 0);
4864 tree hi_index = TREE_OPERAND (purpose, 1);
4865
4866 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4867 mult = (tree_low_cst (hi_index, 1)
4868 - tree_low_cst (lo_index, 1) + 1);
4869 }
4870
4871 switch (TREE_CODE (value))
4872 {
4873 case CONSTRUCTOR:
4874 {
4875 HOST_WIDE_INT nz = 0, ic = 0;
4876
4877 bool const_elt_p
4878 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4879
4880 nz_elts += mult * nz;
4881 elt_count += mult * ic;
4882
4883 if (const_from_elts_p && const_p)
4884 const_p = const_elt_p;
4885 }
4886 break;
4887
4888 case INTEGER_CST:
4889 case REAL_CST:
4890 case FIXED_CST:
4891 if (!initializer_zerop (value))
4892 nz_elts += mult;
4893 elt_count += mult;
4894 break;
4895
4896 case STRING_CST:
4897 nz_elts += mult * TREE_STRING_LENGTH (value);
4898 elt_count += mult * TREE_STRING_LENGTH (value);
4899 break;
4900
4901 case COMPLEX_CST:
4902 if (!initializer_zerop (TREE_REALPART (value)))
4903 nz_elts += mult;
4904 if (!initializer_zerop (TREE_IMAGPART (value)))
4905 nz_elts += mult;
4906 elt_count += mult;
4907 break;
4908
4909 case VECTOR_CST:
4910 {
4911 tree v;
4912 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4913 {
4914 if (!initializer_zerop (TREE_VALUE (v)))
4915 nz_elts += mult;
4916 elt_count += mult;
4917 }
4918 }
4919 break;
4920
4921 default:
4922 nz_elts += mult;
4923 elt_count += mult;
4924
4925 if (const_from_elts_p && const_p)
4926 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4927 != NULL_TREE;
4928 break;
4929 }
4930 }
4931
4932 if (!*p_must_clear
4933 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4934 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4935 {
4936 tree init_sub_type;
4937 bool clear_this = true;
4938
4939 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4940 {
4941 /* We don't expect more than one element of the union to be
4942 initialized. Not sure what we should do otherwise... */
4943 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4944 == 1);
4945
4946 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4947 CONSTRUCTOR_ELTS (ctor),
4948 0)->value);
4949
4950 /* ??? We could look at each element of the union, and find the
4951 largest element. Which would avoid comparing the size of the
4952 initialized element against any tail padding in the union.
4953 Doesn't seem worth the effort... */
4954 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4955 TYPE_SIZE (init_sub_type)) == 1)
4956 {
4957 /* And now we have to find out if the element itself is fully
4958 constructed. E.g. for union { struct { int a, b; } s; } u
4959 = { .s = { .a = 1 } }. */
4960 if (elt_count == count_type_elements (init_sub_type, false))
4961 clear_this = false;
4962 }
4963 }
4964
4965 *p_must_clear = clear_this;
4966 }
4967
4968 *p_nz_elts += nz_elts;
4969 *p_elt_count += elt_count;
4970
4971 return const_p;
4972 }
4973
4974 /* Examine CTOR to discover:
4975 * how many scalar fields are set to nonzero values,
4976 and place it in *P_NZ_ELTS;
4977 * how many scalar fields in total are in CTOR,
4978 and place it in *P_ELT_COUNT.
4979 * if a type is a union, and the initializer from the constructor
4980 is not the largest element in the union, then set *p_must_clear.
4981
4982 Return whether or not CTOR is a valid static constant initializer, the same
4983 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4984
4985 bool
4986 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4987 HOST_WIDE_INT *p_elt_count,
4988 bool *p_must_clear)
4989 {
4990 *p_nz_elts = 0;
4991 *p_elt_count = 0;
4992 *p_must_clear = false;
4993
4994 return
4995 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4996 }
4997
4998 /* Count the number of scalars in TYPE. Return -1 on overflow or
4999 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
5000 array member at the end of the structure. */
5001
5002 HOST_WIDE_INT
5003 count_type_elements (const_tree type, bool allow_flexarr)
5004 {
5005 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
5006 switch (TREE_CODE (type))
5007 {
5008 case ARRAY_TYPE:
5009 {
5010 tree telts = array_type_nelts (type);
5011 if (telts && host_integerp (telts, 1))
5012 {
5013 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
5014 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
5015 if (n == 0)
5016 return 0;
5017 else if (max / n > m)
5018 return n * m;
5019 }
5020 return -1;
5021 }
5022
5023 case RECORD_TYPE:
5024 {
5025 HOST_WIDE_INT n = 0, t;
5026 tree f;
5027
5028 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5029 if (TREE_CODE (f) == FIELD_DECL)
5030 {
5031 t = count_type_elements (TREE_TYPE (f), false);
5032 if (t < 0)
5033 {
5034 /* Check for structures with flexible array member. */
5035 tree tf = TREE_TYPE (f);
5036 if (allow_flexarr
5037 && TREE_CHAIN (f) == NULL
5038 && TREE_CODE (tf) == ARRAY_TYPE
5039 && TYPE_DOMAIN (tf)
5040 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5041 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5042 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5043 && int_size_in_bytes (type) >= 0)
5044 break;
5045
5046 return -1;
5047 }
5048 n += t;
5049 }
5050
5051 return n;
5052 }
5053
5054 case UNION_TYPE:
5055 case QUAL_UNION_TYPE:
5056 return -1;
5057
5058 case COMPLEX_TYPE:
5059 return 2;
5060
5061 case VECTOR_TYPE:
5062 return TYPE_VECTOR_SUBPARTS (type);
5063
5064 case INTEGER_TYPE:
5065 case REAL_TYPE:
5066 case FIXED_POINT_TYPE:
5067 case ENUMERAL_TYPE:
5068 case BOOLEAN_TYPE:
5069 case POINTER_TYPE:
5070 case OFFSET_TYPE:
5071 case REFERENCE_TYPE:
5072 return 1;
5073
5074 case ERROR_MARK:
5075 return 0;
5076
5077 case VOID_TYPE:
5078 case METHOD_TYPE:
5079 case FUNCTION_TYPE:
5080 case LANG_TYPE:
5081 default:
5082 gcc_unreachable ();
5083 }
5084 }
5085
5086 /* Return 1 if EXP contains mostly (3/4) zeros. */
5087
5088 static int
5089 mostly_zeros_p (const_tree exp)
5090 {
5091 if (TREE_CODE (exp) == CONSTRUCTOR)
5092
5093 {
5094 HOST_WIDE_INT nz_elts, count, elts;
5095 bool must_clear;
5096
5097 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5098 if (must_clear)
5099 return 1;
5100
5101 elts = count_type_elements (TREE_TYPE (exp), false);
5102
5103 return nz_elts < elts / 4;
5104 }
5105
5106 return initializer_zerop (exp);
5107 }
5108
5109 /* Return 1 if EXP contains all zeros. */
5110
5111 static int
5112 all_zeros_p (const_tree exp)
5113 {
5114 if (TREE_CODE (exp) == CONSTRUCTOR)
5115
5116 {
5117 HOST_WIDE_INT nz_elts, count;
5118 bool must_clear;
5119
5120 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5121 return nz_elts == 0;
5122 }
5123
5124 return initializer_zerop (exp);
5125 }
5126 \f
5127 /* Helper function for store_constructor.
5128 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5129 TYPE is the type of the CONSTRUCTOR, not the element type.
5130 CLEARED is as for store_constructor.
5131 ALIAS_SET is the alias set to use for any stores.
5132
5133 This provides a recursive shortcut back to store_constructor when it isn't
5134 necessary to go through store_field. This is so that we can pass through
5135 the cleared field to let store_constructor know that we may not have to
5136 clear a substructure if the outer structure has already been cleared. */
5137
5138 static void
5139 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5140 HOST_WIDE_INT bitpos, enum machine_mode mode,
5141 tree exp, tree type, int cleared,
5142 alias_set_type alias_set)
5143 {
5144 if (TREE_CODE (exp) == CONSTRUCTOR
5145 /* We can only call store_constructor recursively if the size and
5146 bit position are on a byte boundary. */
5147 && bitpos % BITS_PER_UNIT == 0
5148 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5149 /* If we have a nonzero bitpos for a register target, then we just
5150 let store_field do the bitfield handling. This is unlikely to
5151 generate unnecessary clear instructions anyways. */
5152 && (bitpos == 0 || MEM_P (target)))
5153 {
5154 if (MEM_P (target))
5155 target
5156 = adjust_address (target,
5157 GET_MODE (target) == BLKmode
5158 || 0 != (bitpos
5159 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5160 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5161
5162
5163 /* Update the alias set, if required. */
5164 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5165 && MEM_ALIAS_SET (target) != 0)
5166 {
5167 target = copy_rtx (target);
5168 set_mem_alias_set (target, alias_set);
5169 }
5170
5171 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5172 }
5173 else
5174 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5175 }
5176
5177 /* Store the value of constructor EXP into the rtx TARGET.
5178 TARGET is either a REG or a MEM; we know it cannot conflict, since
5179 safe_from_p has been called.
5180 CLEARED is true if TARGET is known to have been zero'd.
5181 SIZE is the number of bytes of TARGET we are allowed to modify: this
5182 may not be the same as the size of EXP if we are assigning to a field
5183 which has been packed to exclude padding bits. */
5184
5185 static void
5186 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5187 {
5188 tree type = TREE_TYPE (exp);
5189 #ifdef WORD_REGISTER_OPERATIONS
5190 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5191 #endif
5192
5193 switch (TREE_CODE (type))
5194 {
5195 case RECORD_TYPE:
5196 case UNION_TYPE:
5197 case QUAL_UNION_TYPE:
5198 {
5199 unsigned HOST_WIDE_INT idx;
5200 tree field, value;
5201
5202 /* If size is zero or the target is already cleared, do nothing. */
5203 if (size == 0 || cleared)
5204 cleared = 1;
5205 /* We either clear the aggregate or indicate the value is dead. */
5206 else if ((TREE_CODE (type) == UNION_TYPE
5207 || TREE_CODE (type) == QUAL_UNION_TYPE)
5208 && ! CONSTRUCTOR_ELTS (exp))
5209 /* If the constructor is empty, clear the union. */
5210 {
5211 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5212 cleared = 1;
5213 }
5214
5215 /* If we are building a static constructor into a register,
5216 set the initial value as zero so we can fold the value into
5217 a constant. But if more than one register is involved,
5218 this probably loses. */
5219 else if (REG_P (target) && TREE_STATIC (exp)
5220 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5221 {
5222 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5223 cleared = 1;
5224 }
5225
5226 /* If the constructor has fewer fields than the structure or
5227 if we are initializing the structure to mostly zeros, clear
5228 the whole structure first. Don't do this if TARGET is a
5229 register whose mode size isn't equal to SIZE since
5230 clear_storage can't handle this case. */
5231 else if (size > 0
5232 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5233 != fields_length (type))
5234 || mostly_zeros_p (exp))
5235 && (!REG_P (target)
5236 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5237 == size)))
5238 {
5239 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5240 cleared = 1;
5241 }
5242
5243 if (REG_P (target) && !cleared)
5244 emit_clobber (target);
5245
5246 /* Store each element of the constructor into the
5247 corresponding field of TARGET. */
5248 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5249 {
5250 enum machine_mode mode;
5251 HOST_WIDE_INT bitsize;
5252 HOST_WIDE_INT bitpos = 0;
5253 tree offset;
5254 rtx to_rtx = target;
5255
5256 /* Just ignore missing fields. We cleared the whole
5257 structure, above, if any fields are missing. */
5258 if (field == 0)
5259 continue;
5260
5261 if (cleared && initializer_zerop (value))
5262 continue;
5263
5264 if (host_integerp (DECL_SIZE (field), 1))
5265 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5266 else
5267 bitsize = -1;
5268
5269 mode = DECL_MODE (field);
5270 if (DECL_BIT_FIELD (field))
5271 mode = VOIDmode;
5272
5273 offset = DECL_FIELD_OFFSET (field);
5274 if (host_integerp (offset, 0)
5275 && host_integerp (bit_position (field), 0))
5276 {
5277 bitpos = int_bit_position (field);
5278 offset = 0;
5279 }
5280 else
5281 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5282
5283 if (offset)
5284 {
5285 enum machine_mode address_mode;
5286 rtx offset_rtx;
5287
5288 offset
5289 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5290 make_tree (TREE_TYPE (exp),
5291 target));
5292
5293 offset_rtx = expand_normal (offset);
5294 gcc_assert (MEM_P (to_rtx));
5295
5296 address_mode
5297 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
5298 if (GET_MODE (offset_rtx) != address_mode)
5299 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5300
5301 to_rtx = offset_address (to_rtx, offset_rtx,
5302 highest_pow2_factor (offset));
5303 }
5304
5305 #ifdef WORD_REGISTER_OPERATIONS
5306 /* If this initializes a field that is smaller than a
5307 word, at the start of a word, try to widen it to a full
5308 word. This special case allows us to output C++ member
5309 function initializations in a form that the optimizers
5310 can understand. */
5311 if (REG_P (target)
5312 && bitsize < BITS_PER_WORD
5313 && bitpos % BITS_PER_WORD == 0
5314 && GET_MODE_CLASS (mode) == MODE_INT
5315 && TREE_CODE (value) == INTEGER_CST
5316 && exp_size >= 0
5317 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5318 {
5319 tree type = TREE_TYPE (value);
5320
5321 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5322 {
5323 type = lang_hooks.types.type_for_size
5324 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5325 value = fold_convert (type, value);
5326 }
5327
5328 if (BYTES_BIG_ENDIAN)
5329 value
5330 = fold_build2 (LSHIFT_EXPR, type, value,
5331 build_int_cst (type,
5332 BITS_PER_WORD - bitsize));
5333 bitsize = BITS_PER_WORD;
5334 mode = word_mode;
5335 }
5336 #endif
5337
5338 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5339 && DECL_NONADDRESSABLE_P (field))
5340 {
5341 to_rtx = copy_rtx (to_rtx);
5342 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5343 }
5344
5345 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5346 value, type, cleared,
5347 get_alias_set (TREE_TYPE (field)));
5348 }
5349 break;
5350 }
5351 case ARRAY_TYPE:
5352 {
5353 tree value, index;
5354 unsigned HOST_WIDE_INT i;
5355 int need_to_clear;
5356 tree domain;
5357 tree elttype = TREE_TYPE (type);
5358 int const_bounds_p;
5359 HOST_WIDE_INT minelt = 0;
5360 HOST_WIDE_INT maxelt = 0;
5361
5362 domain = TYPE_DOMAIN (type);
5363 const_bounds_p = (TYPE_MIN_VALUE (domain)
5364 && TYPE_MAX_VALUE (domain)
5365 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5366 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5367
5368 /* If we have constant bounds for the range of the type, get them. */
5369 if (const_bounds_p)
5370 {
5371 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5372 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5373 }
5374
5375 /* If the constructor has fewer elements than the array, clear
5376 the whole array first. Similarly if this is static
5377 constructor of a non-BLKmode object. */
5378 if (cleared)
5379 need_to_clear = 0;
5380 else if (REG_P (target) && TREE_STATIC (exp))
5381 need_to_clear = 1;
5382 else
5383 {
5384 unsigned HOST_WIDE_INT idx;
5385 tree index, value;
5386 HOST_WIDE_INT count = 0, zero_count = 0;
5387 need_to_clear = ! const_bounds_p;
5388
5389 /* This loop is a more accurate version of the loop in
5390 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5391 is also needed to check for missing elements. */
5392 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5393 {
5394 HOST_WIDE_INT this_node_count;
5395
5396 if (need_to_clear)
5397 break;
5398
5399 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5400 {
5401 tree lo_index = TREE_OPERAND (index, 0);
5402 tree hi_index = TREE_OPERAND (index, 1);
5403
5404 if (! host_integerp (lo_index, 1)
5405 || ! host_integerp (hi_index, 1))
5406 {
5407 need_to_clear = 1;
5408 break;
5409 }
5410
5411 this_node_count = (tree_low_cst (hi_index, 1)
5412 - tree_low_cst (lo_index, 1) + 1);
5413 }
5414 else
5415 this_node_count = 1;
5416
5417 count += this_node_count;
5418 if (mostly_zeros_p (value))
5419 zero_count += this_node_count;
5420 }
5421
5422 /* Clear the entire array first if there are any missing
5423 elements, or if the incidence of zero elements is >=
5424 75%. */
5425 if (! need_to_clear
5426 && (count < maxelt - minelt + 1
5427 || 4 * zero_count >= 3 * count))
5428 need_to_clear = 1;
5429 }
5430
5431 if (need_to_clear && size > 0)
5432 {
5433 if (REG_P (target))
5434 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5435 else
5436 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5437 cleared = 1;
5438 }
5439
5440 if (!cleared && REG_P (target))
5441 /* Inform later passes that the old value is dead. */
5442 emit_clobber (target);
5443
5444 /* Store each element of the constructor into the
5445 corresponding element of TARGET, determined by counting the
5446 elements. */
5447 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5448 {
5449 enum machine_mode mode;
5450 HOST_WIDE_INT bitsize;
5451 HOST_WIDE_INT bitpos;
5452 rtx xtarget = target;
5453
5454 if (cleared && initializer_zerop (value))
5455 continue;
5456
5457 mode = TYPE_MODE (elttype);
5458 if (mode == BLKmode)
5459 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5460 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5461 : -1);
5462 else
5463 bitsize = GET_MODE_BITSIZE (mode);
5464
5465 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5466 {
5467 tree lo_index = TREE_OPERAND (index, 0);
5468 tree hi_index = TREE_OPERAND (index, 1);
5469 rtx index_r, pos_rtx;
5470 HOST_WIDE_INT lo, hi, count;
5471 tree position;
5472
5473 /* If the range is constant and "small", unroll the loop. */
5474 if (const_bounds_p
5475 && host_integerp (lo_index, 0)
5476 && host_integerp (hi_index, 0)
5477 && (lo = tree_low_cst (lo_index, 0),
5478 hi = tree_low_cst (hi_index, 0),
5479 count = hi - lo + 1,
5480 (!MEM_P (target)
5481 || count <= 2
5482 || (host_integerp (TYPE_SIZE (elttype), 1)
5483 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5484 <= 40 * 8)))))
5485 {
5486 lo -= minelt; hi -= minelt;
5487 for (; lo <= hi; lo++)
5488 {
5489 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5490
5491 if (MEM_P (target)
5492 && !MEM_KEEP_ALIAS_SET_P (target)
5493 && TREE_CODE (type) == ARRAY_TYPE
5494 && TYPE_NONALIASED_COMPONENT (type))
5495 {
5496 target = copy_rtx (target);
5497 MEM_KEEP_ALIAS_SET_P (target) = 1;
5498 }
5499
5500 store_constructor_field
5501 (target, bitsize, bitpos, mode, value, type, cleared,
5502 get_alias_set (elttype));
5503 }
5504 }
5505 else
5506 {
5507 rtx loop_start = gen_label_rtx ();
5508 rtx loop_end = gen_label_rtx ();
5509 tree exit_cond;
5510
5511 expand_normal (hi_index);
5512
5513 index = build_decl (EXPR_LOCATION (exp),
5514 VAR_DECL, NULL_TREE, domain);
5515 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
5516 SET_DECL_RTL (index, index_r);
5517 store_expr (lo_index, index_r, 0, false);
5518
5519 /* Build the head of the loop. */
5520 do_pending_stack_adjust ();
5521 emit_label (loop_start);
5522
5523 /* Assign value to element index. */
5524 position =
5525 fold_convert (ssizetype,
5526 fold_build2 (MINUS_EXPR,
5527 TREE_TYPE (index),
5528 index,
5529 TYPE_MIN_VALUE (domain)));
5530
5531 position =
5532 size_binop (MULT_EXPR, position,
5533 fold_convert (ssizetype,
5534 TYPE_SIZE_UNIT (elttype)));
5535
5536 pos_rtx = expand_normal (position);
5537 xtarget = offset_address (target, pos_rtx,
5538 highest_pow2_factor (position));
5539 xtarget = adjust_address (xtarget, mode, 0);
5540 if (TREE_CODE (value) == CONSTRUCTOR)
5541 store_constructor (value, xtarget, cleared,
5542 bitsize / BITS_PER_UNIT);
5543 else
5544 store_expr (value, xtarget, 0, false);
5545
5546 /* Generate a conditional jump to exit the loop. */
5547 exit_cond = build2 (LT_EXPR, integer_type_node,
5548 index, hi_index);
5549 jumpif (exit_cond, loop_end);
5550
5551 /* Update the loop counter, and jump to the head of
5552 the loop. */
5553 expand_assignment (index,
5554 build2 (PLUS_EXPR, TREE_TYPE (index),
5555 index, integer_one_node),
5556 false);
5557
5558 emit_jump (loop_start);
5559
5560 /* Build the end of the loop. */
5561 emit_label (loop_end);
5562 }
5563 }
5564 else if ((index != 0 && ! host_integerp (index, 0))
5565 || ! host_integerp (TYPE_SIZE (elttype), 1))
5566 {
5567 tree position;
5568
5569 if (index == 0)
5570 index = ssize_int (1);
5571
5572 if (minelt)
5573 index = fold_convert (ssizetype,
5574 fold_build2 (MINUS_EXPR,
5575 TREE_TYPE (index),
5576 index,
5577 TYPE_MIN_VALUE (domain)));
5578
5579 position =
5580 size_binop (MULT_EXPR, index,
5581 fold_convert (ssizetype,
5582 TYPE_SIZE_UNIT (elttype)));
5583 xtarget = offset_address (target,
5584 expand_normal (position),
5585 highest_pow2_factor (position));
5586 xtarget = adjust_address (xtarget, mode, 0);
5587 store_expr (value, xtarget, 0, false);
5588 }
5589 else
5590 {
5591 if (index != 0)
5592 bitpos = ((tree_low_cst (index, 0) - minelt)
5593 * tree_low_cst (TYPE_SIZE (elttype), 1));
5594 else
5595 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5596
5597 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5598 && TREE_CODE (type) == ARRAY_TYPE
5599 && TYPE_NONALIASED_COMPONENT (type))
5600 {
5601 target = copy_rtx (target);
5602 MEM_KEEP_ALIAS_SET_P (target) = 1;
5603 }
5604 store_constructor_field (target, bitsize, bitpos, mode, value,
5605 type, cleared, get_alias_set (elttype));
5606 }
5607 }
5608 break;
5609 }
5610
5611 case VECTOR_TYPE:
5612 {
5613 unsigned HOST_WIDE_INT idx;
5614 constructor_elt *ce;
5615 int i;
5616 int need_to_clear;
5617 int icode = 0;
5618 tree elttype = TREE_TYPE (type);
5619 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5620 enum machine_mode eltmode = TYPE_MODE (elttype);
5621 HOST_WIDE_INT bitsize;
5622 HOST_WIDE_INT bitpos;
5623 rtvec vector = NULL;
5624 unsigned n_elts;
5625 alias_set_type alias;
5626
5627 gcc_assert (eltmode != BLKmode);
5628
5629 n_elts = TYPE_VECTOR_SUBPARTS (type);
5630 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5631 {
5632 enum machine_mode mode = GET_MODE (target);
5633
5634 icode = (int) optab_handler (vec_init_optab, mode)->insn_code;
5635 if (icode != CODE_FOR_nothing)
5636 {
5637 unsigned int i;
5638
5639 vector = rtvec_alloc (n_elts);
5640 for (i = 0; i < n_elts; i++)
5641 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5642 }
5643 }
5644
5645 /* If the constructor has fewer elements than the vector,
5646 clear the whole array first. Similarly if this is static
5647 constructor of a non-BLKmode object. */
5648 if (cleared)
5649 need_to_clear = 0;
5650 else if (REG_P (target) && TREE_STATIC (exp))
5651 need_to_clear = 1;
5652 else
5653 {
5654 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5655 tree value;
5656
5657 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5658 {
5659 int n_elts_here = tree_low_cst
5660 (int_const_binop (TRUNC_DIV_EXPR,
5661 TYPE_SIZE (TREE_TYPE (value)),
5662 TYPE_SIZE (elttype), 0), 1);
5663
5664 count += n_elts_here;
5665 if (mostly_zeros_p (value))
5666 zero_count += n_elts_here;
5667 }
5668
5669 /* Clear the entire vector first if there are any missing elements,
5670 or if the incidence of zero elements is >= 75%. */
5671 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5672 }
5673
5674 if (need_to_clear && size > 0 && !vector)
5675 {
5676 if (REG_P (target))
5677 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5678 else
5679 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5680 cleared = 1;
5681 }
5682
5683 /* Inform later passes that the old value is dead. */
5684 if (!cleared && !vector && REG_P (target))
5685 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5686
5687 if (MEM_P (target))
5688 alias = MEM_ALIAS_SET (target);
5689 else
5690 alias = get_alias_set (elttype);
5691
5692 /* Store each element of the constructor into the corresponding
5693 element of TARGET, determined by counting the elements. */
5694 for (idx = 0, i = 0;
5695 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5696 idx++, i += bitsize / elt_size)
5697 {
5698 HOST_WIDE_INT eltpos;
5699 tree value = ce->value;
5700
5701 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5702 if (cleared && initializer_zerop (value))
5703 continue;
5704
5705 if (ce->index)
5706 eltpos = tree_low_cst (ce->index, 1);
5707 else
5708 eltpos = i;
5709
5710 if (vector)
5711 {
5712 /* Vector CONSTRUCTORs should only be built from smaller
5713 vectors in the case of BLKmode vectors. */
5714 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5715 RTVEC_ELT (vector, eltpos)
5716 = expand_normal (value);
5717 }
5718 else
5719 {
5720 enum machine_mode value_mode =
5721 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5722 ? TYPE_MODE (TREE_TYPE (value))
5723 : eltmode;
5724 bitpos = eltpos * elt_size;
5725 store_constructor_field (target, bitsize, bitpos,
5726 value_mode, value, type,
5727 cleared, alias);
5728 }
5729 }
5730
5731 if (vector)
5732 emit_insn (GEN_FCN (icode)
5733 (target,
5734 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5735 break;
5736 }
5737
5738 default:
5739 gcc_unreachable ();
5740 }
5741 }
5742
5743 /* Store the value of EXP (an expression tree)
5744 into a subfield of TARGET which has mode MODE and occupies
5745 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5746 If MODE is VOIDmode, it means that we are storing into a bit-field.
5747
5748 Always return const0_rtx unless we have something particular to
5749 return.
5750
5751 TYPE is the type of the underlying object,
5752
5753 ALIAS_SET is the alias set for the destination. This value will
5754 (in general) be different from that for TARGET, since TARGET is a
5755 reference to the containing structure.
5756
5757 If NONTEMPORAL is true, try generating a nontemporal store. */
5758
5759 static rtx
5760 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5761 enum machine_mode mode, tree exp, tree type,
5762 alias_set_type alias_set, bool nontemporal)
5763 {
5764 if (TREE_CODE (exp) == ERROR_MARK)
5765 return const0_rtx;
5766
5767 /* If we have nothing to store, do nothing unless the expression has
5768 side-effects. */
5769 if (bitsize == 0)
5770 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5771
5772 /* If we are storing into an unaligned field of an aligned union that is
5773 in a register, we may have the mode of TARGET being an integer mode but
5774 MODE == BLKmode. In that case, get an aligned object whose size and
5775 alignment are the same as TARGET and store TARGET into it (we can avoid
5776 the store if the field being stored is the entire width of TARGET). Then
5777 call ourselves recursively to store the field into a BLKmode version of
5778 that object. Finally, load from the object into TARGET. This is not
5779 very efficient in general, but should only be slightly more expensive
5780 than the otherwise-required unaligned accesses. Perhaps this can be
5781 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5782 twice, once with emit_move_insn and once via store_field. */
5783
5784 if (mode == BLKmode
5785 && (REG_P (target) || GET_CODE (target) == SUBREG))
5786 {
5787 rtx object = assign_temp (type, 0, 1, 1);
5788 rtx blk_object = adjust_address (object, BLKmode, 0);
5789
5790 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5791 emit_move_insn (object, target);
5792
5793 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5794 nontemporal);
5795
5796 emit_move_insn (target, object);
5797
5798 /* We want to return the BLKmode version of the data. */
5799 return blk_object;
5800 }
5801
5802 if (GET_CODE (target) == CONCAT)
5803 {
5804 /* We're storing into a struct containing a single __complex. */
5805
5806 gcc_assert (!bitpos);
5807 return store_expr (exp, target, 0, nontemporal);
5808 }
5809
5810 /* If the structure is in a register or if the component
5811 is a bit field, we cannot use addressing to access it.
5812 Use bit-field techniques or SUBREG to store in it. */
5813
5814 if (mode == VOIDmode
5815 || (mode != BLKmode && ! direct_store[(int) mode]
5816 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5817 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5818 || REG_P (target)
5819 || GET_CODE (target) == SUBREG
5820 /* If the field isn't aligned enough to store as an ordinary memref,
5821 store it as a bit field. */
5822 || (mode != BLKmode
5823 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5824 || bitpos % GET_MODE_ALIGNMENT (mode))
5825 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5826 || (bitpos % BITS_PER_UNIT != 0)))
5827 /* If the RHS and field are a constant size and the size of the
5828 RHS isn't the same size as the bitfield, we must use bitfield
5829 operations. */
5830 || (bitsize >= 0
5831 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5832 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5833 {
5834 rtx temp;
5835 gimple nop_def;
5836
5837 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5838 implies a mask operation. If the precision is the same size as
5839 the field we're storing into, that mask is redundant. This is
5840 particularly common with bit field assignments generated by the
5841 C front end. */
5842 nop_def = get_def_for_expr (exp, NOP_EXPR);
5843 if (nop_def)
5844 {
5845 tree type = TREE_TYPE (exp);
5846 if (INTEGRAL_TYPE_P (type)
5847 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5848 && bitsize == TYPE_PRECISION (type))
5849 {
5850 tree op = gimple_assign_rhs1 (nop_def);
5851 type = TREE_TYPE (op);
5852 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5853 exp = op;
5854 }
5855 }
5856
5857 temp = expand_normal (exp);
5858
5859 /* If BITSIZE is narrower than the size of the type of EXP
5860 we will be narrowing TEMP. Normally, what's wanted are the
5861 low-order bits. However, if EXP's type is a record and this is
5862 big-endian machine, we want the upper BITSIZE bits. */
5863 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5864 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5865 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5866 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5867 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5868 - bitsize),
5869 NULL_RTX, 1);
5870
5871 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5872 MODE. */
5873 if (mode != VOIDmode && mode != BLKmode
5874 && mode != TYPE_MODE (TREE_TYPE (exp)))
5875 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5876
5877 /* If the modes of TEMP and TARGET are both BLKmode, both
5878 must be in memory and BITPOS must be aligned on a byte
5879 boundary. If so, we simply do a block copy. Likewise
5880 for a BLKmode-like TARGET. */
5881 if (GET_MODE (temp) == BLKmode
5882 && (GET_MODE (target) == BLKmode
5883 || (MEM_P (target)
5884 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
5885 && (bitpos % BITS_PER_UNIT) == 0
5886 && (bitsize % BITS_PER_UNIT) == 0)))
5887 {
5888 gcc_assert (MEM_P (target) && MEM_P (temp)
5889 && (bitpos % BITS_PER_UNIT) == 0);
5890
5891 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5892 emit_block_move (target, temp,
5893 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5894 / BITS_PER_UNIT),
5895 BLOCK_OP_NORMAL);
5896
5897 return const0_rtx;
5898 }
5899
5900 /* Store the value in the bitfield. */
5901 store_bit_field (target, bitsize, bitpos, mode, temp);
5902
5903 return const0_rtx;
5904 }
5905 else
5906 {
5907 /* Now build a reference to just the desired component. */
5908 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5909
5910 if (to_rtx == target)
5911 to_rtx = copy_rtx (to_rtx);
5912
5913 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5914 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5915 set_mem_alias_set (to_rtx, alias_set);
5916
5917 return store_expr (exp, to_rtx, 0, nontemporal);
5918 }
5919 }
5920 \f
5921 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5922 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5923 codes and find the ultimate containing object, which we return.
5924
5925 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5926 bit position, and *PUNSIGNEDP to the signedness of the field.
5927 If the position of the field is variable, we store a tree
5928 giving the variable offset (in units) in *POFFSET.
5929 This offset is in addition to the bit position.
5930 If the position is not variable, we store 0 in *POFFSET.
5931
5932 If any of the extraction expressions is volatile,
5933 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5934
5935 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
5936 Otherwise, it is a mode that can be used to access the field.
5937
5938 If the field describes a variable-sized object, *PMODE is set to
5939 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
5940 this case, but the address of the object can be found.
5941
5942 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5943 look through nodes that serve as markers of a greater alignment than
5944 the one that can be deduced from the expression. These nodes make it
5945 possible for front-ends to prevent temporaries from being created by
5946 the middle-end on alignment considerations. For that purpose, the
5947 normal operating mode at high-level is to always pass FALSE so that
5948 the ultimate containing object is really returned; moreover, the
5949 associated predicate handled_component_p will always return TRUE
5950 on these nodes, thus indicating that they are essentially handled
5951 by get_inner_reference. TRUE should only be passed when the caller
5952 is scanning the expression in order to build another representation
5953 and specifically knows how to handle these nodes; as such, this is
5954 the normal operating mode in the RTL expanders. */
5955
5956 tree
5957 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5958 HOST_WIDE_INT *pbitpos, tree *poffset,
5959 enum machine_mode *pmode, int *punsignedp,
5960 int *pvolatilep, bool keep_aligning)
5961 {
5962 tree size_tree = 0;
5963 enum machine_mode mode = VOIDmode;
5964 bool blkmode_bitfield = false;
5965 tree offset = size_zero_node;
5966 tree bit_offset = bitsize_zero_node;
5967
5968 /* First get the mode, signedness, and size. We do this from just the
5969 outermost expression. */
5970 if (TREE_CODE (exp) == COMPONENT_REF)
5971 {
5972 tree field = TREE_OPERAND (exp, 1);
5973 size_tree = DECL_SIZE (field);
5974 if (!DECL_BIT_FIELD (field))
5975 mode = DECL_MODE (field);
5976 else if (DECL_MODE (field) == BLKmode)
5977 blkmode_bitfield = true;
5978
5979 *punsignedp = DECL_UNSIGNED (field);
5980 }
5981 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5982 {
5983 size_tree = TREE_OPERAND (exp, 1);
5984 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
5985 || TYPE_UNSIGNED (TREE_TYPE (exp)));
5986
5987 /* For vector types, with the correct size of access, use the mode of
5988 inner type. */
5989 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5990 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5991 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5992 mode = TYPE_MODE (TREE_TYPE (exp));
5993 }
5994 else
5995 {
5996 mode = TYPE_MODE (TREE_TYPE (exp));
5997 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5998
5999 if (mode == BLKmode)
6000 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6001 else
6002 *pbitsize = GET_MODE_BITSIZE (mode);
6003 }
6004
6005 if (size_tree != 0)
6006 {
6007 if (! host_integerp (size_tree, 1))
6008 mode = BLKmode, *pbitsize = -1;
6009 else
6010 *pbitsize = tree_low_cst (size_tree, 1);
6011 }
6012
6013 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6014 and find the ultimate containing object. */
6015 while (1)
6016 {
6017 switch (TREE_CODE (exp))
6018 {
6019 case BIT_FIELD_REF:
6020 bit_offset = size_binop (PLUS_EXPR, bit_offset,
6021 TREE_OPERAND (exp, 2));
6022 break;
6023
6024 case COMPONENT_REF:
6025 {
6026 tree field = TREE_OPERAND (exp, 1);
6027 tree this_offset = component_ref_field_offset (exp);
6028
6029 /* If this field hasn't been filled in yet, don't go past it.
6030 This should only happen when folding expressions made during
6031 type construction. */
6032 if (this_offset == 0)
6033 break;
6034
6035 offset = size_binop (PLUS_EXPR, offset, this_offset);
6036 bit_offset = size_binop (PLUS_EXPR, bit_offset,
6037 DECL_FIELD_BIT_OFFSET (field));
6038
6039 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6040 }
6041 break;
6042
6043 case ARRAY_REF:
6044 case ARRAY_RANGE_REF:
6045 {
6046 tree index = TREE_OPERAND (exp, 1);
6047 tree low_bound = array_ref_low_bound (exp);
6048 tree unit_size = array_ref_element_size (exp);
6049
6050 /* We assume all arrays have sizes that are a multiple of a byte.
6051 First subtract the lower bound, if any, in the type of the
6052 index, then convert to sizetype and multiply by the size of
6053 the array element. */
6054 if (! integer_zerop (low_bound))
6055 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6056 index, low_bound);
6057
6058 offset = size_binop (PLUS_EXPR, offset,
6059 size_binop (MULT_EXPR,
6060 fold_convert (sizetype, index),
6061 unit_size));
6062 }
6063 break;
6064
6065 case REALPART_EXPR:
6066 break;
6067
6068 case IMAGPART_EXPR:
6069 bit_offset = size_binop (PLUS_EXPR, bit_offset,
6070 bitsize_int (*pbitsize));
6071 break;
6072
6073 case VIEW_CONVERT_EXPR:
6074 if (keep_aligning && STRICT_ALIGNMENT
6075 && (TYPE_ALIGN (TREE_TYPE (exp))
6076 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6077 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6078 < BIGGEST_ALIGNMENT)
6079 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6080 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6081 goto done;
6082 break;
6083
6084 default:
6085 goto done;
6086 }
6087
6088 /* If any reference in the chain is volatile, the effect is volatile. */
6089 if (TREE_THIS_VOLATILE (exp))
6090 *pvolatilep = 1;
6091
6092 exp = TREE_OPERAND (exp, 0);
6093 }
6094 done:
6095
6096 /* If OFFSET is constant, see if we can return the whole thing as a
6097 constant bit position. Make sure to handle overflow during
6098 this conversion. */
6099 if (host_integerp (offset, 0))
6100 {
6101 double_int tem = double_int_mul (tree_to_double_int (offset),
6102 uhwi_to_double_int (BITS_PER_UNIT));
6103 tem = double_int_add (tem, tree_to_double_int (bit_offset));
6104 if (double_int_fits_in_shwi_p (tem))
6105 {
6106 *pbitpos = double_int_to_shwi (tem);
6107 *poffset = offset = NULL_TREE;
6108 }
6109 }
6110
6111 /* Otherwise, split it up. */
6112 if (offset)
6113 {
6114 *pbitpos = tree_low_cst (bit_offset, 0);
6115 *poffset = offset;
6116 }
6117
6118 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6119 if (mode == VOIDmode
6120 && blkmode_bitfield
6121 && (*pbitpos % BITS_PER_UNIT) == 0
6122 && (*pbitsize % BITS_PER_UNIT) == 0)
6123 *pmode = BLKmode;
6124 else
6125 *pmode = mode;
6126
6127 return exp;
6128 }
6129
6130 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6131 ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6132 EXP is marked as PACKED. */
6133
6134 bool
6135 contains_packed_reference (const_tree exp)
6136 {
6137 bool packed_p = false;
6138
6139 while (1)
6140 {
6141 switch (TREE_CODE (exp))
6142 {
6143 case COMPONENT_REF:
6144 {
6145 tree field = TREE_OPERAND (exp, 1);
6146 packed_p = DECL_PACKED (field)
6147 || TYPE_PACKED (TREE_TYPE (field))
6148 || TYPE_PACKED (TREE_TYPE (exp));
6149 if (packed_p)
6150 goto done;
6151 }
6152 break;
6153
6154 case BIT_FIELD_REF:
6155 case ARRAY_REF:
6156 case ARRAY_RANGE_REF:
6157 case REALPART_EXPR:
6158 case IMAGPART_EXPR:
6159 case VIEW_CONVERT_EXPR:
6160 break;
6161
6162 default:
6163 goto done;
6164 }
6165 exp = TREE_OPERAND (exp, 0);
6166 }
6167 done:
6168 return packed_p;
6169 }
6170
6171 /* Return a tree of sizetype representing the size, in bytes, of the element
6172 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6173
6174 tree
6175 array_ref_element_size (tree exp)
6176 {
6177 tree aligned_size = TREE_OPERAND (exp, 3);
6178 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6179 location_t loc = EXPR_LOCATION (exp);
6180
6181 /* If a size was specified in the ARRAY_REF, it's the size measured
6182 in alignment units of the element type. So multiply by that value. */
6183 if (aligned_size)
6184 {
6185 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6186 sizetype from another type of the same width and signedness. */
6187 if (TREE_TYPE (aligned_size) != sizetype)
6188 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6189 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6190 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6191 }
6192
6193 /* Otherwise, take the size from that of the element type. Substitute
6194 any PLACEHOLDER_EXPR that we have. */
6195 else
6196 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6197 }
6198
6199 /* Return a tree representing the lower bound of the array mentioned in
6200 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6201
6202 tree
6203 array_ref_low_bound (tree exp)
6204 {
6205 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6206
6207 /* If a lower bound is specified in EXP, use it. */
6208 if (TREE_OPERAND (exp, 2))
6209 return TREE_OPERAND (exp, 2);
6210
6211 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6212 substituting for a PLACEHOLDER_EXPR as needed. */
6213 if (domain_type && TYPE_MIN_VALUE (domain_type))
6214 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6215
6216 /* Otherwise, return a zero of the appropriate type. */
6217 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6218 }
6219
6220 /* Return a tree representing the upper bound of the array mentioned in
6221 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6222
6223 tree
6224 array_ref_up_bound (tree exp)
6225 {
6226 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6227
6228 /* If there is a domain type and it has an upper bound, use it, substituting
6229 for a PLACEHOLDER_EXPR as needed. */
6230 if (domain_type && TYPE_MAX_VALUE (domain_type))
6231 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6232
6233 /* Otherwise fail. */
6234 return NULL_TREE;
6235 }
6236
6237 /* Return a tree representing the offset, in bytes, of the field referenced
6238 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6239
6240 tree
6241 component_ref_field_offset (tree exp)
6242 {
6243 tree aligned_offset = TREE_OPERAND (exp, 2);
6244 tree field = TREE_OPERAND (exp, 1);
6245 location_t loc = EXPR_LOCATION (exp);
6246
6247 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6248 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6249 value. */
6250 if (aligned_offset)
6251 {
6252 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6253 sizetype from another type of the same width and signedness. */
6254 if (TREE_TYPE (aligned_offset) != sizetype)
6255 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6256 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6257 size_int (DECL_OFFSET_ALIGN (field)
6258 / BITS_PER_UNIT));
6259 }
6260
6261 /* Otherwise, take the offset from that of the field. Substitute
6262 any PLACEHOLDER_EXPR that we have. */
6263 else
6264 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6265 }
6266
6267 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6268
6269 static unsigned HOST_WIDE_INT
6270 target_align (const_tree target)
6271 {
6272 /* We might have a chain of nested references with intermediate misaligning
6273 bitfields components, so need to recurse to find out. */
6274
6275 unsigned HOST_WIDE_INT this_align, outer_align;
6276
6277 switch (TREE_CODE (target))
6278 {
6279 case BIT_FIELD_REF:
6280 return 1;
6281
6282 case COMPONENT_REF:
6283 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6284 outer_align = target_align (TREE_OPERAND (target, 0));
6285 return MIN (this_align, outer_align);
6286
6287 case ARRAY_REF:
6288 case ARRAY_RANGE_REF:
6289 this_align = TYPE_ALIGN (TREE_TYPE (target));
6290 outer_align = target_align (TREE_OPERAND (target, 0));
6291 return MIN (this_align, outer_align);
6292
6293 CASE_CONVERT:
6294 case NON_LVALUE_EXPR:
6295 case VIEW_CONVERT_EXPR:
6296 this_align = TYPE_ALIGN (TREE_TYPE (target));
6297 outer_align = target_align (TREE_OPERAND (target, 0));
6298 return MAX (this_align, outer_align);
6299
6300 default:
6301 return TYPE_ALIGN (TREE_TYPE (target));
6302 }
6303 }
6304
6305 \f
6306 /* Given an rtx VALUE that may contain additions and multiplications, return
6307 an equivalent value that just refers to a register, memory, or constant.
6308 This is done by generating instructions to perform the arithmetic and
6309 returning a pseudo-register containing the value.
6310
6311 The returned value may be a REG, SUBREG, MEM or constant. */
6312
6313 rtx
6314 force_operand (rtx value, rtx target)
6315 {
6316 rtx op1, op2;
6317 /* Use subtarget as the target for operand 0 of a binary operation. */
6318 rtx subtarget = get_subtarget (target);
6319 enum rtx_code code = GET_CODE (value);
6320
6321 /* Check for subreg applied to an expression produced by loop optimizer. */
6322 if (code == SUBREG
6323 && !REG_P (SUBREG_REG (value))
6324 && !MEM_P (SUBREG_REG (value)))
6325 {
6326 value
6327 = simplify_gen_subreg (GET_MODE (value),
6328 force_reg (GET_MODE (SUBREG_REG (value)),
6329 force_operand (SUBREG_REG (value),
6330 NULL_RTX)),
6331 GET_MODE (SUBREG_REG (value)),
6332 SUBREG_BYTE (value));
6333 code = GET_CODE (value);
6334 }
6335
6336 /* Check for a PIC address load. */
6337 if ((code == PLUS || code == MINUS)
6338 && XEXP (value, 0) == pic_offset_table_rtx
6339 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6340 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6341 || GET_CODE (XEXP (value, 1)) == CONST))
6342 {
6343 if (!subtarget)
6344 subtarget = gen_reg_rtx (GET_MODE (value));
6345 emit_move_insn (subtarget, value);
6346 return subtarget;
6347 }
6348
6349 if (ARITHMETIC_P (value))
6350 {
6351 op2 = XEXP (value, 1);
6352 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6353 subtarget = 0;
6354 if (code == MINUS && CONST_INT_P (op2))
6355 {
6356 code = PLUS;
6357 op2 = negate_rtx (GET_MODE (value), op2);
6358 }
6359
6360 /* Check for an addition with OP2 a constant integer and our first
6361 operand a PLUS of a virtual register and something else. In that
6362 case, we want to emit the sum of the virtual register and the
6363 constant first and then add the other value. This allows virtual
6364 register instantiation to simply modify the constant rather than
6365 creating another one around this addition. */
6366 if (code == PLUS && CONST_INT_P (op2)
6367 && GET_CODE (XEXP (value, 0)) == PLUS
6368 && REG_P (XEXP (XEXP (value, 0), 0))
6369 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6370 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6371 {
6372 rtx temp = expand_simple_binop (GET_MODE (value), code,
6373 XEXP (XEXP (value, 0), 0), op2,
6374 subtarget, 0, OPTAB_LIB_WIDEN);
6375 return expand_simple_binop (GET_MODE (value), code, temp,
6376 force_operand (XEXP (XEXP (value,
6377 0), 1), 0),
6378 target, 0, OPTAB_LIB_WIDEN);
6379 }
6380
6381 op1 = force_operand (XEXP (value, 0), subtarget);
6382 op2 = force_operand (op2, NULL_RTX);
6383 switch (code)
6384 {
6385 case MULT:
6386 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6387 case DIV:
6388 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6389 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6390 target, 1, OPTAB_LIB_WIDEN);
6391 else
6392 return expand_divmod (0,
6393 FLOAT_MODE_P (GET_MODE (value))
6394 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6395 GET_MODE (value), op1, op2, target, 0);
6396 case MOD:
6397 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6398 target, 0);
6399 case UDIV:
6400 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6401 target, 1);
6402 case UMOD:
6403 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6404 target, 1);
6405 case ASHIFTRT:
6406 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6407 target, 0, OPTAB_LIB_WIDEN);
6408 default:
6409 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6410 target, 1, OPTAB_LIB_WIDEN);
6411 }
6412 }
6413 if (UNARY_P (value))
6414 {
6415 if (!target)
6416 target = gen_reg_rtx (GET_MODE (value));
6417 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6418 switch (code)
6419 {
6420 case ZERO_EXTEND:
6421 case SIGN_EXTEND:
6422 case TRUNCATE:
6423 case FLOAT_EXTEND:
6424 case FLOAT_TRUNCATE:
6425 convert_move (target, op1, code == ZERO_EXTEND);
6426 return target;
6427
6428 case FIX:
6429 case UNSIGNED_FIX:
6430 expand_fix (target, op1, code == UNSIGNED_FIX);
6431 return target;
6432
6433 case FLOAT:
6434 case UNSIGNED_FLOAT:
6435 expand_float (target, op1, code == UNSIGNED_FLOAT);
6436 return target;
6437
6438 default:
6439 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6440 }
6441 }
6442
6443 #ifdef INSN_SCHEDULING
6444 /* On machines that have insn scheduling, we want all memory reference to be
6445 explicit, so we need to deal with such paradoxical SUBREGs. */
6446 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6447 && (GET_MODE_SIZE (GET_MODE (value))
6448 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6449 value
6450 = simplify_gen_subreg (GET_MODE (value),
6451 force_reg (GET_MODE (SUBREG_REG (value)),
6452 force_operand (SUBREG_REG (value),
6453 NULL_RTX)),
6454 GET_MODE (SUBREG_REG (value)),
6455 SUBREG_BYTE (value));
6456 #endif
6457
6458 return value;
6459 }
6460 \f
6461 /* Subroutine of expand_expr: return nonzero iff there is no way that
6462 EXP can reference X, which is being modified. TOP_P is nonzero if this
6463 call is going to be used to determine whether we need a temporary
6464 for EXP, as opposed to a recursive call to this function.
6465
6466 It is always safe for this routine to return zero since it merely
6467 searches for optimization opportunities. */
6468
6469 int
6470 safe_from_p (const_rtx x, tree exp, int top_p)
6471 {
6472 rtx exp_rtl = 0;
6473 int i, nops;
6474
6475 if (x == 0
6476 /* If EXP has varying size, we MUST use a target since we currently
6477 have no way of allocating temporaries of variable size
6478 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6479 So we assume here that something at a higher level has prevented a
6480 clash. This is somewhat bogus, but the best we can do. Only
6481 do this when X is BLKmode and when we are at the top level. */
6482 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6483 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6484 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6485 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6486 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6487 != INTEGER_CST)
6488 && GET_MODE (x) == BLKmode)
6489 /* If X is in the outgoing argument area, it is always safe. */
6490 || (MEM_P (x)
6491 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6492 || (GET_CODE (XEXP (x, 0)) == PLUS
6493 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6494 return 1;
6495
6496 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6497 find the underlying pseudo. */
6498 if (GET_CODE (x) == SUBREG)
6499 {
6500 x = SUBREG_REG (x);
6501 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6502 return 0;
6503 }
6504
6505 /* Now look at our tree code and possibly recurse. */
6506 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6507 {
6508 case tcc_declaration:
6509 exp_rtl = DECL_RTL_IF_SET (exp);
6510 break;
6511
6512 case tcc_constant:
6513 return 1;
6514
6515 case tcc_exceptional:
6516 if (TREE_CODE (exp) == TREE_LIST)
6517 {
6518 while (1)
6519 {
6520 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6521 return 0;
6522 exp = TREE_CHAIN (exp);
6523 if (!exp)
6524 return 1;
6525 if (TREE_CODE (exp) != TREE_LIST)
6526 return safe_from_p (x, exp, 0);
6527 }
6528 }
6529 else if (TREE_CODE (exp) == CONSTRUCTOR)
6530 {
6531 constructor_elt *ce;
6532 unsigned HOST_WIDE_INT idx;
6533
6534 for (idx = 0;
6535 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6536 idx++)
6537 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6538 || !safe_from_p (x, ce->value, 0))
6539 return 0;
6540 return 1;
6541 }
6542 else if (TREE_CODE (exp) == ERROR_MARK)
6543 return 1; /* An already-visited SAVE_EXPR? */
6544 else
6545 return 0;
6546
6547 case tcc_statement:
6548 /* The only case we look at here is the DECL_INITIAL inside a
6549 DECL_EXPR. */
6550 return (TREE_CODE (exp) != DECL_EXPR
6551 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6552 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6553 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6554
6555 case tcc_binary:
6556 case tcc_comparison:
6557 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6558 return 0;
6559 /* Fall through. */
6560
6561 case tcc_unary:
6562 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6563
6564 case tcc_expression:
6565 case tcc_reference:
6566 case tcc_vl_exp:
6567 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6568 the expression. If it is set, we conflict iff we are that rtx or
6569 both are in memory. Otherwise, we check all operands of the
6570 expression recursively. */
6571
6572 switch (TREE_CODE (exp))
6573 {
6574 case ADDR_EXPR:
6575 /* If the operand is static or we are static, we can't conflict.
6576 Likewise if we don't conflict with the operand at all. */
6577 if (staticp (TREE_OPERAND (exp, 0))
6578 || TREE_STATIC (exp)
6579 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6580 return 1;
6581
6582 /* Otherwise, the only way this can conflict is if we are taking
6583 the address of a DECL a that address if part of X, which is
6584 very rare. */
6585 exp = TREE_OPERAND (exp, 0);
6586 if (DECL_P (exp))
6587 {
6588 if (!DECL_RTL_SET_P (exp)
6589 || !MEM_P (DECL_RTL (exp)))
6590 return 0;
6591 else
6592 exp_rtl = XEXP (DECL_RTL (exp), 0);
6593 }
6594 break;
6595
6596 case MISALIGNED_INDIRECT_REF:
6597 case ALIGN_INDIRECT_REF:
6598 case INDIRECT_REF:
6599 if (MEM_P (x)
6600 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6601 get_alias_set (exp)))
6602 return 0;
6603 break;
6604
6605 case CALL_EXPR:
6606 /* Assume that the call will clobber all hard registers and
6607 all of memory. */
6608 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6609 || MEM_P (x))
6610 return 0;
6611 break;
6612
6613 case WITH_CLEANUP_EXPR:
6614 case CLEANUP_POINT_EXPR:
6615 /* Lowered by gimplify.c. */
6616 gcc_unreachable ();
6617
6618 case SAVE_EXPR:
6619 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6620
6621 default:
6622 break;
6623 }
6624
6625 /* If we have an rtx, we do not need to scan our operands. */
6626 if (exp_rtl)
6627 break;
6628
6629 nops = TREE_OPERAND_LENGTH (exp);
6630 for (i = 0; i < nops; i++)
6631 if (TREE_OPERAND (exp, i) != 0
6632 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6633 return 0;
6634
6635 break;
6636
6637 case tcc_type:
6638 /* Should never get a type here. */
6639 gcc_unreachable ();
6640 }
6641
6642 /* If we have an rtl, find any enclosed object. Then see if we conflict
6643 with it. */
6644 if (exp_rtl)
6645 {
6646 if (GET_CODE (exp_rtl) == SUBREG)
6647 {
6648 exp_rtl = SUBREG_REG (exp_rtl);
6649 if (REG_P (exp_rtl)
6650 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6651 return 0;
6652 }
6653
6654 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6655 are memory and they conflict. */
6656 return ! (rtx_equal_p (x, exp_rtl)
6657 || (MEM_P (x) && MEM_P (exp_rtl)
6658 && true_dependence (exp_rtl, VOIDmode, x,
6659 rtx_addr_varies_p)));
6660 }
6661
6662 /* If we reach here, it is safe. */
6663 return 1;
6664 }
6665
6666 \f
6667 /* Return the highest power of two that EXP is known to be a multiple of.
6668 This is used in updating alignment of MEMs in array references. */
6669
6670 unsigned HOST_WIDE_INT
6671 highest_pow2_factor (const_tree exp)
6672 {
6673 unsigned HOST_WIDE_INT c0, c1;
6674
6675 switch (TREE_CODE (exp))
6676 {
6677 case INTEGER_CST:
6678 /* We can find the lowest bit that's a one. If the low
6679 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6680 We need to handle this case since we can find it in a COND_EXPR,
6681 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6682 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6683 later ICE. */
6684 if (TREE_OVERFLOW (exp))
6685 return BIGGEST_ALIGNMENT;
6686 else
6687 {
6688 /* Note: tree_low_cst is intentionally not used here,
6689 we don't care about the upper bits. */
6690 c0 = TREE_INT_CST_LOW (exp);
6691 c0 &= -c0;
6692 return c0 ? c0 : BIGGEST_ALIGNMENT;
6693 }
6694 break;
6695
6696 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6697 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6698 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6699 return MIN (c0, c1);
6700
6701 case MULT_EXPR:
6702 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6703 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6704 return c0 * c1;
6705
6706 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6707 case CEIL_DIV_EXPR:
6708 if (integer_pow2p (TREE_OPERAND (exp, 1))
6709 && host_integerp (TREE_OPERAND (exp, 1), 1))
6710 {
6711 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6712 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6713 return MAX (1, c0 / c1);
6714 }
6715 break;
6716
6717 case BIT_AND_EXPR:
6718 /* The highest power of two of a bit-and expression is the maximum of
6719 that of its operands. We typically get here for a complex LHS and
6720 a constant negative power of two on the RHS to force an explicit
6721 alignment, so don't bother looking at the LHS. */
6722 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6723
6724 CASE_CONVERT:
6725 case SAVE_EXPR:
6726 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6727
6728 case COMPOUND_EXPR:
6729 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6730
6731 case COND_EXPR:
6732 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6733 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6734 return MIN (c0, c1);
6735
6736 default:
6737 break;
6738 }
6739
6740 return 1;
6741 }
6742
6743 /* Similar, except that the alignment requirements of TARGET are
6744 taken into account. Assume it is at least as aligned as its
6745 type, unless it is a COMPONENT_REF in which case the layout of
6746 the structure gives the alignment. */
6747
6748 static unsigned HOST_WIDE_INT
6749 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6750 {
6751 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
6752 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
6753
6754 return MAX (factor, talign);
6755 }
6756 \f
6757 /* Return &VAR expression for emulated thread local VAR. */
6758
6759 static tree
6760 emutls_var_address (tree var)
6761 {
6762 tree emuvar = emutls_decl (var);
6763 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6764 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6765 tree arglist = build_tree_list (NULL_TREE, arg);
6766 tree call = build_function_call_expr (UNKNOWN_LOCATION, fn, arglist);
6767 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6768 }
6769 \f
6770
6771 /* Subroutine of expand_expr. Expand the two operands of a binary
6772 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6773 The value may be stored in TARGET if TARGET is nonzero. The
6774 MODIFIER argument is as documented by expand_expr. */
6775
6776 static void
6777 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6778 enum expand_modifier modifier)
6779 {
6780 if (! safe_from_p (target, exp1, 1))
6781 target = 0;
6782 if (operand_equal_p (exp0, exp1, 0))
6783 {
6784 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6785 *op1 = copy_rtx (*op0);
6786 }
6787 else
6788 {
6789 /* If we need to preserve evaluation order, copy exp0 into its own
6790 temporary variable so that it can't be clobbered by exp1. */
6791 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6792 exp0 = save_expr (exp0);
6793 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6794 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6795 }
6796 }
6797
6798 \f
6799 /* Return a MEM that contains constant EXP. DEFER is as for
6800 output_constant_def and MODIFIER is as for expand_expr. */
6801
6802 static rtx
6803 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6804 {
6805 rtx mem;
6806
6807 mem = output_constant_def (exp, defer);
6808 if (modifier != EXPAND_INITIALIZER)
6809 mem = use_anchored_address (mem);
6810 return mem;
6811 }
6812
6813 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6814 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6815
6816 static rtx
6817 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6818 enum expand_modifier modifier, addr_space_t as)
6819 {
6820 rtx result, subtarget;
6821 tree inner, offset;
6822 HOST_WIDE_INT bitsize, bitpos;
6823 int volatilep, unsignedp;
6824 enum machine_mode mode1;
6825
6826 /* If we are taking the address of a constant and are at the top level,
6827 we have to use output_constant_def since we can't call force_const_mem
6828 at top level. */
6829 /* ??? This should be considered a front-end bug. We should not be
6830 generating ADDR_EXPR of something that isn't an LVALUE. The only
6831 exception here is STRING_CST. */
6832 if (CONSTANT_CLASS_P (exp))
6833 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6834
6835 /* Everything must be something allowed by is_gimple_addressable. */
6836 switch (TREE_CODE (exp))
6837 {
6838 case INDIRECT_REF:
6839 /* This case will happen via recursion for &a->b. */
6840 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6841
6842 case CONST_DECL:
6843 /* Recurse and make the output_constant_def clause above handle this. */
6844 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6845 tmode, modifier, as);
6846
6847 case REALPART_EXPR:
6848 /* The real part of the complex number is always first, therefore
6849 the address is the same as the address of the parent object. */
6850 offset = 0;
6851 bitpos = 0;
6852 inner = TREE_OPERAND (exp, 0);
6853 break;
6854
6855 case IMAGPART_EXPR:
6856 /* The imaginary part of the complex number is always second.
6857 The expression is therefore always offset by the size of the
6858 scalar type. */
6859 offset = 0;
6860 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6861 inner = TREE_OPERAND (exp, 0);
6862 break;
6863
6864 case VAR_DECL:
6865 /* TLS emulation hook - replace __thread VAR's &VAR with
6866 __emutls_get_address (&_emutls.VAR). */
6867 if (! targetm.have_tls
6868 && TREE_CODE (exp) == VAR_DECL
6869 && DECL_THREAD_LOCAL_P (exp))
6870 {
6871 exp = emutls_var_address (exp);
6872 return expand_expr (exp, target, tmode, modifier);
6873 }
6874 /* Fall through. */
6875
6876 default:
6877 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6878 expand_expr, as that can have various side effects; LABEL_DECLs for
6879 example, may not have their DECL_RTL set yet. Expand the rtl of
6880 CONSTRUCTORs too, which should yield a memory reference for the
6881 constructor's contents. Assume language specific tree nodes can
6882 be expanded in some interesting way. */
6883 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
6884 if (DECL_P (exp)
6885 || TREE_CODE (exp) == CONSTRUCTOR
6886 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
6887 {
6888 result = expand_expr (exp, target, tmode,
6889 modifier == EXPAND_INITIALIZER
6890 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6891
6892 /* If the DECL isn't in memory, then the DECL wasn't properly
6893 marked TREE_ADDRESSABLE, which will be either a front-end
6894 or a tree optimizer bug. */
6895 gcc_assert (MEM_P (result));
6896 result = XEXP (result, 0);
6897
6898 /* ??? Is this needed anymore? */
6899 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6900 {
6901 assemble_external (exp);
6902 TREE_USED (exp) = 1;
6903 }
6904
6905 if (modifier != EXPAND_INITIALIZER
6906 && modifier != EXPAND_CONST_ADDRESS)
6907 result = force_operand (result, target);
6908 return result;
6909 }
6910
6911 /* Pass FALSE as the last argument to get_inner_reference although
6912 we are expanding to RTL. The rationale is that we know how to
6913 handle "aligning nodes" here: we can just bypass them because
6914 they won't change the final object whose address will be returned
6915 (they actually exist only for that purpose). */
6916 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6917 &mode1, &unsignedp, &volatilep, false);
6918 break;
6919 }
6920
6921 /* We must have made progress. */
6922 gcc_assert (inner != exp);
6923
6924 subtarget = offset || bitpos ? NULL_RTX : target;
6925 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
6926 inner alignment, force the inner to be sufficiently aligned. */
6927 if (CONSTANT_CLASS_P (inner)
6928 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
6929 {
6930 inner = copy_node (inner);
6931 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
6932 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
6933 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
6934 }
6935 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
6936
6937 if (offset)
6938 {
6939 rtx tmp;
6940
6941 if (modifier != EXPAND_NORMAL)
6942 result = force_operand (result, NULL);
6943 tmp = expand_expr (offset, NULL_RTX, tmode,
6944 modifier == EXPAND_INITIALIZER
6945 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6946
6947 result = convert_memory_address_addr_space (tmode, result, as);
6948 tmp = convert_memory_address_addr_space (tmode, tmp, as);
6949
6950 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6951 result = gen_rtx_PLUS (tmode, result, tmp);
6952 else
6953 {
6954 subtarget = bitpos ? NULL_RTX : target;
6955 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6956 1, OPTAB_LIB_WIDEN);
6957 }
6958 }
6959
6960 if (bitpos)
6961 {
6962 /* Someone beforehand should have rejected taking the address
6963 of such an object. */
6964 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6965
6966 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6967 if (modifier < EXPAND_SUM)
6968 result = force_operand (result, target);
6969 }
6970
6971 return result;
6972 }
6973
6974 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6975 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6976
6977 static rtx
6978 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6979 enum expand_modifier modifier)
6980 {
6981 addr_space_t as = ADDR_SPACE_GENERIC;
6982 enum machine_mode address_mode = Pmode;
6983 enum machine_mode pointer_mode = ptr_mode;
6984 enum machine_mode rmode;
6985 rtx result;
6986
6987 /* Target mode of VOIDmode says "whatever's natural". */
6988 if (tmode == VOIDmode)
6989 tmode = TYPE_MODE (TREE_TYPE (exp));
6990
6991 if (POINTER_TYPE_P (TREE_TYPE (exp)))
6992 {
6993 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
6994 address_mode = targetm.addr_space.address_mode (as);
6995 pointer_mode = targetm.addr_space.pointer_mode (as);
6996 }
6997
6998 /* We can get called with some Weird Things if the user does silliness
6999 like "(short) &a". In that case, convert_memory_address won't do
7000 the right thing, so ignore the given target mode. */
7001 if (tmode != address_mode && tmode != pointer_mode)
7002 tmode = address_mode;
7003
7004 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7005 tmode, modifier, as);
7006
7007 /* Despite expand_expr claims concerning ignoring TMODE when not
7008 strictly convenient, stuff breaks if we don't honor it. Note
7009 that combined with the above, we only do this for pointer modes. */
7010 rmode = GET_MODE (result);
7011 if (rmode == VOIDmode)
7012 rmode = tmode;
7013 if (rmode != tmode)
7014 result = convert_memory_address_addr_space (tmode, result, as);
7015
7016 return result;
7017 }
7018
7019 /* Generate code for computing CONSTRUCTOR EXP.
7020 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7021 is TRUE, instead of creating a temporary variable in memory
7022 NULL is returned and the caller needs to handle it differently. */
7023
7024 static rtx
7025 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7026 bool avoid_temp_mem)
7027 {
7028 tree type = TREE_TYPE (exp);
7029 enum machine_mode mode = TYPE_MODE (type);
7030
7031 /* Try to avoid creating a temporary at all. This is possible
7032 if all of the initializer is zero.
7033 FIXME: try to handle all [0..255] initializers we can handle
7034 with memset. */
7035 if (TREE_STATIC (exp)
7036 && !TREE_ADDRESSABLE (exp)
7037 && target != 0 && mode == BLKmode
7038 && all_zeros_p (exp))
7039 {
7040 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7041 return target;
7042 }
7043
7044 /* All elts simple constants => refer to a constant in memory. But
7045 if this is a non-BLKmode mode, let it store a field at a time
7046 since that should make a CONST_INT or CONST_DOUBLE when we
7047 fold. Likewise, if we have a target we can use, it is best to
7048 store directly into the target unless the type is large enough
7049 that memcpy will be used. If we are making an initializer and
7050 all operands are constant, put it in memory as well.
7051
7052 FIXME: Avoid trying to fill vector constructors piece-meal.
7053 Output them with output_constant_def below unless we're sure
7054 they're zeros. This should go away when vector initializers
7055 are treated like VECTOR_CST instead of arrays. */
7056 if ((TREE_STATIC (exp)
7057 && ((mode == BLKmode
7058 && ! (target != 0 && safe_from_p (target, exp, 1)))
7059 || TREE_ADDRESSABLE (exp)
7060 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7061 && (! MOVE_BY_PIECES_P
7062 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7063 TYPE_ALIGN (type)))
7064 && ! mostly_zeros_p (exp))))
7065 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7066 && TREE_CONSTANT (exp)))
7067 {
7068 rtx constructor;
7069
7070 if (avoid_temp_mem)
7071 return NULL_RTX;
7072
7073 constructor = expand_expr_constant (exp, 1, modifier);
7074
7075 if (modifier != EXPAND_CONST_ADDRESS
7076 && modifier != EXPAND_INITIALIZER
7077 && modifier != EXPAND_SUM)
7078 constructor = validize_mem (constructor);
7079
7080 return constructor;
7081 }
7082
7083 /* Handle calls that pass values in multiple non-contiguous
7084 locations. The Irix 6 ABI has examples of this. */
7085 if (target == 0 || ! safe_from_p (target, exp, 1)
7086 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7087 {
7088 if (avoid_temp_mem)
7089 return NULL_RTX;
7090
7091 target
7092 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7093 | (TREE_READONLY (exp)
7094 * TYPE_QUAL_CONST))),
7095 0, TREE_ADDRESSABLE (exp), 1);
7096 }
7097
7098 store_constructor (exp, target, 0, int_expr_size (exp));
7099 return target;
7100 }
7101
7102
7103 /* expand_expr: generate code for computing expression EXP.
7104 An rtx for the computed value is returned. The value is never null.
7105 In the case of a void EXP, const0_rtx is returned.
7106
7107 The value may be stored in TARGET if TARGET is nonzero.
7108 TARGET is just a suggestion; callers must assume that
7109 the rtx returned may not be the same as TARGET.
7110
7111 If TARGET is CONST0_RTX, it means that the value will be ignored.
7112
7113 If TMODE is not VOIDmode, it suggests generating the
7114 result in mode TMODE. But this is done only when convenient.
7115 Otherwise, TMODE is ignored and the value generated in its natural mode.
7116 TMODE is just a suggestion; callers must assume that
7117 the rtx returned may not have mode TMODE.
7118
7119 Note that TARGET may have neither TMODE nor MODE. In that case, it
7120 probably will not be used.
7121
7122 If MODIFIER is EXPAND_SUM then when EXP is an addition
7123 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7124 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7125 products as above, or REG or MEM, or constant.
7126 Ordinarily in such cases we would output mul or add instructions
7127 and then return a pseudo reg containing the sum.
7128
7129 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7130 it also marks a label as absolutely required (it can't be dead).
7131 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7132 This is used for outputting expressions used in initializers.
7133
7134 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7135 with a constant address even if that address is not normally legitimate.
7136 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7137
7138 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7139 a call parameter. Such targets require special care as we haven't yet
7140 marked TARGET so that it's safe from being trashed by libcalls. We
7141 don't want to use TARGET for anything but the final result;
7142 Intermediate values must go elsewhere. Additionally, calls to
7143 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7144
7145 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7146 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7147 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7148 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7149 recursively. */
7150
7151 rtx
7152 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7153 enum expand_modifier modifier, rtx *alt_rtl)
7154 {
7155 rtx ret;
7156
7157 /* Handle ERROR_MARK before anybody tries to access its type. */
7158 if (TREE_CODE (exp) == ERROR_MARK
7159 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7160 {
7161 ret = CONST0_RTX (tmode);
7162 return ret ? ret : const0_rtx;
7163 }
7164
7165 /* If this is an expression of some kind and it has an associated line
7166 number, then emit the line number before expanding the expression.
7167
7168 We need to save and restore the file and line information so that
7169 errors discovered during expansion are emitted with the right
7170 information. It would be better of the diagnostic routines
7171 used the file/line information embedded in the tree nodes rather
7172 than globals. */
7173 if (cfun && EXPR_HAS_LOCATION (exp))
7174 {
7175 location_t saved_location = input_location;
7176 input_location = EXPR_LOCATION (exp);
7177 set_curr_insn_source_location (input_location);
7178
7179 /* Record where the insns produced belong. */
7180 set_curr_insn_block (TREE_BLOCK (exp));
7181
7182 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7183
7184 input_location = saved_location;
7185 }
7186 else
7187 {
7188 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7189 }
7190
7191 return ret;
7192 }
7193
7194 rtx
7195 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7196 enum expand_modifier modifier)
7197 {
7198 rtx op0, op1, op2, temp;
7199 tree type;
7200 int unsignedp;
7201 enum machine_mode mode;
7202 enum tree_code code = ops->code;
7203 optab this_optab;
7204 rtx subtarget, original_target;
7205 int ignore;
7206 tree subexp0, subexp1;
7207 bool reduce_bit_field;
7208 gimple subexp0_def, subexp1_def;
7209 tree top0, top1;
7210 location_t loc = ops->location;
7211 tree treeop0, treeop1;
7212 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7213 ? reduce_to_bit_field_precision ((expr), \
7214 target, \
7215 type) \
7216 : (expr))
7217
7218 type = ops->type;
7219 mode = TYPE_MODE (type);
7220 unsignedp = TYPE_UNSIGNED (type);
7221
7222 treeop0 = ops->op0;
7223 treeop1 = ops->op1;
7224
7225 /* We should be called only on simple (binary or unary) expressions,
7226 exactly those that are valid in gimple expressions that aren't
7227 GIMPLE_SINGLE_RHS (or invalid). */
7228 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7229 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS);
7230
7231 ignore = (target == const0_rtx
7232 || ((CONVERT_EXPR_CODE_P (code)
7233 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7234 && TREE_CODE (type) == VOID_TYPE));
7235
7236 /* We should be called only if we need the result. */
7237 gcc_assert (!ignore);
7238
7239 /* An operation in what may be a bit-field type needs the
7240 result to be reduced to the precision of the bit-field type,
7241 which is narrower than that of the type's mode. */
7242 reduce_bit_field = (TREE_CODE (type) == INTEGER_TYPE
7243 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7244
7245 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7246 target = 0;
7247
7248 /* Use subtarget as the target for operand 0 of a binary operation. */
7249 subtarget = get_subtarget (target);
7250 original_target = target;
7251
7252 switch (code)
7253 {
7254 case NON_LVALUE_EXPR:
7255 case PAREN_EXPR:
7256 CASE_CONVERT:
7257 if (treeop0 == error_mark_node)
7258 return const0_rtx;
7259
7260 if (TREE_CODE (type) == UNION_TYPE)
7261 {
7262 tree valtype = TREE_TYPE (treeop0);
7263
7264 /* If both input and output are BLKmode, this conversion isn't doing
7265 anything except possibly changing memory attribute. */
7266 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7267 {
7268 rtx result = expand_expr (treeop0, target, tmode,
7269 modifier);
7270
7271 result = copy_rtx (result);
7272 set_mem_attributes (result, type, 0);
7273 return result;
7274 }
7275
7276 if (target == 0)
7277 {
7278 if (TYPE_MODE (type) != BLKmode)
7279 target = gen_reg_rtx (TYPE_MODE (type));
7280 else
7281 target = assign_temp (type, 0, 1, 1);
7282 }
7283
7284 if (MEM_P (target))
7285 /* Store data into beginning of memory target. */
7286 store_expr (treeop0,
7287 adjust_address (target, TYPE_MODE (valtype), 0),
7288 modifier == EXPAND_STACK_PARM,
7289 false);
7290
7291 else
7292 {
7293 gcc_assert (REG_P (target));
7294
7295 /* Store this field into a union of the proper type. */
7296 store_field (target,
7297 MIN ((int_size_in_bytes (TREE_TYPE
7298 (treeop0))
7299 * BITS_PER_UNIT),
7300 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7301 0, TYPE_MODE (valtype), treeop0,
7302 type, 0, false);
7303 }
7304
7305 /* Return the entire union. */
7306 return target;
7307 }
7308
7309 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
7310 {
7311 op0 = expand_expr (treeop0, target, VOIDmode,
7312 modifier);
7313
7314 /* If the signedness of the conversion differs and OP0 is
7315 a promoted SUBREG, clear that indication since we now
7316 have to do the proper extension. */
7317 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
7318 && GET_CODE (op0) == SUBREG)
7319 SUBREG_PROMOTED_VAR_P (op0) = 0;
7320
7321 return REDUCE_BIT_FIELD (op0);
7322 }
7323
7324 op0 = expand_expr (treeop0, NULL_RTX, mode,
7325 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7326 if (GET_MODE (op0) == mode)
7327 ;
7328
7329 /* If OP0 is a constant, just convert it into the proper mode. */
7330 else if (CONSTANT_P (op0))
7331 {
7332 tree inner_type = TREE_TYPE (treeop0);
7333 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7334
7335 if (modifier == EXPAND_INITIALIZER)
7336 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7337 subreg_lowpart_offset (mode,
7338 inner_mode));
7339 else
7340 op0= convert_modes (mode, inner_mode, op0,
7341 TYPE_UNSIGNED (inner_type));
7342 }
7343
7344 else if (modifier == EXPAND_INITIALIZER)
7345 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7346
7347 else if (target == 0)
7348 op0 = convert_to_mode (mode, op0,
7349 TYPE_UNSIGNED (TREE_TYPE
7350 (treeop0)));
7351 else
7352 {
7353 convert_move (target, op0,
7354 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7355 op0 = target;
7356 }
7357
7358 return REDUCE_BIT_FIELD (op0);
7359
7360 case ADDR_SPACE_CONVERT_EXPR:
7361 {
7362 tree treeop0_type = TREE_TYPE (treeop0);
7363 addr_space_t as_to;
7364 addr_space_t as_from;
7365
7366 gcc_assert (POINTER_TYPE_P (type));
7367 gcc_assert (POINTER_TYPE_P (treeop0_type));
7368
7369 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
7370 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
7371
7372 /* Conversions between pointers to the same address space should
7373 have been implemented via CONVERT_EXPR / NOP_EXPR. */
7374 gcc_assert (as_to != as_from);
7375
7376 /* Ask target code to handle conversion between pointers
7377 to overlapping address spaces. */
7378 if (targetm.addr_space.subset_p (as_to, as_from)
7379 || targetm.addr_space.subset_p (as_from, as_to))
7380 {
7381 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
7382 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
7383 gcc_assert (op0);
7384 return op0;
7385 }
7386
7387 /* For disjoint address spaces, converting anything but
7388 a null pointer invokes undefined behaviour. We simply
7389 always return a null pointer here. */
7390 return CONST0_RTX (mode);
7391 }
7392
7393 case POINTER_PLUS_EXPR:
7394 /* Even though the sizetype mode and the pointer's mode can be different
7395 expand is able to handle this correctly and get the correct result out
7396 of the PLUS_EXPR code. */
7397 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
7398 if sizetype precision is smaller than pointer precision. */
7399 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
7400 treeop1 = fold_convert_loc (loc, type,
7401 fold_convert_loc (loc, ssizetype,
7402 treeop1));
7403 case PLUS_EXPR:
7404
7405 /* Check if this is a case for multiplication and addition. */
7406 if ((TREE_CODE (type) == INTEGER_TYPE
7407 || TREE_CODE (type) == FIXED_POINT_TYPE)
7408 && (subexp0_def = get_def_for_expr (treeop0,
7409 MULT_EXPR)))
7410 {
7411 tree subsubexp0, subsubexp1;
7412 gimple subsubexp0_def, subsubexp1_def;
7413 enum tree_code this_code;
7414
7415 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
7416 : FIXED_CONVERT_EXPR;
7417 subsubexp0 = gimple_assign_rhs1 (subexp0_def);
7418 subsubexp0_def = get_def_for_expr (subsubexp0, this_code);
7419 subsubexp1 = gimple_assign_rhs2 (subexp0_def);
7420 subsubexp1_def = get_def_for_expr (subsubexp1, this_code);
7421 if (subsubexp0_def && subsubexp1_def
7422 && (top0 = gimple_assign_rhs1 (subsubexp0_def))
7423 && (top1 = gimple_assign_rhs1 (subsubexp1_def))
7424 && (TYPE_PRECISION (TREE_TYPE (top0))
7425 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
7426 && (TYPE_PRECISION (TREE_TYPE (top0))
7427 == TYPE_PRECISION (TREE_TYPE (top1)))
7428 && (TYPE_UNSIGNED (TREE_TYPE (top0))
7429 == TYPE_UNSIGNED (TREE_TYPE (top1))))
7430 {
7431 tree op0type = TREE_TYPE (top0);
7432 enum machine_mode innermode = TYPE_MODE (op0type);
7433 bool zextend_p = TYPE_UNSIGNED (op0type);
7434 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
7435 if (sat_p == 0)
7436 this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
7437 else
7438 this_optab = zextend_p ? usmadd_widen_optab
7439 : ssmadd_widen_optab;
7440 if (mode == GET_MODE_2XWIDER_MODE (innermode)
7441 && (optab_handler (this_optab, mode)->insn_code
7442 != CODE_FOR_nothing))
7443 {
7444 expand_operands (top0, top1, NULL_RTX, &op0, &op1,
7445 EXPAND_NORMAL);
7446 op2 = expand_expr (treeop1, subtarget,
7447 VOIDmode, EXPAND_NORMAL);
7448 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
7449 target, unsignedp);
7450 gcc_assert (temp);
7451 return REDUCE_BIT_FIELD (temp);
7452 }
7453 }
7454 }
7455
7456 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7457 something else, make sure we add the register to the constant and
7458 then to the other thing. This case can occur during strength
7459 reduction and doing it this way will produce better code if the
7460 frame pointer or argument pointer is eliminated.
7461
7462 fold-const.c will ensure that the constant is always in the inner
7463 PLUS_EXPR, so the only case we need to do anything about is if
7464 sp, ap, or fp is our second argument, in which case we must swap
7465 the innermost first argument and our second argument. */
7466
7467 if (TREE_CODE (treeop0) == PLUS_EXPR
7468 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
7469 && TREE_CODE (treeop1) == VAR_DECL
7470 && (DECL_RTL (treeop1) == frame_pointer_rtx
7471 || DECL_RTL (treeop1) == stack_pointer_rtx
7472 || DECL_RTL (treeop1) == arg_pointer_rtx))
7473 {
7474 tree t = treeop1;
7475
7476 treeop1 = TREE_OPERAND (treeop0, 0);
7477 TREE_OPERAND (treeop0, 0) = t;
7478 }
7479
7480 /* If the result is to be ptr_mode and we are adding an integer to
7481 something, we might be forming a constant. So try to use
7482 plus_constant. If it produces a sum and we can't accept it,
7483 use force_operand. This allows P = &ARR[const] to generate
7484 efficient code on machines where a SYMBOL_REF is not a valid
7485 address.
7486
7487 If this is an EXPAND_SUM call, always return the sum. */
7488 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7489 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7490 {
7491 if (modifier == EXPAND_STACK_PARM)
7492 target = 0;
7493 if (TREE_CODE (treeop0) == INTEGER_CST
7494 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7495 && TREE_CONSTANT (treeop1))
7496 {
7497 rtx constant_part;
7498
7499 op1 = expand_expr (treeop1, subtarget, VOIDmode,
7500 EXPAND_SUM);
7501 /* Use immed_double_const to ensure that the constant is
7502 truncated according to the mode of OP1, then sign extended
7503 to a HOST_WIDE_INT. Using the constant directly can result
7504 in non-canonical RTL in a 64x32 cross compile. */
7505 constant_part
7506 = immed_double_const (TREE_INT_CST_LOW (treeop0),
7507 (HOST_WIDE_INT) 0,
7508 TYPE_MODE (TREE_TYPE (treeop1)));
7509 op1 = plus_constant (op1, INTVAL (constant_part));
7510 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7511 op1 = force_operand (op1, target);
7512 return REDUCE_BIT_FIELD (op1);
7513 }
7514
7515 else if (TREE_CODE (treeop1) == INTEGER_CST
7516 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7517 && TREE_CONSTANT (treeop0))
7518 {
7519 rtx constant_part;
7520
7521 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7522 (modifier == EXPAND_INITIALIZER
7523 ? EXPAND_INITIALIZER : EXPAND_SUM));
7524 if (! CONSTANT_P (op0))
7525 {
7526 op1 = expand_expr (treeop1, NULL_RTX,
7527 VOIDmode, modifier);
7528 /* Return a PLUS if modifier says it's OK. */
7529 if (modifier == EXPAND_SUM
7530 || modifier == EXPAND_INITIALIZER)
7531 return simplify_gen_binary (PLUS, mode, op0, op1);
7532 goto binop2;
7533 }
7534 /* Use immed_double_const to ensure that the constant is
7535 truncated according to the mode of OP1, then sign extended
7536 to a HOST_WIDE_INT. Using the constant directly can result
7537 in non-canonical RTL in a 64x32 cross compile. */
7538 constant_part
7539 = immed_double_const (TREE_INT_CST_LOW (treeop1),
7540 (HOST_WIDE_INT) 0,
7541 TYPE_MODE (TREE_TYPE (treeop0)));
7542 op0 = plus_constant (op0, INTVAL (constant_part));
7543 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7544 op0 = force_operand (op0, target);
7545 return REDUCE_BIT_FIELD (op0);
7546 }
7547 }
7548
7549 /* No sense saving up arithmetic to be done
7550 if it's all in the wrong mode to form part of an address.
7551 And force_operand won't know whether to sign-extend or
7552 zero-extend. */
7553 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7554 || mode != ptr_mode)
7555 {
7556 expand_operands (treeop0, treeop1,
7557 subtarget, &op0, &op1, EXPAND_NORMAL);
7558 if (op0 == const0_rtx)
7559 return op1;
7560 if (op1 == const0_rtx)
7561 return op0;
7562 goto binop2;
7563 }
7564
7565 expand_operands (treeop0, treeop1,
7566 subtarget, &op0, &op1, modifier);
7567 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7568
7569 case MINUS_EXPR:
7570 /* Check if this is a case for multiplication and subtraction. */
7571 if ((TREE_CODE (type) == INTEGER_TYPE
7572 || TREE_CODE (type) == FIXED_POINT_TYPE)
7573 && (subexp1_def = get_def_for_expr (treeop1,
7574 MULT_EXPR)))
7575 {
7576 tree subsubexp0, subsubexp1;
7577 gimple subsubexp0_def, subsubexp1_def;
7578 enum tree_code this_code;
7579
7580 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
7581 : FIXED_CONVERT_EXPR;
7582 subsubexp0 = gimple_assign_rhs1 (subexp1_def);
7583 subsubexp0_def = get_def_for_expr (subsubexp0, this_code);
7584 subsubexp1 = gimple_assign_rhs2 (subexp1_def);
7585 subsubexp1_def = get_def_for_expr (subsubexp1, this_code);
7586 if (subsubexp0_def && subsubexp1_def
7587 && (top0 = gimple_assign_rhs1 (subsubexp0_def))
7588 && (top1 = gimple_assign_rhs1 (subsubexp1_def))
7589 && (TYPE_PRECISION (TREE_TYPE (top0))
7590 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
7591 && (TYPE_PRECISION (TREE_TYPE (top0))
7592 == TYPE_PRECISION (TREE_TYPE (top1)))
7593 && (TYPE_UNSIGNED (TREE_TYPE (top0))
7594 == TYPE_UNSIGNED (TREE_TYPE (top1))))
7595 {
7596 tree op0type = TREE_TYPE (top0);
7597 enum machine_mode innermode = TYPE_MODE (op0type);
7598 bool zextend_p = TYPE_UNSIGNED (op0type);
7599 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
7600 if (sat_p == 0)
7601 this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
7602 else
7603 this_optab = zextend_p ? usmsub_widen_optab
7604 : ssmsub_widen_optab;
7605 if (mode == GET_MODE_2XWIDER_MODE (innermode)
7606 && (optab_handler (this_optab, mode)->insn_code
7607 != CODE_FOR_nothing))
7608 {
7609 expand_operands (top0, top1, NULL_RTX, &op0, &op1,
7610 EXPAND_NORMAL);
7611 op2 = expand_expr (treeop0, subtarget,
7612 VOIDmode, EXPAND_NORMAL);
7613 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
7614 target, unsignedp);
7615 gcc_assert (temp);
7616 return REDUCE_BIT_FIELD (temp);
7617 }
7618 }
7619 }
7620
7621 /* For initializers, we are allowed to return a MINUS of two
7622 symbolic constants. Here we handle all cases when both operands
7623 are constant. */
7624 /* Handle difference of two symbolic constants,
7625 for the sake of an initializer. */
7626 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7627 && really_constant_p (treeop0)
7628 && really_constant_p (treeop1))
7629 {
7630 expand_operands (treeop0, treeop1,
7631 NULL_RTX, &op0, &op1, modifier);
7632
7633 /* If the last operand is a CONST_INT, use plus_constant of
7634 the negated constant. Else make the MINUS. */
7635 if (CONST_INT_P (op1))
7636 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7637 else
7638 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7639 }
7640
7641 /* No sense saving up arithmetic to be done
7642 if it's all in the wrong mode to form part of an address.
7643 And force_operand won't know whether to sign-extend or
7644 zero-extend. */
7645 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7646 || mode != ptr_mode)
7647 goto binop;
7648
7649 expand_operands (treeop0, treeop1,
7650 subtarget, &op0, &op1, modifier);
7651
7652 /* Convert A - const to A + (-const). */
7653 if (CONST_INT_P (op1))
7654 {
7655 op1 = negate_rtx (mode, op1);
7656 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7657 }
7658
7659 goto binop2;
7660
7661 case MULT_EXPR:
7662 /* If this is a fixed-point operation, then we cannot use the code
7663 below because "expand_mult" doesn't support sat/no-sat fixed-point
7664 multiplications. */
7665 if (ALL_FIXED_POINT_MODE_P (mode))
7666 goto binop;
7667
7668 /* If first operand is constant, swap them.
7669 Thus the following special case checks need only
7670 check the second operand. */
7671 if (TREE_CODE (treeop0) == INTEGER_CST)
7672 {
7673 tree t1 = treeop0;
7674 treeop0 = treeop1;
7675 treeop1 = t1;
7676 }
7677
7678 /* Attempt to return something suitable for generating an
7679 indexed address, for machines that support that. */
7680
7681 if (modifier == EXPAND_SUM && mode == ptr_mode
7682 && host_integerp (treeop1, 0))
7683 {
7684 tree exp1 = treeop1;
7685
7686 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7687 EXPAND_SUM);
7688
7689 if (!REG_P (op0))
7690 op0 = force_operand (op0, NULL_RTX);
7691 if (!REG_P (op0))
7692 op0 = copy_to_mode_reg (mode, op0);
7693
7694 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7695 gen_int_mode (tree_low_cst (exp1, 0),
7696 TYPE_MODE (TREE_TYPE (exp1)))));
7697 }
7698
7699 if (modifier == EXPAND_STACK_PARM)
7700 target = 0;
7701
7702 /* Check for multiplying things that have been extended
7703 from a narrower type. If this machine supports multiplying
7704 in that narrower type with a result in the desired type,
7705 do it that way, and avoid the explicit type-conversion. */
7706
7707 subexp0 = treeop0;
7708 subexp1 = treeop1;
7709 subexp0_def = get_def_for_expr (subexp0, NOP_EXPR);
7710 subexp1_def = get_def_for_expr (subexp1, NOP_EXPR);
7711 top0 = top1 = NULL_TREE;
7712
7713 /* First, check if we have a multiplication of one signed and one
7714 unsigned operand. */
7715 if (subexp0_def
7716 && (top0 = gimple_assign_rhs1 (subexp0_def))
7717 && subexp1_def
7718 && (top1 = gimple_assign_rhs1 (subexp1_def))
7719 && TREE_CODE (type) == INTEGER_TYPE
7720 && (TYPE_PRECISION (TREE_TYPE (top0))
7721 < TYPE_PRECISION (TREE_TYPE (subexp0)))
7722 && (TYPE_PRECISION (TREE_TYPE (top0))
7723 == TYPE_PRECISION (TREE_TYPE (top1)))
7724 && (TYPE_UNSIGNED (TREE_TYPE (top0))
7725 != TYPE_UNSIGNED (TREE_TYPE (top1))))
7726 {
7727 enum machine_mode innermode
7728 = TYPE_MODE (TREE_TYPE (top0));
7729 this_optab = usmul_widen_optab;
7730 if (mode == GET_MODE_WIDER_MODE (innermode))
7731 {
7732 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
7733 {
7734 if (TYPE_UNSIGNED (TREE_TYPE (top0)))
7735 expand_operands (top0, top1, NULL_RTX, &op0, &op1,
7736 EXPAND_NORMAL);
7737 else
7738 expand_operands (top0, top1, NULL_RTX, &op1, &op0,
7739 EXPAND_NORMAL);
7740
7741 goto binop3;
7742 }
7743 }
7744 }
7745 /* Check for a multiplication with matching signedness. If
7746 valid, TOP0 and TOP1 were set in the previous if
7747 condition. */
7748 else if (top0
7749 && TREE_CODE (type) == INTEGER_TYPE
7750 && (TYPE_PRECISION (TREE_TYPE (top0))
7751 < TYPE_PRECISION (TREE_TYPE (subexp0)))
7752 && ((TREE_CODE (subexp1) == INTEGER_CST
7753 && int_fits_type_p (subexp1, TREE_TYPE (top0))
7754 /* Don't use a widening multiply if a shift will do. */
7755 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (subexp1)))
7756 > HOST_BITS_PER_WIDE_INT)
7757 || exact_log2 (TREE_INT_CST_LOW (subexp1)) < 0))
7758 ||
7759 (top1
7760 && (TYPE_PRECISION (TREE_TYPE (top1))
7761 == TYPE_PRECISION (TREE_TYPE (top0))
7762 /* If both operands are extended, they must either both
7763 be zero-extended or both be sign-extended. */
7764 && (TYPE_UNSIGNED (TREE_TYPE (top1))
7765 == TYPE_UNSIGNED (TREE_TYPE (top0)))))))
7766 {
7767 tree op0type = TREE_TYPE (top0);
7768 enum machine_mode innermode = TYPE_MODE (op0type);
7769 bool zextend_p = TYPE_UNSIGNED (op0type);
7770 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7771 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7772
7773 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7774 {
7775 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
7776 {
7777 if (TREE_CODE (subexp1) == INTEGER_CST)
7778 expand_operands (top0, subexp1, NULL_RTX, &op0, &op1,
7779 EXPAND_NORMAL);
7780 else
7781 expand_operands (top0, top1, NULL_RTX, &op0, &op1,
7782 EXPAND_NORMAL);
7783 goto binop3;
7784 }
7785 else if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing
7786 && innermode == word_mode)
7787 {
7788 rtx htem, hipart;
7789 op0 = expand_normal (top0);
7790 if (TREE_CODE (subexp1) == INTEGER_CST)
7791 op1 = convert_modes (innermode, mode,
7792 expand_normal (subexp1), unsignedp);
7793 else
7794 op1 = expand_normal (top1);
7795 temp = expand_binop (mode, other_optab, op0, op1, target,
7796 unsignedp, OPTAB_LIB_WIDEN);
7797 hipart = gen_highpart (innermode, temp);
7798 htem = expand_mult_highpart_adjust (innermode, hipart,
7799 op0, op1, hipart,
7800 zextend_p);
7801 if (htem != hipart)
7802 emit_move_insn (hipart, htem);
7803 return REDUCE_BIT_FIELD (temp);
7804 }
7805 }
7806 }
7807 expand_operands (subexp0, subexp1, subtarget, &op0, &op1, EXPAND_NORMAL);
7808 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7809
7810 case TRUNC_DIV_EXPR:
7811 case FLOOR_DIV_EXPR:
7812 case CEIL_DIV_EXPR:
7813 case ROUND_DIV_EXPR:
7814 case EXACT_DIV_EXPR:
7815 /* If this is a fixed-point operation, then we cannot use the code
7816 below because "expand_divmod" doesn't support sat/no-sat fixed-point
7817 divisions. */
7818 if (ALL_FIXED_POINT_MODE_P (mode))
7819 goto binop;
7820
7821 if (modifier == EXPAND_STACK_PARM)
7822 target = 0;
7823 /* Possible optimization: compute the dividend with EXPAND_SUM
7824 then if the divisor is constant can optimize the case
7825 where some terms of the dividend have coeffs divisible by it. */
7826 expand_operands (treeop0, treeop1,
7827 subtarget, &op0, &op1, EXPAND_NORMAL);
7828 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7829
7830 case RDIV_EXPR:
7831 goto binop;
7832
7833 case TRUNC_MOD_EXPR:
7834 case FLOOR_MOD_EXPR:
7835 case CEIL_MOD_EXPR:
7836 case ROUND_MOD_EXPR:
7837 if (modifier == EXPAND_STACK_PARM)
7838 target = 0;
7839 expand_operands (treeop0, treeop1,
7840 subtarget, &op0, &op1, EXPAND_NORMAL);
7841 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7842
7843 case FIXED_CONVERT_EXPR:
7844 op0 = expand_normal (treeop0);
7845 if (target == 0 || modifier == EXPAND_STACK_PARM)
7846 target = gen_reg_rtx (mode);
7847
7848 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
7849 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7850 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
7851 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
7852 else
7853 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
7854 return target;
7855
7856 case FIX_TRUNC_EXPR:
7857 op0 = expand_normal (treeop0);
7858 if (target == 0 || modifier == EXPAND_STACK_PARM)
7859 target = gen_reg_rtx (mode);
7860 expand_fix (target, op0, unsignedp);
7861 return target;
7862
7863 case FLOAT_EXPR:
7864 op0 = expand_normal (treeop0);
7865 if (target == 0 || modifier == EXPAND_STACK_PARM)
7866 target = gen_reg_rtx (mode);
7867 /* expand_float can't figure out what to do if FROM has VOIDmode.
7868 So give it the correct mode. With -O, cse will optimize this. */
7869 if (GET_MODE (op0) == VOIDmode)
7870 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
7871 op0);
7872 expand_float (target, op0,
7873 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7874 return target;
7875
7876 case NEGATE_EXPR:
7877 op0 = expand_expr (treeop0, subtarget,
7878 VOIDmode, EXPAND_NORMAL);
7879 if (modifier == EXPAND_STACK_PARM)
7880 target = 0;
7881 temp = expand_unop (mode,
7882 optab_for_tree_code (NEGATE_EXPR, type,
7883 optab_default),
7884 op0, target, 0);
7885 gcc_assert (temp);
7886 return REDUCE_BIT_FIELD (temp);
7887
7888 case ABS_EXPR:
7889 op0 = expand_expr (treeop0, subtarget,
7890 VOIDmode, EXPAND_NORMAL);
7891 if (modifier == EXPAND_STACK_PARM)
7892 target = 0;
7893
7894 /* ABS_EXPR is not valid for complex arguments. */
7895 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7896 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7897
7898 /* Unsigned abs is simply the operand. Testing here means we don't
7899 risk generating incorrect code below. */
7900 if (TYPE_UNSIGNED (type))
7901 return op0;
7902
7903 return expand_abs (mode, op0, target, unsignedp,
7904 safe_from_p (target, treeop0, 1));
7905
7906 case MAX_EXPR:
7907 case MIN_EXPR:
7908 target = original_target;
7909 if (target == 0
7910 || modifier == EXPAND_STACK_PARM
7911 || (MEM_P (target) && MEM_VOLATILE_P (target))
7912 || GET_MODE (target) != mode
7913 || (REG_P (target)
7914 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7915 target = gen_reg_rtx (mode);
7916 expand_operands (treeop0, treeop1,
7917 target, &op0, &op1, EXPAND_NORMAL);
7918
7919 /* First try to do it with a special MIN or MAX instruction.
7920 If that does not win, use a conditional jump to select the proper
7921 value. */
7922 this_optab = optab_for_tree_code (code, type, optab_default);
7923 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7924 OPTAB_WIDEN);
7925 if (temp != 0)
7926 return temp;
7927
7928 /* At this point, a MEM target is no longer useful; we will get better
7929 code without it. */
7930
7931 if (! REG_P (target))
7932 target = gen_reg_rtx (mode);
7933
7934 /* If op1 was placed in target, swap op0 and op1. */
7935 if (target != op0 && target == op1)
7936 {
7937 temp = op0;
7938 op0 = op1;
7939 op1 = temp;
7940 }
7941
7942 /* We generate better code and avoid problems with op1 mentioning
7943 target by forcing op1 into a pseudo if it isn't a constant. */
7944 if (! CONSTANT_P (op1))
7945 op1 = force_reg (mode, op1);
7946
7947 {
7948 enum rtx_code comparison_code;
7949 rtx cmpop1 = op1;
7950
7951 if (code == MAX_EXPR)
7952 comparison_code = unsignedp ? GEU : GE;
7953 else
7954 comparison_code = unsignedp ? LEU : LE;
7955
7956 /* Canonicalize to comparisons against 0. */
7957 if (op1 == const1_rtx)
7958 {
7959 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
7960 or (a != 0 ? a : 1) for unsigned.
7961 For MIN we are safe converting (a <= 1 ? a : 1)
7962 into (a <= 0 ? a : 1) */
7963 cmpop1 = const0_rtx;
7964 if (code == MAX_EXPR)
7965 comparison_code = unsignedp ? NE : GT;
7966 }
7967 if (op1 == constm1_rtx && !unsignedp)
7968 {
7969 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
7970 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
7971 cmpop1 = const0_rtx;
7972 if (code == MIN_EXPR)
7973 comparison_code = LT;
7974 }
7975 #ifdef HAVE_conditional_move
7976 /* Use a conditional move if possible. */
7977 if (can_conditionally_move_p (mode))
7978 {
7979 rtx insn;
7980
7981 /* ??? Same problem as in expmed.c: emit_conditional_move
7982 forces a stack adjustment via compare_from_rtx, and we
7983 lose the stack adjustment if the sequence we are about
7984 to create is discarded. */
7985 do_pending_stack_adjust ();
7986
7987 start_sequence ();
7988
7989 /* Try to emit the conditional move. */
7990 insn = emit_conditional_move (target, comparison_code,
7991 op0, cmpop1, mode,
7992 op0, op1, mode,
7993 unsignedp);
7994
7995 /* If we could do the conditional move, emit the sequence,
7996 and return. */
7997 if (insn)
7998 {
7999 rtx seq = get_insns ();
8000 end_sequence ();
8001 emit_insn (seq);
8002 return target;
8003 }
8004
8005 /* Otherwise discard the sequence and fall back to code with
8006 branches. */
8007 end_sequence ();
8008 }
8009 #endif
8010 if (target != op0)
8011 emit_move_insn (target, op0);
8012
8013 temp = gen_label_rtx ();
8014 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8015 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8016 }
8017 emit_move_insn (target, op1);
8018 emit_label (temp);
8019 return target;
8020
8021 case BIT_NOT_EXPR:
8022 op0 = expand_expr (treeop0, subtarget,
8023 VOIDmode, EXPAND_NORMAL);
8024 if (modifier == EXPAND_STACK_PARM)
8025 target = 0;
8026 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8027 gcc_assert (temp);
8028 return temp;
8029
8030 /* ??? Can optimize bitwise operations with one arg constant.
8031 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8032 and (a bitwise1 b) bitwise2 b (etc)
8033 but that is probably not worth while. */
8034
8035 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8036 boolean values when we want in all cases to compute both of them. In
8037 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8038 as actual zero-or-1 values and then bitwise anding. In cases where
8039 there cannot be any side effects, better code would be made by
8040 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8041 how to recognize those cases. */
8042
8043 case TRUTH_AND_EXPR:
8044 code = BIT_AND_EXPR;
8045 case BIT_AND_EXPR:
8046 goto binop;
8047
8048 case TRUTH_OR_EXPR:
8049 code = BIT_IOR_EXPR;
8050 case BIT_IOR_EXPR:
8051 goto binop;
8052
8053 case TRUTH_XOR_EXPR:
8054 code = BIT_XOR_EXPR;
8055 case BIT_XOR_EXPR:
8056 goto binop;
8057
8058 case LROTATE_EXPR:
8059 case RROTATE_EXPR:
8060 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8061 || (GET_MODE_PRECISION (TYPE_MODE (type))
8062 == TYPE_PRECISION (type)));
8063 /* fall through */
8064
8065 case LSHIFT_EXPR:
8066 case RSHIFT_EXPR:
8067 /* If this is a fixed-point operation, then we cannot use the code
8068 below because "expand_shift" doesn't support sat/no-sat fixed-point
8069 shifts. */
8070 if (ALL_FIXED_POINT_MODE_P (mode))
8071 goto binop;
8072
8073 if (! safe_from_p (subtarget, treeop1, 1))
8074 subtarget = 0;
8075 if (modifier == EXPAND_STACK_PARM)
8076 target = 0;
8077 op0 = expand_expr (treeop0, subtarget,
8078 VOIDmode, EXPAND_NORMAL);
8079 temp = expand_shift (code, mode, op0, treeop1, target,
8080 unsignedp);
8081 if (code == LSHIFT_EXPR)
8082 temp = REDUCE_BIT_FIELD (temp);
8083 return temp;
8084
8085 /* Could determine the answer when only additive constants differ. Also,
8086 the addition of one can be handled by changing the condition. */
8087 case LT_EXPR:
8088 case LE_EXPR:
8089 case GT_EXPR:
8090 case GE_EXPR:
8091 case EQ_EXPR:
8092 case NE_EXPR:
8093 case UNORDERED_EXPR:
8094 case ORDERED_EXPR:
8095 case UNLT_EXPR:
8096 case UNLE_EXPR:
8097 case UNGT_EXPR:
8098 case UNGE_EXPR:
8099 case UNEQ_EXPR:
8100 case LTGT_EXPR:
8101 temp = do_store_flag (ops,
8102 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8103 tmode != VOIDmode ? tmode : mode);
8104 if (temp)
8105 return temp;
8106
8107 /* Use a compare and a jump for BLKmode comparisons, or for function
8108 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8109
8110 if ((target == 0
8111 || modifier == EXPAND_STACK_PARM
8112 || ! safe_from_p (target, treeop0, 1)
8113 || ! safe_from_p (target, treeop1, 1)
8114 /* Make sure we don't have a hard reg (such as function's return
8115 value) live across basic blocks, if not optimizing. */
8116 || (!optimize && REG_P (target)
8117 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8118 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8119
8120 emit_move_insn (target, const0_rtx);
8121
8122 op1 = gen_label_rtx ();
8123 jumpifnot_1 (code, treeop0, treeop1, op1);
8124
8125 emit_move_insn (target, const1_rtx);
8126
8127 emit_label (op1);
8128 return target;
8129
8130 case TRUTH_NOT_EXPR:
8131 if (modifier == EXPAND_STACK_PARM)
8132 target = 0;
8133 op0 = expand_expr (treeop0, target,
8134 VOIDmode, EXPAND_NORMAL);
8135 /* The parser is careful to generate TRUTH_NOT_EXPR
8136 only with operands that are always zero or one. */
8137 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8138 target, 1, OPTAB_LIB_WIDEN);
8139 gcc_assert (temp);
8140 return temp;
8141
8142 case COMPLEX_EXPR:
8143 /* Get the rtx code of the operands. */
8144 op0 = expand_normal (treeop0);
8145 op1 = expand_normal (treeop1);
8146
8147 if (!target)
8148 target = gen_reg_rtx (TYPE_MODE (type));
8149
8150 /* Move the real (op0) and imaginary (op1) parts to their location. */
8151 write_complex_part (target, op0, false);
8152 write_complex_part (target, op1, true);
8153
8154 return target;
8155
8156 case WIDEN_SUM_EXPR:
8157 {
8158 tree oprnd0 = treeop0;
8159 tree oprnd1 = treeop1;
8160
8161 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8162 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8163 target, unsignedp);
8164 return target;
8165 }
8166
8167 case REDUC_MAX_EXPR:
8168 case REDUC_MIN_EXPR:
8169 case REDUC_PLUS_EXPR:
8170 {
8171 op0 = expand_normal (treeop0);
8172 this_optab = optab_for_tree_code (code, type, optab_default);
8173 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8174 gcc_assert (temp);
8175 return temp;
8176 }
8177
8178 case VEC_EXTRACT_EVEN_EXPR:
8179 case VEC_EXTRACT_ODD_EXPR:
8180 {
8181 expand_operands (treeop0, treeop1,
8182 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8183 this_optab = optab_for_tree_code (code, type, optab_default);
8184 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8185 OPTAB_WIDEN);
8186 gcc_assert (temp);
8187 return temp;
8188 }
8189
8190 case VEC_INTERLEAVE_HIGH_EXPR:
8191 case VEC_INTERLEAVE_LOW_EXPR:
8192 {
8193 expand_operands (treeop0, treeop1,
8194 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8195 this_optab = optab_for_tree_code (code, type, optab_default);
8196 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8197 OPTAB_WIDEN);
8198 gcc_assert (temp);
8199 return temp;
8200 }
8201
8202 case VEC_LSHIFT_EXPR:
8203 case VEC_RSHIFT_EXPR:
8204 {
8205 target = expand_vec_shift_expr (ops, target);
8206 return target;
8207 }
8208
8209 case VEC_UNPACK_HI_EXPR:
8210 case VEC_UNPACK_LO_EXPR:
8211 {
8212 op0 = expand_normal (treeop0);
8213 this_optab = optab_for_tree_code (code, type, optab_default);
8214 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8215 target, unsignedp);
8216 gcc_assert (temp);
8217 return temp;
8218 }
8219
8220 case VEC_UNPACK_FLOAT_HI_EXPR:
8221 case VEC_UNPACK_FLOAT_LO_EXPR:
8222 {
8223 op0 = expand_normal (treeop0);
8224 /* The signedness is determined from input operand. */
8225 this_optab = optab_for_tree_code (code,
8226 TREE_TYPE (treeop0),
8227 optab_default);
8228 temp = expand_widen_pattern_expr
8229 (ops, op0, NULL_RTX, NULL_RTX,
8230 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8231
8232 gcc_assert (temp);
8233 return temp;
8234 }
8235
8236 case VEC_WIDEN_MULT_HI_EXPR:
8237 case VEC_WIDEN_MULT_LO_EXPR:
8238 {
8239 tree oprnd0 = treeop0;
8240 tree oprnd1 = treeop1;
8241
8242 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8243 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8244 target, unsignedp);
8245 gcc_assert (target);
8246 return target;
8247 }
8248
8249 case VEC_PACK_TRUNC_EXPR:
8250 case VEC_PACK_SAT_EXPR:
8251 case VEC_PACK_FIX_TRUNC_EXPR:
8252 mode = TYPE_MODE (TREE_TYPE (treeop0));
8253 goto binop;
8254
8255 default:
8256 gcc_unreachable ();
8257 }
8258
8259 /* Here to do an ordinary binary operator. */
8260 binop:
8261 expand_operands (treeop0, treeop1,
8262 subtarget, &op0, &op1, EXPAND_NORMAL);
8263 binop2:
8264 this_optab = optab_for_tree_code (code, type, optab_default);
8265 binop3:
8266 if (modifier == EXPAND_STACK_PARM)
8267 target = 0;
8268 temp = expand_binop (mode, this_optab, op0, op1, target,
8269 unsignedp, OPTAB_LIB_WIDEN);
8270 gcc_assert (temp);
8271 return REDUCE_BIT_FIELD (temp);
8272 }
8273 #undef REDUCE_BIT_FIELD
8274
8275 rtx
8276 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
8277 enum expand_modifier modifier, rtx *alt_rtl)
8278 {
8279 rtx op0, op1, temp, decl_rtl;
8280 tree type;
8281 int unsignedp;
8282 enum machine_mode mode;
8283 enum tree_code code = TREE_CODE (exp);
8284 optab this_optab;
8285 rtx subtarget, original_target;
8286 int ignore;
8287 tree context;
8288 bool reduce_bit_field;
8289 location_t loc = EXPR_LOCATION (exp);
8290 struct separate_ops ops;
8291 tree treeop0, treeop1, treeop2;
8292
8293 type = TREE_TYPE (exp);
8294 mode = TYPE_MODE (type);
8295 unsignedp = TYPE_UNSIGNED (type);
8296
8297 treeop0 = treeop1 = treeop2 = NULL_TREE;
8298 if (!VL_EXP_CLASS_P (exp))
8299 switch (TREE_CODE_LENGTH (code))
8300 {
8301 default:
8302 case 3: treeop2 = TREE_OPERAND (exp, 2);
8303 case 2: treeop1 = TREE_OPERAND (exp, 1);
8304 case 1: treeop0 = TREE_OPERAND (exp, 0);
8305 case 0: break;
8306 }
8307 ops.code = code;
8308 ops.type = type;
8309 ops.op0 = treeop0;
8310 ops.op1 = treeop1;
8311 ops.op2 = treeop2;
8312 ops.location = loc;
8313
8314 ignore = (target == const0_rtx
8315 || ((CONVERT_EXPR_CODE_P (code)
8316 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8317 && TREE_CODE (type) == VOID_TYPE));
8318
8319 /* An operation in what may be a bit-field type needs the
8320 result to be reduced to the precision of the bit-field type,
8321 which is narrower than that of the type's mode. */
8322 reduce_bit_field = (!ignore
8323 && TREE_CODE (type) == INTEGER_TYPE
8324 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8325
8326 /* If we are going to ignore this result, we need only do something
8327 if there is a side-effect somewhere in the expression. If there
8328 is, short-circuit the most common cases here. Note that we must
8329 not call expand_expr with anything but const0_rtx in case this
8330 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
8331
8332 if (ignore)
8333 {
8334 if (! TREE_SIDE_EFFECTS (exp))
8335 return const0_rtx;
8336
8337 /* Ensure we reference a volatile object even if value is ignored, but
8338 don't do this if all we are doing is taking its address. */
8339 if (TREE_THIS_VOLATILE (exp)
8340 && TREE_CODE (exp) != FUNCTION_DECL
8341 && mode != VOIDmode && mode != BLKmode
8342 && modifier != EXPAND_CONST_ADDRESS)
8343 {
8344 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
8345 if (MEM_P (temp))
8346 temp = copy_to_reg (temp);
8347 return const0_rtx;
8348 }
8349
8350 if (TREE_CODE_CLASS (code) == tcc_unary
8351 || code == COMPONENT_REF || code == INDIRECT_REF)
8352 return expand_expr (treeop0, const0_rtx, VOIDmode,
8353 modifier);
8354
8355 else if (TREE_CODE_CLASS (code) == tcc_binary
8356 || TREE_CODE_CLASS (code) == tcc_comparison
8357 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
8358 {
8359 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8360 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8361 return const0_rtx;
8362 }
8363 else if (code == BIT_FIELD_REF)
8364 {
8365 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8366 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8367 expand_expr (treeop2, const0_rtx, VOIDmode, modifier);
8368 return const0_rtx;
8369 }
8370
8371 target = 0;
8372 }
8373
8374 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8375 target = 0;
8376
8377 /* Use subtarget as the target for operand 0 of a binary operation. */
8378 subtarget = get_subtarget (target);
8379 original_target = target;
8380
8381 switch (code)
8382 {
8383 case LABEL_DECL:
8384 {
8385 tree function = decl_function_context (exp);
8386
8387 temp = label_rtx (exp);
8388 temp = gen_rtx_LABEL_REF (Pmode, temp);
8389
8390 if (function != current_function_decl
8391 && function != 0)
8392 LABEL_REF_NONLOCAL_P (temp) = 1;
8393
8394 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
8395 return temp;
8396 }
8397
8398 case SSA_NAME:
8399 /* ??? ivopts calls expander, without any preparation from
8400 out-of-ssa. So fake instructions as if this was an access to the
8401 base variable. This unnecessarily allocates a pseudo, see how we can
8402 reuse it, if partition base vars have it set already. */
8403 if (!currently_expanding_to_rtl)
8404 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier, NULL);
8405 {
8406 gimple g = get_gimple_for_ssa_name (exp);
8407 if (g)
8408 return expand_expr_real_1 (gimple_assign_rhs_to_tree (g), target,
8409 tmode, modifier, NULL);
8410 }
8411 decl_rtl = get_rtx_for_ssa_name (exp);
8412 exp = SSA_NAME_VAR (exp);
8413 goto expand_decl_rtl;
8414
8415 case PARM_DECL:
8416 case VAR_DECL:
8417 /* If a static var's type was incomplete when the decl was written,
8418 but the type is complete now, lay out the decl now. */
8419 if (DECL_SIZE (exp) == 0
8420 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
8421 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
8422 layout_decl (exp, 0);
8423
8424 /* TLS emulation hook - replace __thread vars with
8425 *__emutls_get_address (&_emutls.var). */
8426 if (! targetm.have_tls
8427 && TREE_CODE (exp) == VAR_DECL
8428 && DECL_THREAD_LOCAL_P (exp))
8429 {
8430 exp = build_fold_indirect_ref_loc (loc, emutls_var_address (exp));
8431 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
8432 }
8433
8434 /* ... fall through ... */
8435
8436 case FUNCTION_DECL:
8437 case RESULT_DECL:
8438 decl_rtl = DECL_RTL (exp);
8439 expand_decl_rtl:
8440 gcc_assert (decl_rtl);
8441 decl_rtl = copy_rtx (decl_rtl);
8442
8443 /* Ensure variable marked as used even if it doesn't go through
8444 a parser. If it hasn't be used yet, write out an external
8445 definition. */
8446 if (! TREE_USED (exp))
8447 {
8448 assemble_external (exp);
8449 TREE_USED (exp) = 1;
8450 }
8451
8452 /* Show we haven't gotten RTL for this yet. */
8453 temp = 0;
8454
8455 /* Variables inherited from containing functions should have
8456 been lowered by this point. */
8457 context = decl_function_context (exp);
8458 gcc_assert (!context
8459 || context == current_function_decl
8460 || TREE_STATIC (exp)
8461 /* ??? C++ creates functions that are not TREE_STATIC. */
8462 || TREE_CODE (exp) == FUNCTION_DECL);
8463
8464 /* This is the case of an array whose size is to be determined
8465 from its initializer, while the initializer is still being parsed.
8466 See expand_decl. */
8467
8468 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
8469 temp = validize_mem (decl_rtl);
8470
8471 /* If DECL_RTL is memory, we are in the normal case and the
8472 address is not valid, get the address into a register. */
8473
8474 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
8475 {
8476 if (alt_rtl)
8477 *alt_rtl = decl_rtl;
8478 decl_rtl = use_anchored_address (decl_rtl);
8479 if (modifier != EXPAND_CONST_ADDRESS
8480 && modifier != EXPAND_SUM
8481 && !memory_address_addr_space_p (DECL_MODE (exp),
8482 XEXP (decl_rtl, 0),
8483 MEM_ADDR_SPACE (decl_rtl)))
8484 temp = replace_equiv_address (decl_rtl,
8485 copy_rtx (XEXP (decl_rtl, 0)));
8486 }
8487
8488 /* If we got something, return it. But first, set the alignment
8489 if the address is a register. */
8490 if (temp != 0)
8491 {
8492 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
8493 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
8494
8495 return temp;
8496 }
8497
8498 /* If the mode of DECL_RTL does not match that of the decl, it
8499 must be a promoted value. We return a SUBREG of the wanted mode,
8500 but mark it so that we know that it was already extended. */
8501
8502 if (REG_P (decl_rtl)
8503 && GET_MODE (decl_rtl) != DECL_MODE (exp))
8504 {
8505 enum machine_mode pmode;
8506
8507 /* Get the signedness used for this variable. Ensure we get the
8508 same mode we got when the variable was declared. */
8509 pmode = promote_decl_mode (exp, &unsignedp);
8510 gcc_assert (GET_MODE (decl_rtl) == pmode);
8511
8512 temp = gen_lowpart_SUBREG (mode, decl_rtl);
8513 SUBREG_PROMOTED_VAR_P (temp) = 1;
8514 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
8515 return temp;
8516 }
8517
8518 return decl_rtl;
8519
8520 case INTEGER_CST:
8521 temp = immed_double_const (TREE_INT_CST_LOW (exp),
8522 TREE_INT_CST_HIGH (exp), mode);
8523
8524 return temp;
8525
8526 case VECTOR_CST:
8527 {
8528 tree tmp = NULL_TREE;
8529 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
8530 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
8531 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
8532 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
8533 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
8534 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
8535 return const_vector_from_tree (exp);
8536 if (GET_MODE_CLASS (mode) == MODE_INT)
8537 {
8538 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
8539 if (type_for_mode)
8540 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
8541 }
8542 if (!tmp)
8543 tmp = build_constructor_from_list (type,
8544 TREE_VECTOR_CST_ELTS (exp));
8545 return expand_expr (tmp, ignore ? const0_rtx : target,
8546 tmode, modifier);
8547 }
8548
8549 case CONST_DECL:
8550 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
8551
8552 case REAL_CST:
8553 /* If optimized, generate immediate CONST_DOUBLE
8554 which will be turned into memory by reload if necessary.
8555
8556 We used to force a register so that loop.c could see it. But
8557 this does not allow gen_* patterns to perform optimizations with
8558 the constants. It also produces two insns in cases like "x = 1.0;".
8559 On most machines, floating-point constants are not permitted in
8560 many insns, so we'd end up copying it to a register in any case.
8561
8562 Now, we do the copying in expand_binop, if appropriate. */
8563 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
8564 TYPE_MODE (TREE_TYPE (exp)));
8565
8566 case FIXED_CST:
8567 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
8568 TYPE_MODE (TREE_TYPE (exp)));
8569
8570 case COMPLEX_CST:
8571 /* Handle evaluating a complex constant in a CONCAT target. */
8572 if (original_target && GET_CODE (original_target) == CONCAT)
8573 {
8574 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8575 rtx rtarg, itarg;
8576
8577 rtarg = XEXP (original_target, 0);
8578 itarg = XEXP (original_target, 1);
8579
8580 /* Move the real and imaginary parts separately. */
8581 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
8582 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
8583
8584 if (op0 != rtarg)
8585 emit_move_insn (rtarg, op0);
8586 if (op1 != itarg)
8587 emit_move_insn (itarg, op1);
8588
8589 return original_target;
8590 }
8591
8592 /* ... fall through ... */
8593
8594 case STRING_CST:
8595 temp = expand_expr_constant (exp, 1, modifier);
8596
8597 /* temp contains a constant address.
8598 On RISC machines where a constant address isn't valid,
8599 make some insns to get that address into a register. */
8600 if (modifier != EXPAND_CONST_ADDRESS
8601 && modifier != EXPAND_INITIALIZER
8602 && modifier != EXPAND_SUM
8603 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
8604 MEM_ADDR_SPACE (temp)))
8605 return replace_equiv_address (temp,
8606 copy_rtx (XEXP (temp, 0)));
8607 return temp;
8608
8609 case SAVE_EXPR:
8610 {
8611 tree val = treeop0;
8612 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
8613
8614 if (!SAVE_EXPR_RESOLVED_P (exp))
8615 {
8616 /* We can indeed still hit this case, typically via builtin
8617 expanders calling save_expr immediately before expanding
8618 something. Assume this means that we only have to deal
8619 with non-BLKmode values. */
8620 gcc_assert (GET_MODE (ret) != BLKmode);
8621
8622 val = build_decl (EXPR_LOCATION (exp),
8623 VAR_DECL, NULL, TREE_TYPE (exp));
8624 DECL_ARTIFICIAL (val) = 1;
8625 DECL_IGNORED_P (val) = 1;
8626 treeop0 = val;
8627 TREE_OPERAND (exp, 0) = treeop0;
8628 SAVE_EXPR_RESOLVED_P (exp) = 1;
8629
8630 if (!CONSTANT_P (ret))
8631 ret = copy_to_reg (ret);
8632 SET_DECL_RTL (val, ret);
8633 }
8634
8635 return ret;
8636 }
8637
8638
8639 case CONSTRUCTOR:
8640 /* If we don't need the result, just ensure we evaluate any
8641 subexpressions. */
8642 if (ignore)
8643 {
8644 unsigned HOST_WIDE_INT idx;
8645 tree value;
8646
8647 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
8648 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
8649
8650 return const0_rtx;
8651 }
8652
8653 return expand_constructor (exp, target, modifier, false);
8654
8655 case MISALIGNED_INDIRECT_REF:
8656 case ALIGN_INDIRECT_REF:
8657 case INDIRECT_REF:
8658 {
8659 tree exp1 = treeop0;
8660 addr_space_t as = ADDR_SPACE_GENERIC;
8661 enum machine_mode address_mode = Pmode;
8662
8663 if (modifier != EXPAND_WRITE)
8664 {
8665 tree t;
8666
8667 t = fold_read_from_constant_string (exp);
8668 if (t)
8669 return expand_expr (t, target, tmode, modifier);
8670 }
8671
8672 if (POINTER_TYPE_P (TREE_TYPE (exp1)))
8673 {
8674 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp1)));
8675 address_mode = targetm.addr_space.address_mode (as);
8676 }
8677
8678 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
8679 op0 = memory_address_addr_space (mode, op0, as);
8680
8681 if (code == ALIGN_INDIRECT_REF)
8682 {
8683 int align = TYPE_ALIGN_UNIT (type);
8684 op0 = gen_rtx_AND (address_mode, op0, GEN_INT (-align));
8685 op0 = memory_address_addr_space (mode, op0, as);
8686 }
8687
8688 temp = gen_rtx_MEM (mode, op0);
8689
8690 set_mem_attributes (temp, exp, 0);
8691 set_mem_addr_space (temp, as);
8692
8693 /* Resolve the misalignment now, so that we don't have to remember
8694 to resolve it later. Of course, this only works for reads. */
8695 if (code == MISALIGNED_INDIRECT_REF)
8696 {
8697 int icode;
8698 rtx reg, insn;
8699
8700 gcc_assert (modifier == EXPAND_NORMAL
8701 || modifier == EXPAND_STACK_PARM);
8702
8703 /* The vectorizer should have already checked the mode. */
8704 icode = optab_handler (movmisalign_optab, mode)->insn_code;
8705 gcc_assert (icode != CODE_FOR_nothing);
8706
8707 /* We've already validated the memory, and we're creating a
8708 new pseudo destination. The predicates really can't fail. */
8709 reg = gen_reg_rtx (mode);
8710
8711 /* Nor can the insn generator. */
8712 insn = GEN_FCN (icode) (reg, temp);
8713 emit_insn (insn);
8714
8715 return reg;
8716 }
8717
8718 return temp;
8719 }
8720
8721 case TARGET_MEM_REF:
8722 {
8723 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
8724 struct mem_address addr;
8725
8726 get_address_description (exp, &addr);
8727 op0 = addr_for_mem_ref (&addr, as, true);
8728 op0 = memory_address_addr_space (mode, op0, as);
8729 temp = gen_rtx_MEM (mode, op0);
8730 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
8731 set_mem_addr_space (temp, as);
8732 }
8733 return temp;
8734
8735 case ARRAY_REF:
8736
8737 {
8738 tree array = treeop0;
8739 tree index = treeop1;
8740
8741 /* Fold an expression like: "foo"[2].
8742 This is not done in fold so it won't happen inside &.
8743 Don't fold if this is for wide characters since it's too
8744 difficult to do correctly and this is a very rare case. */
8745
8746 if (modifier != EXPAND_CONST_ADDRESS
8747 && modifier != EXPAND_INITIALIZER
8748 && modifier != EXPAND_MEMORY)
8749 {
8750 tree t = fold_read_from_constant_string (exp);
8751
8752 if (t)
8753 return expand_expr (t, target, tmode, modifier);
8754 }
8755
8756 /* If this is a constant index into a constant array,
8757 just get the value from the array. Handle both the cases when
8758 we have an explicit constructor and when our operand is a variable
8759 that was declared const. */
8760
8761 if (modifier != EXPAND_CONST_ADDRESS
8762 && modifier != EXPAND_INITIALIZER
8763 && modifier != EXPAND_MEMORY
8764 && TREE_CODE (array) == CONSTRUCTOR
8765 && ! TREE_SIDE_EFFECTS (array)
8766 && TREE_CODE (index) == INTEGER_CST)
8767 {
8768 unsigned HOST_WIDE_INT ix;
8769 tree field, value;
8770
8771 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
8772 field, value)
8773 if (tree_int_cst_equal (field, index))
8774 {
8775 if (!TREE_SIDE_EFFECTS (value))
8776 return expand_expr (fold (value), target, tmode, modifier);
8777 break;
8778 }
8779 }
8780
8781 else if (optimize >= 1
8782 && modifier != EXPAND_CONST_ADDRESS
8783 && modifier != EXPAND_INITIALIZER
8784 && modifier != EXPAND_MEMORY
8785 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8786 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8787 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
8788 && targetm.binds_local_p (array))
8789 {
8790 if (TREE_CODE (index) == INTEGER_CST)
8791 {
8792 tree init = DECL_INITIAL (array);
8793
8794 if (TREE_CODE (init) == CONSTRUCTOR)
8795 {
8796 unsigned HOST_WIDE_INT ix;
8797 tree field, value;
8798
8799 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
8800 field, value)
8801 if (tree_int_cst_equal (field, index))
8802 {
8803 if (TREE_SIDE_EFFECTS (value))
8804 break;
8805
8806 if (TREE_CODE (value) == CONSTRUCTOR)
8807 {
8808 /* If VALUE is a CONSTRUCTOR, this
8809 optimization is only useful if
8810 this doesn't store the CONSTRUCTOR
8811 into memory. If it does, it is more
8812 efficient to just load the data from
8813 the array directly. */
8814 rtx ret = expand_constructor (value, target,
8815 modifier, true);
8816 if (ret == NULL_RTX)
8817 break;
8818 }
8819
8820 return expand_expr (fold (value), target, tmode,
8821 modifier);
8822 }
8823 }
8824 else if(TREE_CODE (init) == STRING_CST)
8825 {
8826 tree index1 = index;
8827 tree low_bound = array_ref_low_bound (exp);
8828 index1 = fold_convert_loc (loc, sizetype,
8829 treeop1);
8830
8831 /* Optimize the special-case of a zero lower bound.
8832
8833 We convert the low_bound to sizetype to avoid some problems
8834 with constant folding. (E.g. suppose the lower bound is 1,
8835 and its mode is QI. Without the conversion,l (ARRAY
8836 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8837 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
8838
8839 if (! integer_zerop (low_bound))
8840 index1 = size_diffop_loc (loc, index1,
8841 fold_convert_loc (loc, sizetype,
8842 low_bound));
8843
8844 if (0 > compare_tree_int (index1,
8845 TREE_STRING_LENGTH (init)))
8846 {
8847 tree type = TREE_TYPE (TREE_TYPE (init));
8848 enum machine_mode mode = TYPE_MODE (type);
8849
8850 if (GET_MODE_CLASS (mode) == MODE_INT
8851 && GET_MODE_SIZE (mode) == 1)
8852 return gen_int_mode (TREE_STRING_POINTER (init)
8853 [TREE_INT_CST_LOW (index1)],
8854 mode);
8855 }
8856 }
8857 }
8858 }
8859 }
8860 goto normal_inner_ref;
8861
8862 case COMPONENT_REF:
8863 /* If the operand is a CONSTRUCTOR, we can just extract the
8864 appropriate field if it is present. */
8865 if (TREE_CODE (treeop0) == CONSTRUCTOR)
8866 {
8867 unsigned HOST_WIDE_INT idx;
8868 tree field, value;
8869
8870 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
8871 idx, field, value)
8872 if (field == treeop1
8873 /* We can normally use the value of the field in the
8874 CONSTRUCTOR. However, if this is a bitfield in
8875 an integral mode that we can fit in a HOST_WIDE_INT,
8876 we must mask only the number of bits in the bitfield,
8877 since this is done implicitly by the constructor. If
8878 the bitfield does not meet either of those conditions,
8879 we can't do this optimization. */
8880 && (! DECL_BIT_FIELD (field)
8881 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
8882 && (GET_MODE_BITSIZE (DECL_MODE (field))
8883 <= HOST_BITS_PER_WIDE_INT))))
8884 {
8885 if (DECL_BIT_FIELD (field)
8886 && modifier == EXPAND_STACK_PARM)
8887 target = 0;
8888 op0 = expand_expr (value, target, tmode, modifier);
8889 if (DECL_BIT_FIELD (field))
8890 {
8891 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
8892 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
8893
8894 if (TYPE_UNSIGNED (TREE_TYPE (field)))
8895 {
8896 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
8897 op0 = expand_and (imode, op0, op1, target);
8898 }
8899 else
8900 {
8901 tree count
8902 = build_int_cst (NULL_TREE,
8903 GET_MODE_BITSIZE (imode) - bitsize);
8904
8905 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
8906 target, 0);
8907 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
8908 target, 0);
8909 }
8910 }
8911
8912 return op0;
8913 }
8914 }
8915 goto normal_inner_ref;
8916
8917 case BIT_FIELD_REF:
8918 case ARRAY_RANGE_REF:
8919 normal_inner_ref:
8920 {
8921 enum machine_mode mode1, mode2;
8922 HOST_WIDE_INT bitsize, bitpos;
8923 tree offset;
8924 int volatilep = 0, must_force_mem;
8925 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8926 &mode1, &unsignedp, &volatilep, true);
8927 rtx orig_op0, memloc;
8928
8929 /* If we got back the original object, something is wrong. Perhaps
8930 we are evaluating an expression too early. In any event, don't
8931 infinitely recurse. */
8932 gcc_assert (tem != exp);
8933
8934 /* If TEM's type is a union of variable size, pass TARGET to the inner
8935 computation, since it will need a temporary and TARGET is known
8936 to have to do. This occurs in unchecked conversion in Ada. */
8937 orig_op0 = op0
8938 = expand_expr (tem,
8939 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
8940 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
8941 != INTEGER_CST)
8942 && modifier != EXPAND_STACK_PARM
8943 ? target : NULL_RTX),
8944 VOIDmode,
8945 (modifier == EXPAND_INITIALIZER
8946 || modifier == EXPAND_CONST_ADDRESS
8947 || modifier == EXPAND_STACK_PARM)
8948 ? modifier : EXPAND_NORMAL);
8949
8950 mode2
8951 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
8952
8953 /* If we have either an offset, a BLKmode result, or a reference
8954 outside the underlying object, we must force it to memory.
8955 Such a case can occur in Ada if we have unchecked conversion
8956 of an expression from a scalar type to an aggregate type or
8957 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
8958 passed a partially uninitialized object or a view-conversion
8959 to a larger size. */
8960 must_force_mem = (offset
8961 || mode1 == BLKmode
8962 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
8963
8964 /* Handle CONCAT first. */
8965 if (GET_CODE (op0) == CONCAT && !must_force_mem)
8966 {
8967 if (bitpos == 0
8968 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
8969 return op0;
8970 if (bitpos == 0
8971 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
8972 && bitsize)
8973 {
8974 op0 = XEXP (op0, 0);
8975 mode2 = GET_MODE (op0);
8976 }
8977 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
8978 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
8979 && bitpos
8980 && bitsize)
8981 {
8982 op0 = XEXP (op0, 1);
8983 bitpos = 0;
8984 mode2 = GET_MODE (op0);
8985 }
8986 else
8987 /* Otherwise force into memory. */
8988 must_force_mem = 1;
8989 }
8990
8991 /* If this is a constant, put it in a register if it is a legitimate
8992 constant and we don't need a memory reference. */
8993 if (CONSTANT_P (op0)
8994 && mode2 != BLKmode
8995 && LEGITIMATE_CONSTANT_P (op0)
8996 && !must_force_mem)
8997 op0 = force_reg (mode2, op0);
8998
8999 /* Otherwise, if this is a constant, try to force it to the constant
9000 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
9001 is a legitimate constant. */
9002 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
9003 op0 = validize_mem (memloc);
9004
9005 /* Otherwise, if this is a constant or the object is not in memory
9006 and need be, put it there. */
9007 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9008 {
9009 tree nt = build_qualified_type (TREE_TYPE (tem),
9010 (TYPE_QUALS (TREE_TYPE (tem))
9011 | TYPE_QUAL_CONST));
9012 memloc = assign_temp (nt, 1, 1, 1);
9013 emit_move_insn (memloc, op0);
9014 op0 = memloc;
9015 }
9016
9017 if (offset)
9018 {
9019 enum machine_mode address_mode;
9020 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
9021 EXPAND_SUM);
9022
9023 gcc_assert (MEM_P (op0));
9024
9025 address_mode
9026 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (op0));
9027 if (GET_MODE (offset_rtx) != address_mode)
9028 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
9029
9030 if (GET_MODE (op0) == BLKmode
9031 /* A constant address in OP0 can have VOIDmode, we must
9032 not try to call force_reg in that case. */
9033 && GET_MODE (XEXP (op0, 0)) != VOIDmode
9034 && bitsize != 0
9035 && (bitpos % bitsize) == 0
9036 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
9037 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
9038 {
9039 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9040 bitpos = 0;
9041 }
9042
9043 op0 = offset_address (op0, offset_rtx,
9044 highest_pow2_factor (offset));
9045 }
9046
9047 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9048 record its alignment as BIGGEST_ALIGNMENT. */
9049 if (MEM_P (op0) && bitpos == 0 && offset != 0
9050 && is_aligning_offset (offset, tem))
9051 set_mem_align (op0, BIGGEST_ALIGNMENT);
9052
9053 /* Don't forget about volatility even if this is a bitfield. */
9054 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
9055 {
9056 if (op0 == orig_op0)
9057 op0 = copy_rtx (op0);
9058
9059 MEM_VOLATILE_P (op0) = 1;
9060 }
9061
9062 /* In cases where an aligned union has an unaligned object
9063 as a field, we might be extracting a BLKmode value from
9064 an integer-mode (e.g., SImode) object. Handle this case
9065 by doing the extract into an object as wide as the field
9066 (which we know to be the width of a basic mode), then
9067 storing into memory, and changing the mode to BLKmode. */
9068 if (mode1 == VOIDmode
9069 || REG_P (op0) || GET_CODE (op0) == SUBREG
9070 || (mode1 != BLKmode && ! direct_load[(int) mode1]
9071 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9072 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9073 && modifier != EXPAND_CONST_ADDRESS
9074 && modifier != EXPAND_INITIALIZER)
9075 /* If the field isn't aligned enough to fetch as a memref,
9076 fetch it as a bit field. */
9077 || (mode1 != BLKmode
9078 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
9079 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
9080 || (MEM_P (op0)
9081 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
9082 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
9083 && ((modifier == EXPAND_CONST_ADDRESS
9084 || modifier == EXPAND_INITIALIZER)
9085 ? STRICT_ALIGNMENT
9086 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9087 || (bitpos % BITS_PER_UNIT != 0)))
9088 /* If the type and the field are a constant size and the
9089 size of the type isn't the same size as the bitfield,
9090 we must use bitfield operations. */
9091 || (bitsize >= 0
9092 && TYPE_SIZE (TREE_TYPE (exp))
9093 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9094 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
9095 bitsize)))
9096 {
9097 enum machine_mode ext_mode = mode;
9098
9099 if (ext_mode == BLKmode
9100 && ! (target != 0 && MEM_P (op0)
9101 && MEM_P (target)
9102 && bitpos % BITS_PER_UNIT == 0))
9103 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9104
9105 if (ext_mode == BLKmode)
9106 {
9107 if (target == 0)
9108 target = assign_temp (type, 0, 1, 1);
9109
9110 if (bitsize == 0)
9111 return target;
9112
9113 /* In this case, BITPOS must start at a byte boundary and
9114 TARGET, if specified, must be a MEM. */
9115 gcc_assert (MEM_P (op0)
9116 && (!target || MEM_P (target))
9117 && !(bitpos % BITS_PER_UNIT));
9118
9119 emit_block_move (target,
9120 adjust_address (op0, VOIDmode,
9121 bitpos / BITS_PER_UNIT),
9122 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
9123 / BITS_PER_UNIT),
9124 (modifier == EXPAND_STACK_PARM
9125 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9126
9127 return target;
9128 }
9129
9130 op0 = validize_mem (op0);
9131
9132 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
9133 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9134
9135 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
9136 (modifier == EXPAND_STACK_PARM
9137 ? NULL_RTX : target),
9138 ext_mode, ext_mode);
9139
9140 /* If the result is a record type and BITSIZE is narrower than
9141 the mode of OP0, an integral mode, and this is a big endian
9142 machine, we must put the field into the high-order bits. */
9143 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9144 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9145 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
9146 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9147 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
9148 - bitsize),
9149 op0, 1);
9150
9151 /* If the result type is BLKmode, store the data into a temporary
9152 of the appropriate type, but with the mode corresponding to the
9153 mode for the data we have (op0's mode). It's tempting to make
9154 this a constant type, since we know it's only being stored once,
9155 but that can cause problems if we are taking the address of this
9156 COMPONENT_REF because the MEM of any reference via that address
9157 will have flags corresponding to the type, which will not
9158 necessarily be constant. */
9159 if (mode == BLKmode)
9160 {
9161 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
9162 rtx new_rtx;
9163
9164 /* If the reference doesn't use the alias set of its type,
9165 we cannot create the temporary using that type. */
9166 if (component_uses_parent_alias_set (exp))
9167 {
9168 new_rtx = assign_stack_local (ext_mode, size, 0);
9169 set_mem_alias_set (new_rtx, get_alias_set (exp));
9170 }
9171 else
9172 new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
9173
9174 emit_move_insn (new_rtx, op0);
9175 op0 = copy_rtx (new_rtx);
9176 PUT_MODE (op0, BLKmode);
9177 set_mem_attributes (op0, exp, 1);
9178 }
9179
9180 return op0;
9181 }
9182
9183 /* If the result is BLKmode, use that to access the object
9184 now as well. */
9185 if (mode == BLKmode)
9186 mode1 = BLKmode;
9187
9188 /* Get a reference to just this component. */
9189 if (modifier == EXPAND_CONST_ADDRESS
9190 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9191 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
9192 else
9193 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9194
9195 if (op0 == orig_op0)
9196 op0 = copy_rtx (op0);
9197
9198 set_mem_attributes (op0, exp, 0);
9199 if (REG_P (XEXP (op0, 0)))
9200 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9201
9202 MEM_VOLATILE_P (op0) |= volatilep;
9203 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
9204 || modifier == EXPAND_CONST_ADDRESS
9205 || modifier == EXPAND_INITIALIZER)
9206 return op0;
9207 else if (target == 0)
9208 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9209
9210 convert_move (target, op0, unsignedp);
9211 return target;
9212 }
9213
9214 case OBJ_TYPE_REF:
9215 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
9216
9217 case CALL_EXPR:
9218 /* All valid uses of __builtin_va_arg_pack () are removed during
9219 inlining. */
9220 if (CALL_EXPR_VA_ARG_PACK (exp))
9221 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
9222 {
9223 tree fndecl = get_callee_fndecl (exp), attr;
9224
9225 if (fndecl
9226 && (attr = lookup_attribute ("error",
9227 DECL_ATTRIBUTES (fndecl))) != NULL)
9228 error ("%Kcall to %qs declared with attribute error: %s",
9229 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9230 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9231 if (fndecl
9232 && (attr = lookup_attribute ("warning",
9233 DECL_ATTRIBUTES (fndecl))) != NULL)
9234 warning_at (tree_nonartificial_location (exp),
9235 0, "%Kcall to %qs declared with attribute warning: %s",
9236 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9237 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9238
9239 /* Check for a built-in function. */
9240 if (fndecl && DECL_BUILT_IN (fndecl))
9241 {
9242 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
9243 return expand_builtin (exp, target, subtarget, tmode, ignore);
9244 }
9245 }
9246 return expand_call (exp, target, ignore);
9247
9248 case VIEW_CONVERT_EXPR:
9249 op0 = NULL_RTX;
9250
9251 /* If we are converting to BLKmode, try to avoid an intermediate
9252 temporary by fetching an inner memory reference. */
9253 if (mode == BLKmode
9254 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9255 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
9256 && handled_component_p (treeop0))
9257 {
9258 enum machine_mode mode1;
9259 HOST_WIDE_INT bitsize, bitpos;
9260 tree offset;
9261 int unsignedp;
9262 int volatilep = 0;
9263 tree tem
9264 = get_inner_reference (treeop0, &bitsize, &bitpos,
9265 &offset, &mode1, &unsignedp, &volatilep,
9266 true);
9267 rtx orig_op0;
9268
9269 /* ??? We should work harder and deal with non-zero offsets. */
9270 if (!offset
9271 && (bitpos % BITS_PER_UNIT) == 0
9272 && bitsize >= 0
9273 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
9274 {
9275 /* See the normal_inner_ref case for the rationale. */
9276 orig_op0
9277 = expand_expr (tem,
9278 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9279 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9280 != INTEGER_CST)
9281 && modifier != EXPAND_STACK_PARM
9282 ? target : NULL_RTX),
9283 VOIDmode,
9284 (modifier == EXPAND_INITIALIZER
9285 || modifier == EXPAND_CONST_ADDRESS
9286 || modifier == EXPAND_STACK_PARM)
9287 ? modifier : EXPAND_NORMAL);
9288
9289 if (MEM_P (orig_op0))
9290 {
9291 op0 = orig_op0;
9292
9293 /* Get a reference to just this component. */
9294 if (modifier == EXPAND_CONST_ADDRESS
9295 || modifier == EXPAND_SUM
9296 || modifier == EXPAND_INITIALIZER)
9297 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
9298 else
9299 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
9300
9301 if (op0 == orig_op0)
9302 op0 = copy_rtx (op0);
9303
9304 set_mem_attributes (op0, treeop0, 0);
9305 if (REG_P (XEXP (op0, 0)))
9306 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9307
9308 MEM_VOLATILE_P (op0) |= volatilep;
9309 }
9310 }
9311 }
9312
9313 if (!op0)
9314 op0 = expand_expr (treeop0,
9315 NULL_RTX, VOIDmode, modifier);
9316
9317 /* If the input and output modes are both the same, we are done. */
9318 if (mode == GET_MODE (op0))
9319 ;
9320 /* If neither mode is BLKmode, and both modes are the same size
9321 then we can use gen_lowpart. */
9322 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
9323 && GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (op0))
9324 && !COMPLEX_MODE_P (GET_MODE (op0)))
9325 {
9326 if (GET_CODE (op0) == SUBREG)
9327 op0 = force_reg (GET_MODE (op0), op0);
9328 op0 = gen_lowpart (mode, op0);
9329 }
9330 /* If both modes are integral, then we can convert from one to the
9331 other. */
9332 else if (SCALAR_INT_MODE_P (GET_MODE (op0)) && SCALAR_INT_MODE_P (mode))
9333 op0 = convert_modes (mode, GET_MODE (op0), op0,
9334 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9335 /* As a last resort, spill op0 to memory, and reload it in a
9336 different mode. */
9337 else if (!MEM_P (op0))
9338 {
9339 /* If the operand is not a MEM, force it into memory. Since we
9340 are going to be changing the mode of the MEM, don't call
9341 force_const_mem for constants because we don't allow pool
9342 constants to change mode. */
9343 tree inner_type = TREE_TYPE (treeop0);
9344
9345 gcc_assert (!TREE_ADDRESSABLE (exp));
9346
9347 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
9348 target
9349 = assign_stack_temp_for_type
9350 (TYPE_MODE (inner_type),
9351 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
9352
9353 emit_move_insn (target, op0);
9354 op0 = target;
9355 }
9356
9357 /* At this point, OP0 is in the correct mode. If the output type is
9358 such that the operand is known to be aligned, indicate that it is.
9359 Otherwise, we need only be concerned about alignment for non-BLKmode
9360 results. */
9361 if (MEM_P (op0))
9362 {
9363 op0 = copy_rtx (op0);
9364
9365 if (TYPE_ALIGN_OK (type))
9366 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
9367 else if (STRICT_ALIGNMENT
9368 && mode != BLKmode
9369 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
9370 {
9371 tree inner_type = TREE_TYPE (treeop0);
9372 HOST_WIDE_INT temp_size
9373 = MAX (int_size_in_bytes (inner_type),
9374 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
9375 rtx new_rtx
9376 = assign_stack_temp_for_type (mode, temp_size, 0, type);
9377 rtx new_with_op0_mode
9378 = adjust_address (new_rtx, GET_MODE (op0), 0);
9379
9380 gcc_assert (!TREE_ADDRESSABLE (exp));
9381
9382 if (GET_MODE (op0) == BLKmode)
9383 emit_block_move (new_with_op0_mode, op0,
9384 GEN_INT (GET_MODE_SIZE (mode)),
9385 (modifier == EXPAND_STACK_PARM
9386 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9387 else
9388 emit_move_insn (new_with_op0_mode, op0);
9389
9390 op0 = new_rtx;
9391 }
9392
9393 op0 = adjust_address (op0, mode, 0);
9394 }
9395
9396 return op0;
9397
9398 /* Use a compare and a jump for BLKmode comparisons, or for function
9399 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
9400
9401 /* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they
9402 are occassionally created by folding during expansion. */
9403 case TRUTH_ANDIF_EXPR:
9404 case TRUTH_ORIF_EXPR:
9405 if (! ignore
9406 && (target == 0
9407 || modifier == EXPAND_STACK_PARM
9408 || ! safe_from_p (target, treeop0, 1)
9409 || ! safe_from_p (target, treeop1, 1)
9410 /* Make sure we don't have a hard reg (such as function's return
9411 value) live across basic blocks, if not optimizing. */
9412 || (!optimize && REG_P (target)
9413 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9414 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9415
9416 if (target)
9417 emit_move_insn (target, const0_rtx);
9418
9419 op1 = gen_label_rtx ();
9420 jumpifnot_1 (code, treeop0, treeop1, op1);
9421
9422 if (target)
9423 emit_move_insn (target, const1_rtx);
9424
9425 emit_label (op1);
9426 return ignore ? const0_rtx : target;
9427
9428 case STATEMENT_LIST:
9429 {
9430 tree_stmt_iterator iter;
9431
9432 gcc_assert (ignore);
9433
9434 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9435 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9436 }
9437 return const0_rtx;
9438
9439 case COND_EXPR:
9440 /* A COND_EXPR with its type being VOID_TYPE represents a
9441 conditional jump and is handled in
9442 expand_gimple_cond_expr. */
9443 gcc_assert (!VOID_TYPE_P (type));
9444
9445 /* Note that COND_EXPRs whose type is a structure or union
9446 are required to be constructed to contain assignments of
9447 a temporary variable, so that we can evaluate them here
9448 for side effect only. If type is void, we must do likewise. */
9449
9450 gcc_assert (!TREE_ADDRESSABLE (type)
9451 && !ignore
9452 && TREE_TYPE (treeop1) != void_type_node
9453 && TREE_TYPE (treeop2) != void_type_node);
9454
9455 /* If we are not to produce a result, we have no target. Otherwise,
9456 if a target was specified use it; it will not be used as an
9457 intermediate target unless it is safe. If no target, use a
9458 temporary. */
9459
9460 if (modifier != EXPAND_STACK_PARM
9461 && original_target
9462 && safe_from_p (original_target, treeop0, 1)
9463 && GET_MODE (original_target) == mode
9464 #ifdef HAVE_conditional_move
9465 && (! can_conditionally_move_p (mode)
9466 || REG_P (original_target))
9467 #endif
9468 && !MEM_P (original_target))
9469 temp = original_target;
9470 else
9471 temp = assign_temp (type, 0, 0, 1);
9472
9473 do_pending_stack_adjust ();
9474 NO_DEFER_POP;
9475 op0 = gen_label_rtx ();
9476 op1 = gen_label_rtx ();
9477 jumpifnot (treeop0, op0);
9478 store_expr (treeop1, temp,
9479 modifier == EXPAND_STACK_PARM,
9480 false);
9481
9482 emit_jump_insn (gen_jump (op1));
9483 emit_barrier ();
9484 emit_label (op0);
9485 store_expr (treeop2, temp,
9486 modifier == EXPAND_STACK_PARM,
9487 false);
9488
9489 emit_label (op1);
9490 OK_DEFER_POP;
9491 return temp;
9492
9493 case VEC_COND_EXPR:
9494 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9495 return target;
9496
9497 case MODIFY_EXPR:
9498 {
9499 tree lhs = treeop0;
9500 tree rhs = treeop1;
9501 gcc_assert (ignore);
9502
9503 /* Check for |= or &= of a bitfield of size one into another bitfield
9504 of size 1. In this case, (unless we need the result of the
9505 assignment) we can do this more efficiently with a
9506 test followed by an assignment, if necessary.
9507
9508 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9509 things change so we do, this code should be enhanced to
9510 support it. */
9511 if (TREE_CODE (lhs) == COMPONENT_REF
9512 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9513 || TREE_CODE (rhs) == BIT_AND_EXPR)
9514 && TREE_OPERAND (rhs, 0) == lhs
9515 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9516 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9517 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9518 {
9519 rtx label = gen_label_rtx ();
9520 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9521 do_jump (TREE_OPERAND (rhs, 1),
9522 value ? label : 0,
9523 value ? 0 : label);
9524 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9525 MOVE_NONTEMPORAL (exp));
9526 do_pending_stack_adjust ();
9527 emit_label (label);
9528 return const0_rtx;
9529 }
9530
9531 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9532 return const0_rtx;
9533 }
9534
9535 case ADDR_EXPR:
9536 return expand_expr_addr_expr (exp, target, tmode, modifier);
9537
9538 case REALPART_EXPR:
9539 op0 = expand_normal (treeop0);
9540 return read_complex_part (op0, false);
9541
9542 case IMAGPART_EXPR:
9543 op0 = expand_normal (treeop0);
9544 return read_complex_part (op0, true);
9545
9546 case RETURN_EXPR:
9547 case LABEL_EXPR:
9548 case GOTO_EXPR:
9549 case SWITCH_EXPR:
9550 case ASM_EXPR:
9551 /* Expanded in cfgexpand.c. */
9552 gcc_unreachable ();
9553
9554 case TRY_CATCH_EXPR:
9555 case CATCH_EXPR:
9556 case EH_FILTER_EXPR:
9557 case TRY_FINALLY_EXPR:
9558 /* Lowered by tree-eh.c. */
9559 gcc_unreachable ();
9560
9561 case WITH_CLEANUP_EXPR:
9562 case CLEANUP_POINT_EXPR:
9563 case TARGET_EXPR:
9564 case CASE_LABEL_EXPR:
9565 case VA_ARG_EXPR:
9566 case BIND_EXPR:
9567 case INIT_EXPR:
9568 case CONJ_EXPR:
9569 case COMPOUND_EXPR:
9570 case PREINCREMENT_EXPR:
9571 case PREDECREMENT_EXPR:
9572 case POSTINCREMENT_EXPR:
9573 case POSTDECREMENT_EXPR:
9574 case LOOP_EXPR:
9575 case EXIT_EXPR:
9576 /* Lowered by gimplify.c. */
9577 gcc_unreachable ();
9578
9579 case FDESC_EXPR:
9580 /* Function descriptors are not valid except for as
9581 initialization constants, and should not be expanded. */
9582 gcc_unreachable ();
9583
9584 case WITH_SIZE_EXPR:
9585 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9586 have pulled out the size to use in whatever context it needed. */
9587 return expand_expr_real (treeop0, original_target, tmode,
9588 modifier, alt_rtl);
9589
9590 case REALIGN_LOAD_EXPR:
9591 {
9592 tree oprnd0 = treeop0;
9593 tree oprnd1 = treeop1;
9594 tree oprnd2 = treeop2;
9595 rtx op2;
9596
9597 this_optab = optab_for_tree_code (code, type, optab_default);
9598 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9599 op2 = expand_normal (oprnd2);
9600 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9601 target, unsignedp);
9602 gcc_assert (temp);
9603 return temp;
9604 }
9605
9606 case DOT_PROD_EXPR:
9607 {
9608 tree oprnd0 = treeop0;
9609 tree oprnd1 = treeop1;
9610 tree oprnd2 = treeop2;
9611 rtx op2;
9612
9613 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9614 op2 = expand_normal (oprnd2);
9615 target = expand_widen_pattern_expr (&ops, op0, op1, op2,
9616 target, unsignedp);
9617 return target;
9618 }
9619
9620 case COMPOUND_LITERAL_EXPR:
9621 {
9622 /* Initialize the anonymous variable declared in the compound
9623 literal, then return the variable. */
9624 tree decl = COMPOUND_LITERAL_EXPR_DECL (exp);
9625
9626 /* Create RTL for this variable. */
9627 if (!DECL_RTL_SET_P (decl))
9628 {
9629 if (DECL_HARD_REGISTER (decl))
9630 /* The user specified an assembler name for this variable.
9631 Set that up now. */
9632 rest_of_decl_compilation (decl, 0, 0);
9633 else
9634 expand_decl (decl);
9635 }
9636
9637 return expand_expr_real (decl, original_target, tmode,
9638 modifier, alt_rtl);
9639 }
9640
9641 default:
9642 return expand_expr_real_2 (&ops, target, tmode, modifier);
9643 }
9644 }
9645 \f
9646 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9647 signedness of TYPE), possibly returning the result in TARGET. */
9648 static rtx
9649 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9650 {
9651 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9652 if (target && GET_MODE (target) != GET_MODE (exp))
9653 target = 0;
9654 /* For constant values, reduce using build_int_cst_type. */
9655 if (CONST_INT_P (exp))
9656 {
9657 HOST_WIDE_INT value = INTVAL (exp);
9658 tree t = build_int_cst_type (type, value);
9659 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9660 }
9661 else if (TYPE_UNSIGNED (type))
9662 {
9663 rtx mask;
9664 if (prec < HOST_BITS_PER_WIDE_INT)
9665 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
9666 GET_MODE (exp));
9667 else
9668 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
9669 ((unsigned HOST_WIDE_INT) 1
9670 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
9671 GET_MODE (exp));
9672 return expand_and (GET_MODE (exp), exp, mask, target);
9673 }
9674 else
9675 {
9676 tree count = build_int_cst (NULL_TREE,
9677 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9678 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9679 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9680 }
9681 }
9682 \f
9683 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9684 when applied to the address of EXP produces an address known to be
9685 aligned more than BIGGEST_ALIGNMENT. */
9686
9687 static int
9688 is_aligning_offset (const_tree offset, const_tree exp)
9689 {
9690 /* Strip off any conversions. */
9691 while (CONVERT_EXPR_P (offset))
9692 offset = TREE_OPERAND (offset, 0);
9693
9694 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9695 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9696 if (TREE_CODE (offset) != BIT_AND_EXPR
9697 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9698 || compare_tree_int (TREE_OPERAND (offset, 1),
9699 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9700 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9701 return 0;
9702
9703 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9704 It must be NEGATE_EXPR. Then strip any more conversions. */
9705 offset = TREE_OPERAND (offset, 0);
9706 while (CONVERT_EXPR_P (offset))
9707 offset = TREE_OPERAND (offset, 0);
9708
9709 if (TREE_CODE (offset) != NEGATE_EXPR)
9710 return 0;
9711
9712 offset = TREE_OPERAND (offset, 0);
9713 while (CONVERT_EXPR_P (offset))
9714 offset = TREE_OPERAND (offset, 0);
9715
9716 /* This must now be the address of EXP. */
9717 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9718 }
9719 \f
9720 /* Return the tree node if an ARG corresponds to a string constant or zero
9721 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9722 in bytes within the string that ARG is accessing. The type of the
9723 offset will be `sizetype'. */
9724
9725 tree
9726 string_constant (tree arg, tree *ptr_offset)
9727 {
9728 tree array, offset, lower_bound;
9729 STRIP_NOPS (arg);
9730
9731 if (TREE_CODE (arg) == ADDR_EXPR)
9732 {
9733 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9734 {
9735 *ptr_offset = size_zero_node;
9736 return TREE_OPERAND (arg, 0);
9737 }
9738 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9739 {
9740 array = TREE_OPERAND (arg, 0);
9741 offset = size_zero_node;
9742 }
9743 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9744 {
9745 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9746 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9747 if (TREE_CODE (array) != STRING_CST
9748 && TREE_CODE (array) != VAR_DECL)
9749 return 0;
9750
9751 /* Check if the array has a nonzero lower bound. */
9752 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9753 if (!integer_zerop (lower_bound))
9754 {
9755 /* If the offset and base aren't both constants, return 0. */
9756 if (TREE_CODE (lower_bound) != INTEGER_CST)
9757 return 0;
9758 if (TREE_CODE (offset) != INTEGER_CST)
9759 return 0;
9760 /* Adjust offset by the lower bound. */
9761 offset = size_diffop (fold_convert (sizetype, offset),
9762 fold_convert (sizetype, lower_bound));
9763 }
9764 }
9765 else
9766 return 0;
9767 }
9768 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9769 {
9770 tree arg0 = TREE_OPERAND (arg, 0);
9771 tree arg1 = TREE_OPERAND (arg, 1);
9772
9773 STRIP_NOPS (arg0);
9774 STRIP_NOPS (arg1);
9775
9776 if (TREE_CODE (arg0) == ADDR_EXPR
9777 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9778 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9779 {
9780 array = TREE_OPERAND (arg0, 0);
9781 offset = arg1;
9782 }
9783 else if (TREE_CODE (arg1) == ADDR_EXPR
9784 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9785 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9786 {
9787 array = TREE_OPERAND (arg1, 0);
9788 offset = arg0;
9789 }
9790 else
9791 return 0;
9792 }
9793 else
9794 return 0;
9795
9796 if (TREE_CODE (array) == STRING_CST)
9797 {
9798 *ptr_offset = fold_convert (sizetype, offset);
9799 return array;
9800 }
9801 else if (TREE_CODE (array) == VAR_DECL)
9802 {
9803 int length;
9804
9805 /* Variables initialized to string literals can be handled too. */
9806 if (DECL_INITIAL (array) == NULL_TREE
9807 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9808 return 0;
9809
9810 /* If they are read-only, non-volatile and bind locally. */
9811 if (! TREE_READONLY (array)
9812 || TREE_SIDE_EFFECTS (array)
9813 || ! targetm.binds_local_p (array))
9814 return 0;
9815
9816 /* Avoid const char foo[4] = "abcde"; */
9817 if (DECL_SIZE_UNIT (array) == NULL_TREE
9818 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9819 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9820 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9821 return 0;
9822
9823 /* If variable is bigger than the string literal, OFFSET must be constant
9824 and inside of the bounds of the string literal. */
9825 offset = fold_convert (sizetype, offset);
9826 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9827 && (! host_integerp (offset, 1)
9828 || compare_tree_int (offset, length) >= 0))
9829 return 0;
9830
9831 *ptr_offset = offset;
9832 return DECL_INITIAL (array);
9833 }
9834
9835 return 0;
9836 }
9837 \f
9838 /* Generate code to calculate OPS, and exploded expression
9839 using a store-flag instruction and return an rtx for the result.
9840 OPS reflects a comparison.
9841
9842 If TARGET is nonzero, store the result there if convenient.
9843
9844 Return zero if there is no suitable set-flag instruction
9845 available on this machine.
9846
9847 Once expand_expr has been called on the arguments of the comparison,
9848 we are committed to doing the store flag, since it is not safe to
9849 re-evaluate the expression. We emit the store-flag insn by calling
9850 emit_store_flag, but only expand the arguments if we have a reason
9851 to believe that emit_store_flag will be successful. If we think that
9852 it will, but it isn't, we have to simulate the store-flag with a
9853 set/jump/set sequence. */
9854
9855 static rtx
9856 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
9857 {
9858 enum rtx_code code;
9859 tree arg0, arg1, type;
9860 tree tem;
9861 enum machine_mode operand_mode;
9862 int unsignedp;
9863 rtx op0, op1;
9864 rtx subtarget = target;
9865 location_t loc = ops->location;
9866
9867 arg0 = ops->op0;
9868 arg1 = ops->op1;
9869
9870 /* Don't crash if the comparison was erroneous. */
9871 if (arg0 == error_mark_node || arg1 == error_mark_node)
9872 return const0_rtx;
9873
9874 type = TREE_TYPE (arg0);
9875 operand_mode = TYPE_MODE (type);
9876 unsignedp = TYPE_UNSIGNED (type);
9877
9878 /* We won't bother with BLKmode store-flag operations because it would mean
9879 passing a lot of information to emit_store_flag. */
9880 if (operand_mode == BLKmode)
9881 return 0;
9882
9883 /* We won't bother with store-flag operations involving function pointers
9884 when function pointers must be canonicalized before comparisons. */
9885 #ifdef HAVE_canonicalize_funcptr_for_compare
9886 if (HAVE_canonicalize_funcptr_for_compare
9887 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
9888 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
9889 == FUNCTION_TYPE))
9890 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
9891 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
9892 == FUNCTION_TYPE))))
9893 return 0;
9894 #endif
9895
9896 STRIP_NOPS (arg0);
9897 STRIP_NOPS (arg1);
9898
9899 /* Get the rtx comparison code to use. We know that EXP is a comparison
9900 operation of some type. Some comparisons against 1 and -1 can be
9901 converted to comparisons with zero. Do so here so that the tests
9902 below will be aware that we have a comparison with zero. These
9903 tests will not catch constants in the first operand, but constants
9904 are rarely passed as the first operand. */
9905
9906 switch (ops->code)
9907 {
9908 case EQ_EXPR:
9909 code = EQ;
9910 break;
9911 case NE_EXPR:
9912 code = NE;
9913 break;
9914 case LT_EXPR:
9915 if (integer_onep (arg1))
9916 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9917 else
9918 code = unsignedp ? LTU : LT;
9919 break;
9920 case LE_EXPR:
9921 if (! unsignedp && integer_all_onesp (arg1))
9922 arg1 = integer_zero_node, code = LT;
9923 else
9924 code = unsignedp ? LEU : LE;
9925 break;
9926 case GT_EXPR:
9927 if (! unsignedp && integer_all_onesp (arg1))
9928 arg1 = integer_zero_node, code = GE;
9929 else
9930 code = unsignedp ? GTU : GT;
9931 break;
9932 case GE_EXPR:
9933 if (integer_onep (arg1))
9934 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9935 else
9936 code = unsignedp ? GEU : GE;
9937 break;
9938
9939 case UNORDERED_EXPR:
9940 code = UNORDERED;
9941 break;
9942 case ORDERED_EXPR:
9943 code = ORDERED;
9944 break;
9945 case UNLT_EXPR:
9946 code = UNLT;
9947 break;
9948 case UNLE_EXPR:
9949 code = UNLE;
9950 break;
9951 case UNGT_EXPR:
9952 code = UNGT;
9953 break;
9954 case UNGE_EXPR:
9955 code = UNGE;
9956 break;
9957 case UNEQ_EXPR:
9958 code = UNEQ;
9959 break;
9960 case LTGT_EXPR:
9961 code = LTGT;
9962 break;
9963
9964 default:
9965 gcc_unreachable ();
9966 }
9967
9968 /* Put a constant second. */
9969 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
9970 || TREE_CODE (arg0) == FIXED_CST)
9971 {
9972 tem = arg0; arg0 = arg1; arg1 = tem;
9973 code = swap_condition (code);
9974 }
9975
9976 /* If this is an equality or inequality test of a single bit, we can
9977 do this by shifting the bit being tested to the low-order bit and
9978 masking the result with the constant 1. If the condition was EQ,
9979 we xor it with 1. This does not require an scc insn and is faster
9980 than an scc insn even if we have it.
9981
9982 The code to make this transformation was moved into fold_single_bit_test,
9983 so we just call into the folder and expand its result. */
9984
9985 if ((code == NE || code == EQ)
9986 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9987 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9988 {
9989 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9990 return expand_expr (fold_single_bit_test (loc,
9991 code == NE ? NE_EXPR : EQ_EXPR,
9992 arg0, arg1, type),
9993 target, VOIDmode, EXPAND_NORMAL);
9994 }
9995
9996 if (! get_subtarget (target)
9997 || GET_MODE (subtarget) != operand_mode)
9998 subtarget = 0;
9999
10000 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10001
10002 if (target == 0)
10003 target = gen_reg_rtx (mode);
10004
10005 /* Try a cstore if possible. */
10006 return emit_store_flag_force (target, code, op0, op1,
10007 operand_mode, unsignedp, 1);
10008 }
10009 \f
10010
10011 /* Stubs in case we haven't got a casesi insn. */
10012 #ifndef HAVE_casesi
10013 # define HAVE_casesi 0
10014 # define gen_casesi(a, b, c, d, e) (0)
10015 # define CODE_FOR_casesi CODE_FOR_nothing
10016 #endif
10017
10018 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10019 0 otherwise (i.e. if there is no casesi instruction). */
10020 int
10021 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10022 rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
10023 rtx fallback_label ATTRIBUTE_UNUSED)
10024 {
10025 enum machine_mode index_mode = SImode;
10026 int index_bits = GET_MODE_BITSIZE (index_mode);
10027 rtx op1, op2, index;
10028 enum machine_mode op_mode;
10029
10030 if (! HAVE_casesi)
10031 return 0;
10032
10033 /* Convert the index to SImode. */
10034 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10035 {
10036 enum machine_mode omode = TYPE_MODE (index_type);
10037 rtx rangertx = expand_normal (range);
10038
10039 /* We must handle the endpoints in the original mode. */
10040 index_expr = build2 (MINUS_EXPR, index_type,
10041 index_expr, minval);
10042 minval = integer_zero_node;
10043 index = expand_normal (index_expr);
10044 if (default_label)
10045 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10046 omode, 1, default_label);
10047 /* Now we can safely truncate. */
10048 index = convert_to_mode (index_mode, index, 0);
10049 }
10050 else
10051 {
10052 if (TYPE_MODE (index_type) != index_mode)
10053 {
10054 index_type = lang_hooks.types.type_for_size (index_bits, 0);
10055 index_expr = fold_convert (index_type, index_expr);
10056 }
10057
10058 index = expand_normal (index_expr);
10059 }
10060
10061 do_pending_stack_adjust ();
10062
10063 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10064 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10065 (index, op_mode))
10066 index = copy_to_mode_reg (op_mode, index);
10067
10068 op1 = expand_normal (minval);
10069
10070 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10071 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10072 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
10073 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10074 (op1, op_mode))
10075 op1 = copy_to_mode_reg (op_mode, op1);
10076
10077 op2 = expand_normal (range);
10078
10079 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10080 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10081 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
10082 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10083 (op2, op_mode))
10084 op2 = copy_to_mode_reg (op_mode, op2);
10085
10086 emit_jump_insn (gen_casesi (index, op1, op2,
10087 table_label, !default_label
10088 ? fallback_label : default_label));
10089 return 1;
10090 }
10091
10092 /* Attempt to generate a tablejump instruction; same concept. */
10093 #ifndef HAVE_tablejump
10094 #define HAVE_tablejump 0
10095 #define gen_tablejump(x, y) (0)
10096 #endif
10097
10098 /* Subroutine of the next function.
10099
10100 INDEX is the value being switched on, with the lowest value
10101 in the table already subtracted.
10102 MODE is its expected mode (needed if INDEX is constant).
10103 RANGE is the length of the jump table.
10104 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10105
10106 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10107 index value is out of range. */
10108
10109 static void
10110 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10111 rtx default_label)
10112 {
10113 rtx temp, vector;
10114
10115 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10116 cfun->cfg->max_jumptable_ents = INTVAL (range);
10117
10118 /* Do an unsigned comparison (in the proper mode) between the index
10119 expression and the value which represents the length of the range.
10120 Since we just finished subtracting the lower bound of the range
10121 from the index expression, this comparison allows us to simultaneously
10122 check that the original index expression value is both greater than
10123 or equal to the minimum value of the range and less than or equal to
10124 the maximum value of the range. */
10125
10126 if (default_label)
10127 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10128 default_label);
10129
10130 /* If index is in range, it must fit in Pmode.
10131 Convert to Pmode so we can index with it. */
10132 if (mode != Pmode)
10133 index = convert_to_mode (Pmode, index, 1);
10134
10135 /* Don't let a MEM slip through, because then INDEX that comes
10136 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10137 and break_out_memory_refs will go to work on it and mess it up. */
10138 #ifdef PIC_CASE_VECTOR_ADDRESS
10139 if (flag_pic && !REG_P (index))
10140 index = copy_to_mode_reg (Pmode, index);
10141 #endif
10142
10143 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10144 GET_MODE_SIZE, because this indicates how large insns are. The other
10145 uses should all be Pmode, because they are addresses. This code
10146 could fail if addresses and insns are not the same size. */
10147 index = gen_rtx_PLUS (Pmode,
10148 gen_rtx_MULT (Pmode, index,
10149 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10150 gen_rtx_LABEL_REF (Pmode, table_label));
10151 #ifdef PIC_CASE_VECTOR_ADDRESS
10152 if (flag_pic)
10153 index = PIC_CASE_VECTOR_ADDRESS (index);
10154 else
10155 #endif
10156 index = memory_address (CASE_VECTOR_MODE, index);
10157 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10158 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10159 convert_move (temp, vector, 0);
10160
10161 emit_jump_insn (gen_tablejump (temp, table_label));
10162
10163 /* If we are generating PIC code or if the table is PC-relative, the
10164 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10165 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10166 emit_barrier ();
10167 }
10168
10169 int
10170 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10171 rtx table_label, rtx default_label)
10172 {
10173 rtx index;
10174
10175 if (! HAVE_tablejump)
10176 return 0;
10177
10178 index_expr = fold_build2 (MINUS_EXPR, index_type,
10179 fold_convert (index_type, index_expr),
10180 fold_convert (index_type, minval));
10181 index = expand_normal (index_expr);
10182 do_pending_stack_adjust ();
10183
10184 do_tablejump (index, TYPE_MODE (index_type),
10185 convert_modes (TYPE_MODE (index_type),
10186 TYPE_MODE (TREE_TYPE (range)),
10187 expand_normal (range),
10188 TYPE_UNSIGNED (TREE_TYPE (range))),
10189 table_label, default_label);
10190 return 1;
10191 }
10192
10193 /* Nonzero if the mode is a valid vector mode for this architecture.
10194 This returns nonzero even if there is no hardware support for the
10195 vector mode, but we can emulate with narrower modes. */
10196
10197 int
10198 vector_mode_valid_p (enum machine_mode mode)
10199 {
10200 enum mode_class mclass = GET_MODE_CLASS (mode);
10201 enum machine_mode innermode;
10202
10203 /* Doh! What's going on? */
10204 if (mclass != MODE_VECTOR_INT
10205 && mclass != MODE_VECTOR_FLOAT
10206 && mclass != MODE_VECTOR_FRACT
10207 && mclass != MODE_VECTOR_UFRACT
10208 && mclass != MODE_VECTOR_ACCUM
10209 && mclass != MODE_VECTOR_UACCUM)
10210 return 0;
10211
10212 /* Hardware support. Woo hoo! */
10213 if (targetm.vector_mode_supported_p (mode))
10214 return 1;
10215
10216 innermode = GET_MODE_INNER (mode);
10217
10218 /* We should probably return 1 if requesting V4DI and we have no DI,
10219 but we have V2DI, but this is probably very unlikely. */
10220
10221 /* If we have support for the inner mode, we can safely emulate it.
10222 We may not have V2DI, but me can emulate with a pair of DIs. */
10223 return targetm.scalar_mode_supported_p (innermode);
10224 }
10225
10226 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10227 static rtx
10228 const_vector_from_tree (tree exp)
10229 {
10230 rtvec v;
10231 int units, i;
10232 tree link, elt;
10233 enum machine_mode inner, mode;
10234
10235 mode = TYPE_MODE (TREE_TYPE (exp));
10236
10237 if (initializer_zerop (exp))
10238 return CONST0_RTX (mode);
10239
10240 units = GET_MODE_NUNITS (mode);
10241 inner = GET_MODE_INNER (mode);
10242
10243 v = rtvec_alloc (units);
10244
10245 link = TREE_VECTOR_CST_ELTS (exp);
10246 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10247 {
10248 elt = TREE_VALUE (link);
10249
10250 if (TREE_CODE (elt) == REAL_CST)
10251 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10252 inner);
10253 else if (TREE_CODE (elt) == FIXED_CST)
10254 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10255 inner);
10256 else
10257 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10258 TREE_INT_CST_HIGH (elt),
10259 inner);
10260 }
10261
10262 /* Initialize remaining elements to 0. */
10263 for (; i < units; ++i)
10264 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10265
10266 return gen_rtx_CONST_VECTOR (mode, v);
10267 }
10268
10269
10270 /* Build a decl for a EH personality function named NAME. */
10271
10272 tree
10273 build_personality_function (const char *name)
10274 {
10275 tree decl, type;
10276
10277 type = build_function_type_list (integer_type_node, integer_type_node,
10278 long_long_unsigned_type_node,
10279 ptr_type_node, ptr_type_node, NULL_TREE);
10280 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
10281 get_identifier (name), type);
10282 DECL_ARTIFICIAL (decl) = 1;
10283 DECL_EXTERNAL (decl) = 1;
10284 TREE_PUBLIC (decl) = 1;
10285
10286 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
10287 are the flags assigned by targetm.encode_section_info. */
10288 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
10289
10290 return decl;
10291 }
10292
10293 /* Extracts the personality function of DECL and returns the corresponding
10294 libfunc. */
10295
10296 rtx
10297 get_personality_function (tree decl)
10298 {
10299 tree personality = DECL_FUNCTION_PERSONALITY (decl);
10300 enum eh_personality_kind pk;
10301
10302 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
10303 if (pk == eh_personality_none)
10304 return NULL;
10305
10306 if (!personality
10307 && pk == eh_personality_any)
10308 personality = lang_hooks.eh_personality ();
10309
10310 if (pk == eh_personality_lang)
10311 gcc_assert (personality != NULL_TREE);
10312
10313 return XEXP (DECL_RTL (personality), 0);
10314 }
10315
10316 #include "gt-expr.h"