]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/expr.c
convert.c (convert_to_real, [...]): Replace calls to build with calls to buildN.
[thirdparty/gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
55
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
58
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
61
62 #ifdef PUSH_ROUNDING
63
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #endif
68 #endif
69
70 #endif
71
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
75 #else
76 #define STACK_PUSH_CODE PRE_INC
77 #endif
78 #endif
79
80
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
88
89 /* This structure is used by move_by_pieces to describe the move to
90 be performed. */
91 struct move_by_pieces
92 {
93 rtx to;
94 rtx to_addr;
95 int autinc_to;
96 int explicit_inc_to;
97 rtx from;
98 rtx from_addr;
99 int autinc_from;
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
103 int reverse;
104 };
105
106 /* This structure is used by store_by_pieces to describe the clear to
107 be performed. */
108
109 struct store_by_pieces
110 {
111 rtx to;
112 rtx to_addr;
113 int autinc_to;
114 int explicit_inc_to;
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
118 void *constfundata;
119 int reverse;
120 };
121
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
123 unsigned int);
124 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
125 struct move_by_pieces *);
126 static bool block_move_libcall_safe_for_call_parm (void);
127 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
128 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
129 static tree emit_block_move_libcall_fn (int);
130 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
131 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
132 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
133 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
134 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
135 struct store_by_pieces *);
136 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
137 static rtx clear_storage_via_libcall (rtx, rtx);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, int);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, enum machine_mode, int, tree, int);
147 static rtx var_rtx (tree);
148
149 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
150 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
151
152 static int is_aligning_offset (tree, tree);
153 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
154 enum expand_modifier);
155 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
156 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
157 #ifdef PUSH_ROUNDING
158 static void emit_single_push_insn (enum machine_mode, rtx, tree);
159 #endif
160 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
161 static rtx const_vector_from_tree (tree);
162
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
166
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
169
170 /* Record for each mode whether we can float-extend from memory. */
171
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
179 #endif
180
181 /* This macro is used to determine whether clear_by_pieces should be
182 called to clear storage. */
183 #ifndef CLEAR_BY_PIECES_P
184 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
185 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
186 #endif
187
188 /* This macro is used to determine whether store_by_pieces should be
189 called to "memset" storage with byte values other than zero, or
190 to "memcpy" storage when the source is a constant string. */
191 #ifndef STORE_BY_PIECES_P
192 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
193 #endif
194
195 /* This array records the insn_code of insns to perform block moves. */
196 enum insn_code movmem_optab[NUM_MACHINE_MODES];
197
198 /* This array records the insn_code of insns to perform block clears. */
199 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
200
201 /* These arrays record the insn_code of two different kinds of insns
202 to perform block compares. */
203 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
204 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
205
206 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
207
208 #ifndef SLOW_UNALIGNED_ACCESS
209 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
210 #endif
211 \f
212 /* This is run once per compilation to set up which modes can be used
213 directly in memory and to initialize the block move optab. */
214
215 void
216 init_expr_once (void)
217 {
218 rtx insn, pat;
219 enum machine_mode mode;
220 int num_clobbers;
221 rtx mem, mem1;
222 rtx reg;
223
224 /* Try indexing by frame ptr and try by stack ptr.
225 It is known that on the Convex the stack ptr isn't a valid index.
226 With luck, one or the other is valid on any machine. */
227 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
228 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
229
230 /* A scratch register we can modify in-place below to avoid
231 useless RTL allocations. */
232 reg = gen_rtx_REG (VOIDmode, -1);
233
234 insn = rtx_alloc (INSN);
235 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
236 PATTERN (insn) = pat;
237
238 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
239 mode = (enum machine_mode) ((int) mode + 1))
240 {
241 int regno;
242
243 direct_load[(int) mode] = direct_store[(int) mode] = 0;
244 PUT_MODE (mem, mode);
245 PUT_MODE (mem1, mode);
246 PUT_MODE (reg, mode);
247
248 /* See if there is some register that can be used in this mode and
249 directly loaded or stored from memory. */
250
251 if (mode != VOIDmode && mode != BLKmode)
252 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
253 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
254 regno++)
255 {
256 if (! HARD_REGNO_MODE_OK (regno, mode))
257 continue;
258
259 REGNO (reg) = regno;
260
261 SET_SRC (pat) = mem;
262 SET_DEST (pat) = reg;
263 if (recog (pat, insn, &num_clobbers) >= 0)
264 direct_load[(int) mode] = 1;
265
266 SET_SRC (pat) = mem1;
267 SET_DEST (pat) = reg;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_load[(int) mode] = 1;
270
271 SET_SRC (pat) = reg;
272 SET_DEST (pat) = mem;
273 if (recog (pat, insn, &num_clobbers) >= 0)
274 direct_store[(int) mode] = 1;
275
276 SET_SRC (pat) = reg;
277 SET_DEST (pat) = mem1;
278 if (recog (pat, insn, &num_clobbers) >= 0)
279 direct_store[(int) mode] = 1;
280 }
281 }
282
283 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
284
285 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
286 mode = GET_MODE_WIDER_MODE (mode))
287 {
288 enum machine_mode srcmode;
289 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
290 srcmode = GET_MODE_WIDER_MODE (srcmode))
291 {
292 enum insn_code ic;
293
294 ic = can_extend_p (mode, srcmode, 0);
295 if (ic == CODE_FOR_nothing)
296 continue;
297
298 PUT_MODE (mem, srcmode);
299
300 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
301 float_extend_from_mem[mode][srcmode] = true;
302 }
303 }
304 }
305
306 /* This is run at the start of compiling a function. */
307
308 void
309 init_expr (void)
310 {
311 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
312 }
313 \f
314 /* Copy data from FROM to TO, where the machine modes are not the same.
315 Both modes may be integer, or both may be floating.
316 UNSIGNEDP should be nonzero if FROM is an unsigned type.
317 This causes zero-extension instead of sign-extension. */
318
319 void
320 convert_move (rtx to, rtx from, int unsignedp)
321 {
322 enum machine_mode to_mode = GET_MODE (to);
323 enum machine_mode from_mode = GET_MODE (from);
324 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
325 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
326 enum insn_code code;
327 rtx libcall;
328
329 /* rtx code for making an equivalent value. */
330 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
331 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
332
333
334 if (to_real != from_real)
335 abort ();
336
337 /* If the source and destination are already the same, then there's
338 nothing to do. */
339 if (to == from)
340 return;
341
342 /* If FROM is a SUBREG that indicates that we have already done at least
343 the required extension, strip it. We don't handle such SUBREGs as
344 TO here. */
345
346 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
347 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
348 >= GET_MODE_SIZE (to_mode))
349 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
350 from = gen_lowpart (to_mode, from), from_mode = to_mode;
351
352 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
353 abort ();
354
355 if (to_mode == from_mode
356 || (from_mode == VOIDmode && CONSTANT_P (from)))
357 {
358 emit_move_insn (to, from);
359 return;
360 }
361
362 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
363 {
364 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
365 abort ();
366
367 if (VECTOR_MODE_P (to_mode))
368 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
369 else
370 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
371
372 emit_move_insn (to, from);
373 return;
374 }
375
376 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
377 {
378 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
379 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
380 return;
381 }
382
383 if (to_real)
384 {
385 rtx value, insns;
386 convert_optab tab;
387
388 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
389 tab = sext_optab;
390 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
391 tab = trunc_optab;
392 else
393 abort ();
394
395 /* Try converting directly if the insn is supported. */
396
397 code = tab->handlers[to_mode][from_mode].insn_code;
398 if (code != CODE_FOR_nothing)
399 {
400 emit_unop_insn (code, to, from,
401 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
402 return;
403 }
404
405 /* Otherwise use a libcall. */
406 libcall = tab->handlers[to_mode][from_mode].libfunc;
407
408 if (!libcall)
409 /* This conversion is not implemented yet. */
410 abort ();
411
412 start_sequence ();
413 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
414 1, from, from_mode);
415 insns = get_insns ();
416 end_sequence ();
417 emit_libcall_block (insns, to, value,
418 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
419 from)
420 : gen_rtx_FLOAT_EXTEND (to_mode, from));
421 return;
422 }
423
424 /* Handle pointer conversion. */ /* SPEE 900220. */
425 /* Targets are expected to provide conversion insns between PxImode and
426 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
427 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
428 {
429 enum machine_mode full_mode
430 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
431
432 if (trunc_optab->handlers[to_mode][full_mode].insn_code
433 == CODE_FOR_nothing)
434 abort ();
435
436 if (full_mode != from_mode)
437 from = convert_to_mode (full_mode, from, unsignedp);
438 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
439 to, from, UNKNOWN);
440 return;
441 }
442 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
443 {
444 enum machine_mode full_mode
445 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
446
447 if (sext_optab->handlers[full_mode][from_mode].insn_code
448 == CODE_FOR_nothing)
449 abort ();
450
451 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
452 to, from, UNKNOWN);
453 if (to_mode == full_mode)
454 return;
455
456 /* else proceed to integer conversions below. */
457 from_mode = full_mode;
458 }
459
460 /* Now both modes are integers. */
461
462 /* Handle expanding beyond a word. */
463 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
464 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
465 {
466 rtx insns;
467 rtx lowpart;
468 rtx fill_value;
469 rtx lowfrom;
470 int i;
471 enum machine_mode lowpart_mode;
472 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
473
474 /* Try converting directly if the insn is supported. */
475 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
476 != CODE_FOR_nothing)
477 {
478 /* If FROM is a SUBREG, put it into a register. Do this
479 so that we always generate the same set of insns for
480 better cse'ing; if an intermediate assignment occurred,
481 we won't be doing the operation directly on the SUBREG. */
482 if (optimize > 0 && GET_CODE (from) == SUBREG)
483 from = force_reg (from_mode, from);
484 emit_unop_insn (code, to, from, equiv_code);
485 return;
486 }
487 /* Next, try converting via full word. */
488 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
489 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
490 != CODE_FOR_nothing))
491 {
492 if (REG_P (to))
493 {
494 if (reg_overlap_mentioned_p (to, from))
495 from = force_reg (from_mode, from);
496 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
497 }
498 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
499 emit_unop_insn (code, to,
500 gen_lowpart (word_mode, to), equiv_code);
501 return;
502 }
503
504 /* No special multiword conversion insn; do it by hand. */
505 start_sequence ();
506
507 /* Since we will turn this into a no conflict block, we must ensure
508 that the source does not overlap the target. */
509
510 if (reg_overlap_mentioned_p (to, from))
511 from = force_reg (from_mode, from);
512
513 /* Get a copy of FROM widened to a word, if necessary. */
514 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
515 lowpart_mode = word_mode;
516 else
517 lowpart_mode = from_mode;
518
519 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
520
521 lowpart = gen_lowpart (lowpart_mode, to);
522 emit_move_insn (lowpart, lowfrom);
523
524 /* Compute the value to put in each remaining word. */
525 if (unsignedp)
526 fill_value = const0_rtx;
527 else
528 {
529 #ifdef HAVE_slt
530 if (HAVE_slt
531 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
532 && STORE_FLAG_VALUE == -1)
533 {
534 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
535 lowpart_mode, 0);
536 fill_value = gen_reg_rtx (word_mode);
537 emit_insn (gen_slt (fill_value));
538 }
539 else
540 #endif
541 {
542 fill_value
543 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
544 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
545 NULL_RTX, 0);
546 fill_value = convert_to_mode (word_mode, fill_value, 1);
547 }
548 }
549
550 /* Fill the remaining words. */
551 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
552 {
553 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
554 rtx subword = operand_subword (to, index, 1, to_mode);
555
556 if (subword == 0)
557 abort ();
558
559 if (fill_value != subword)
560 emit_move_insn (subword, fill_value);
561 }
562
563 insns = get_insns ();
564 end_sequence ();
565
566 emit_no_conflict_block (insns, to, from, NULL_RTX,
567 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
568 return;
569 }
570
571 /* Truncating multi-word to a word or less. */
572 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
573 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
574 {
575 if (!((MEM_P (from)
576 && ! MEM_VOLATILE_P (from)
577 && direct_load[(int) to_mode]
578 && ! mode_dependent_address_p (XEXP (from, 0)))
579 || REG_P (from)
580 || GET_CODE (from) == SUBREG))
581 from = force_reg (from_mode, from);
582 convert_move (to, gen_lowpart (word_mode, from), 0);
583 return;
584 }
585
586 /* Now follow all the conversions between integers
587 no more than a word long. */
588
589 /* For truncation, usually we can just refer to FROM in a narrower mode. */
590 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
591 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
592 GET_MODE_BITSIZE (from_mode)))
593 {
594 if (!((MEM_P (from)
595 && ! MEM_VOLATILE_P (from)
596 && direct_load[(int) to_mode]
597 && ! mode_dependent_address_p (XEXP (from, 0)))
598 || REG_P (from)
599 || GET_CODE (from) == SUBREG))
600 from = force_reg (from_mode, from);
601 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
602 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
603 from = copy_to_reg (from);
604 emit_move_insn (to, gen_lowpart (to_mode, from));
605 return;
606 }
607
608 /* Handle extension. */
609 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
610 {
611 /* Convert directly if that works. */
612 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
613 != CODE_FOR_nothing)
614 {
615 if (flag_force_mem)
616 from = force_not_mem (from);
617
618 emit_unop_insn (code, to, from, equiv_code);
619 return;
620 }
621 else
622 {
623 enum machine_mode intermediate;
624 rtx tmp;
625 tree shift_amount;
626
627 /* Search for a mode to convert via. */
628 for (intermediate = from_mode; intermediate != VOIDmode;
629 intermediate = GET_MODE_WIDER_MODE (intermediate))
630 if (((can_extend_p (to_mode, intermediate, unsignedp)
631 != CODE_FOR_nothing)
632 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
633 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
634 GET_MODE_BITSIZE (intermediate))))
635 && (can_extend_p (intermediate, from_mode, unsignedp)
636 != CODE_FOR_nothing))
637 {
638 convert_move (to, convert_to_mode (intermediate, from,
639 unsignedp), unsignedp);
640 return;
641 }
642
643 /* No suitable intermediate mode.
644 Generate what we need with shifts. */
645 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
646 - GET_MODE_BITSIZE (from_mode), 0);
647 from = gen_lowpart (to_mode, force_reg (from_mode, from));
648 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
649 to, unsignedp);
650 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
651 to, unsignedp);
652 if (tmp != to)
653 emit_move_insn (to, tmp);
654 return;
655 }
656 }
657
658 /* Support special truncate insns for certain modes. */
659 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
660 {
661 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
662 to, from, UNKNOWN);
663 return;
664 }
665
666 /* Handle truncation of volatile memrefs, and so on;
667 the things that couldn't be truncated directly,
668 and for which there was no special instruction.
669
670 ??? Code above formerly short-circuited this, for most integer
671 mode pairs, with a force_reg in from_mode followed by a recursive
672 call to this routine. Appears always to have been wrong. */
673 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
674 {
675 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
676 emit_move_insn (to, temp);
677 return;
678 }
679
680 /* Mode combination is not recognized. */
681 abort ();
682 }
683
684 /* Return an rtx for a value that would result
685 from converting X to mode MODE.
686 Both X and MODE may be floating, or both integer.
687 UNSIGNEDP is nonzero if X is an unsigned value.
688 This can be done by referring to a part of X in place
689 or by copying to a new temporary with conversion. */
690
691 rtx
692 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
693 {
694 return convert_modes (mode, VOIDmode, x, unsignedp);
695 }
696
697 /* Return an rtx for a value that would result
698 from converting X from mode OLDMODE to mode MODE.
699 Both modes may be floating, or both integer.
700 UNSIGNEDP is nonzero if X is an unsigned value.
701
702 This can be done by referring to a part of X in place
703 or by copying to a new temporary with conversion.
704
705 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
706
707 rtx
708 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
709 {
710 rtx temp;
711
712 /* If FROM is a SUBREG that indicates that we have already done at least
713 the required extension, strip it. */
714
715 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
716 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
717 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
718 x = gen_lowpart (mode, x);
719
720 if (GET_MODE (x) != VOIDmode)
721 oldmode = GET_MODE (x);
722
723 if (mode == oldmode)
724 return x;
725
726 /* There is one case that we must handle specially: If we are converting
727 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
728 we are to interpret the constant as unsigned, gen_lowpart will do
729 the wrong if the constant appears negative. What we want to do is
730 make the high-order word of the constant zero, not all ones. */
731
732 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
733 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
734 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
735 {
736 HOST_WIDE_INT val = INTVAL (x);
737
738 if (oldmode != VOIDmode
739 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
740 {
741 int width = GET_MODE_BITSIZE (oldmode);
742
743 /* We need to zero extend VAL. */
744 val &= ((HOST_WIDE_INT) 1 << width) - 1;
745 }
746
747 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
748 }
749
750 /* We can do this with a gen_lowpart if both desired and current modes
751 are integer, and this is either a constant integer, a register, or a
752 non-volatile MEM. Except for the constant case where MODE is no
753 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
754
755 if ((GET_CODE (x) == CONST_INT
756 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
757 || (GET_MODE_CLASS (mode) == MODE_INT
758 && GET_MODE_CLASS (oldmode) == MODE_INT
759 && (GET_CODE (x) == CONST_DOUBLE
760 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
761 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
762 && direct_load[(int) mode])
763 || (REG_P (x)
764 && (! HARD_REGISTER_P (x)
765 || HARD_REGNO_MODE_OK (REGNO (x), mode))
766 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
767 GET_MODE_BITSIZE (GET_MODE (x)))))))))
768 {
769 /* ?? If we don't know OLDMODE, we have to assume here that
770 X does not need sign- or zero-extension. This may not be
771 the case, but it's the best we can do. */
772 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
773 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
774 {
775 HOST_WIDE_INT val = INTVAL (x);
776 int width = GET_MODE_BITSIZE (oldmode);
777
778 /* We must sign or zero-extend in this case. Start by
779 zero-extending, then sign extend if we need to. */
780 val &= ((HOST_WIDE_INT) 1 << width) - 1;
781 if (! unsignedp
782 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
783 val |= (HOST_WIDE_INT) (-1) << width;
784
785 return gen_int_mode (val, mode);
786 }
787
788 return gen_lowpart (mode, x);
789 }
790
791 /* Converting from integer constant into mode is always equivalent to an
792 subreg operation. */
793 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
794 {
795 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
796 abort ();
797 return simplify_gen_subreg (mode, x, oldmode, 0);
798 }
799
800 temp = gen_reg_rtx (mode);
801 convert_move (temp, x, unsignedp);
802 return temp;
803 }
804 \f
805 /* STORE_MAX_PIECES is the number of bytes at a time that we can
806 store efficiently. Due to internal GCC limitations, this is
807 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
808 for an immediate constant. */
809
810 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
811
812 /* Determine whether the LEN bytes can be moved by using several move
813 instructions. Return nonzero if a call to move_by_pieces should
814 succeed. */
815
816 int
817 can_move_by_pieces (unsigned HOST_WIDE_INT len,
818 unsigned int align ATTRIBUTE_UNUSED)
819 {
820 return MOVE_BY_PIECES_P (len, align);
821 }
822
823 /* Generate several move instructions to copy LEN bytes from block FROM to
824 block TO. (These are MEM rtx's with BLKmode).
825
826 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
827 used to push FROM to the stack.
828
829 ALIGN is maximum stack alignment we can assume.
830
831 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
832 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
833 stpcpy. */
834
835 rtx
836 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
837 unsigned int align, int endp)
838 {
839 struct move_by_pieces data;
840 rtx to_addr, from_addr = XEXP (from, 0);
841 unsigned int max_size = MOVE_MAX_PIECES + 1;
842 enum machine_mode mode = VOIDmode, tmode;
843 enum insn_code icode;
844
845 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
846
847 data.offset = 0;
848 data.from_addr = from_addr;
849 if (to)
850 {
851 to_addr = XEXP (to, 0);
852 data.to = to;
853 data.autinc_to
854 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
855 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
856 data.reverse
857 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
858 }
859 else
860 {
861 to_addr = NULL_RTX;
862 data.to = NULL_RTX;
863 data.autinc_to = 1;
864 #ifdef STACK_GROWS_DOWNWARD
865 data.reverse = 1;
866 #else
867 data.reverse = 0;
868 #endif
869 }
870 data.to_addr = to_addr;
871 data.from = from;
872 data.autinc_from
873 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
874 || GET_CODE (from_addr) == POST_INC
875 || GET_CODE (from_addr) == POST_DEC);
876
877 data.explicit_inc_from = 0;
878 data.explicit_inc_to = 0;
879 if (data.reverse) data.offset = len;
880 data.len = len;
881
882 /* If copying requires more than two move insns,
883 copy addresses to registers (to make displacements shorter)
884 and use post-increment if available. */
885 if (!(data.autinc_from && data.autinc_to)
886 && move_by_pieces_ninsns (len, align) > 2)
887 {
888 /* Find the mode of the largest move... */
889 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
890 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
891 if (GET_MODE_SIZE (tmode) < max_size)
892 mode = tmode;
893
894 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
895 {
896 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
897 data.autinc_from = 1;
898 data.explicit_inc_from = -1;
899 }
900 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
901 {
902 data.from_addr = copy_addr_to_reg (from_addr);
903 data.autinc_from = 1;
904 data.explicit_inc_from = 1;
905 }
906 if (!data.autinc_from && CONSTANT_P (from_addr))
907 data.from_addr = copy_addr_to_reg (from_addr);
908 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
909 {
910 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
911 data.autinc_to = 1;
912 data.explicit_inc_to = -1;
913 }
914 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
915 {
916 data.to_addr = copy_addr_to_reg (to_addr);
917 data.autinc_to = 1;
918 data.explicit_inc_to = 1;
919 }
920 if (!data.autinc_to && CONSTANT_P (to_addr))
921 data.to_addr = copy_addr_to_reg (to_addr);
922 }
923
924 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
925 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
926 align = MOVE_MAX * BITS_PER_UNIT;
927
928 /* First move what we can in the largest integer mode, then go to
929 successively smaller modes. */
930
931 while (max_size > 1)
932 {
933 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
934 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
935 if (GET_MODE_SIZE (tmode) < max_size)
936 mode = tmode;
937
938 if (mode == VOIDmode)
939 break;
940
941 icode = mov_optab->handlers[(int) mode].insn_code;
942 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
943 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
944
945 max_size = GET_MODE_SIZE (mode);
946 }
947
948 /* The code above should have handled everything. */
949 if (data.len > 0)
950 abort ();
951
952 if (endp)
953 {
954 rtx to1;
955
956 if (data.reverse)
957 abort ();
958 if (data.autinc_to)
959 {
960 if (endp == 2)
961 {
962 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
963 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
964 else
965 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
966 -1));
967 }
968 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
969 data.offset);
970 }
971 else
972 {
973 if (endp == 2)
974 --data.offset;
975 to1 = adjust_address (data.to, QImode, data.offset);
976 }
977 return to1;
978 }
979 else
980 return data.to;
981 }
982
983 /* Return number of insns required to move L bytes by pieces.
984 ALIGN (in bits) is maximum alignment we can assume. */
985
986 static unsigned HOST_WIDE_INT
987 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
988 {
989 unsigned HOST_WIDE_INT n_insns = 0;
990 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
991
992 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
993 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
994 align = MOVE_MAX * BITS_PER_UNIT;
995
996 while (max_size > 1)
997 {
998 enum machine_mode mode = VOIDmode, tmode;
999 enum insn_code icode;
1000
1001 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1002 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1003 if (GET_MODE_SIZE (tmode) < max_size)
1004 mode = tmode;
1005
1006 if (mode == VOIDmode)
1007 break;
1008
1009 icode = mov_optab->handlers[(int) mode].insn_code;
1010 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1011 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1012
1013 max_size = GET_MODE_SIZE (mode);
1014 }
1015
1016 if (l)
1017 abort ();
1018 return n_insns;
1019 }
1020
1021 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1022 with move instructions for mode MODE. GENFUN is the gen_... function
1023 to make a move insn for that mode. DATA has all the other info. */
1024
1025 static void
1026 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1027 struct move_by_pieces *data)
1028 {
1029 unsigned int size = GET_MODE_SIZE (mode);
1030 rtx to1 = NULL_RTX, from1;
1031
1032 while (data->len >= size)
1033 {
1034 if (data->reverse)
1035 data->offset -= size;
1036
1037 if (data->to)
1038 {
1039 if (data->autinc_to)
1040 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1041 data->offset);
1042 else
1043 to1 = adjust_address (data->to, mode, data->offset);
1044 }
1045
1046 if (data->autinc_from)
1047 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1048 data->offset);
1049 else
1050 from1 = adjust_address (data->from, mode, data->offset);
1051
1052 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1053 emit_insn (gen_add2_insn (data->to_addr,
1054 GEN_INT (-(HOST_WIDE_INT)size)));
1055 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1056 emit_insn (gen_add2_insn (data->from_addr,
1057 GEN_INT (-(HOST_WIDE_INT)size)));
1058
1059 if (data->to)
1060 emit_insn ((*genfun) (to1, from1));
1061 else
1062 {
1063 #ifdef PUSH_ROUNDING
1064 emit_single_push_insn (mode, from1, NULL);
1065 #else
1066 abort ();
1067 #endif
1068 }
1069
1070 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1071 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1072 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1073 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1074
1075 if (! data->reverse)
1076 data->offset += size;
1077
1078 data->len -= size;
1079 }
1080 }
1081 \f
1082 /* Emit code to move a block Y to a block X. This may be done with
1083 string-move instructions, with multiple scalar move instructions,
1084 or with a library call.
1085
1086 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1087 SIZE is an rtx that says how long they are.
1088 ALIGN is the maximum alignment we can assume they have.
1089 METHOD describes what kind of copy this is, and what mechanisms may be used.
1090
1091 Return the address of the new block, if memcpy is called and returns it,
1092 0 otherwise. */
1093
1094 rtx
1095 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1096 {
1097 bool may_use_call;
1098 rtx retval = 0;
1099 unsigned int align;
1100
1101 switch (method)
1102 {
1103 case BLOCK_OP_NORMAL:
1104 may_use_call = true;
1105 break;
1106
1107 case BLOCK_OP_CALL_PARM:
1108 may_use_call = block_move_libcall_safe_for_call_parm ();
1109
1110 /* Make inhibit_defer_pop nonzero around the library call
1111 to force it to pop the arguments right away. */
1112 NO_DEFER_POP;
1113 break;
1114
1115 case BLOCK_OP_NO_LIBCALL:
1116 may_use_call = false;
1117 break;
1118
1119 default:
1120 abort ();
1121 }
1122
1123 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1124
1125 if (!MEM_P (x))
1126 abort ();
1127 if (!MEM_P (y))
1128 abort ();
1129 if (size == 0)
1130 abort ();
1131
1132 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1133 block copy is more efficient for other large modes, e.g. DCmode. */
1134 x = adjust_address (x, BLKmode, 0);
1135 y = adjust_address (y, BLKmode, 0);
1136
1137 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1138 can be incorrect is coming from __builtin_memcpy. */
1139 if (GET_CODE (size) == CONST_INT)
1140 {
1141 if (INTVAL (size) == 0)
1142 return 0;
1143
1144 x = shallow_copy_rtx (x);
1145 y = shallow_copy_rtx (y);
1146 set_mem_size (x, size);
1147 set_mem_size (y, size);
1148 }
1149
1150 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1151 move_by_pieces (x, y, INTVAL (size), align, 0);
1152 else if (emit_block_move_via_movmem (x, y, size, align))
1153 ;
1154 else if (may_use_call)
1155 retval = emit_block_move_via_libcall (x, y, size);
1156 else
1157 emit_block_move_via_loop (x, y, size, align);
1158
1159 if (method == BLOCK_OP_CALL_PARM)
1160 OK_DEFER_POP;
1161
1162 return retval;
1163 }
1164
1165 /* A subroutine of emit_block_move. Returns true if calling the
1166 block move libcall will not clobber any parameters which may have
1167 already been placed on the stack. */
1168
1169 static bool
1170 block_move_libcall_safe_for_call_parm (void)
1171 {
1172 /* If arguments are pushed on the stack, then they're safe. */
1173 if (PUSH_ARGS)
1174 return true;
1175
1176 /* If registers go on the stack anyway, any argument is sure to clobber
1177 an outgoing argument. */
1178 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1179 {
1180 tree fn = emit_block_move_libcall_fn (false);
1181 (void) fn;
1182 if (REG_PARM_STACK_SPACE (fn) != 0)
1183 return false;
1184 }
1185 #endif
1186
1187 /* If any argument goes in memory, then it might clobber an outgoing
1188 argument. */
1189 {
1190 CUMULATIVE_ARGS args_so_far;
1191 tree fn, arg;
1192
1193 fn = emit_block_move_libcall_fn (false);
1194 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1195
1196 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1197 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1198 {
1199 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1200 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1201 if (!tmp || !REG_P (tmp))
1202 return false;
1203 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1204 NULL_TREE, 1))
1205 return false;
1206 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1207 }
1208 }
1209 return true;
1210 }
1211
1212 /* A subroutine of emit_block_move. Expand a movmem pattern;
1213 return true if successful. */
1214
1215 static bool
1216 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1217 {
1218 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1219 int save_volatile_ok = volatile_ok;
1220 enum machine_mode mode;
1221
1222 /* Since this is a move insn, we don't care about volatility. */
1223 volatile_ok = 1;
1224
1225 /* Try the most limited insn first, because there's no point
1226 including more than one in the machine description unless
1227 the more limited one has some advantage. */
1228
1229 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1230 mode = GET_MODE_WIDER_MODE (mode))
1231 {
1232 enum insn_code code = movmem_optab[(int) mode];
1233 insn_operand_predicate_fn pred;
1234
1235 if (code != CODE_FOR_nothing
1236 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1237 here because if SIZE is less than the mode mask, as it is
1238 returned by the macro, it will definitely be less than the
1239 actual mode mask. */
1240 && ((GET_CODE (size) == CONST_INT
1241 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1242 <= (GET_MODE_MASK (mode) >> 1)))
1243 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1244 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1245 || (*pred) (x, BLKmode))
1246 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1247 || (*pred) (y, BLKmode))
1248 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1249 || (*pred) (opalign, VOIDmode)))
1250 {
1251 rtx op2;
1252 rtx last = get_last_insn ();
1253 rtx pat;
1254
1255 op2 = convert_to_mode (mode, size, 1);
1256 pred = insn_data[(int) code].operand[2].predicate;
1257 if (pred != 0 && ! (*pred) (op2, mode))
1258 op2 = copy_to_mode_reg (mode, op2);
1259
1260 /* ??? When called via emit_block_move_for_call, it'd be
1261 nice if there were some way to inform the backend, so
1262 that it doesn't fail the expansion because it thinks
1263 emitting the libcall would be more efficient. */
1264
1265 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1266 if (pat)
1267 {
1268 emit_insn (pat);
1269 volatile_ok = save_volatile_ok;
1270 return true;
1271 }
1272 else
1273 delete_insns_since (last);
1274 }
1275 }
1276
1277 volatile_ok = save_volatile_ok;
1278 return false;
1279 }
1280
1281 /* A subroutine of emit_block_move. Expand a call to memcpy.
1282 Return the return value from memcpy, 0 otherwise. */
1283
1284 static rtx
1285 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1286 {
1287 rtx dst_addr, src_addr;
1288 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1289 enum machine_mode size_mode;
1290 rtx retval;
1291
1292 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1293 pseudos. We can then place those new pseudos into a VAR_DECL and
1294 use them later. */
1295
1296 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1297 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1298
1299 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1300 src_addr = convert_memory_address (ptr_mode, src_addr);
1301
1302 dst_tree = make_tree (ptr_type_node, dst_addr);
1303 src_tree = make_tree (ptr_type_node, src_addr);
1304
1305 size_mode = TYPE_MODE (sizetype);
1306
1307 size = convert_to_mode (size_mode, size, 1);
1308 size = copy_to_mode_reg (size_mode, size);
1309
1310 /* It is incorrect to use the libcall calling conventions to call
1311 memcpy in this context. This could be a user call to memcpy and
1312 the user may wish to examine the return value from memcpy. For
1313 targets where libcalls and normal calls have different conventions
1314 for returning pointers, we could end up generating incorrect code. */
1315
1316 size_tree = make_tree (sizetype, size);
1317
1318 fn = emit_block_move_libcall_fn (true);
1319 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1320 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1321 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1322
1323 /* Now we have to build up the CALL_EXPR itself. */
1324 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1325 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1326 call_expr, arg_list, NULL_TREE);
1327
1328 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1329
1330 /* If we are initializing a readonly value, show the above call clobbered
1331 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1332 the delay slot scheduler might overlook conflicts and take nasty
1333 decisions. */
1334 if (RTX_UNCHANGING_P (dst))
1335 add_function_usage_to
1336 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1337 gen_rtx_CLOBBER (VOIDmode, dst),
1338 NULL_RTX));
1339
1340 return retval;
1341 }
1342
1343 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1344 for the function we use for block copies. The first time FOR_CALL
1345 is true, we call assemble_external. */
1346
1347 static GTY(()) tree block_move_fn;
1348
1349 void
1350 init_block_move_fn (const char *asmspec)
1351 {
1352 if (!block_move_fn)
1353 {
1354 tree args, fn;
1355
1356 fn = get_identifier ("memcpy");
1357 args = build_function_type_list (ptr_type_node, ptr_type_node,
1358 const_ptr_type_node, sizetype,
1359 NULL_TREE);
1360
1361 fn = build_decl (FUNCTION_DECL, fn, args);
1362 DECL_EXTERNAL (fn) = 1;
1363 TREE_PUBLIC (fn) = 1;
1364 DECL_ARTIFICIAL (fn) = 1;
1365 TREE_NOTHROW (fn) = 1;
1366
1367 block_move_fn = fn;
1368 }
1369
1370 if (asmspec)
1371 {
1372 SET_DECL_RTL (block_move_fn, NULL_RTX);
1373 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1374 }
1375 }
1376
1377 static tree
1378 emit_block_move_libcall_fn (int for_call)
1379 {
1380 static bool emitted_extern;
1381
1382 if (!block_move_fn)
1383 init_block_move_fn (NULL);
1384
1385 if (for_call && !emitted_extern)
1386 {
1387 emitted_extern = true;
1388 make_decl_rtl (block_move_fn, NULL);
1389 assemble_external (block_move_fn);
1390 }
1391
1392 return block_move_fn;
1393 }
1394
1395 /* A subroutine of emit_block_move. Copy the data via an explicit
1396 loop. This is used only when libcalls are forbidden. */
1397 /* ??? It'd be nice to copy in hunks larger than QImode. */
1398
1399 static void
1400 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1401 unsigned int align ATTRIBUTE_UNUSED)
1402 {
1403 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1404 enum machine_mode iter_mode;
1405
1406 iter_mode = GET_MODE (size);
1407 if (iter_mode == VOIDmode)
1408 iter_mode = word_mode;
1409
1410 top_label = gen_label_rtx ();
1411 cmp_label = gen_label_rtx ();
1412 iter = gen_reg_rtx (iter_mode);
1413
1414 emit_move_insn (iter, const0_rtx);
1415
1416 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1417 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1418 do_pending_stack_adjust ();
1419
1420 emit_jump (cmp_label);
1421 emit_label (top_label);
1422
1423 tmp = convert_modes (Pmode, iter_mode, iter, true);
1424 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1425 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1426 x = change_address (x, QImode, x_addr);
1427 y = change_address (y, QImode, y_addr);
1428
1429 emit_move_insn (x, y);
1430
1431 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1432 true, OPTAB_LIB_WIDEN);
1433 if (tmp != iter)
1434 emit_move_insn (iter, tmp);
1435
1436 emit_label (cmp_label);
1437
1438 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1439 true, top_label);
1440 }
1441 \f
1442 /* Copy all or part of a value X into registers starting at REGNO.
1443 The number of registers to be filled is NREGS. */
1444
1445 void
1446 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1447 {
1448 int i;
1449 #ifdef HAVE_load_multiple
1450 rtx pat;
1451 rtx last;
1452 #endif
1453
1454 if (nregs == 0)
1455 return;
1456
1457 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1458 x = validize_mem (force_const_mem (mode, x));
1459
1460 /* See if the machine can do this with a load multiple insn. */
1461 #ifdef HAVE_load_multiple
1462 if (HAVE_load_multiple)
1463 {
1464 last = get_last_insn ();
1465 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1466 GEN_INT (nregs));
1467 if (pat)
1468 {
1469 emit_insn (pat);
1470 return;
1471 }
1472 else
1473 delete_insns_since (last);
1474 }
1475 #endif
1476
1477 for (i = 0; i < nregs; i++)
1478 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1479 operand_subword_force (x, i, mode));
1480 }
1481
1482 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1483 The number of registers to be filled is NREGS. */
1484
1485 void
1486 move_block_from_reg (int regno, rtx x, int nregs)
1487 {
1488 int i;
1489
1490 if (nregs == 0)
1491 return;
1492
1493 /* See if the machine can do this with a store multiple insn. */
1494 #ifdef HAVE_store_multiple
1495 if (HAVE_store_multiple)
1496 {
1497 rtx last = get_last_insn ();
1498 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1499 GEN_INT (nregs));
1500 if (pat)
1501 {
1502 emit_insn (pat);
1503 return;
1504 }
1505 else
1506 delete_insns_since (last);
1507 }
1508 #endif
1509
1510 for (i = 0; i < nregs; i++)
1511 {
1512 rtx tem = operand_subword (x, i, 1, BLKmode);
1513
1514 if (tem == 0)
1515 abort ();
1516
1517 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1518 }
1519 }
1520
1521 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1522 ORIG, where ORIG is a non-consecutive group of registers represented by
1523 a PARALLEL. The clone is identical to the original except in that the
1524 original set of registers is replaced by a new set of pseudo registers.
1525 The new set has the same modes as the original set. */
1526
1527 rtx
1528 gen_group_rtx (rtx orig)
1529 {
1530 int i, length;
1531 rtx *tmps;
1532
1533 if (GET_CODE (orig) != PARALLEL)
1534 abort ();
1535
1536 length = XVECLEN (orig, 0);
1537 tmps = alloca (sizeof (rtx) * length);
1538
1539 /* Skip a NULL entry in first slot. */
1540 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1541
1542 if (i)
1543 tmps[0] = 0;
1544
1545 for (; i < length; i++)
1546 {
1547 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1548 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1549
1550 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1551 }
1552
1553 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1554 }
1555
1556 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1557 where DST is non-consecutive registers represented by a PARALLEL.
1558 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1559 if not known. */
1560
1561 void
1562 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1563 {
1564 rtx *tmps, src;
1565 int start, i;
1566
1567 if (GET_CODE (dst) != PARALLEL)
1568 abort ();
1569
1570 /* Check for a NULL entry, used to indicate that the parameter goes
1571 both on the stack and in registers. */
1572 if (XEXP (XVECEXP (dst, 0, 0), 0))
1573 start = 0;
1574 else
1575 start = 1;
1576
1577 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1578
1579 /* Process the pieces. */
1580 for (i = start; i < XVECLEN (dst, 0); i++)
1581 {
1582 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1583 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1584 unsigned int bytelen = GET_MODE_SIZE (mode);
1585 int shift = 0;
1586
1587 /* Handle trailing fragments that run over the size of the struct. */
1588 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1589 {
1590 /* Arrange to shift the fragment to where it belongs.
1591 extract_bit_field loads to the lsb of the reg. */
1592 if (
1593 #ifdef BLOCK_REG_PADDING
1594 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1595 == (BYTES_BIG_ENDIAN ? upward : downward)
1596 #else
1597 BYTES_BIG_ENDIAN
1598 #endif
1599 )
1600 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1601 bytelen = ssize - bytepos;
1602 if (bytelen <= 0)
1603 abort ();
1604 }
1605
1606 /* If we won't be loading directly from memory, protect the real source
1607 from strange tricks we might play; but make sure that the source can
1608 be loaded directly into the destination. */
1609 src = orig_src;
1610 if (!MEM_P (orig_src)
1611 && (!CONSTANT_P (orig_src)
1612 || (GET_MODE (orig_src) != mode
1613 && GET_MODE (orig_src) != VOIDmode)))
1614 {
1615 if (GET_MODE (orig_src) == VOIDmode)
1616 src = gen_reg_rtx (mode);
1617 else
1618 src = gen_reg_rtx (GET_MODE (orig_src));
1619
1620 emit_move_insn (src, orig_src);
1621 }
1622
1623 /* Optimize the access just a bit. */
1624 if (MEM_P (src)
1625 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1626 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1627 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1628 && bytelen == GET_MODE_SIZE (mode))
1629 {
1630 tmps[i] = gen_reg_rtx (mode);
1631 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1632 }
1633 else if (GET_CODE (src) == CONCAT)
1634 {
1635 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1636 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1637
1638 if ((bytepos == 0 && bytelen == slen0)
1639 || (bytepos != 0 && bytepos + bytelen <= slen))
1640 {
1641 /* The following assumes that the concatenated objects all
1642 have the same size. In this case, a simple calculation
1643 can be used to determine the object and the bit field
1644 to be extracted. */
1645 tmps[i] = XEXP (src, bytepos / slen0);
1646 if (! CONSTANT_P (tmps[i])
1647 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1648 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1649 (bytepos % slen0) * BITS_PER_UNIT,
1650 1, NULL_RTX, mode, mode);
1651 }
1652 else if (bytepos == 0)
1653 {
1654 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1655 emit_move_insn (mem, src);
1656 tmps[i] = adjust_address (mem, mode, 0);
1657 }
1658 else
1659 abort ();
1660 }
1661 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1662 SIMD register, which is currently broken. While we get GCC
1663 to emit proper RTL for these cases, let's dump to memory. */
1664 else if (VECTOR_MODE_P (GET_MODE (dst))
1665 && REG_P (src))
1666 {
1667 int slen = GET_MODE_SIZE (GET_MODE (src));
1668 rtx mem;
1669
1670 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1671 emit_move_insn (mem, src);
1672 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1673 }
1674 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1675 && XVECLEN (dst, 0) > 1)
1676 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1677 else if (CONSTANT_P (src)
1678 || (REG_P (src) && GET_MODE (src) == mode))
1679 tmps[i] = src;
1680 else
1681 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1682 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1683 mode, mode);
1684
1685 if (shift)
1686 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1687 build_int_2 (shift, 0), tmps[i], 0);
1688 }
1689
1690 /* Copy the extracted pieces into the proper (probable) hard regs. */
1691 for (i = start; i < XVECLEN (dst, 0); i++)
1692 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1693 }
1694
1695 /* Emit code to move a block SRC to block DST, where SRC and DST are
1696 non-consecutive groups of registers, each represented by a PARALLEL. */
1697
1698 void
1699 emit_group_move (rtx dst, rtx src)
1700 {
1701 int i;
1702
1703 if (GET_CODE (src) != PARALLEL
1704 || GET_CODE (dst) != PARALLEL
1705 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1706 abort ();
1707
1708 /* Skip first entry if NULL. */
1709 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1710 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1711 XEXP (XVECEXP (src, 0, i), 0));
1712 }
1713
1714 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1715 where SRC is non-consecutive registers represented by a PARALLEL.
1716 SSIZE represents the total size of block ORIG_DST, or -1 if not
1717 known. */
1718
1719 void
1720 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1721 {
1722 rtx *tmps, dst;
1723 int start, i;
1724
1725 if (GET_CODE (src) != PARALLEL)
1726 abort ();
1727
1728 /* Check for a NULL entry, used to indicate that the parameter goes
1729 both on the stack and in registers. */
1730 if (XEXP (XVECEXP (src, 0, 0), 0))
1731 start = 0;
1732 else
1733 start = 1;
1734
1735 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1736
1737 /* Copy the (probable) hard regs into pseudos. */
1738 for (i = start; i < XVECLEN (src, 0); i++)
1739 {
1740 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1741 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1742 emit_move_insn (tmps[i], reg);
1743 }
1744
1745 /* If we won't be storing directly into memory, protect the real destination
1746 from strange tricks we might play. */
1747 dst = orig_dst;
1748 if (GET_CODE (dst) == PARALLEL)
1749 {
1750 rtx temp;
1751
1752 /* We can get a PARALLEL dst if there is a conditional expression in
1753 a return statement. In that case, the dst and src are the same,
1754 so no action is necessary. */
1755 if (rtx_equal_p (dst, src))
1756 return;
1757
1758 /* It is unclear if we can ever reach here, but we may as well handle
1759 it. Allocate a temporary, and split this into a store/load to/from
1760 the temporary. */
1761
1762 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1763 emit_group_store (temp, src, type, ssize);
1764 emit_group_load (dst, temp, type, ssize);
1765 return;
1766 }
1767 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1768 {
1769 dst = gen_reg_rtx (GET_MODE (orig_dst));
1770 /* Make life a bit easier for combine. */
1771 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1772 }
1773
1774 /* Process the pieces. */
1775 for (i = start; i < XVECLEN (src, 0); i++)
1776 {
1777 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1778 enum machine_mode mode = GET_MODE (tmps[i]);
1779 unsigned int bytelen = GET_MODE_SIZE (mode);
1780 rtx dest = dst;
1781
1782 /* Handle trailing fragments that run over the size of the struct. */
1783 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1784 {
1785 /* store_bit_field always takes its value from the lsb.
1786 Move the fragment to the lsb if it's not already there. */
1787 if (
1788 #ifdef BLOCK_REG_PADDING
1789 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1790 == (BYTES_BIG_ENDIAN ? upward : downward)
1791 #else
1792 BYTES_BIG_ENDIAN
1793 #endif
1794 )
1795 {
1796 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1797 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1798 build_int_2 (shift, 0), tmps[i], 0);
1799 }
1800 bytelen = ssize - bytepos;
1801 }
1802
1803 if (GET_CODE (dst) == CONCAT)
1804 {
1805 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1806 dest = XEXP (dst, 0);
1807 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1808 {
1809 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1810 dest = XEXP (dst, 1);
1811 }
1812 else if (bytepos == 0 && XVECLEN (src, 0))
1813 {
1814 dest = assign_stack_temp (GET_MODE (dest),
1815 GET_MODE_SIZE (GET_MODE (dest)), 0);
1816 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1817 tmps[i]);
1818 dst = dest;
1819 break;
1820 }
1821 else
1822 abort ();
1823 }
1824
1825 /* Optimize the access just a bit. */
1826 if (MEM_P (dest)
1827 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1828 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1829 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1830 && bytelen == GET_MODE_SIZE (mode))
1831 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1832 else
1833 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1834 mode, tmps[i]);
1835 }
1836
1837 /* Copy from the pseudo into the (probable) hard reg. */
1838 if (orig_dst != dst)
1839 emit_move_insn (orig_dst, dst);
1840 }
1841
1842 /* Generate code to copy a BLKmode object of TYPE out of a
1843 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1844 is null, a stack temporary is created. TGTBLK is returned.
1845
1846 The purpose of this routine is to handle functions that return
1847 BLKmode structures in registers. Some machines (the PA for example)
1848 want to return all small structures in registers regardless of the
1849 structure's alignment. */
1850
1851 rtx
1852 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
1853 {
1854 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1855 rtx src = NULL, dst = NULL;
1856 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
1857 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
1858
1859 if (tgtblk == 0)
1860 {
1861 tgtblk = assign_temp (build_qualified_type (type,
1862 (TYPE_QUALS (type)
1863 | TYPE_QUAL_CONST)),
1864 0, 1, 1);
1865 preserve_temp_slots (tgtblk);
1866 }
1867
1868 /* This code assumes srcreg is at least a full word. If it isn't, copy it
1869 into a new pseudo which is a full word. */
1870
1871 if (GET_MODE (srcreg) != BLKmode
1872 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
1873 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
1874
1875 /* If the structure doesn't take up a whole number of words, see whether
1876 SRCREG is padded on the left or on the right. If it's on the left,
1877 set PADDING_CORRECTION to the number of bits to skip.
1878
1879 In most ABIs, the structure will be returned at the least end of
1880 the register, which translates to right padding on little-endian
1881 targets and left padding on big-endian targets. The opposite
1882 holds if the structure is returned at the most significant
1883 end of the register. */
1884 if (bytes % UNITS_PER_WORD != 0
1885 && (targetm.calls.return_in_msb (type)
1886 ? !BYTES_BIG_ENDIAN
1887 : BYTES_BIG_ENDIAN))
1888 padding_correction
1889 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
1890
1891 /* Copy the structure BITSIZE bites at a time.
1892
1893 We could probably emit more efficient code for machines which do not use
1894 strict alignment, but it doesn't seem worth the effort at the current
1895 time. */
1896 for (bitpos = 0, xbitpos = padding_correction;
1897 bitpos < bytes * BITS_PER_UNIT;
1898 bitpos += bitsize, xbitpos += bitsize)
1899 {
1900 /* We need a new source operand each time xbitpos is on a
1901 word boundary and when xbitpos == padding_correction
1902 (the first time through). */
1903 if (xbitpos % BITS_PER_WORD == 0
1904 || xbitpos == padding_correction)
1905 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
1906 GET_MODE (srcreg));
1907
1908 /* We need a new destination operand each time bitpos is on
1909 a word boundary. */
1910 if (bitpos % BITS_PER_WORD == 0)
1911 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
1912
1913 /* Use xbitpos for the source extraction (right justified) and
1914 xbitpos for the destination store (left justified). */
1915 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
1916 extract_bit_field (src, bitsize,
1917 xbitpos % BITS_PER_WORD, 1,
1918 NULL_RTX, word_mode, word_mode));
1919 }
1920
1921 return tgtblk;
1922 }
1923
1924 /* Add a USE expression for REG to the (possibly empty) list pointed
1925 to by CALL_FUSAGE. REG must denote a hard register. */
1926
1927 void
1928 use_reg (rtx *call_fusage, rtx reg)
1929 {
1930 if (!REG_P (reg)
1931 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1932 abort ();
1933
1934 *call_fusage
1935 = gen_rtx_EXPR_LIST (VOIDmode,
1936 gen_rtx_USE (VOIDmode, reg), *call_fusage);
1937 }
1938
1939 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1940 starting at REGNO. All of these registers must be hard registers. */
1941
1942 void
1943 use_regs (rtx *call_fusage, int regno, int nregs)
1944 {
1945 int i;
1946
1947 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1948 abort ();
1949
1950 for (i = 0; i < nregs; i++)
1951 use_reg (call_fusage, regno_reg_rtx[regno + i]);
1952 }
1953
1954 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1955 PARALLEL REGS. This is for calls that pass values in multiple
1956 non-contiguous locations. The Irix 6 ABI has examples of this. */
1957
1958 void
1959 use_group_regs (rtx *call_fusage, rtx regs)
1960 {
1961 int i;
1962
1963 for (i = 0; i < XVECLEN (regs, 0); i++)
1964 {
1965 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
1966
1967 /* A NULL entry means the parameter goes both on the stack and in
1968 registers. This can also be a MEM for targets that pass values
1969 partially on the stack and partially in registers. */
1970 if (reg != 0 && REG_P (reg))
1971 use_reg (call_fusage, reg);
1972 }
1973 }
1974 \f
1975
1976 /* Determine whether the LEN bytes generated by CONSTFUN can be
1977 stored to memory using several move instructions. CONSTFUNDATA is
1978 a pointer which will be passed as argument in every CONSTFUN call.
1979 ALIGN is maximum alignment we can assume. Return nonzero if a
1980 call to store_by_pieces should succeed. */
1981
1982 int
1983 can_store_by_pieces (unsigned HOST_WIDE_INT len,
1984 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
1985 void *constfundata, unsigned int align)
1986 {
1987 unsigned HOST_WIDE_INT max_size, l;
1988 HOST_WIDE_INT offset = 0;
1989 enum machine_mode mode, tmode;
1990 enum insn_code icode;
1991 int reverse;
1992 rtx cst;
1993
1994 if (len == 0)
1995 return 1;
1996
1997 if (! STORE_BY_PIECES_P (len, align))
1998 return 0;
1999
2000 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2001 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2002 align = MOVE_MAX * BITS_PER_UNIT;
2003
2004 /* We would first store what we can in the largest integer mode, then go to
2005 successively smaller modes. */
2006
2007 for (reverse = 0;
2008 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2009 reverse++)
2010 {
2011 l = len;
2012 mode = VOIDmode;
2013 max_size = STORE_MAX_PIECES + 1;
2014 while (max_size > 1)
2015 {
2016 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2017 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2018 if (GET_MODE_SIZE (tmode) < max_size)
2019 mode = tmode;
2020
2021 if (mode == VOIDmode)
2022 break;
2023
2024 icode = mov_optab->handlers[(int) mode].insn_code;
2025 if (icode != CODE_FOR_nothing
2026 && align >= GET_MODE_ALIGNMENT (mode))
2027 {
2028 unsigned int size = GET_MODE_SIZE (mode);
2029
2030 while (l >= size)
2031 {
2032 if (reverse)
2033 offset -= size;
2034
2035 cst = (*constfun) (constfundata, offset, mode);
2036 if (!LEGITIMATE_CONSTANT_P (cst))
2037 return 0;
2038
2039 if (!reverse)
2040 offset += size;
2041
2042 l -= size;
2043 }
2044 }
2045
2046 max_size = GET_MODE_SIZE (mode);
2047 }
2048
2049 /* The code above should have handled everything. */
2050 if (l != 0)
2051 abort ();
2052 }
2053
2054 return 1;
2055 }
2056
2057 /* Generate several move instructions to store LEN bytes generated by
2058 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2059 pointer which will be passed as argument in every CONSTFUN call.
2060 ALIGN is maximum alignment we can assume.
2061 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2062 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2063 stpcpy. */
2064
2065 rtx
2066 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2067 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2068 void *constfundata, unsigned int align, int endp)
2069 {
2070 struct store_by_pieces data;
2071
2072 if (len == 0)
2073 {
2074 if (endp == 2)
2075 abort ();
2076 return to;
2077 }
2078
2079 if (! STORE_BY_PIECES_P (len, align))
2080 abort ();
2081 data.constfun = constfun;
2082 data.constfundata = constfundata;
2083 data.len = len;
2084 data.to = to;
2085 store_by_pieces_1 (&data, align);
2086 if (endp)
2087 {
2088 rtx to1;
2089
2090 if (data.reverse)
2091 abort ();
2092 if (data.autinc_to)
2093 {
2094 if (endp == 2)
2095 {
2096 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2097 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2098 else
2099 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2100 -1));
2101 }
2102 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2103 data.offset);
2104 }
2105 else
2106 {
2107 if (endp == 2)
2108 --data.offset;
2109 to1 = adjust_address (data.to, QImode, data.offset);
2110 }
2111 return to1;
2112 }
2113 else
2114 return data.to;
2115 }
2116
2117 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2118 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2119
2120 static void
2121 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2122 {
2123 struct store_by_pieces data;
2124
2125 if (len == 0)
2126 return;
2127
2128 data.constfun = clear_by_pieces_1;
2129 data.constfundata = NULL;
2130 data.len = len;
2131 data.to = to;
2132 store_by_pieces_1 (&data, align);
2133 }
2134
2135 /* Callback routine for clear_by_pieces.
2136 Return const0_rtx unconditionally. */
2137
2138 static rtx
2139 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2140 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2141 enum machine_mode mode ATTRIBUTE_UNUSED)
2142 {
2143 return const0_rtx;
2144 }
2145
2146 /* Subroutine of clear_by_pieces and store_by_pieces.
2147 Generate several move instructions to store LEN bytes of block TO. (A MEM
2148 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2149
2150 static void
2151 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2152 unsigned int align ATTRIBUTE_UNUSED)
2153 {
2154 rtx to_addr = XEXP (data->to, 0);
2155 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2156 enum machine_mode mode = VOIDmode, tmode;
2157 enum insn_code icode;
2158
2159 data->offset = 0;
2160 data->to_addr = to_addr;
2161 data->autinc_to
2162 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2163 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2164
2165 data->explicit_inc_to = 0;
2166 data->reverse
2167 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2168 if (data->reverse)
2169 data->offset = data->len;
2170
2171 /* If storing requires more than two move insns,
2172 copy addresses to registers (to make displacements shorter)
2173 and use post-increment if available. */
2174 if (!data->autinc_to
2175 && move_by_pieces_ninsns (data->len, align) > 2)
2176 {
2177 /* Determine the main mode we'll be using. */
2178 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2179 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2180 if (GET_MODE_SIZE (tmode) < max_size)
2181 mode = tmode;
2182
2183 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2184 {
2185 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2186 data->autinc_to = 1;
2187 data->explicit_inc_to = -1;
2188 }
2189
2190 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2191 && ! data->autinc_to)
2192 {
2193 data->to_addr = copy_addr_to_reg (to_addr);
2194 data->autinc_to = 1;
2195 data->explicit_inc_to = 1;
2196 }
2197
2198 if ( !data->autinc_to && CONSTANT_P (to_addr))
2199 data->to_addr = copy_addr_to_reg (to_addr);
2200 }
2201
2202 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2203 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2204 align = MOVE_MAX * BITS_PER_UNIT;
2205
2206 /* First store what we can in the largest integer mode, then go to
2207 successively smaller modes. */
2208
2209 while (max_size > 1)
2210 {
2211 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2212 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2213 if (GET_MODE_SIZE (tmode) < max_size)
2214 mode = tmode;
2215
2216 if (mode == VOIDmode)
2217 break;
2218
2219 icode = mov_optab->handlers[(int) mode].insn_code;
2220 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2221 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2222
2223 max_size = GET_MODE_SIZE (mode);
2224 }
2225
2226 /* The code above should have handled everything. */
2227 if (data->len != 0)
2228 abort ();
2229 }
2230
2231 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2232 with move instructions for mode MODE. GENFUN is the gen_... function
2233 to make a move insn for that mode. DATA has all the other info. */
2234
2235 static void
2236 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2237 struct store_by_pieces *data)
2238 {
2239 unsigned int size = GET_MODE_SIZE (mode);
2240 rtx to1, cst;
2241
2242 while (data->len >= size)
2243 {
2244 if (data->reverse)
2245 data->offset -= size;
2246
2247 if (data->autinc_to)
2248 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2249 data->offset);
2250 else
2251 to1 = adjust_address (data->to, mode, data->offset);
2252
2253 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2254 emit_insn (gen_add2_insn (data->to_addr,
2255 GEN_INT (-(HOST_WIDE_INT) size)));
2256
2257 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2258 emit_insn ((*genfun) (to1, cst));
2259
2260 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2261 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2262
2263 if (! data->reverse)
2264 data->offset += size;
2265
2266 data->len -= size;
2267 }
2268 }
2269 \f
2270 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2271 its length in bytes. */
2272
2273 rtx
2274 clear_storage (rtx object, rtx size)
2275 {
2276 rtx retval = 0;
2277 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
2278 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2279
2280 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2281 just move a zero. Otherwise, do this a piece at a time. */
2282 if (GET_MODE (object) != BLKmode
2283 && GET_CODE (size) == CONST_INT
2284 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2285 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2286 else
2287 {
2288 if (size == const0_rtx)
2289 ;
2290 else if (GET_CODE (size) == CONST_INT
2291 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2292 clear_by_pieces (object, INTVAL (size), align);
2293 else if (clear_storage_via_clrmem (object, size, align))
2294 ;
2295 else
2296 retval = clear_storage_via_libcall (object, size);
2297 }
2298
2299 return retval;
2300 }
2301
2302 /* A subroutine of clear_storage. Expand a clrmem pattern;
2303 return true if successful. */
2304
2305 static bool
2306 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2307 {
2308 /* Try the most limited insn first, because there's no point
2309 including more than one in the machine description unless
2310 the more limited one has some advantage. */
2311
2312 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2313 enum machine_mode mode;
2314
2315 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2316 mode = GET_MODE_WIDER_MODE (mode))
2317 {
2318 enum insn_code code = clrmem_optab[(int) mode];
2319 insn_operand_predicate_fn pred;
2320
2321 if (code != CODE_FOR_nothing
2322 /* We don't need MODE to be narrower than
2323 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2324 the mode mask, as it is returned by the macro, it will
2325 definitely be less than the actual mode mask. */
2326 && ((GET_CODE (size) == CONST_INT
2327 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2328 <= (GET_MODE_MASK (mode) >> 1)))
2329 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2330 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2331 || (*pred) (object, BLKmode))
2332 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2333 || (*pred) (opalign, VOIDmode)))
2334 {
2335 rtx op1;
2336 rtx last = get_last_insn ();
2337 rtx pat;
2338
2339 op1 = convert_to_mode (mode, size, 1);
2340 pred = insn_data[(int) code].operand[1].predicate;
2341 if (pred != 0 && ! (*pred) (op1, mode))
2342 op1 = copy_to_mode_reg (mode, op1);
2343
2344 pat = GEN_FCN ((int) code) (object, op1, opalign);
2345 if (pat)
2346 {
2347 emit_insn (pat);
2348 return true;
2349 }
2350 else
2351 delete_insns_since (last);
2352 }
2353 }
2354
2355 return false;
2356 }
2357
2358 /* A subroutine of clear_storage. Expand a call to memset.
2359 Return the return value of memset, 0 otherwise. */
2360
2361 static rtx
2362 clear_storage_via_libcall (rtx object, rtx size)
2363 {
2364 tree call_expr, arg_list, fn, object_tree, size_tree;
2365 enum machine_mode size_mode;
2366 rtx retval;
2367
2368 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2369 place those into new pseudos into a VAR_DECL and use them later. */
2370
2371 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2372
2373 size_mode = TYPE_MODE (sizetype);
2374 size = convert_to_mode (size_mode, size, 1);
2375 size = copy_to_mode_reg (size_mode, size);
2376
2377 /* It is incorrect to use the libcall calling conventions to call
2378 memset in this context. This could be a user call to memset and
2379 the user may wish to examine the return value from memset. For
2380 targets where libcalls and normal calls have different conventions
2381 for returning pointers, we could end up generating incorrect code. */
2382
2383 object_tree = make_tree (ptr_type_node, object);
2384 size_tree = make_tree (sizetype, size);
2385
2386 fn = clear_storage_libcall_fn (true);
2387 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2388 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2389 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2390
2391 /* Now we have to build up the CALL_EXPR itself. */
2392 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2393 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2394 call_expr, arg_list, NULL_TREE);
2395
2396 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2397
2398 /* If we are initializing a readonly value, show the above call
2399 clobbered it. Otherwise, a load from it may erroneously be
2400 hoisted from a loop. */
2401 if (RTX_UNCHANGING_P (object))
2402 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2403
2404 return retval;
2405 }
2406
2407 /* A subroutine of clear_storage_via_libcall. Create the tree node
2408 for the function we use for block clears. The first time FOR_CALL
2409 is true, we call assemble_external. */
2410
2411 static GTY(()) tree block_clear_fn;
2412
2413 void
2414 init_block_clear_fn (const char *asmspec)
2415 {
2416 if (!block_clear_fn)
2417 {
2418 tree fn, args;
2419
2420 fn = get_identifier ("memset");
2421 args = build_function_type_list (ptr_type_node, ptr_type_node,
2422 integer_type_node, sizetype,
2423 NULL_TREE);
2424
2425 fn = build_decl (FUNCTION_DECL, fn, args);
2426 DECL_EXTERNAL (fn) = 1;
2427 TREE_PUBLIC (fn) = 1;
2428 DECL_ARTIFICIAL (fn) = 1;
2429 TREE_NOTHROW (fn) = 1;
2430
2431 block_clear_fn = fn;
2432 }
2433
2434 if (asmspec)
2435 {
2436 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2437 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2438 }
2439 }
2440
2441 static tree
2442 clear_storage_libcall_fn (int for_call)
2443 {
2444 static bool emitted_extern;
2445
2446 if (!block_clear_fn)
2447 init_block_clear_fn (NULL);
2448
2449 if (for_call && !emitted_extern)
2450 {
2451 emitted_extern = true;
2452 make_decl_rtl (block_clear_fn, NULL);
2453 assemble_external (block_clear_fn);
2454 }
2455
2456 return block_clear_fn;
2457 }
2458 \f
2459 /* Generate code to copy Y into X.
2460 Both Y and X must have the same mode, except that
2461 Y can be a constant with VOIDmode.
2462 This mode cannot be BLKmode; use emit_block_move for that.
2463
2464 Return the last instruction emitted. */
2465
2466 rtx
2467 emit_move_insn (rtx x, rtx y)
2468 {
2469 enum machine_mode mode = GET_MODE (x);
2470 rtx y_cst = NULL_RTX;
2471 rtx last_insn, set;
2472
2473 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2474 abort ();
2475
2476 if (CONSTANT_P (y))
2477 {
2478 if (optimize
2479 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2480 && (last_insn = compress_float_constant (x, y)))
2481 return last_insn;
2482
2483 y_cst = y;
2484
2485 if (!LEGITIMATE_CONSTANT_P (y))
2486 {
2487 y = force_const_mem (mode, y);
2488
2489 /* If the target's cannot_force_const_mem prevented the spill,
2490 assume that the target's move expanders will also take care
2491 of the non-legitimate constant. */
2492 if (!y)
2493 y = y_cst;
2494 }
2495 }
2496
2497 /* If X or Y are memory references, verify that their addresses are valid
2498 for the machine. */
2499 if (MEM_P (x)
2500 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2501 && ! push_operand (x, GET_MODE (x)))
2502 || (flag_force_addr
2503 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2504 x = validize_mem (x);
2505
2506 if (MEM_P (y)
2507 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2508 || (flag_force_addr
2509 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2510 y = validize_mem (y);
2511
2512 if (mode == BLKmode)
2513 abort ();
2514
2515 last_insn = emit_move_insn_1 (x, y);
2516
2517 if (y_cst && REG_P (x)
2518 && (set = single_set (last_insn)) != NULL_RTX
2519 && SET_DEST (set) == x
2520 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2521 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2522
2523 return last_insn;
2524 }
2525
2526 /* Low level part of emit_move_insn.
2527 Called just like emit_move_insn, but assumes X and Y
2528 are basically valid. */
2529
2530 rtx
2531 emit_move_insn_1 (rtx x, rtx y)
2532 {
2533 enum machine_mode mode = GET_MODE (x);
2534 enum machine_mode submode;
2535 enum mode_class class = GET_MODE_CLASS (mode);
2536
2537 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2538 abort ();
2539
2540 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2541 return
2542 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2543
2544 /* Expand complex moves by moving real part and imag part, if possible. */
2545 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2546 && BLKmode != (submode = GET_MODE_INNER (mode))
2547 && (mov_optab->handlers[(int) submode].insn_code
2548 != CODE_FOR_nothing))
2549 {
2550 /* Don't split destination if it is a stack push. */
2551 int stack = push_operand (x, GET_MODE (x));
2552
2553 #ifdef PUSH_ROUNDING
2554 /* In case we output to the stack, but the size is smaller than the
2555 machine can push exactly, we need to use move instructions. */
2556 if (stack
2557 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2558 != GET_MODE_SIZE (submode)))
2559 {
2560 rtx temp;
2561 HOST_WIDE_INT offset1, offset2;
2562
2563 /* Do not use anti_adjust_stack, since we don't want to update
2564 stack_pointer_delta. */
2565 temp = expand_binop (Pmode,
2566 #ifdef STACK_GROWS_DOWNWARD
2567 sub_optab,
2568 #else
2569 add_optab,
2570 #endif
2571 stack_pointer_rtx,
2572 GEN_INT
2573 (PUSH_ROUNDING
2574 (GET_MODE_SIZE (GET_MODE (x)))),
2575 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2576
2577 if (temp != stack_pointer_rtx)
2578 emit_move_insn (stack_pointer_rtx, temp);
2579
2580 #ifdef STACK_GROWS_DOWNWARD
2581 offset1 = 0;
2582 offset2 = GET_MODE_SIZE (submode);
2583 #else
2584 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2585 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2586 + GET_MODE_SIZE (submode));
2587 #endif
2588
2589 emit_move_insn (change_address (x, submode,
2590 gen_rtx_PLUS (Pmode,
2591 stack_pointer_rtx,
2592 GEN_INT (offset1))),
2593 gen_realpart (submode, y));
2594 emit_move_insn (change_address (x, submode,
2595 gen_rtx_PLUS (Pmode,
2596 stack_pointer_rtx,
2597 GEN_INT (offset2))),
2598 gen_imagpart (submode, y));
2599 }
2600 else
2601 #endif
2602 /* If this is a stack, push the highpart first, so it
2603 will be in the argument order.
2604
2605 In that case, change_address is used only to convert
2606 the mode, not to change the address. */
2607 if (stack)
2608 {
2609 /* Note that the real part always precedes the imag part in memory
2610 regardless of machine's endianness. */
2611 #ifdef STACK_GROWS_DOWNWARD
2612 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2613 gen_imagpart (submode, y));
2614 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2615 gen_realpart (submode, y));
2616 #else
2617 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2618 gen_realpart (submode, y));
2619 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2620 gen_imagpart (submode, y));
2621 #endif
2622 }
2623 else
2624 {
2625 rtx realpart_x, realpart_y;
2626 rtx imagpart_x, imagpart_y;
2627
2628 /* If this is a complex value with each part being smaller than a
2629 word, the usual calling sequence will likely pack the pieces into
2630 a single register. Unfortunately, SUBREG of hard registers only
2631 deals in terms of words, so we have a problem converting input
2632 arguments to the CONCAT of two registers that is used elsewhere
2633 for complex values. If this is before reload, we can copy it into
2634 memory and reload. FIXME, we should see about using extract and
2635 insert on integer registers, but complex short and complex char
2636 variables should be rarely used. */
2637 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2638 && (reload_in_progress | reload_completed) == 0)
2639 {
2640 int packed_dest_p
2641 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2642 int packed_src_p
2643 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2644
2645 if (packed_dest_p || packed_src_p)
2646 {
2647 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2648 ? MODE_FLOAT : MODE_INT);
2649
2650 enum machine_mode reg_mode
2651 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2652
2653 if (reg_mode != BLKmode)
2654 {
2655 rtx mem = assign_stack_temp (reg_mode,
2656 GET_MODE_SIZE (mode), 0);
2657 rtx cmem = adjust_address (mem, mode, 0);
2658
2659 if (packed_dest_p)
2660 {
2661 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2662
2663 emit_move_insn_1 (cmem, y);
2664 return emit_move_insn_1 (sreg, mem);
2665 }
2666 else
2667 {
2668 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2669
2670 emit_move_insn_1 (mem, sreg);
2671 return emit_move_insn_1 (x, cmem);
2672 }
2673 }
2674 }
2675 }
2676
2677 realpart_x = gen_realpart (submode, x);
2678 realpart_y = gen_realpart (submode, y);
2679 imagpart_x = gen_imagpart (submode, x);
2680 imagpart_y = gen_imagpart (submode, y);
2681
2682 /* Show the output dies here. This is necessary for SUBREGs
2683 of pseudos since we cannot track their lifetimes correctly;
2684 hard regs shouldn't appear here except as return values.
2685 We never want to emit such a clobber after reload. */
2686 if (x != y
2687 && ! (reload_in_progress || reload_completed)
2688 && (GET_CODE (realpart_x) == SUBREG
2689 || GET_CODE (imagpart_x) == SUBREG))
2690 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2691
2692 emit_move_insn (realpart_x, realpart_y);
2693 emit_move_insn (imagpart_x, imagpart_y);
2694 }
2695
2696 return get_last_insn ();
2697 }
2698
2699 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
2700 find a mode to do it in. If we have a movcc, use it. Otherwise,
2701 find the MODE_INT mode of the same width. */
2702 else if (GET_MODE_CLASS (mode) == MODE_CC
2703 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2704 {
2705 enum insn_code insn_code;
2706 enum machine_mode tmode = VOIDmode;
2707 rtx x1 = x, y1 = y;
2708
2709 if (mode != CCmode
2710 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
2711 tmode = CCmode;
2712 else
2713 for (tmode = QImode; tmode != VOIDmode;
2714 tmode = GET_MODE_WIDER_MODE (tmode))
2715 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
2716 break;
2717
2718 if (tmode == VOIDmode)
2719 abort ();
2720
2721 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
2722 may call change_address which is not appropriate if we were
2723 called when a reload was in progress. We don't have to worry
2724 about changing the address since the size in bytes is supposed to
2725 be the same. Copy the MEM to change the mode and move any
2726 substitutions from the old MEM to the new one. */
2727
2728 if (reload_in_progress)
2729 {
2730 x = gen_lowpart_common (tmode, x1);
2731 if (x == 0 && MEM_P (x1))
2732 {
2733 x = adjust_address_nv (x1, tmode, 0);
2734 copy_replacements (x1, x);
2735 }
2736
2737 y = gen_lowpart_common (tmode, y1);
2738 if (y == 0 && MEM_P (y1))
2739 {
2740 y = adjust_address_nv (y1, tmode, 0);
2741 copy_replacements (y1, y);
2742 }
2743 }
2744 else
2745 {
2746 x = gen_lowpart (tmode, x);
2747 y = gen_lowpart (tmode, y);
2748 }
2749
2750 insn_code = mov_optab->handlers[(int) tmode].insn_code;
2751 return emit_insn (GEN_FCN (insn_code) (x, y));
2752 }
2753
2754 /* Try using a move pattern for the corresponding integer mode. This is
2755 only safe when simplify_subreg can convert MODE constants into integer
2756 constants. At present, it can only do this reliably if the value
2757 fits within a HOST_WIDE_INT. */
2758 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2759 && (submode = int_mode_for_mode (mode)) != BLKmode
2760 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
2761 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
2762 (simplify_gen_subreg (submode, x, mode, 0),
2763 simplify_gen_subreg (submode, y, mode, 0)));
2764
2765 /* This will handle any multi-word or full-word mode that lacks a move_insn
2766 pattern. However, you will get better code if you define such patterns,
2767 even if they must turn into multiple assembler instructions. */
2768 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
2769 {
2770 rtx last_insn = 0;
2771 rtx seq, inner;
2772 int need_clobber;
2773 int i;
2774
2775 #ifdef PUSH_ROUNDING
2776
2777 /* If X is a push on the stack, do the push now and replace
2778 X with a reference to the stack pointer. */
2779 if (push_operand (x, GET_MODE (x)))
2780 {
2781 rtx temp;
2782 enum rtx_code code;
2783
2784 /* Do not use anti_adjust_stack, since we don't want to update
2785 stack_pointer_delta. */
2786 temp = expand_binop (Pmode,
2787 #ifdef STACK_GROWS_DOWNWARD
2788 sub_optab,
2789 #else
2790 add_optab,
2791 #endif
2792 stack_pointer_rtx,
2793 GEN_INT
2794 (PUSH_ROUNDING
2795 (GET_MODE_SIZE (GET_MODE (x)))),
2796 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2797
2798 if (temp != stack_pointer_rtx)
2799 emit_move_insn (stack_pointer_rtx, temp);
2800
2801 code = GET_CODE (XEXP (x, 0));
2802
2803 /* Just hope that small offsets off SP are OK. */
2804 if (code == POST_INC)
2805 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2806 GEN_INT (-((HOST_WIDE_INT)
2807 GET_MODE_SIZE (GET_MODE (x)))));
2808 else if (code == POST_DEC)
2809 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2810 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2811 else
2812 temp = stack_pointer_rtx;
2813
2814 x = change_address (x, VOIDmode, temp);
2815 }
2816 #endif
2817
2818 /* If we are in reload, see if either operand is a MEM whose address
2819 is scheduled for replacement. */
2820 if (reload_in_progress && MEM_P (x)
2821 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2822 x = replace_equiv_address_nv (x, inner);
2823 if (reload_in_progress && MEM_P (y)
2824 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2825 y = replace_equiv_address_nv (y, inner);
2826
2827 start_sequence ();
2828
2829 need_clobber = 0;
2830 for (i = 0;
2831 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2832 i++)
2833 {
2834 rtx xpart = operand_subword (x, i, 1, mode);
2835 rtx ypart = operand_subword (y, i, 1, mode);
2836
2837 /* If we can't get a part of Y, put Y into memory if it is a
2838 constant. Otherwise, force it into a register. If we still
2839 can't get a part of Y, abort. */
2840 if (ypart == 0 && CONSTANT_P (y))
2841 {
2842 y = force_const_mem (mode, y);
2843 ypart = operand_subword (y, i, 1, mode);
2844 }
2845 else if (ypart == 0)
2846 ypart = operand_subword_force (y, i, mode);
2847
2848 if (xpart == 0 || ypart == 0)
2849 abort ();
2850
2851 need_clobber |= (GET_CODE (xpart) == SUBREG);
2852
2853 last_insn = emit_move_insn (xpart, ypart);
2854 }
2855
2856 seq = get_insns ();
2857 end_sequence ();
2858
2859 /* Show the output dies here. This is necessary for SUBREGs
2860 of pseudos since we cannot track their lifetimes correctly;
2861 hard regs shouldn't appear here except as return values.
2862 We never want to emit such a clobber after reload. */
2863 if (x != y
2864 && ! (reload_in_progress || reload_completed)
2865 && need_clobber != 0)
2866 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2867
2868 emit_insn (seq);
2869
2870 return last_insn;
2871 }
2872 else
2873 abort ();
2874 }
2875
2876 /* If Y is representable exactly in a narrower mode, and the target can
2877 perform the extension directly from constant or memory, then emit the
2878 move as an extension. */
2879
2880 static rtx
2881 compress_float_constant (rtx x, rtx y)
2882 {
2883 enum machine_mode dstmode = GET_MODE (x);
2884 enum machine_mode orig_srcmode = GET_MODE (y);
2885 enum machine_mode srcmode;
2886 REAL_VALUE_TYPE r;
2887
2888 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
2889
2890 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
2891 srcmode != orig_srcmode;
2892 srcmode = GET_MODE_WIDER_MODE (srcmode))
2893 {
2894 enum insn_code ic;
2895 rtx trunc_y, last_insn;
2896
2897 /* Skip if the target can't extend this way. */
2898 ic = can_extend_p (dstmode, srcmode, 0);
2899 if (ic == CODE_FOR_nothing)
2900 continue;
2901
2902 /* Skip if the narrowed value isn't exact. */
2903 if (! exact_real_truncate (srcmode, &r))
2904 continue;
2905
2906 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
2907
2908 if (LEGITIMATE_CONSTANT_P (trunc_y))
2909 {
2910 /* Skip if the target needs extra instructions to perform
2911 the extension. */
2912 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
2913 continue;
2914 }
2915 else if (float_extend_from_mem[dstmode][srcmode])
2916 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
2917 else
2918 continue;
2919
2920 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
2921 last_insn = get_last_insn ();
2922
2923 if (REG_P (x))
2924 set_unique_reg_note (last_insn, REG_EQUAL, y);
2925
2926 return last_insn;
2927 }
2928
2929 return NULL_RTX;
2930 }
2931 \f
2932 /* Pushing data onto the stack. */
2933
2934 /* Push a block of length SIZE (perhaps variable)
2935 and return an rtx to address the beginning of the block.
2936 The value may be virtual_outgoing_args_rtx.
2937
2938 EXTRA is the number of bytes of padding to push in addition to SIZE.
2939 BELOW nonzero means this padding comes at low addresses;
2940 otherwise, the padding comes at high addresses. */
2941
2942 rtx
2943 push_block (rtx size, int extra, int below)
2944 {
2945 rtx temp;
2946
2947 size = convert_modes (Pmode, ptr_mode, size, 1);
2948 if (CONSTANT_P (size))
2949 anti_adjust_stack (plus_constant (size, extra));
2950 else if (REG_P (size) && extra == 0)
2951 anti_adjust_stack (size);
2952 else
2953 {
2954 temp = copy_to_mode_reg (Pmode, size);
2955 if (extra != 0)
2956 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2957 temp, 0, OPTAB_LIB_WIDEN);
2958 anti_adjust_stack (temp);
2959 }
2960
2961 #ifndef STACK_GROWS_DOWNWARD
2962 if (0)
2963 #else
2964 if (1)
2965 #endif
2966 {
2967 temp = virtual_outgoing_args_rtx;
2968 if (extra != 0 && below)
2969 temp = plus_constant (temp, extra);
2970 }
2971 else
2972 {
2973 if (GET_CODE (size) == CONST_INT)
2974 temp = plus_constant (virtual_outgoing_args_rtx,
2975 -INTVAL (size) - (below ? 0 : extra));
2976 else if (extra != 0 && !below)
2977 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2978 negate_rtx (Pmode, plus_constant (size, extra)));
2979 else
2980 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2981 negate_rtx (Pmode, size));
2982 }
2983
2984 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2985 }
2986
2987 #ifdef PUSH_ROUNDING
2988
2989 /* Emit single push insn. */
2990
2991 static void
2992 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
2993 {
2994 rtx dest_addr;
2995 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
2996 rtx dest;
2997 enum insn_code icode;
2998 insn_operand_predicate_fn pred;
2999
3000 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3001 /* If there is push pattern, use it. Otherwise try old way of throwing
3002 MEM representing push operation to move expander. */
3003 icode = push_optab->handlers[(int) mode].insn_code;
3004 if (icode != CODE_FOR_nothing)
3005 {
3006 if (((pred = insn_data[(int) icode].operand[0].predicate)
3007 && !((*pred) (x, mode))))
3008 x = force_reg (mode, x);
3009 emit_insn (GEN_FCN (icode) (x));
3010 return;
3011 }
3012 if (GET_MODE_SIZE (mode) == rounded_size)
3013 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3014 /* If we are to pad downward, adjust the stack pointer first and
3015 then store X into the stack location using an offset. This is
3016 because emit_move_insn does not know how to pad; it does not have
3017 access to type. */
3018 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3019 {
3020 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3021 HOST_WIDE_INT offset;
3022
3023 emit_move_insn (stack_pointer_rtx,
3024 expand_binop (Pmode,
3025 #ifdef STACK_GROWS_DOWNWARD
3026 sub_optab,
3027 #else
3028 add_optab,
3029 #endif
3030 stack_pointer_rtx,
3031 GEN_INT (rounded_size),
3032 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3033
3034 offset = (HOST_WIDE_INT) padding_size;
3035 #ifdef STACK_GROWS_DOWNWARD
3036 if (STACK_PUSH_CODE == POST_DEC)
3037 /* We have already decremented the stack pointer, so get the
3038 previous value. */
3039 offset += (HOST_WIDE_INT) rounded_size;
3040 #else
3041 if (STACK_PUSH_CODE == POST_INC)
3042 /* We have already incremented the stack pointer, so get the
3043 previous value. */
3044 offset -= (HOST_WIDE_INT) rounded_size;
3045 #endif
3046 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3047 }
3048 else
3049 {
3050 #ifdef STACK_GROWS_DOWNWARD
3051 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3052 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3053 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3054 #else
3055 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3056 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3057 GEN_INT (rounded_size));
3058 #endif
3059 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3060 }
3061
3062 dest = gen_rtx_MEM (mode, dest_addr);
3063
3064 if (type != 0)
3065 {
3066 set_mem_attributes (dest, type, 1);
3067
3068 if (flag_optimize_sibling_calls)
3069 /* Function incoming arguments may overlap with sibling call
3070 outgoing arguments and we cannot allow reordering of reads
3071 from function arguments with stores to outgoing arguments
3072 of sibling calls. */
3073 set_mem_alias_set (dest, 0);
3074 }
3075 emit_move_insn (dest, x);
3076 }
3077 #endif
3078
3079 /* Generate code to push X onto the stack, assuming it has mode MODE and
3080 type TYPE.
3081 MODE is redundant except when X is a CONST_INT (since they don't
3082 carry mode info).
3083 SIZE is an rtx for the size of data to be copied (in bytes),
3084 needed only if X is BLKmode.
3085
3086 ALIGN (in bits) is maximum alignment we can assume.
3087
3088 If PARTIAL and REG are both nonzero, then copy that many of the first
3089 words of X into registers starting with REG, and push the rest of X.
3090 The amount of space pushed is decreased by PARTIAL words,
3091 rounded *down* to a multiple of PARM_BOUNDARY.
3092 REG must be a hard register in this case.
3093 If REG is zero but PARTIAL is not, take any all others actions for an
3094 argument partially in registers, but do not actually load any
3095 registers.
3096
3097 EXTRA is the amount in bytes of extra space to leave next to this arg.
3098 This is ignored if an argument block has already been allocated.
3099
3100 On a machine that lacks real push insns, ARGS_ADDR is the address of
3101 the bottom of the argument block for this call. We use indexing off there
3102 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3103 argument block has not been preallocated.
3104
3105 ARGS_SO_FAR is the size of args previously pushed for this call.
3106
3107 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3108 for arguments passed in registers. If nonzero, it will be the number
3109 of bytes required. */
3110
3111 void
3112 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3113 unsigned int align, int partial, rtx reg, int extra,
3114 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3115 rtx alignment_pad)
3116 {
3117 rtx xinner;
3118 enum direction stack_direction
3119 #ifdef STACK_GROWS_DOWNWARD
3120 = downward;
3121 #else
3122 = upward;
3123 #endif
3124
3125 /* Decide where to pad the argument: `downward' for below,
3126 `upward' for above, or `none' for don't pad it.
3127 Default is below for small data on big-endian machines; else above. */
3128 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3129
3130 /* Invert direction if stack is post-decrement.
3131 FIXME: why? */
3132 if (STACK_PUSH_CODE == POST_DEC)
3133 if (where_pad != none)
3134 where_pad = (where_pad == downward ? upward : downward);
3135
3136 xinner = x;
3137
3138 if (mode == BLKmode)
3139 {
3140 /* Copy a block into the stack, entirely or partially. */
3141
3142 rtx temp;
3143 int used = partial * UNITS_PER_WORD;
3144 int offset;
3145 int skip;
3146
3147 if (reg && GET_CODE (reg) == PARALLEL)
3148 {
3149 /* Use the size of the elt to compute offset. */
3150 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3151 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3152 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3153 }
3154 else
3155 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3156
3157 if (size == 0)
3158 abort ();
3159
3160 used -= offset;
3161
3162 /* USED is now the # of bytes we need not copy to the stack
3163 because registers will take care of them. */
3164
3165 if (partial != 0)
3166 xinner = adjust_address (xinner, BLKmode, used);
3167
3168 /* If the partial register-part of the arg counts in its stack size,
3169 skip the part of stack space corresponding to the registers.
3170 Otherwise, start copying to the beginning of the stack space,
3171 by setting SKIP to 0. */
3172 skip = (reg_parm_stack_space == 0) ? 0 : used;
3173
3174 #ifdef PUSH_ROUNDING
3175 /* Do it with several push insns if that doesn't take lots of insns
3176 and if there is no difficulty with push insns that skip bytes
3177 on the stack for alignment purposes. */
3178 if (args_addr == 0
3179 && PUSH_ARGS
3180 && GET_CODE (size) == CONST_INT
3181 && skip == 0
3182 && MEM_ALIGN (xinner) >= align
3183 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3184 /* Here we avoid the case of a structure whose weak alignment
3185 forces many pushes of a small amount of data,
3186 and such small pushes do rounding that causes trouble. */
3187 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3188 || align >= BIGGEST_ALIGNMENT
3189 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3190 == (align / BITS_PER_UNIT)))
3191 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3192 {
3193 /* Push padding now if padding above and stack grows down,
3194 or if padding below and stack grows up.
3195 But if space already allocated, this has already been done. */
3196 if (extra && args_addr == 0
3197 && where_pad != none && where_pad != stack_direction)
3198 anti_adjust_stack (GEN_INT (extra));
3199
3200 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3201 }
3202 else
3203 #endif /* PUSH_ROUNDING */
3204 {
3205 rtx target;
3206
3207 /* Otherwise make space on the stack and copy the data
3208 to the address of that space. */
3209
3210 /* Deduct words put into registers from the size we must copy. */
3211 if (partial != 0)
3212 {
3213 if (GET_CODE (size) == CONST_INT)
3214 size = GEN_INT (INTVAL (size) - used);
3215 else
3216 size = expand_binop (GET_MODE (size), sub_optab, size,
3217 GEN_INT (used), NULL_RTX, 0,
3218 OPTAB_LIB_WIDEN);
3219 }
3220
3221 /* Get the address of the stack space.
3222 In this case, we do not deal with EXTRA separately.
3223 A single stack adjust will do. */
3224 if (! args_addr)
3225 {
3226 temp = push_block (size, extra, where_pad == downward);
3227 extra = 0;
3228 }
3229 else if (GET_CODE (args_so_far) == CONST_INT)
3230 temp = memory_address (BLKmode,
3231 plus_constant (args_addr,
3232 skip + INTVAL (args_so_far)));
3233 else
3234 temp = memory_address (BLKmode,
3235 plus_constant (gen_rtx_PLUS (Pmode,
3236 args_addr,
3237 args_so_far),
3238 skip));
3239
3240 if (!ACCUMULATE_OUTGOING_ARGS)
3241 {
3242 /* If the source is referenced relative to the stack pointer,
3243 copy it to another register to stabilize it. We do not need
3244 to do this if we know that we won't be changing sp. */
3245
3246 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3247 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3248 temp = copy_to_reg (temp);
3249 }
3250
3251 target = gen_rtx_MEM (BLKmode, temp);
3252
3253 /* We do *not* set_mem_attributes here, because incoming arguments
3254 may overlap with sibling call outgoing arguments and we cannot
3255 allow reordering of reads from function arguments with stores
3256 to outgoing arguments of sibling calls. We do, however, want
3257 to record the alignment of the stack slot. */
3258 /* ALIGN may well be better aligned than TYPE, e.g. due to
3259 PARM_BOUNDARY. Assume the caller isn't lying. */
3260 set_mem_align (target, align);
3261
3262 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3263 }
3264 }
3265 else if (partial > 0)
3266 {
3267 /* Scalar partly in registers. */
3268
3269 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3270 int i;
3271 int not_stack;
3272 /* # words of start of argument
3273 that we must make space for but need not store. */
3274 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3275 int args_offset = INTVAL (args_so_far);
3276 int skip;
3277
3278 /* Push padding now if padding above and stack grows down,
3279 or if padding below and stack grows up.
3280 But if space already allocated, this has already been done. */
3281 if (extra && args_addr == 0
3282 && where_pad != none && where_pad != stack_direction)
3283 anti_adjust_stack (GEN_INT (extra));
3284
3285 /* If we make space by pushing it, we might as well push
3286 the real data. Otherwise, we can leave OFFSET nonzero
3287 and leave the space uninitialized. */
3288 if (args_addr == 0)
3289 offset = 0;
3290
3291 /* Now NOT_STACK gets the number of words that we don't need to
3292 allocate on the stack. */
3293 not_stack = partial - offset;
3294
3295 /* If the partial register-part of the arg counts in its stack size,
3296 skip the part of stack space corresponding to the registers.
3297 Otherwise, start copying to the beginning of the stack space,
3298 by setting SKIP to 0. */
3299 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3300
3301 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3302 x = validize_mem (force_const_mem (mode, x));
3303
3304 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3305 SUBREGs of such registers are not allowed. */
3306 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3307 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3308 x = copy_to_reg (x);
3309
3310 /* Loop over all the words allocated on the stack for this arg. */
3311 /* We can do it by words, because any scalar bigger than a word
3312 has a size a multiple of a word. */
3313 #ifndef PUSH_ARGS_REVERSED
3314 for (i = not_stack; i < size; i++)
3315 #else
3316 for (i = size - 1; i >= not_stack; i--)
3317 #endif
3318 if (i >= not_stack + offset)
3319 emit_push_insn (operand_subword_force (x, i, mode),
3320 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3321 0, args_addr,
3322 GEN_INT (args_offset + ((i - not_stack + skip)
3323 * UNITS_PER_WORD)),
3324 reg_parm_stack_space, alignment_pad);
3325 }
3326 else
3327 {
3328 rtx addr;
3329 rtx dest;
3330
3331 /* Push padding now if padding above and stack grows down,
3332 or if padding below and stack grows up.
3333 But if space already allocated, this has already been done. */
3334 if (extra && args_addr == 0
3335 && where_pad != none && where_pad != stack_direction)
3336 anti_adjust_stack (GEN_INT (extra));
3337
3338 #ifdef PUSH_ROUNDING
3339 if (args_addr == 0 && PUSH_ARGS)
3340 emit_single_push_insn (mode, x, type);
3341 else
3342 #endif
3343 {
3344 if (GET_CODE (args_so_far) == CONST_INT)
3345 addr
3346 = memory_address (mode,
3347 plus_constant (args_addr,
3348 INTVAL (args_so_far)));
3349 else
3350 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3351 args_so_far));
3352 dest = gen_rtx_MEM (mode, addr);
3353
3354 /* We do *not* set_mem_attributes here, because incoming arguments
3355 may overlap with sibling call outgoing arguments and we cannot
3356 allow reordering of reads from function arguments with stores
3357 to outgoing arguments of sibling calls. We do, however, want
3358 to record the alignment of the stack slot. */
3359 /* ALIGN may well be better aligned than TYPE, e.g. due to
3360 PARM_BOUNDARY. Assume the caller isn't lying. */
3361 set_mem_align (dest, align);
3362
3363 emit_move_insn (dest, x);
3364 }
3365 }
3366
3367 /* If part should go in registers, copy that part
3368 into the appropriate registers. Do this now, at the end,
3369 since mem-to-mem copies above may do function calls. */
3370 if (partial > 0 && reg != 0)
3371 {
3372 /* Handle calls that pass values in multiple non-contiguous locations.
3373 The Irix 6 ABI has examples of this. */
3374 if (GET_CODE (reg) == PARALLEL)
3375 emit_group_load (reg, x, type, -1);
3376 else
3377 move_block_to_reg (REGNO (reg), x, partial, mode);
3378 }
3379
3380 if (extra && args_addr == 0 && where_pad == stack_direction)
3381 anti_adjust_stack (GEN_INT (extra));
3382
3383 if (alignment_pad && args_addr == 0)
3384 anti_adjust_stack (alignment_pad);
3385 }
3386 \f
3387 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3388 operations. */
3389
3390 static rtx
3391 get_subtarget (rtx x)
3392 {
3393 return ((x == 0
3394 /* Only registers can be subtargets. */
3395 || !REG_P (x)
3396 /* If the register is readonly, it can't be set more than once. */
3397 || RTX_UNCHANGING_P (x)
3398 /* Don't use hard regs to avoid extending their life. */
3399 || REGNO (x) < FIRST_PSEUDO_REGISTER
3400 /* Avoid subtargets inside loops,
3401 since they hide some invariant expressions. */
3402 || preserve_subexpressions_p ())
3403 ? 0 : x);
3404 }
3405
3406 /* Expand an assignment that stores the value of FROM into TO.
3407 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3408 (If the value is constant, this rtx is a constant.)
3409 Otherwise, the returned value is NULL_RTX. */
3410
3411 rtx
3412 expand_assignment (tree to, tree from, int want_value)
3413 {
3414 rtx to_rtx = 0;
3415 rtx result;
3416
3417 /* Don't crash if the lhs of the assignment was erroneous. */
3418
3419 if (TREE_CODE (to) == ERROR_MARK)
3420 {
3421 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3422 return want_value ? result : NULL_RTX;
3423 }
3424
3425 /* Assignment of a structure component needs special treatment
3426 if the structure component's rtx is not simply a MEM.
3427 Assignment of an array element at a constant index, and assignment of
3428 an array element in an unaligned packed structure field, has the same
3429 problem. */
3430
3431 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3432 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3433 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3434 {
3435 enum machine_mode mode1;
3436 HOST_WIDE_INT bitsize, bitpos;
3437 rtx orig_to_rtx;
3438 tree offset;
3439 int unsignedp;
3440 int volatilep = 0;
3441 tree tem;
3442
3443 push_temp_slots ();
3444 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3445 &unsignedp, &volatilep);
3446
3447 /* If we are going to use store_bit_field and extract_bit_field,
3448 make sure to_rtx will be safe for multiple use. */
3449
3450 if (mode1 == VOIDmode && want_value)
3451 tem = stabilize_reference (tem);
3452
3453 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3454
3455 if (offset != 0)
3456 {
3457 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3458
3459 if (!MEM_P (to_rtx))
3460 abort ();
3461
3462 #ifdef POINTERS_EXTEND_UNSIGNED
3463 if (GET_MODE (offset_rtx) != Pmode)
3464 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3465 #else
3466 if (GET_MODE (offset_rtx) != ptr_mode)
3467 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3468 #endif
3469
3470 /* A constant address in TO_RTX can have VOIDmode, we must not try
3471 to call force_reg for that case. Avoid that case. */
3472 if (MEM_P (to_rtx)
3473 && GET_MODE (to_rtx) == BLKmode
3474 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3475 && bitsize > 0
3476 && (bitpos % bitsize) == 0
3477 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3478 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3479 {
3480 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3481 bitpos = 0;
3482 }
3483
3484 to_rtx = offset_address (to_rtx, offset_rtx,
3485 highest_pow2_factor_for_target (to,
3486 offset));
3487 }
3488
3489 if (MEM_P (to_rtx))
3490 {
3491 /* If the field is at offset zero, we could have been given the
3492 DECL_RTX of the parent struct. Don't munge it. */
3493 to_rtx = shallow_copy_rtx (to_rtx);
3494
3495 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3496 }
3497
3498 /* Deal with volatile and readonly fields. The former is only done
3499 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3500 if (volatilep && MEM_P (to_rtx))
3501 {
3502 if (to_rtx == orig_to_rtx)
3503 to_rtx = copy_rtx (to_rtx);
3504 MEM_VOLATILE_P (to_rtx) = 1;
3505 }
3506
3507 if (TREE_CODE (to) == COMPONENT_REF
3508 && TREE_READONLY (TREE_OPERAND (to, 1))
3509 /* We can't assert that a MEM won't be set more than once
3510 if the component is not addressable because another
3511 non-addressable component may be referenced by the same MEM. */
3512 && ! (MEM_P (to_rtx) && ! can_address_p (to)))
3513 {
3514 if (to_rtx == orig_to_rtx)
3515 to_rtx = copy_rtx (to_rtx);
3516 RTX_UNCHANGING_P (to_rtx) = 1;
3517 }
3518
3519 if (MEM_P (to_rtx) && ! can_address_p (to))
3520 {
3521 if (to_rtx == orig_to_rtx)
3522 to_rtx = copy_rtx (to_rtx);
3523 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3524 }
3525
3526 /* Optimize bitfld op= val in certain cases. */
3527 while (mode1 == VOIDmode && !want_value
3528 && bitsize > 0 && bitsize < BITS_PER_WORD
3529 && GET_MODE_BITSIZE (GET_MODE (to_rtx)) <= BITS_PER_WORD
3530 && !TREE_SIDE_EFFECTS (to)
3531 && !TREE_THIS_VOLATILE (to))
3532 {
3533 tree src, op0, op1;
3534 rtx value, str_rtx = to_rtx;
3535 HOST_WIDE_INT bitpos1 = bitpos;
3536 optab binop;
3537
3538 src = from;
3539 STRIP_NOPS (src);
3540 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE
3541 || TREE_CODE_CLASS (TREE_CODE (src)) != '2')
3542 break;
3543
3544 op0 = TREE_OPERAND (src, 0);
3545 op1 = TREE_OPERAND (src, 1);
3546 STRIP_NOPS (op0);
3547
3548 if (! operand_equal_p (to, op0, 0))
3549 break;
3550
3551 if (MEM_P (str_rtx))
3552 {
3553 enum machine_mode mode = GET_MODE (str_rtx);
3554 HOST_WIDE_INT offset1;
3555
3556 if (GET_MODE_BITSIZE (mode) == 0
3557 || GET_MODE_BITSIZE (mode) > BITS_PER_WORD)
3558 mode = word_mode;
3559 mode = get_best_mode (bitsize, bitpos1, MEM_ALIGN (str_rtx),
3560 mode, 0);
3561 if (mode == VOIDmode)
3562 break;
3563
3564 offset1 = bitpos1;
3565 bitpos1 %= GET_MODE_BITSIZE (mode);
3566 offset1 = (offset1 - bitpos1) / BITS_PER_UNIT;
3567 str_rtx = adjust_address (str_rtx, mode, offset1);
3568 }
3569 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3570 break;
3571
3572 /* If the bit field covers the whole REG/MEM, store_field
3573 will likely generate better code. */
3574 if (bitsize >= GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3575 break;
3576
3577 /* We can't handle fields split accross multiple entities. */
3578 if (bitpos1 + bitsize > GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3579 break;
3580
3581 if (BYTES_BIG_ENDIAN)
3582 bitpos1 = GET_MODE_BITSIZE (GET_MODE (str_rtx)) - bitpos1
3583 - bitsize;
3584
3585 /* Special case some bitfield op= exp. */
3586 switch (TREE_CODE (src))
3587 {
3588 case PLUS_EXPR:
3589 case MINUS_EXPR:
3590 /* For now, just optimize the case of the topmost bitfield
3591 where we don't need to do any masking and also
3592 1 bit bitfields where xor can be used.
3593 We might win by one instruction for the other bitfields
3594 too if insv/extv instructions aren't used, so that
3595 can be added later. */
3596 if (bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx))
3597 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3598 break;
3599 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3600 value = convert_modes (GET_MODE (str_rtx),
3601 TYPE_MODE (TREE_TYPE (op1)), value,
3602 TYPE_UNSIGNED (TREE_TYPE (op1)));
3603
3604 /* We may be accessing data outside the field, which means
3605 we can alias adjacent data. */
3606 if (MEM_P (str_rtx))
3607 {
3608 str_rtx = shallow_copy_rtx (str_rtx);
3609 set_mem_alias_set (str_rtx, 0);
3610 set_mem_expr (str_rtx, 0);
3611 }
3612
3613 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3614 if (bitsize == 1
3615 && bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3616 {
3617 value = expand_and (GET_MODE (str_rtx), value, const1_rtx,
3618 NULL_RTX);
3619 binop = xor_optab;
3620 }
3621 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx),
3622 value, build_int_2 (bitpos1, 0),
3623 NULL_RTX, 1);
3624 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3625 value, str_rtx, 1, OPTAB_WIDEN);
3626 if (result != str_rtx)
3627 emit_move_insn (str_rtx, result);
3628 free_temp_slots ();
3629 pop_temp_slots ();
3630 return NULL_RTX;
3631
3632 default:
3633 break;
3634 }
3635
3636 break;
3637 }
3638
3639 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3640 (want_value
3641 /* Spurious cast for HPUX compiler. */
3642 ? ((enum machine_mode)
3643 TYPE_MODE (TREE_TYPE (to)))
3644 : VOIDmode),
3645 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3646
3647 preserve_temp_slots (result);
3648 free_temp_slots ();
3649 pop_temp_slots ();
3650
3651 /* If the value is meaningful, convert RESULT to the proper mode.
3652 Otherwise, return nothing. */
3653 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3654 TYPE_MODE (TREE_TYPE (from)),
3655 result,
3656 TYPE_UNSIGNED (TREE_TYPE (to)))
3657 : NULL_RTX);
3658 }
3659
3660 /* If the rhs is a function call and its value is not an aggregate,
3661 call the function before we start to compute the lhs.
3662 This is needed for correct code for cases such as
3663 val = setjmp (buf) on machines where reference to val
3664 requires loading up part of an address in a separate insn.
3665
3666 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3667 since it might be a promoted variable where the zero- or sign- extension
3668 needs to be done. Handling this in the normal way is safe because no
3669 computation is done before the call. */
3670 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3671 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3672 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3673 && REG_P (DECL_RTL (to))))
3674 {
3675 rtx value;
3676
3677 push_temp_slots ();
3678 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3679 if (to_rtx == 0)
3680 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3681
3682 /* Handle calls that return values in multiple non-contiguous locations.
3683 The Irix 6 ABI has examples of this. */
3684 if (GET_CODE (to_rtx) == PARALLEL)
3685 emit_group_load (to_rtx, value, TREE_TYPE (from),
3686 int_size_in_bytes (TREE_TYPE (from)));
3687 else if (GET_MODE (to_rtx) == BLKmode)
3688 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3689 else
3690 {
3691 if (POINTER_TYPE_P (TREE_TYPE (to)))
3692 value = convert_memory_address (GET_MODE (to_rtx), value);
3693 emit_move_insn (to_rtx, value);
3694 }
3695 preserve_temp_slots (to_rtx);
3696 free_temp_slots ();
3697 pop_temp_slots ();
3698 return want_value ? to_rtx : NULL_RTX;
3699 }
3700
3701 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3702 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3703
3704 if (to_rtx == 0)
3705 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3706
3707 /* Don't move directly into a return register. */
3708 if (TREE_CODE (to) == RESULT_DECL
3709 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3710 {
3711 rtx temp;
3712
3713 push_temp_slots ();
3714 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3715
3716 if (GET_CODE (to_rtx) == PARALLEL)
3717 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3718 int_size_in_bytes (TREE_TYPE (from)));
3719 else
3720 emit_move_insn (to_rtx, temp);
3721
3722 preserve_temp_slots (to_rtx);
3723 free_temp_slots ();
3724 pop_temp_slots ();
3725 return want_value ? to_rtx : NULL_RTX;
3726 }
3727
3728 /* In case we are returning the contents of an object which overlaps
3729 the place the value is being stored, use a safe function when copying
3730 a value through a pointer into a structure value return block. */
3731 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3732 && current_function_returns_struct
3733 && !current_function_returns_pcc_struct)
3734 {
3735 rtx from_rtx, size;
3736
3737 push_temp_slots ();
3738 size = expr_size (from);
3739 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3740
3741 emit_library_call (memmove_libfunc, LCT_NORMAL,
3742 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3743 XEXP (from_rtx, 0), Pmode,
3744 convert_to_mode (TYPE_MODE (sizetype),
3745 size, TYPE_UNSIGNED (sizetype)),
3746 TYPE_MODE (sizetype));
3747
3748 preserve_temp_slots (to_rtx);
3749 free_temp_slots ();
3750 pop_temp_slots ();
3751 return want_value ? to_rtx : NULL_RTX;
3752 }
3753
3754 /* Compute FROM and store the value in the rtx we got. */
3755
3756 push_temp_slots ();
3757 result = store_expr (from, to_rtx, want_value);
3758 preserve_temp_slots (result);
3759 free_temp_slots ();
3760 pop_temp_slots ();
3761 return want_value ? result : NULL_RTX;
3762 }
3763
3764 /* Generate code for computing expression EXP,
3765 and storing the value into TARGET.
3766
3767 If WANT_VALUE & 1 is nonzero, return a copy of the value
3768 not in TARGET, so that we can be sure to use the proper
3769 value in a containing expression even if TARGET has something
3770 else stored in it. If possible, we copy the value through a pseudo
3771 and return that pseudo. Or, if the value is constant, we try to
3772 return the constant. In some cases, we return a pseudo
3773 copied *from* TARGET.
3774
3775 If the mode is BLKmode then we may return TARGET itself.
3776 It turns out that in BLKmode it doesn't cause a problem.
3777 because C has no operators that could combine two different
3778 assignments into the same BLKmode object with different values
3779 with no sequence point. Will other languages need this to
3780 be more thorough?
3781
3782 If WANT_VALUE & 1 is 0, we return NULL, to make sure
3783 to catch quickly any cases where the caller uses the value
3784 and fails to set WANT_VALUE.
3785
3786 If WANT_VALUE & 2 is set, this is a store into a call param on the
3787 stack, and block moves may need to be treated specially. */
3788
3789 rtx
3790 store_expr (tree exp, rtx target, int want_value)
3791 {
3792 rtx temp;
3793 rtx alt_rtl = NULL_RTX;
3794 int dont_return_target = 0;
3795 int dont_store_target = 0;
3796
3797 if (VOID_TYPE_P (TREE_TYPE (exp)))
3798 {
3799 /* C++ can generate ?: expressions with a throw expression in one
3800 branch and an rvalue in the other. Here, we resolve attempts to
3801 store the throw expression's nonexistent result. */
3802 if (want_value)
3803 abort ();
3804 expand_expr (exp, const0_rtx, VOIDmode, 0);
3805 return NULL_RTX;
3806 }
3807 if (TREE_CODE (exp) == COMPOUND_EXPR)
3808 {
3809 /* Perform first part of compound expression, then assign from second
3810 part. */
3811 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
3812 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3813 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3814 }
3815 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3816 {
3817 /* For conditional expression, get safe form of the target. Then
3818 test the condition, doing the appropriate assignment on either
3819 side. This avoids the creation of unnecessary temporaries.
3820 For non-BLKmode, it is more efficient not to do this. */
3821
3822 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3823
3824 do_pending_stack_adjust ();
3825 NO_DEFER_POP;
3826 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3827 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
3828 emit_jump_insn (gen_jump (lab2));
3829 emit_barrier ();
3830 emit_label (lab1);
3831 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
3832 emit_label (lab2);
3833 OK_DEFER_POP;
3834
3835 return want_value & 1 ? target : NULL_RTX;
3836 }
3837 else if ((want_value & 1) != 0
3838 && MEM_P (target)
3839 && ! MEM_VOLATILE_P (target)
3840 && GET_MODE (target) != BLKmode)
3841 /* If target is in memory and caller wants value in a register instead,
3842 arrange that. Pass TARGET as target for expand_expr so that,
3843 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3844 We know expand_expr will not use the target in that case.
3845 Don't do this if TARGET is volatile because we are supposed
3846 to write it and then read it. */
3847 {
3848 temp = expand_expr (exp, target, GET_MODE (target),
3849 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3850 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3851 {
3852 /* If TEMP is already in the desired TARGET, only copy it from
3853 memory and don't store it there again. */
3854 if (temp == target
3855 || (rtx_equal_p (temp, target)
3856 && ! side_effects_p (temp) && ! side_effects_p (target)))
3857 dont_store_target = 1;
3858 temp = copy_to_reg (temp);
3859 }
3860 dont_return_target = 1;
3861 }
3862 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3863 /* If this is a scalar in a register that is stored in a wider mode
3864 than the declared mode, compute the result into its declared mode
3865 and then convert to the wider mode. Our value is the computed
3866 expression. */
3867 {
3868 rtx inner_target = 0;
3869
3870 /* If we don't want a value, we can do the conversion inside EXP,
3871 which will often result in some optimizations. Do the conversion
3872 in two steps: first change the signedness, if needed, then
3873 the extend. But don't do this if the type of EXP is a subtype
3874 of something else since then the conversion might involve
3875 more than just converting modes. */
3876 if ((want_value & 1) == 0
3877 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3878 && TREE_TYPE (TREE_TYPE (exp)) == 0
3879 && (!lang_hooks.reduce_bit_field_operations
3880 || (GET_MODE_PRECISION (GET_MODE (target))
3881 == TYPE_PRECISION (TREE_TYPE (exp)))))
3882 {
3883 if (TYPE_UNSIGNED (TREE_TYPE (exp))
3884 != SUBREG_PROMOTED_UNSIGNED_P (target))
3885 exp = convert
3886 (lang_hooks.types.signed_or_unsigned_type
3887 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
3888
3889 exp = convert (lang_hooks.types.type_for_mode
3890 (GET_MODE (SUBREG_REG (target)),
3891 SUBREG_PROMOTED_UNSIGNED_P (target)),
3892 exp);
3893
3894 inner_target = SUBREG_REG (target);
3895 }
3896
3897 temp = expand_expr (exp, inner_target, VOIDmode,
3898 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3899
3900 /* If TEMP is a MEM and we want a result value, make the access
3901 now so it gets done only once. Strictly speaking, this is
3902 only necessary if the MEM is volatile, or if the address
3903 overlaps TARGET. But not performing the load twice also
3904 reduces the amount of rtl we generate and then have to CSE. */
3905 if (MEM_P (temp) && (want_value & 1) != 0)
3906 temp = copy_to_reg (temp);
3907
3908 /* If TEMP is a VOIDmode constant, use convert_modes to make
3909 sure that we properly convert it. */
3910 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3911 {
3912 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3913 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3914 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3915 GET_MODE (target), temp,
3916 SUBREG_PROMOTED_UNSIGNED_P (target));
3917 }
3918
3919 convert_move (SUBREG_REG (target), temp,
3920 SUBREG_PROMOTED_UNSIGNED_P (target));
3921
3922 /* If we promoted a constant, change the mode back down to match
3923 target. Otherwise, the caller might get confused by a result whose
3924 mode is larger than expected. */
3925
3926 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
3927 {
3928 if (GET_MODE (temp) != VOIDmode)
3929 {
3930 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
3931 SUBREG_PROMOTED_VAR_P (temp) = 1;
3932 SUBREG_PROMOTED_UNSIGNED_SET (temp,
3933 SUBREG_PROMOTED_UNSIGNED_P (target));
3934 }
3935 else
3936 temp = convert_modes (GET_MODE (target),
3937 GET_MODE (SUBREG_REG (target)),
3938 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3939 }
3940
3941 return want_value & 1 ? temp : NULL_RTX;
3942 }
3943 else
3944 {
3945 temp = expand_expr_real (exp, target, GET_MODE (target),
3946 (want_value & 2
3947 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
3948 &alt_rtl);
3949 /* Return TARGET if it's a specified hardware register.
3950 If TARGET is a volatile mem ref, either return TARGET
3951 or return a reg copied *from* TARGET; ANSI requires this.
3952
3953 Otherwise, if TEMP is not TARGET, return TEMP
3954 if it is constant (for efficiency),
3955 or if we really want the correct value. */
3956 if (!(target && REG_P (target)
3957 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3958 && !(MEM_P (target) && MEM_VOLATILE_P (target))
3959 && ! rtx_equal_p (temp, target)
3960 && (CONSTANT_P (temp) || (want_value & 1) != 0))
3961 dont_return_target = 1;
3962 }
3963
3964 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3965 the same as that of TARGET, adjust the constant. This is needed, for
3966 example, in case it is a CONST_DOUBLE and we want only a word-sized
3967 value. */
3968 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3969 && TREE_CODE (exp) != ERROR_MARK
3970 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3971 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3972 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
3973
3974 /* If value was not generated in the target, store it there.
3975 Convert the value to TARGET's type first if necessary and emit the
3976 pending incrementations that have been queued when expanding EXP.
3977 Note that we cannot emit the whole queue blindly because this will
3978 effectively disable the POST_INC optimization later.
3979
3980 If TEMP and TARGET compare equal according to rtx_equal_p, but
3981 one or both of them are volatile memory refs, we have to distinguish
3982 two cases:
3983 - expand_expr has used TARGET. In this case, we must not generate
3984 another copy. This can be detected by TARGET being equal according
3985 to == .
3986 - expand_expr has not used TARGET - that means that the source just
3987 happens to have the same RTX form. Since temp will have been created
3988 by expand_expr, it will compare unequal according to == .
3989 We must generate a copy in this case, to reach the correct number
3990 of volatile memory references. */
3991
3992 if ((! rtx_equal_p (temp, target)
3993 || (temp != target && (side_effects_p (temp)
3994 || side_effects_p (target))))
3995 && TREE_CODE (exp) != ERROR_MARK
3996 && ! dont_store_target
3997 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
3998 but TARGET is not valid memory reference, TEMP will differ
3999 from TARGET although it is really the same location. */
4000 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4001 /* If there's nothing to copy, don't bother. Don't call expr_size
4002 unless necessary, because some front-ends (C++) expr_size-hook
4003 aborts on objects that are not supposed to be bit-copied or
4004 bit-initialized. */
4005 && expr_size (exp) != const0_rtx)
4006 {
4007 if (GET_MODE (temp) != GET_MODE (target)
4008 && GET_MODE (temp) != VOIDmode)
4009 {
4010 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4011 if (dont_return_target)
4012 {
4013 /* In this case, we will return TEMP,
4014 so make sure it has the proper mode.
4015 But don't forget to store the value into TARGET. */
4016 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4017 emit_move_insn (target, temp);
4018 }
4019 else
4020 convert_move (target, temp, unsignedp);
4021 }
4022
4023 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4024 {
4025 /* Handle copying a string constant into an array. The string
4026 constant may be shorter than the array. So copy just the string's
4027 actual length, and clear the rest. First get the size of the data
4028 type of the string, which is actually the size of the target. */
4029 rtx size = expr_size (exp);
4030
4031 if (GET_CODE (size) == CONST_INT
4032 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4033 emit_block_move (target, temp, size,
4034 (want_value & 2
4035 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4036 else
4037 {
4038 /* Compute the size of the data to copy from the string. */
4039 tree copy_size
4040 = size_binop (MIN_EXPR,
4041 make_tree (sizetype, size),
4042 size_int (TREE_STRING_LENGTH (exp)));
4043 rtx copy_size_rtx
4044 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4045 (want_value & 2
4046 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4047 rtx label = 0;
4048
4049 /* Copy that much. */
4050 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4051 TYPE_UNSIGNED (sizetype));
4052 emit_block_move (target, temp, copy_size_rtx,
4053 (want_value & 2
4054 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4055
4056 /* Figure out how much is left in TARGET that we have to clear.
4057 Do all calculations in ptr_mode. */
4058 if (GET_CODE (copy_size_rtx) == CONST_INT)
4059 {
4060 size = plus_constant (size, -INTVAL (copy_size_rtx));
4061 target = adjust_address (target, BLKmode,
4062 INTVAL (copy_size_rtx));
4063 }
4064 else
4065 {
4066 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4067 copy_size_rtx, NULL_RTX, 0,
4068 OPTAB_LIB_WIDEN);
4069
4070 #ifdef POINTERS_EXTEND_UNSIGNED
4071 if (GET_MODE (copy_size_rtx) != Pmode)
4072 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4073 TYPE_UNSIGNED (sizetype));
4074 #endif
4075
4076 target = offset_address (target, copy_size_rtx,
4077 highest_pow2_factor (copy_size));
4078 label = gen_label_rtx ();
4079 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4080 GET_MODE (size), 0, label);
4081 }
4082
4083 if (size != const0_rtx)
4084 clear_storage (target, size);
4085
4086 if (label)
4087 emit_label (label);
4088 }
4089 }
4090 /* Handle calls that return values in multiple non-contiguous locations.
4091 The Irix 6 ABI has examples of this. */
4092 else if (GET_CODE (target) == PARALLEL)
4093 emit_group_load (target, temp, TREE_TYPE (exp),
4094 int_size_in_bytes (TREE_TYPE (exp)));
4095 else if (GET_MODE (temp) == BLKmode)
4096 emit_block_move (target, temp, expr_size (exp),
4097 (want_value & 2
4098 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4099 else
4100 {
4101 temp = force_operand (temp, target);
4102 if (temp != target)
4103 emit_move_insn (target, temp);
4104 }
4105 }
4106
4107 /* If we don't want a value, return NULL_RTX. */
4108 if ((want_value & 1) == 0)
4109 return NULL_RTX;
4110
4111 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4112 ??? The latter test doesn't seem to make sense. */
4113 else if (dont_return_target && !MEM_P (temp))
4114 return temp;
4115
4116 /* Return TARGET itself if it is a hard register. */
4117 else if ((want_value & 1) != 0
4118 && GET_MODE (target) != BLKmode
4119 && ! (REG_P (target)
4120 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4121 return copy_to_reg (target);
4122
4123 else
4124 return target;
4125 }
4126 \f
4127 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4128 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4129 are set to non-constant values and place it in *P_NC_ELTS. */
4130
4131 static void
4132 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4133 HOST_WIDE_INT *p_nc_elts)
4134 {
4135 HOST_WIDE_INT nz_elts, nc_elts;
4136 tree list;
4137
4138 nz_elts = 0;
4139 nc_elts = 0;
4140
4141 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4142 {
4143 tree value = TREE_VALUE (list);
4144 tree purpose = TREE_PURPOSE (list);
4145 HOST_WIDE_INT mult;
4146
4147 mult = 1;
4148 if (TREE_CODE (purpose) == RANGE_EXPR)
4149 {
4150 tree lo_index = TREE_OPERAND (purpose, 0);
4151 tree hi_index = TREE_OPERAND (purpose, 1);
4152
4153 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4154 mult = (tree_low_cst (hi_index, 1)
4155 - tree_low_cst (lo_index, 1) + 1);
4156 }
4157
4158 switch (TREE_CODE (value))
4159 {
4160 case CONSTRUCTOR:
4161 {
4162 HOST_WIDE_INT nz = 0, nc = 0;
4163 categorize_ctor_elements_1 (value, &nz, &nc);
4164 nz_elts += mult * nz;
4165 nc_elts += mult * nc;
4166 }
4167 break;
4168
4169 case INTEGER_CST:
4170 case REAL_CST:
4171 if (!initializer_zerop (value))
4172 nz_elts += mult;
4173 break;
4174 case COMPLEX_CST:
4175 if (!initializer_zerop (TREE_REALPART (value)))
4176 nz_elts += mult;
4177 if (!initializer_zerop (TREE_IMAGPART (value)))
4178 nz_elts += mult;
4179 break;
4180 case VECTOR_CST:
4181 {
4182 tree v;
4183 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4184 if (!initializer_zerop (TREE_VALUE (v)))
4185 nz_elts += mult;
4186 }
4187 break;
4188
4189 default:
4190 nz_elts += mult;
4191 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4192 nc_elts += mult;
4193 break;
4194 }
4195 }
4196
4197 *p_nz_elts += nz_elts;
4198 *p_nc_elts += nc_elts;
4199 }
4200
4201 void
4202 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4203 HOST_WIDE_INT *p_nc_elts)
4204 {
4205 *p_nz_elts = 0;
4206 *p_nc_elts = 0;
4207 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4208 }
4209
4210 /* Count the number of scalars in TYPE. Return -1 on overflow or
4211 variable-sized. */
4212
4213 HOST_WIDE_INT
4214 count_type_elements (tree type)
4215 {
4216 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4217 switch (TREE_CODE (type))
4218 {
4219 case ARRAY_TYPE:
4220 {
4221 tree telts = array_type_nelts (type);
4222 if (telts && host_integerp (telts, 1))
4223 {
4224 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4225 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4226 if (n == 0)
4227 return 0;
4228 else if (max / n > m)
4229 return n * m;
4230 }
4231 return -1;
4232 }
4233
4234 case RECORD_TYPE:
4235 {
4236 HOST_WIDE_INT n = 0, t;
4237 tree f;
4238
4239 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4240 if (TREE_CODE (f) == FIELD_DECL)
4241 {
4242 t = count_type_elements (TREE_TYPE (f));
4243 if (t < 0)
4244 return -1;
4245 n += t;
4246 }
4247
4248 return n;
4249 }
4250
4251 case UNION_TYPE:
4252 case QUAL_UNION_TYPE:
4253 {
4254 /* Ho hum. How in the world do we guess here? Clearly it isn't
4255 right to count the fields. Guess based on the number of words. */
4256 HOST_WIDE_INT n = int_size_in_bytes (type);
4257 if (n < 0)
4258 return -1;
4259 return n / UNITS_PER_WORD;
4260 }
4261
4262 case COMPLEX_TYPE:
4263 return 2;
4264
4265 case VECTOR_TYPE:
4266 return TYPE_VECTOR_SUBPARTS (type);
4267
4268 case INTEGER_TYPE:
4269 case REAL_TYPE:
4270 case ENUMERAL_TYPE:
4271 case BOOLEAN_TYPE:
4272 case CHAR_TYPE:
4273 case POINTER_TYPE:
4274 case OFFSET_TYPE:
4275 case REFERENCE_TYPE:
4276 return 1;
4277
4278 case VOID_TYPE:
4279 case METHOD_TYPE:
4280 case FILE_TYPE:
4281 case SET_TYPE:
4282 case FUNCTION_TYPE:
4283 case LANG_TYPE:
4284 default:
4285 abort ();
4286 }
4287 }
4288
4289 /* Return 1 if EXP contains mostly (3/4) zeros. */
4290
4291 int
4292 mostly_zeros_p (tree exp)
4293 {
4294 if (TREE_CODE (exp) == CONSTRUCTOR)
4295
4296 {
4297 HOST_WIDE_INT nz_elts, nc_elts, elts;
4298
4299 /* If there are no ranges of true bits, it is all zero. */
4300 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4301 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4302
4303 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4304 elts = count_type_elements (TREE_TYPE (exp));
4305
4306 return nz_elts < elts / 4;
4307 }
4308
4309 return initializer_zerop (exp);
4310 }
4311 \f
4312 /* Helper function for store_constructor.
4313 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4314 TYPE is the type of the CONSTRUCTOR, not the element type.
4315 CLEARED is as for store_constructor.
4316 ALIAS_SET is the alias set to use for any stores.
4317
4318 This provides a recursive shortcut back to store_constructor when it isn't
4319 necessary to go through store_field. This is so that we can pass through
4320 the cleared field to let store_constructor know that we may not have to
4321 clear a substructure if the outer structure has already been cleared. */
4322
4323 static void
4324 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4325 HOST_WIDE_INT bitpos, enum machine_mode mode,
4326 tree exp, tree type, int cleared, int alias_set)
4327 {
4328 if (TREE_CODE (exp) == CONSTRUCTOR
4329 /* We can only call store_constructor recursively if the size and
4330 bit position are on a byte boundary. */
4331 && bitpos % BITS_PER_UNIT == 0
4332 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4333 /* If we have a nonzero bitpos for a register target, then we just
4334 let store_field do the bitfield handling. This is unlikely to
4335 generate unnecessary clear instructions anyways. */
4336 && (bitpos == 0 || MEM_P (target)))
4337 {
4338 if (MEM_P (target))
4339 target
4340 = adjust_address (target,
4341 GET_MODE (target) == BLKmode
4342 || 0 != (bitpos
4343 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4344 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4345
4346
4347 /* Update the alias set, if required. */
4348 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4349 && MEM_ALIAS_SET (target) != 0)
4350 {
4351 target = copy_rtx (target);
4352 set_mem_alias_set (target, alias_set);
4353 }
4354
4355 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4356 }
4357 else
4358 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4359 alias_set);
4360 }
4361
4362 /* Store the value of constructor EXP into the rtx TARGET.
4363 TARGET is either a REG or a MEM; we know it cannot conflict, since
4364 safe_from_p has been called.
4365 CLEARED is true if TARGET is known to have been zero'd.
4366 SIZE is the number of bytes of TARGET we are allowed to modify: this
4367 may not be the same as the size of EXP if we are assigning to a field
4368 which has been packed to exclude padding bits. */
4369
4370 static void
4371 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4372 {
4373 tree type = TREE_TYPE (exp);
4374 #ifdef WORD_REGISTER_OPERATIONS
4375 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4376 #endif
4377
4378 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4379 || TREE_CODE (type) == QUAL_UNION_TYPE)
4380 {
4381 tree elt;
4382
4383 /* If size is zero or the target is already cleared, do nothing. */
4384 if (size == 0 || cleared)
4385 cleared = 1;
4386 /* We either clear the aggregate or indicate the value is dead. */
4387 else if ((TREE_CODE (type) == UNION_TYPE
4388 || TREE_CODE (type) == QUAL_UNION_TYPE)
4389 && ! CONSTRUCTOR_ELTS (exp))
4390 /* If the constructor is empty, clear the union. */
4391 {
4392 clear_storage (target, expr_size (exp));
4393 cleared = 1;
4394 }
4395
4396 /* If we are building a static constructor into a register,
4397 set the initial value as zero so we can fold the value into
4398 a constant. But if more than one register is involved,
4399 this probably loses. */
4400 else if (REG_P (target) && TREE_STATIC (exp)
4401 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4402 {
4403 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4404 cleared = 1;
4405 }
4406
4407 /* If the constructor has fewer fields than the structure
4408 or if we are initializing the structure to mostly zeros,
4409 clear the whole structure first. Don't do this if TARGET is a
4410 register whose mode size isn't equal to SIZE since clear_storage
4411 can't handle this case. */
4412 else if (size > 0
4413 && ((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4414 || mostly_zeros_p (exp))
4415 && (!REG_P (target)
4416 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4417 == size)))
4418 {
4419 rtx xtarget = target;
4420
4421 if (readonly_fields_p (type))
4422 {
4423 xtarget = copy_rtx (xtarget);
4424 RTX_UNCHANGING_P (xtarget) = 1;
4425 }
4426
4427 clear_storage (xtarget, GEN_INT (size));
4428 cleared = 1;
4429 }
4430
4431 if (! cleared)
4432 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4433
4434 /* Store each element of the constructor into
4435 the corresponding field of TARGET. */
4436
4437 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4438 {
4439 tree field = TREE_PURPOSE (elt);
4440 tree value = TREE_VALUE (elt);
4441 enum machine_mode mode;
4442 HOST_WIDE_INT bitsize;
4443 HOST_WIDE_INT bitpos = 0;
4444 tree offset;
4445 rtx to_rtx = target;
4446
4447 /* Just ignore missing fields.
4448 We cleared the whole structure, above,
4449 if any fields are missing. */
4450 if (field == 0)
4451 continue;
4452
4453 if (cleared && initializer_zerop (value))
4454 continue;
4455
4456 if (host_integerp (DECL_SIZE (field), 1))
4457 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4458 else
4459 bitsize = -1;
4460
4461 mode = DECL_MODE (field);
4462 if (DECL_BIT_FIELD (field))
4463 mode = VOIDmode;
4464
4465 offset = DECL_FIELD_OFFSET (field);
4466 if (host_integerp (offset, 0)
4467 && host_integerp (bit_position (field), 0))
4468 {
4469 bitpos = int_bit_position (field);
4470 offset = 0;
4471 }
4472 else
4473 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4474
4475 if (offset)
4476 {
4477 rtx offset_rtx;
4478
4479 offset
4480 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4481 make_tree (TREE_TYPE (exp),
4482 target));
4483
4484 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4485 if (!MEM_P (to_rtx))
4486 abort ();
4487
4488 #ifdef POINTERS_EXTEND_UNSIGNED
4489 if (GET_MODE (offset_rtx) != Pmode)
4490 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4491 #else
4492 if (GET_MODE (offset_rtx) != ptr_mode)
4493 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4494 #endif
4495
4496 to_rtx = offset_address (to_rtx, offset_rtx,
4497 highest_pow2_factor (offset));
4498 }
4499
4500 if (TREE_READONLY (field))
4501 {
4502 if (MEM_P (to_rtx))
4503 to_rtx = copy_rtx (to_rtx);
4504
4505 RTX_UNCHANGING_P (to_rtx) = 1;
4506 }
4507
4508 #ifdef WORD_REGISTER_OPERATIONS
4509 /* If this initializes a field that is smaller than a word, at the
4510 start of a word, try to widen it to a full word.
4511 This special case allows us to output C++ member function
4512 initializations in a form that the optimizers can understand. */
4513 if (REG_P (target)
4514 && bitsize < BITS_PER_WORD
4515 && bitpos % BITS_PER_WORD == 0
4516 && GET_MODE_CLASS (mode) == MODE_INT
4517 && TREE_CODE (value) == INTEGER_CST
4518 && exp_size >= 0
4519 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4520 {
4521 tree type = TREE_TYPE (value);
4522
4523 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4524 {
4525 type = lang_hooks.types.type_for_size
4526 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4527 value = convert (type, value);
4528 }
4529
4530 if (BYTES_BIG_ENDIAN)
4531 value
4532 = fold (build2 (LSHIFT_EXPR, type, value,
4533 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4534 bitsize = BITS_PER_WORD;
4535 mode = word_mode;
4536 }
4537 #endif
4538
4539 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4540 && DECL_NONADDRESSABLE_P (field))
4541 {
4542 to_rtx = copy_rtx (to_rtx);
4543 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4544 }
4545
4546 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4547 value, type, cleared,
4548 get_alias_set (TREE_TYPE (field)));
4549 }
4550 }
4551
4552 else if (TREE_CODE (type) == ARRAY_TYPE)
4553 {
4554 tree elt;
4555 int i;
4556 int need_to_clear;
4557 tree domain;
4558 tree elttype = TREE_TYPE (type);
4559 int const_bounds_p;
4560 HOST_WIDE_INT minelt = 0;
4561 HOST_WIDE_INT maxelt = 0;
4562
4563 domain = TYPE_DOMAIN (type);
4564 const_bounds_p = (TYPE_MIN_VALUE (domain)
4565 && TYPE_MAX_VALUE (domain)
4566 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4567 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4568
4569 /* If we have constant bounds for the range of the type, get them. */
4570 if (const_bounds_p)
4571 {
4572 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4573 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4574 }
4575
4576 /* If the constructor has fewer elements than the array,
4577 clear the whole array first. Similarly if this is
4578 static constructor of a non-BLKmode object. */
4579 if (cleared)
4580 need_to_clear = 0;
4581 else if (REG_P (target) && TREE_STATIC (exp))
4582 need_to_clear = 1;
4583 else
4584 {
4585 HOST_WIDE_INT count = 0, zero_count = 0;
4586 need_to_clear = ! const_bounds_p;
4587
4588 /* This loop is a more accurate version of the loop in
4589 mostly_zeros_p (it handles RANGE_EXPR in an index).
4590 It is also needed to check for missing elements. */
4591 for (elt = CONSTRUCTOR_ELTS (exp);
4592 elt != NULL_TREE && ! need_to_clear;
4593 elt = TREE_CHAIN (elt))
4594 {
4595 tree index = TREE_PURPOSE (elt);
4596 HOST_WIDE_INT this_node_count;
4597
4598 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4599 {
4600 tree lo_index = TREE_OPERAND (index, 0);
4601 tree hi_index = TREE_OPERAND (index, 1);
4602
4603 if (! host_integerp (lo_index, 1)
4604 || ! host_integerp (hi_index, 1))
4605 {
4606 need_to_clear = 1;
4607 break;
4608 }
4609
4610 this_node_count = (tree_low_cst (hi_index, 1)
4611 - tree_low_cst (lo_index, 1) + 1);
4612 }
4613 else
4614 this_node_count = 1;
4615
4616 count += this_node_count;
4617 if (mostly_zeros_p (TREE_VALUE (elt)))
4618 zero_count += this_node_count;
4619 }
4620
4621 /* Clear the entire array first if there are any missing elements,
4622 or if the incidence of zero elements is >= 75%. */
4623 if (! need_to_clear
4624 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4625 need_to_clear = 1;
4626 }
4627
4628 if (need_to_clear && size > 0)
4629 {
4630 if (REG_P (target))
4631 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4632 else
4633 clear_storage (target, GEN_INT (size));
4634 cleared = 1;
4635 }
4636
4637 if (!cleared && REG_P (target))
4638 /* Inform later passes that the old value is dead. */
4639 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4640
4641 /* Store each element of the constructor into
4642 the corresponding element of TARGET, determined
4643 by counting the elements. */
4644 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4645 elt;
4646 elt = TREE_CHAIN (elt), i++)
4647 {
4648 enum machine_mode mode;
4649 HOST_WIDE_INT bitsize;
4650 HOST_WIDE_INT bitpos;
4651 int unsignedp;
4652 tree value = TREE_VALUE (elt);
4653 tree index = TREE_PURPOSE (elt);
4654 rtx xtarget = target;
4655
4656 if (cleared && initializer_zerop (value))
4657 continue;
4658
4659 unsignedp = TYPE_UNSIGNED (elttype);
4660 mode = TYPE_MODE (elttype);
4661 if (mode == BLKmode)
4662 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4663 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4664 : -1);
4665 else
4666 bitsize = GET_MODE_BITSIZE (mode);
4667
4668 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4669 {
4670 tree lo_index = TREE_OPERAND (index, 0);
4671 tree hi_index = TREE_OPERAND (index, 1);
4672 rtx index_r, pos_rtx;
4673 HOST_WIDE_INT lo, hi, count;
4674 tree position;
4675
4676 /* If the range is constant and "small", unroll the loop. */
4677 if (const_bounds_p
4678 && host_integerp (lo_index, 0)
4679 && host_integerp (hi_index, 0)
4680 && (lo = tree_low_cst (lo_index, 0),
4681 hi = tree_low_cst (hi_index, 0),
4682 count = hi - lo + 1,
4683 (!MEM_P (target)
4684 || count <= 2
4685 || (host_integerp (TYPE_SIZE (elttype), 1)
4686 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4687 <= 40 * 8)))))
4688 {
4689 lo -= minelt; hi -= minelt;
4690 for (; lo <= hi; lo++)
4691 {
4692 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4693
4694 if (MEM_P (target)
4695 && !MEM_KEEP_ALIAS_SET_P (target)
4696 && TREE_CODE (type) == ARRAY_TYPE
4697 && TYPE_NONALIASED_COMPONENT (type))
4698 {
4699 target = copy_rtx (target);
4700 MEM_KEEP_ALIAS_SET_P (target) = 1;
4701 }
4702
4703 store_constructor_field
4704 (target, bitsize, bitpos, mode, value, type, cleared,
4705 get_alias_set (elttype));
4706 }
4707 }
4708 else
4709 {
4710 rtx loop_start = gen_label_rtx ();
4711 rtx loop_end = gen_label_rtx ();
4712 tree exit_cond;
4713
4714 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4715 unsignedp = TYPE_UNSIGNED (domain);
4716
4717 index = build_decl (VAR_DECL, NULL_TREE, domain);
4718
4719 index_r
4720 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4721 &unsignedp, 0));
4722 SET_DECL_RTL (index, index_r);
4723 store_expr (lo_index, index_r, 0);
4724
4725 /* Build the head of the loop. */
4726 do_pending_stack_adjust ();
4727 emit_label (loop_start);
4728
4729 /* Assign value to element index. */
4730 position
4731 = convert (ssizetype,
4732 fold (build2 (MINUS_EXPR, TREE_TYPE (index),
4733 index, TYPE_MIN_VALUE (domain))));
4734 position = size_binop (MULT_EXPR, position,
4735 convert (ssizetype,
4736 TYPE_SIZE_UNIT (elttype)));
4737
4738 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4739 xtarget = offset_address (target, pos_rtx,
4740 highest_pow2_factor (position));
4741 xtarget = adjust_address (xtarget, mode, 0);
4742 if (TREE_CODE (value) == CONSTRUCTOR)
4743 store_constructor (value, xtarget, cleared,
4744 bitsize / BITS_PER_UNIT);
4745 else
4746 store_expr (value, xtarget, 0);
4747
4748 /* Generate a conditional jump to exit the loop. */
4749 exit_cond = build2 (LT_EXPR, integer_type_node,
4750 index, hi_index);
4751 jumpif (exit_cond, loop_end);
4752
4753 /* Update the loop counter, and jump to the head of
4754 the loop. */
4755 expand_assignment (index,
4756 build2 (PLUS_EXPR, TREE_TYPE (index),
4757 index, integer_one_node), 0);
4758
4759 emit_jump (loop_start);
4760
4761 /* Build the end of the loop. */
4762 emit_label (loop_end);
4763 }
4764 }
4765 else if ((index != 0 && ! host_integerp (index, 0))
4766 || ! host_integerp (TYPE_SIZE (elttype), 1))
4767 {
4768 tree position;
4769
4770 if (index == 0)
4771 index = ssize_int (1);
4772
4773 if (minelt)
4774 index = fold_convert (ssizetype,
4775 fold (build2 (MINUS_EXPR,
4776 TREE_TYPE (index),
4777 index,
4778 TYPE_MIN_VALUE (domain))));
4779
4780 position = size_binop (MULT_EXPR, index,
4781 convert (ssizetype,
4782 TYPE_SIZE_UNIT (elttype)));
4783 xtarget = offset_address (target,
4784 expand_expr (position, 0, VOIDmode, 0),
4785 highest_pow2_factor (position));
4786 xtarget = adjust_address (xtarget, mode, 0);
4787 store_expr (value, xtarget, 0);
4788 }
4789 else
4790 {
4791 if (index != 0)
4792 bitpos = ((tree_low_cst (index, 0) - minelt)
4793 * tree_low_cst (TYPE_SIZE (elttype), 1));
4794 else
4795 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4796
4797 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
4798 && TREE_CODE (type) == ARRAY_TYPE
4799 && TYPE_NONALIASED_COMPONENT (type))
4800 {
4801 target = copy_rtx (target);
4802 MEM_KEEP_ALIAS_SET_P (target) = 1;
4803 }
4804 store_constructor_field (target, bitsize, bitpos, mode, value,
4805 type, cleared, get_alias_set (elttype));
4806 }
4807 }
4808 }
4809
4810 else if (TREE_CODE (type) == VECTOR_TYPE)
4811 {
4812 tree elt;
4813 int i;
4814 int need_to_clear;
4815 int icode = 0;
4816 tree elttype = TREE_TYPE (type);
4817 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
4818 enum machine_mode eltmode = TYPE_MODE (elttype);
4819 HOST_WIDE_INT bitsize;
4820 HOST_WIDE_INT bitpos;
4821 rtx *vector = NULL;
4822 unsigned n_elts;
4823
4824 if (eltmode == BLKmode)
4825 abort ();
4826
4827 n_elts = TYPE_VECTOR_SUBPARTS (type);
4828 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4829 {
4830 enum machine_mode mode = GET_MODE (target);
4831
4832 icode = (int) vec_init_optab->handlers[mode].insn_code;
4833 if (icode != CODE_FOR_nothing)
4834 {
4835 unsigned int i;
4836
4837 vector = alloca (n_elts);
4838 for (i = 0; i < n_elts; i++)
4839 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4840 }
4841 }
4842
4843 /* If the constructor has fewer elements than the vector,
4844 clear the whole array first. Similarly if this is
4845 static constructor of a non-BLKmode object. */
4846 if (cleared)
4847 need_to_clear = 0;
4848 else if (REG_P (target) && TREE_STATIC (exp))
4849 need_to_clear = 1;
4850 else
4851 {
4852 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
4853
4854 for (elt = CONSTRUCTOR_ELTS (exp);
4855 elt != NULL_TREE;
4856 elt = TREE_CHAIN (elt))
4857 {
4858 int n_elts_here =
4859 tree_low_cst (
4860 int_const_binop (TRUNC_DIV_EXPR,
4861 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
4862 TYPE_SIZE (elttype), 0), 1);
4863
4864 count += n_elts_here;
4865 if (mostly_zeros_p (TREE_VALUE (elt)))
4866 zero_count += n_elts_here;
4867 }
4868
4869 /* Clear the entire vector first if there are any missing elements,
4870 or if the incidence of zero elements is >= 75%. */
4871 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
4872 }
4873
4874 if (need_to_clear && size > 0 && !vector)
4875 {
4876 if (REG_P (target))
4877 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4878 else
4879 clear_storage (target, GEN_INT (size));
4880 cleared = 1;
4881 }
4882
4883 if (!cleared && REG_P (target))
4884 /* Inform later passes that the old value is dead. */
4885 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4886
4887 /* Store each element of the constructor into the corresponding
4888 element of TARGET, determined by counting the elements. */
4889 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4890 elt;
4891 elt = TREE_CHAIN (elt), i += bitsize / elt_size)
4892 {
4893 tree value = TREE_VALUE (elt);
4894 tree index = TREE_PURPOSE (elt);
4895 HOST_WIDE_INT eltpos;
4896
4897 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
4898 if (cleared && initializer_zerop (value))
4899 continue;
4900
4901 if (index != 0)
4902 eltpos = tree_low_cst (index, 1);
4903 else
4904 eltpos = i;
4905
4906 if (vector)
4907 {
4908 /* Vector CONSTRUCTORs should only be built from smaller
4909 vectors in the case of BLKmode vectors. */
4910 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
4911 abort ();
4912 vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4913 }
4914 else
4915 {
4916 enum machine_mode value_mode =
4917 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
4918 ? TYPE_MODE (TREE_TYPE (value))
4919 : eltmode;
4920 bitpos = eltpos * elt_size;
4921 store_constructor_field (target, bitsize, bitpos, value_mode, value,
4922 type, cleared, get_alias_set (elttype));
4923 }
4924 }
4925
4926 if (vector)
4927 emit_insn (GEN_FCN (icode) (target,
4928 gen_rtx_PARALLEL (GET_MODE (target),
4929 gen_rtvec_v (n_elts, vector))));
4930 }
4931
4932 /* Set constructor assignments. */
4933 else if (TREE_CODE (type) == SET_TYPE)
4934 {
4935 tree elt = CONSTRUCTOR_ELTS (exp);
4936 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4937 tree domain = TYPE_DOMAIN (type);
4938 tree domain_min, domain_max, bitlength;
4939
4940 /* The default implementation strategy is to extract the constant
4941 parts of the constructor, use that to initialize the target,
4942 and then "or" in whatever non-constant ranges we need in addition.
4943
4944 If a large set is all zero or all ones, it is
4945 probably better to set it using memset.
4946 Also, if a large set has just a single range, it may also be
4947 better to first clear all the first clear the set (using
4948 memset), and set the bits we want. */
4949
4950 /* Check for all zeros. */
4951 if (elt == NULL_TREE && size > 0)
4952 {
4953 if (!cleared)
4954 clear_storage (target, GEN_INT (size));
4955 return;
4956 }
4957
4958 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4959 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4960 bitlength = size_binop (PLUS_EXPR,
4961 size_diffop (domain_max, domain_min),
4962 ssize_int (1));
4963
4964 nbits = tree_low_cst (bitlength, 1);
4965
4966 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4967 are "complicated" (more than one range), initialize (the
4968 constant parts) by copying from a constant. */
4969 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4970 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4971 {
4972 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4973 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4974 char *bit_buffer = alloca (nbits);
4975 HOST_WIDE_INT word = 0;
4976 unsigned int bit_pos = 0;
4977 unsigned int ibit = 0;
4978 unsigned int offset = 0; /* In bytes from beginning of set. */
4979
4980 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4981 for (;;)
4982 {
4983 if (bit_buffer[ibit])
4984 {
4985 if (BYTES_BIG_ENDIAN)
4986 word |= (1 << (set_word_size - 1 - bit_pos));
4987 else
4988 word |= 1 << bit_pos;
4989 }
4990
4991 bit_pos++; ibit++;
4992 if (bit_pos >= set_word_size || ibit == nbits)
4993 {
4994 if (word != 0 || ! cleared)
4995 {
4996 rtx datum = gen_int_mode (word, mode);
4997 rtx to_rtx;
4998
4999 /* The assumption here is that it is safe to use
5000 XEXP if the set is multi-word, but not if
5001 it's single-word. */
5002 if (MEM_P (target))
5003 to_rtx = adjust_address (target, mode, offset);
5004 else if (offset == 0)
5005 to_rtx = target;
5006 else
5007 abort ();
5008 emit_move_insn (to_rtx, datum);
5009 }
5010
5011 if (ibit == nbits)
5012 break;
5013 word = 0;
5014 bit_pos = 0;
5015 offset += set_word_size / BITS_PER_UNIT;
5016 }
5017 }
5018 }
5019 else if (!cleared)
5020 /* Don't bother clearing storage if the set is all ones. */
5021 if (TREE_CHAIN (elt) != NULL_TREE
5022 || (TREE_PURPOSE (elt) == NULL_TREE
5023 ? nbits != 1
5024 : ( ! host_integerp (TREE_VALUE (elt), 0)
5025 || ! host_integerp (TREE_PURPOSE (elt), 0)
5026 || (tree_low_cst (TREE_VALUE (elt), 0)
5027 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5028 != (HOST_WIDE_INT) nbits))))
5029 clear_storage (target, expr_size (exp));
5030
5031 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5032 {
5033 /* Start of range of element or NULL. */
5034 tree startbit = TREE_PURPOSE (elt);
5035 /* End of range of element, or element value. */
5036 tree endbit = TREE_VALUE (elt);
5037 HOST_WIDE_INT startb, endb;
5038 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5039
5040 bitlength_rtx = expand_expr (bitlength,
5041 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5042
5043 /* Handle non-range tuple element like [ expr ]. */
5044 if (startbit == NULL_TREE)
5045 {
5046 startbit = save_expr (endbit);
5047 endbit = startbit;
5048 }
5049
5050 startbit = convert (sizetype, startbit);
5051 endbit = convert (sizetype, endbit);
5052 if (! integer_zerop (domain_min))
5053 {
5054 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5055 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5056 }
5057 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5058 EXPAND_CONST_ADDRESS);
5059 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5060 EXPAND_CONST_ADDRESS);
5061
5062 if (REG_P (target))
5063 {
5064 targetx
5065 = assign_temp
5066 ((build_qualified_type (lang_hooks.types.type_for_mode
5067 (GET_MODE (target), 0),
5068 TYPE_QUAL_CONST)),
5069 0, 1, 1);
5070 emit_move_insn (targetx, target);
5071 }
5072
5073 else if (MEM_P (target))
5074 targetx = target;
5075 else
5076 abort ();
5077
5078 /* Optimization: If startbit and endbit are constants divisible
5079 by BITS_PER_UNIT, call memset instead. */
5080 if (TREE_CODE (startbit) == INTEGER_CST
5081 && TREE_CODE (endbit) == INTEGER_CST
5082 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5083 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5084 {
5085 emit_library_call (memset_libfunc, LCT_NORMAL,
5086 VOIDmode, 3,
5087 plus_constant (XEXP (targetx, 0),
5088 startb / BITS_PER_UNIT),
5089 Pmode,
5090 constm1_rtx, TYPE_MODE (integer_type_node),
5091 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5092 TYPE_MODE (sizetype));
5093 }
5094 else
5095 emit_library_call (setbits_libfunc, LCT_NORMAL,
5096 VOIDmode, 4, XEXP (targetx, 0),
5097 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5098 startbit_rtx, TYPE_MODE (sizetype),
5099 endbit_rtx, TYPE_MODE (sizetype));
5100
5101 if (REG_P (target))
5102 emit_move_insn (target, targetx);
5103 }
5104 }
5105
5106 else
5107 abort ();
5108 }
5109
5110 /* Store the value of EXP (an expression tree)
5111 into a subfield of TARGET which has mode MODE and occupies
5112 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5113 If MODE is VOIDmode, it means that we are storing into a bit-field.
5114
5115 If VALUE_MODE is VOIDmode, return nothing in particular.
5116 UNSIGNEDP is not used in this case.
5117
5118 Otherwise, return an rtx for the value stored. This rtx
5119 has mode VALUE_MODE if that is convenient to do.
5120 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5121
5122 TYPE is the type of the underlying object,
5123
5124 ALIAS_SET is the alias set for the destination. This value will
5125 (in general) be different from that for TARGET, since TARGET is a
5126 reference to the containing structure. */
5127
5128 static rtx
5129 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5130 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5131 int unsignedp, tree type, int alias_set)
5132 {
5133 HOST_WIDE_INT width_mask = 0;
5134
5135 if (TREE_CODE (exp) == ERROR_MARK)
5136 return const0_rtx;
5137
5138 /* If we have nothing to store, do nothing unless the expression has
5139 side-effects. */
5140 if (bitsize == 0)
5141 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5142 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5143 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5144
5145 /* If we are storing into an unaligned field of an aligned union that is
5146 in a register, we may have the mode of TARGET being an integer mode but
5147 MODE == BLKmode. In that case, get an aligned object whose size and
5148 alignment are the same as TARGET and store TARGET into it (we can avoid
5149 the store if the field being stored is the entire width of TARGET). Then
5150 call ourselves recursively to store the field into a BLKmode version of
5151 that object. Finally, load from the object into TARGET. This is not
5152 very efficient in general, but should only be slightly more expensive
5153 than the otherwise-required unaligned accesses. Perhaps this can be
5154 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5155 twice, once with emit_move_insn and once via store_field. */
5156
5157 if (mode == BLKmode
5158 && (REG_P (target) || GET_CODE (target) == SUBREG))
5159 {
5160 rtx object = assign_temp (type, 0, 1, 1);
5161 rtx blk_object = adjust_address (object, BLKmode, 0);
5162
5163 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5164 emit_move_insn (object, target);
5165
5166 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5167 alias_set);
5168
5169 emit_move_insn (target, object);
5170
5171 /* We want to return the BLKmode version of the data. */
5172 return blk_object;
5173 }
5174
5175 if (GET_CODE (target) == CONCAT)
5176 {
5177 /* We're storing into a struct containing a single __complex. */
5178
5179 if (bitpos != 0)
5180 abort ();
5181 return store_expr (exp, target, value_mode != VOIDmode);
5182 }
5183
5184 /* If the structure is in a register or if the component
5185 is a bit field, we cannot use addressing to access it.
5186 Use bit-field techniques or SUBREG to store in it. */
5187
5188 if (mode == VOIDmode
5189 || (mode != BLKmode && ! direct_store[(int) mode]
5190 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5191 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5192 || REG_P (target)
5193 || GET_CODE (target) == SUBREG
5194 /* If the field isn't aligned enough to store as an ordinary memref,
5195 store it as a bit field. */
5196 || (mode != BLKmode
5197 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5198 || bitpos % GET_MODE_ALIGNMENT (mode))
5199 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5200 || (bitpos % BITS_PER_UNIT != 0)))
5201 /* If the RHS and field are a constant size and the size of the
5202 RHS isn't the same size as the bitfield, we must use bitfield
5203 operations. */
5204 || (bitsize >= 0
5205 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5206 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5207 {
5208 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5209
5210 /* If BITSIZE is narrower than the size of the type of EXP
5211 we will be narrowing TEMP. Normally, what's wanted are the
5212 low-order bits. However, if EXP's type is a record and this is
5213 big-endian machine, we want the upper BITSIZE bits. */
5214 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5215 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5216 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5217 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5218 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5219 - bitsize),
5220 NULL_RTX, 1);
5221
5222 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5223 MODE. */
5224 if (mode != VOIDmode && mode != BLKmode
5225 && mode != TYPE_MODE (TREE_TYPE (exp)))
5226 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5227
5228 /* If the modes of TARGET and TEMP are both BLKmode, both
5229 must be in memory and BITPOS must be aligned on a byte
5230 boundary. If so, we simply do a block copy. */
5231 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5232 {
5233 if (!MEM_P (target) || !MEM_P (temp)
5234 || bitpos % BITS_PER_UNIT != 0)
5235 abort ();
5236
5237 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5238 emit_block_move (target, temp,
5239 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5240 / BITS_PER_UNIT),
5241 BLOCK_OP_NORMAL);
5242
5243 return value_mode == VOIDmode ? const0_rtx : target;
5244 }
5245
5246 /* Store the value in the bitfield. */
5247 store_bit_field (target, bitsize, bitpos, mode, temp);
5248
5249 if (value_mode != VOIDmode)
5250 {
5251 /* The caller wants an rtx for the value.
5252 If possible, avoid refetching from the bitfield itself. */
5253 if (width_mask != 0
5254 && ! (MEM_P (target) && MEM_VOLATILE_P (target)))
5255 {
5256 tree count;
5257 enum machine_mode tmode;
5258
5259 tmode = GET_MODE (temp);
5260 if (tmode == VOIDmode)
5261 tmode = value_mode;
5262
5263 if (unsignedp)
5264 return expand_and (tmode, temp,
5265 gen_int_mode (width_mask, tmode),
5266 NULL_RTX);
5267
5268 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5269 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5270 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5271 }
5272
5273 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5274 NULL_RTX, value_mode, VOIDmode);
5275 }
5276 return const0_rtx;
5277 }
5278 else
5279 {
5280 rtx addr = XEXP (target, 0);
5281 rtx to_rtx = target;
5282
5283 /* If a value is wanted, it must be the lhs;
5284 so make the address stable for multiple use. */
5285
5286 if (value_mode != VOIDmode && !REG_P (addr)
5287 && ! CONSTANT_ADDRESS_P (addr)
5288 /* A frame-pointer reference is already stable. */
5289 && ! (GET_CODE (addr) == PLUS
5290 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5291 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5292 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5293 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5294
5295 /* Now build a reference to just the desired component. */
5296
5297 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5298
5299 if (to_rtx == target)
5300 to_rtx = copy_rtx (to_rtx);
5301
5302 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5303 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5304 set_mem_alias_set (to_rtx, alias_set);
5305
5306 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5307 }
5308 }
5309 \f
5310 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5311 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5312 codes and find the ultimate containing object, which we return.
5313
5314 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5315 bit position, and *PUNSIGNEDP to the signedness of the field.
5316 If the position of the field is variable, we store a tree
5317 giving the variable offset (in units) in *POFFSET.
5318 This offset is in addition to the bit position.
5319 If the position is not variable, we store 0 in *POFFSET.
5320
5321 If any of the extraction expressions is volatile,
5322 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5323
5324 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5325 is a mode that can be used to access the field. In that case, *PBITSIZE
5326 is redundant.
5327
5328 If the field describes a variable-sized object, *PMODE is set to
5329 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5330 this case, but the address of the object can be found. */
5331
5332 tree
5333 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5334 HOST_WIDE_INT *pbitpos, tree *poffset,
5335 enum machine_mode *pmode, int *punsignedp,
5336 int *pvolatilep)
5337 {
5338 tree size_tree = 0;
5339 enum machine_mode mode = VOIDmode;
5340 tree offset = size_zero_node;
5341 tree bit_offset = bitsize_zero_node;
5342 tree tem;
5343
5344 /* First get the mode, signedness, and size. We do this from just the
5345 outermost expression. */
5346 if (TREE_CODE (exp) == COMPONENT_REF)
5347 {
5348 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5349 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5350 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5351
5352 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5353 }
5354 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5355 {
5356 size_tree = TREE_OPERAND (exp, 1);
5357 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5358 }
5359 else
5360 {
5361 mode = TYPE_MODE (TREE_TYPE (exp));
5362 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5363
5364 if (mode == BLKmode)
5365 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5366 else
5367 *pbitsize = GET_MODE_BITSIZE (mode);
5368 }
5369
5370 if (size_tree != 0)
5371 {
5372 if (! host_integerp (size_tree, 1))
5373 mode = BLKmode, *pbitsize = -1;
5374 else
5375 *pbitsize = tree_low_cst (size_tree, 1);
5376 }
5377
5378 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5379 and find the ultimate containing object. */
5380 while (1)
5381 {
5382 if (TREE_CODE (exp) == BIT_FIELD_REF)
5383 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5384 else if (TREE_CODE (exp) == COMPONENT_REF)
5385 {
5386 tree field = TREE_OPERAND (exp, 1);
5387 tree this_offset = component_ref_field_offset (exp);
5388
5389 /* If this field hasn't been filled in yet, don't go
5390 past it. This should only happen when folding expressions
5391 made during type construction. */
5392 if (this_offset == 0)
5393 break;
5394
5395 offset = size_binop (PLUS_EXPR, offset, this_offset);
5396 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5397 DECL_FIELD_BIT_OFFSET (field));
5398
5399 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5400 }
5401
5402 else if (TREE_CODE (exp) == ARRAY_REF
5403 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5404 {
5405 tree index = TREE_OPERAND (exp, 1);
5406 tree low_bound = array_ref_low_bound (exp);
5407 tree unit_size = array_ref_element_size (exp);
5408
5409 /* We assume all arrays have sizes that are a multiple of a byte.
5410 First subtract the lower bound, if any, in the type of the
5411 index, then convert to sizetype and multiply by the size of the
5412 array element. */
5413 if (! integer_zerop (low_bound))
5414 index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
5415 index, low_bound));
5416
5417 offset = size_binop (PLUS_EXPR, offset,
5418 size_binop (MULT_EXPR,
5419 convert (sizetype, index),
5420 unit_size));
5421 }
5422
5423 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5424 conversions that don't change the mode, and all view conversions
5425 except those that need to "step up" the alignment. */
5426 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5427 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5428 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5429 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5430 && STRICT_ALIGNMENT
5431 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5432 < BIGGEST_ALIGNMENT)
5433 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5434 || TYPE_ALIGN_OK (TREE_TYPE
5435 (TREE_OPERAND (exp, 0))))))
5436 && ! ((TREE_CODE (exp) == NOP_EXPR
5437 || TREE_CODE (exp) == CONVERT_EXPR)
5438 && (TYPE_MODE (TREE_TYPE (exp))
5439 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5440 break;
5441
5442 /* If any reference in the chain is volatile, the effect is volatile. */
5443 if (TREE_THIS_VOLATILE (exp))
5444 *pvolatilep = 1;
5445
5446 exp = TREE_OPERAND (exp, 0);
5447 }
5448
5449 /* If OFFSET is constant, see if we can return the whole thing as a
5450 constant bit position. Otherwise, split it up. */
5451 if (host_integerp (offset, 0)
5452 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5453 bitsize_unit_node))
5454 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5455 && host_integerp (tem, 0))
5456 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5457 else
5458 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5459
5460 *pmode = mode;
5461 return exp;
5462 }
5463
5464 /* Return a tree of sizetype representing the size, in bytes, of the element
5465 of EXP, an ARRAY_REF. */
5466
5467 tree
5468 array_ref_element_size (tree exp)
5469 {
5470 tree aligned_size = TREE_OPERAND (exp, 3);
5471 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5472
5473 /* If a size was specified in the ARRAY_REF, it's the size measured
5474 in alignment units of the element type. So multiply by that value. */
5475 if (aligned_size)
5476 return size_binop (MULT_EXPR, aligned_size,
5477 size_int (TYPE_ALIGN (elmt_type) / BITS_PER_UNIT));
5478
5479 /* Otherwise, take the size from that of the element type. Substitute
5480 any PLACEHOLDER_EXPR that we have. */
5481 else
5482 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5483 }
5484
5485 /* Return a tree representing the lower bound of the array mentioned in
5486 EXP, an ARRAY_REF. */
5487
5488 tree
5489 array_ref_low_bound (tree exp)
5490 {
5491 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5492
5493 /* If a lower bound is specified in EXP, use it. */
5494 if (TREE_OPERAND (exp, 2))
5495 return TREE_OPERAND (exp, 2);
5496
5497 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5498 substituting for a PLACEHOLDER_EXPR as needed. */
5499 if (domain_type && TYPE_MIN_VALUE (domain_type))
5500 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5501
5502 /* Otherwise, return a zero of the appropriate type. */
5503 return fold_convert (TREE_TYPE (TREE_OPERAND (exp, 1)), integer_zero_node);
5504 }
5505
5506 /* Return a tree representing the upper bound of the array mentioned in
5507 EXP, an ARRAY_REF. */
5508
5509 tree
5510 array_ref_up_bound (tree exp)
5511 {
5512 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5513
5514 /* If there is a domain type and it has an upper bound, use it, substituting
5515 for a PLACEHOLDER_EXPR as needed. */
5516 if (domain_type && TYPE_MAX_VALUE (domain_type))
5517 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5518
5519 /* Otherwise fail. */
5520 return NULL_TREE;
5521 }
5522
5523 /* Return a tree representing the offset, in bytes, of the field referenced
5524 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5525
5526 tree
5527 component_ref_field_offset (tree exp)
5528 {
5529 tree aligned_offset = TREE_OPERAND (exp, 2);
5530 tree field = TREE_OPERAND (exp, 1);
5531
5532 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5533 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5534 value. */
5535 if (aligned_offset)
5536 return size_binop (MULT_EXPR, aligned_offset,
5537 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5538
5539 /* Otherwise, take the offset from that of the field. Substitute
5540 any PLACEHOLDER_EXPR that we have. */
5541 else
5542 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5543 }
5544
5545 /* Return 1 if T is an expression that get_inner_reference handles. */
5546
5547 int
5548 handled_component_p (tree t)
5549 {
5550 switch (TREE_CODE (t))
5551 {
5552 case BIT_FIELD_REF:
5553 case COMPONENT_REF:
5554 case ARRAY_REF:
5555 case ARRAY_RANGE_REF:
5556 case NON_LVALUE_EXPR:
5557 case VIEW_CONVERT_EXPR:
5558 return 1;
5559
5560 /* ??? Sure they are handled, but get_inner_reference may return
5561 a different PBITSIZE, depending upon whether the expression is
5562 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5563 case NOP_EXPR:
5564 case CONVERT_EXPR:
5565 return (TYPE_MODE (TREE_TYPE (t))
5566 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5567
5568 default:
5569 return 0;
5570 }
5571 }
5572 \f
5573 /* Given an rtx VALUE that may contain additions and multiplications, return
5574 an equivalent value that just refers to a register, memory, or constant.
5575 This is done by generating instructions to perform the arithmetic and
5576 returning a pseudo-register containing the value.
5577
5578 The returned value may be a REG, SUBREG, MEM or constant. */
5579
5580 rtx
5581 force_operand (rtx value, rtx target)
5582 {
5583 rtx op1, op2;
5584 /* Use subtarget as the target for operand 0 of a binary operation. */
5585 rtx subtarget = get_subtarget (target);
5586 enum rtx_code code = GET_CODE (value);
5587
5588 /* Check for subreg applied to an expression produced by loop optimizer. */
5589 if (code == SUBREG
5590 && !REG_P (SUBREG_REG (value))
5591 && !MEM_P (SUBREG_REG (value)))
5592 {
5593 value = simplify_gen_subreg (GET_MODE (value),
5594 force_reg (GET_MODE (SUBREG_REG (value)),
5595 force_operand (SUBREG_REG (value),
5596 NULL_RTX)),
5597 GET_MODE (SUBREG_REG (value)),
5598 SUBREG_BYTE (value));
5599 code = GET_CODE (value);
5600 }
5601
5602 /* Check for a PIC address load. */
5603 if ((code == PLUS || code == MINUS)
5604 && XEXP (value, 0) == pic_offset_table_rtx
5605 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5606 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5607 || GET_CODE (XEXP (value, 1)) == CONST))
5608 {
5609 if (!subtarget)
5610 subtarget = gen_reg_rtx (GET_MODE (value));
5611 emit_move_insn (subtarget, value);
5612 return subtarget;
5613 }
5614
5615 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5616 {
5617 if (!target)
5618 target = gen_reg_rtx (GET_MODE (value));
5619 convert_move (target, force_operand (XEXP (value, 0), NULL),
5620 code == ZERO_EXTEND);
5621 return target;
5622 }
5623
5624 if (ARITHMETIC_P (value))
5625 {
5626 op2 = XEXP (value, 1);
5627 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5628 subtarget = 0;
5629 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5630 {
5631 code = PLUS;
5632 op2 = negate_rtx (GET_MODE (value), op2);
5633 }
5634
5635 /* Check for an addition with OP2 a constant integer and our first
5636 operand a PLUS of a virtual register and something else. In that
5637 case, we want to emit the sum of the virtual register and the
5638 constant first and then add the other value. This allows virtual
5639 register instantiation to simply modify the constant rather than
5640 creating another one around this addition. */
5641 if (code == PLUS && GET_CODE (op2) == CONST_INT
5642 && GET_CODE (XEXP (value, 0)) == PLUS
5643 && REG_P (XEXP (XEXP (value, 0), 0))
5644 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5645 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5646 {
5647 rtx temp = expand_simple_binop (GET_MODE (value), code,
5648 XEXP (XEXP (value, 0), 0), op2,
5649 subtarget, 0, OPTAB_LIB_WIDEN);
5650 return expand_simple_binop (GET_MODE (value), code, temp,
5651 force_operand (XEXP (XEXP (value,
5652 0), 1), 0),
5653 target, 0, OPTAB_LIB_WIDEN);
5654 }
5655
5656 op1 = force_operand (XEXP (value, 0), subtarget);
5657 op2 = force_operand (op2, NULL_RTX);
5658 switch (code)
5659 {
5660 case MULT:
5661 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5662 case DIV:
5663 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5664 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5665 target, 1, OPTAB_LIB_WIDEN);
5666 else
5667 return expand_divmod (0,
5668 FLOAT_MODE_P (GET_MODE (value))
5669 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5670 GET_MODE (value), op1, op2, target, 0);
5671 break;
5672 case MOD:
5673 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5674 target, 0);
5675 break;
5676 case UDIV:
5677 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5678 target, 1);
5679 break;
5680 case UMOD:
5681 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5682 target, 1);
5683 break;
5684 case ASHIFTRT:
5685 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5686 target, 0, OPTAB_LIB_WIDEN);
5687 break;
5688 default:
5689 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5690 target, 1, OPTAB_LIB_WIDEN);
5691 }
5692 }
5693 if (UNARY_P (value))
5694 {
5695 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5696 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5697 }
5698
5699 #ifdef INSN_SCHEDULING
5700 /* On machines that have insn scheduling, we want all memory reference to be
5701 explicit, so we need to deal with such paradoxical SUBREGs. */
5702 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5703 && (GET_MODE_SIZE (GET_MODE (value))
5704 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5705 value
5706 = simplify_gen_subreg (GET_MODE (value),
5707 force_reg (GET_MODE (SUBREG_REG (value)),
5708 force_operand (SUBREG_REG (value),
5709 NULL_RTX)),
5710 GET_MODE (SUBREG_REG (value)),
5711 SUBREG_BYTE (value));
5712 #endif
5713
5714 return value;
5715 }
5716 \f
5717 /* Subroutine of expand_expr: return nonzero iff there is no way that
5718 EXP can reference X, which is being modified. TOP_P is nonzero if this
5719 call is going to be used to determine whether we need a temporary
5720 for EXP, as opposed to a recursive call to this function.
5721
5722 It is always safe for this routine to return zero since it merely
5723 searches for optimization opportunities. */
5724
5725 int
5726 safe_from_p (rtx x, tree exp, int top_p)
5727 {
5728 rtx exp_rtl = 0;
5729 int i, nops;
5730
5731 if (x == 0
5732 /* If EXP has varying size, we MUST use a target since we currently
5733 have no way of allocating temporaries of variable size
5734 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5735 So we assume here that something at a higher level has prevented a
5736 clash. This is somewhat bogus, but the best we can do. Only
5737 do this when X is BLKmode and when we are at the top level. */
5738 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5739 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5740 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5741 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5742 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5743 != INTEGER_CST)
5744 && GET_MODE (x) == BLKmode)
5745 /* If X is in the outgoing argument area, it is always safe. */
5746 || (MEM_P (x)
5747 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5748 || (GET_CODE (XEXP (x, 0)) == PLUS
5749 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5750 return 1;
5751
5752 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5753 find the underlying pseudo. */
5754 if (GET_CODE (x) == SUBREG)
5755 {
5756 x = SUBREG_REG (x);
5757 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5758 return 0;
5759 }
5760
5761 /* Now look at our tree code and possibly recurse. */
5762 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5763 {
5764 case 'd':
5765 exp_rtl = DECL_RTL_IF_SET (exp);
5766 break;
5767
5768 case 'c':
5769 return 1;
5770
5771 case 'x':
5772 if (TREE_CODE (exp) == TREE_LIST)
5773 {
5774 while (1)
5775 {
5776 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5777 return 0;
5778 exp = TREE_CHAIN (exp);
5779 if (!exp)
5780 return 1;
5781 if (TREE_CODE (exp) != TREE_LIST)
5782 return safe_from_p (x, exp, 0);
5783 }
5784 }
5785 else if (TREE_CODE (exp) == ERROR_MARK)
5786 return 1; /* An already-visited SAVE_EXPR? */
5787 else
5788 return 0;
5789
5790 case 's':
5791 /* The only case we look at here is the DECL_INITIAL inside a
5792 DECL_EXPR. */
5793 return (TREE_CODE (exp) != DECL_EXPR
5794 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5795 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5796 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5797
5798 case '2':
5799 case '<':
5800 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5801 return 0;
5802 /* Fall through. */
5803
5804 case '1':
5805 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5806
5807 case 'e':
5808 case 'r':
5809 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5810 the expression. If it is set, we conflict iff we are that rtx or
5811 both are in memory. Otherwise, we check all operands of the
5812 expression recursively. */
5813
5814 switch (TREE_CODE (exp))
5815 {
5816 case ADDR_EXPR:
5817 /* If the operand is static or we are static, we can't conflict.
5818 Likewise if we don't conflict with the operand at all. */
5819 if (staticp (TREE_OPERAND (exp, 0))
5820 || TREE_STATIC (exp)
5821 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5822 return 1;
5823
5824 /* Otherwise, the only way this can conflict is if we are taking
5825 the address of a DECL a that address if part of X, which is
5826 very rare. */
5827 exp = TREE_OPERAND (exp, 0);
5828 if (DECL_P (exp))
5829 {
5830 if (!DECL_RTL_SET_P (exp)
5831 || !MEM_P (DECL_RTL (exp)))
5832 return 0;
5833 else
5834 exp_rtl = XEXP (DECL_RTL (exp), 0);
5835 }
5836 break;
5837
5838 case INDIRECT_REF:
5839 if (MEM_P (x)
5840 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5841 get_alias_set (exp)))
5842 return 0;
5843 break;
5844
5845 case CALL_EXPR:
5846 /* Assume that the call will clobber all hard registers and
5847 all of memory. */
5848 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5849 || MEM_P (x))
5850 return 0;
5851 break;
5852
5853 case WITH_CLEANUP_EXPR:
5854 case CLEANUP_POINT_EXPR:
5855 /* Lowered by gimplify.c. */
5856 abort ();
5857
5858 case SAVE_EXPR:
5859 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5860
5861 default:
5862 break;
5863 }
5864
5865 /* If we have an rtx, we do not need to scan our operands. */
5866 if (exp_rtl)
5867 break;
5868
5869 nops = first_rtl_op (TREE_CODE (exp));
5870 for (i = 0; i < nops; i++)
5871 if (TREE_OPERAND (exp, i) != 0
5872 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5873 return 0;
5874
5875 /* If this is a language-specific tree code, it may require
5876 special handling. */
5877 if ((unsigned int) TREE_CODE (exp)
5878 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5879 && !lang_hooks.safe_from_p (x, exp))
5880 return 0;
5881 }
5882
5883 /* If we have an rtl, find any enclosed object. Then see if we conflict
5884 with it. */
5885 if (exp_rtl)
5886 {
5887 if (GET_CODE (exp_rtl) == SUBREG)
5888 {
5889 exp_rtl = SUBREG_REG (exp_rtl);
5890 if (REG_P (exp_rtl)
5891 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5892 return 0;
5893 }
5894
5895 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5896 are memory and they conflict. */
5897 return ! (rtx_equal_p (x, exp_rtl)
5898 || (MEM_P (x) && MEM_P (exp_rtl)
5899 && true_dependence (exp_rtl, VOIDmode, x,
5900 rtx_addr_varies_p)));
5901 }
5902
5903 /* If we reach here, it is safe. */
5904 return 1;
5905 }
5906
5907 /* Subroutine of expand_expr: return rtx if EXP is a
5908 variable or parameter; else return 0. */
5909
5910 static rtx
5911 var_rtx (tree exp)
5912 {
5913 STRIP_NOPS (exp);
5914 switch (TREE_CODE (exp))
5915 {
5916 case PARM_DECL:
5917 case VAR_DECL:
5918 return DECL_RTL (exp);
5919 default:
5920 return 0;
5921 }
5922 }
5923 \f
5924 /* Return the highest power of two that EXP is known to be a multiple of.
5925 This is used in updating alignment of MEMs in array references. */
5926
5927 static unsigned HOST_WIDE_INT
5928 highest_pow2_factor (tree exp)
5929 {
5930 unsigned HOST_WIDE_INT c0, c1;
5931
5932 switch (TREE_CODE (exp))
5933 {
5934 case INTEGER_CST:
5935 /* We can find the lowest bit that's a one. If the low
5936 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5937 We need to handle this case since we can find it in a COND_EXPR,
5938 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5939 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5940 later ICE. */
5941 if (TREE_CONSTANT_OVERFLOW (exp))
5942 return BIGGEST_ALIGNMENT;
5943 else
5944 {
5945 /* Note: tree_low_cst is intentionally not used here,
5946 we don't care about the upper bits. */
5947 c0 = TREE_INT_CST_LOW (exp);
5948 c0 &= -c0;
5949 return c0 ? c0 : BIGGEST_ALIGNMENT;
5950 }
5951 break;
5952
5953 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5954 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5955 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5956 return MIN (c0, c1);
5957
5958 case MULT_EXPR:
5959 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5960 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5961 return c0 * c1;
5962
5963 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5964 case CEIL_DIV_EXPR:
5965 if (integer_pow2p (TREE_OPERAND (exp, 1))
5966 && host_integerp (TREE_OPERAND (exp, 1), 1))
5967 {
5968 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5969 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5970 return MAX (1, c0 / c1);
5971 }
5972 break;
5973
5974 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5975 case SAVE_EXPR:
5976 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5977
5978 case COMPOUND_EXPR:
5979 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5980
5981 case COND_EXPR:
5982 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5983 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5984 return MIN (c0, c1);
5985
5986 default:
5987 break;
5988 }
5989
5990 return 1;
5991 }
5992
5993 /* Similar, except that the alignment requirements of TARGET are
5994 taken into account. Assume it is at least as aligned as its
5995 type, unless it is a COMPONENT_REF in which case the layout of
5996 the structure gives the alignment. */
5997
5998 static unsigned HOST_WIDE_INT
5999 highest_pow2_factor_for_target (tree target, tree exp)
6000 {
6001 unsigned HOST_WIDE_INT target_align, factor;
6002
6003 factor = highest_pow2_factor (exp);
6004 if (TREE_CODE (target) == COMPONENT_REF)
6005 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
6006 else
6007 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
6008 return MAX (factor, target_align);
6009 }
6010 \f
6011 /* Expands variable VAR. */
6012
6013 void
6014 expand_var (tree var)
6015 {
6016 if (DECL_EXTERNAL (var))
6017 return;
6018
6019 if (TREE_STATIC (var))
6020 /* If this is an inlined copy of a static local variable,
6021 look up the original decl. */
6022 var = DECL_ORIGIN (var);
6023
6024 if (TREE_STATIC (var)
6025 ? !TREE_ASM_WRITTEN (var)
6026 : !DECL_RTL_SET_P (var))
6027 {
6028 if (TREE_CODE (var) == VAR_DECL && DECL_DEFER_OUTPUT (var))
6029 {
6030 /* Prepare a mem & address for the decl. */
6031 rtx x;
6032
6033 if (TREE_STATIC (var))
6034 abort ();
6035
6036 x = gen_rtx_MEM (DECL_MODE (var),
6037 gen_reg_rtx (Pmode));
6038
6039 set_mem_attributes (x, var, 1);
6040 SET_DECL_RTL (var, x);
6041 }
6042 else if (lang_hooks.expand_decl (var))
6043 /* OK. */;
6044 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6045 expand_decl (var);
6046 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6047 rest_of_decl_compilation (var, NULL, 0, 0);
6048 else if (TREE_CODE (var) == TYPE_DECL
6049 || TREE_CODE (var) == CONST_DECL
6050 || TREE_CODE (var) == FUNCTION_DECL
6051 || TREE_CODE (var) == LABEL_DECL)
6052 /* No expansion needed. */;
6053 else
6054 abort ();
6055 }
6056 }
6057
6058 /* Subroutine of expand_expr. Expand the two operands of a binary
6059 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6060 The value may be stored in TARGET if TARGET is nonzero. The
6061 MODIFIER argument is as documented by expand_expr. */
6062
6063 static void
6064 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6065 enum expand_modifier modifier)
6066 {
6067 if (! safe_from_p (target, exp1, 1))
6068 target = 0;
6069 if (operand_equal_p (exp0, exp1, 0))
6070 {
6071 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6072 *op1 = copy_rtx (*op0);
6073 }
6074 else
6075 {
6076 /* If we need to preserve evaluation order, copy exp0 into its own
6077 temporary variable so that it can't be clobbered by exp1. */
6078 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6079 exp0 = save_expr (exp0);
6080 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6081 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6082 }
6083 }
6084
6085 \f
6086 /* expand_expr: generate code for computing expression EXP.
6087 An rtx for the computed value is returned. The value is never null.
6088 In the case of a void EXP, const0_rtx is returned.
6089
6090 The value may be stored in TARGET if TARGET is nonzero.
6091 TARGET is just a suggestion; callers must assume that
6092 the rtx returned may not be the same as TARGET.
6093
6094 If TARGET is CONST0_RTX, it means that the value will be ignored.
6095
6096 If TMODE is not VOIDmode, it suggests generating the
6097 result in mode TMODE. But this is done only when convenient.
6098 Otherwise, TMODE is ignored and the value generated in its natural mode.
6099 TMODE is just a suggestion; callers must assume that
6100 the rtx returned may not have mode TMODE.
6101
6102 Note that TARGET may have neither TMODE nor MODE. In that case, it
6103 probably will not be used.
6104
6105 If MODIFIER is EXPAND_SUM then when EXP is an addition
6106 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6107 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6108 products as above, or REG or MEM, or constant.
6109 Ordinarily in such cases we would output mul or add instructions
6110 and then return a pseudo reg containing the sum.
6111
6112 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6113 it also marks a label as absolutely required (it can't be dead).
6114 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6115 This is used for outputting expressions used in initializers.
6116
6117 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6118 with a constant address even if that address is not normally legitimate.
6119 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6120
6121 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6122 a call parameter. Such targets require special care as we haven't yet
6123 marked TARGET so that it's safe from being trashed by libcalls. We
6124 don't want to use TARGET for anything but the final result;
6125 Intermediate values must go elsewhere. Additionally, calls to
6126 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6127
6128 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6129 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6130 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6131 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6132 recursively. */
6133
6134 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6135 enum expand_modifier, rtx *);
6136
6137 rtx
6138 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6139 enum expand_modifier modifier, rtx *alt_rtl)
6140 {
6141 int rn = -1;
6142 rtx ret, last = NULL;
6143
6144 /* Handle ERROR_MARK before anybody tries to access its type. */
6145 if (TREE_CODE (exp) == ERROR_MARK
6146 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6147 {
6148 ret = CONST0_RTX (tmode);
6149 return ret ? ret : const0_rtx;
6150 }
6151
6152 if (flag_non_call_exceptions)
6153 {
6154 rn = lookup_stmt_eh_region (exp);
6155 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6156 if (rn >= 0)
6157 last = get_last_insn ();
6158 }
6159
6160 /* If this is an expression of some kind and it has an associated line
6161 number, then emit the line number before expanding the expression.
6162
6163 We need to save and restore the file and line information so that
6164 errors discovered during expansion are emitted with the right
6165 information. It would be better of the diagnostic routines
6166 used the file/line information embedded in the tree nodes rather
6167 than globals. */
6168 if (cfun && EXPR_HAS_LOCATION (exp))
6169 {
6170 location_t saved_location = input_location;
6171 input_location = EXPR_LOCATION (exp);
6172 emit_line_note (input_location);
6173
6174 /* Record where the insns produced belong. */
6175 record_block_change (TREE_BLOCK (exp));
6176
6177 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6178
6179 input_location = saved_location;
6180 }
6181 else
6182 {
6183 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6184 }
6185
6186 /* If using non-call exceptions, mark all insns that may trap.
6187 expand_call() will mark CALL_INSNs before we get to this code,
6188 but it doesn't handle libcalls, and these may trap. */
6189 if (rn >= 0)
6190 {
6191 rtx insn;
6192 for (insn = next_real_insn (last); insn;
6193 insn = next_real_insn (insn))
6194 {
6195 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6196 /* If we want exceptions for non-call insns, any
6197 may_trap_p instruction may throw. */
6198 && GET_CODE (PATTERN (insn)) != CLOBBER
6199 && GET_CODE (PATTERN (insn)) != USE
6200 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6201 {
6202 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6203 REG_NOTES (insn));
6204 }
6205 }
6206 }
6207
6208 return ret;
6209 }
6210
6211 static rtx
6212 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6213 enum expand_modifier modifier, rtx *alt_rtl)
6214 {
6215 rtx op0, op1, temp;
6216 tree type = TREE_TYPE (exp);
6217 int unsignedp;
6218 enum machine_mode mode;
6219 enum tree_code code = TREE_CODE (exp);
6220 optab this_optab;
6221 rtx subtarget, original_target;
6222 int ignore;
6223 tree context;
6224 bool reduce_bit_field = false;
6225 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6226 ? reduce_to_bit_field_precision ((expr), \
6227 target, \
6228 type) \
6229 : (expr))
6230
6231 mode = TYPE_MODE (type);
6232 unsignedp = TYPE_UNSIGNED (type);
6233 if (lang_hooks.reduce_bit_field_operations
6234 && TREE_CODE (type) == INTEGER_TYPE
6235 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6236 {
6237 /* An operation in what may be a bit-field type needs the
6238 result to be reduced to the precision of the bit-field type,
6239 which is narrower than that of the type's mode. */
6240 reduce_bit_field = true;
6241 if (modifier == EXPAND_STACK_PARM)
6242 target = 0;
6243 }
6244
6245 /* Use subtarget as the target for operand 0 of a binary operation. */
6246 subtarget = get_subtarget (target);
6247 original_target = target;
6248 ignore = (target == const0_rtx
6249 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6250 || code == CONVERT_EXPR || code == COND_EXPR
6251 || code == VIEW_CONVERT_EXPR)
6252 && TREE_CODE (type) == VOID_TYPE));
6253
6254 /* If we are going to ignore this result, we need only do something
6255 if there is a side-effect somewhere in the expression. If there
6256 is, short-circuit the most common cases here. Note that we must
6257 not call expand_expr with anything but const0_rtx in case this
6258 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6259
6260 if (ignore)
6261 {
6262 if (! TREE_SIDE_EFFECTS (exp))
6263 return const0_rtx;
6264
6265 /* Ensure we reference a volatile object even if value is ignored, but
6266 don't do this if all we are doing is taking its address. */
6267 if (TREE_THIS_VOLATILE (exp)
6268 && TREE_CODE (exp) != FUNCTION_DECL
6269 && mode != VOIDmode && mode != BLKmode
6270 && modifier != EXPAND_CONST_ADDRESS)
6271 {
6272 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6273 if (MEM_P (temp))
6274 temp = copy_to_reg (temp);
6275 return const0_rtx;
6276 }
6277
6278 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6279 || code == INDIRECT_REF)
6280 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6281 modifier);
6282
6283 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6284 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6285 {
6286 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6287 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6288 return const0_rtx;
6289 }
6290 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6291 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6292 /* If the second operand has no side effects, just evaluate
6293 the first. */
6294 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6295 modifier);
6296 else if (code == BIT_FIELD_REF)
6297 {
6298 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6299 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6300 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6301 return const0_rtx;
6302 }
6303
6304 target = 0;
6305 }
6306
6307 /* If will do cse, generate all results into pseudo registers
6308 since 1) that allows cse to find more things
6309 and 2) otherwise cse could produce an insn the machine
6310 cannot support. An exception is a CONSTRUCTOR into a multi-word
6311 MEM: that's much more likely to be most efficient into the MEM.
6312 Another is a CALL_EXPR which must return in memory. */
6313
6314 if (! cse_not_expected && mode != BLKmode && target
6315 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6316 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6317 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6318 target = 0;
6319
6320 switch (code)
6321 {
6322 case LABEL_DECL:
6323 {
6324 tree function = decl_function_context (exp);
6325
6326 temp = label_rtx (exp);
6327 temp = gen_rtx_LABEL_REF (Pmode, temp);
6328
6329 if (function != current_function_decl
6330 && function != 0)
6331 LABEL_REF_NONLOCAL_P (temp) = 1;
6332
6333 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6334 return temp;
6335 }
6336
6337 case PARM_DECL:
6338 if (!DECL_RTL_SET_P (exp))
6339 {
6340 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6341 return CONST0_RTX (mode);
6342 }
6343
6344 /* ... fall through ... */
6345
6346 case VAR_DECL:
6347 /* If a static var's type was incomplete when the decl was written,
6348 but the type is complete now, lay out the decl now. */
6349 if (DECL_SIZE (exp) == 0
6350 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6351 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6352 layout_decl (exp, 0);
6353
6354 /* ... fall through ... */
6355
6356 case FUNCTION_DECL:
6357 case RESULT_DECL:
6358 if (DECL_RTL (exp) == 0)
6359 abort ();
6360
6361 /* Ensure variable marked as used even if it doesn't go through
6362 a parser. If it hasn't be used yet, write out an external
6363 definition. */
6364 if (! TREE_USED (exp))
6365 {
6366 assemble_external (exp);
6367 TREE_USED (exp) = 1;
6368 }
6369
6370 /* Show we haven't gotten RTL for this yet. */
6371 temp = 0;
6372
6373 /* Variables inherited from containing functions should have
6374 been lowered by this point. */
6375 context = decl_function_context (exp);
6376 if (context != 0
6377 && context != current_function_decl
6378 && !TREE_STATIC (exp)
6379 /* ??? C++ creates functions that are not TREE_STATIC. */
6380 && TREE_CODE (exp) != FUNCTION_DECL)
6381 abort ();
6382
6383 /* This is the case of an array whose size is to be determined
6384 from its initializer, while the initializer is still being parsed.
6385 See expand_decl. */
6386
6387 else if (MEM_P (DECL_RTL (exp))
6388 && REG_P (XEXP (DECL_RTL (exp), 0)))
6389 temp = validize_mem (DECL_RTL (exp));
6390
6391 /* If DECL_RTL is memory, we are in the normal case and either
6392 the address is not valid or it is not a register and -fforce-addr
6393 is specified, get the address into a register. */
6394
6395 else if (MEM_P (DECL_RTL (exp))
6396 && modifier != EXPAND_CONST_ADDRESS
6397 && modifier != EXPAND_SUM
6398 && modifier != EXPAND_INITIALIZER
6399 && (! memory_address_p (DECL_MODE (exp),
6400 XEXP (DECL_RTL (exp), 0))
6401 || (flag_force_addr
6402 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6403 {
6404 if (alt_rtl)
6405 *alt_rtl = DECL_RTL (exp);
6406 temp = replace_equiv_address (DECL_RTL (exp),
6407 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6408 }
6409
6410 /* If we got something, return it. But first, set the alignment
6411 if the address is a register. */
6412 if (temp != 0)
6413 {
6414 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6415 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6416
6417 return temp;
6418 }
6419
6420 /* If the mode of DECL_RTL does not match that of the decl, it
6421 must be a promoted value. We return a SUBREG of the wanted mode,
6422 but mark it so that we know that it was already extended. */
6423
6424 if (REG_P (DECL_RTL (exp))
6425 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6426 {
6427 /* Get the signedness used for this variable. Ensure we get the
6428 same mode we got when the variable was declared. */
6429 if (GET_MODE (DECL_RTL (exp))
6430 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6431 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6432 abort ();
6433
6434 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6435 SUBREG_PROMOTED_VAR_P (temp) = 1;
6436 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6437 return temp;
6438 }
6439
6440 return DECL_RTL (exp);
6441
6442 case INTEGER_CST:
6443 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6444 TREE_INT_CST_HIGH (exp), mode);
6445
6446 /* ??? If overflow is set, fold will have done an incomplete job,
6447 which can result in (plus xx (const_int 0)), which can get
6448 simplified by validate_replace_rtx during virtual register
6449 instantiation, which can result in unrecognizable insns.
6450 Avoid this by forcing all overflows into registers. */
6451 if (TREE_CONSTANT_OVERFLOW (exp)
6452 && modifier != EXPAND_INITIALIZER)
6453 temp = force_reg (mode, temp);
6454
6455 return temp;
6456
6457 case VECTOR_CST:
6458 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6459 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6460 return const_vector_from_tree (exp);
6461 else
6462 return expand_expr (build1 (CONSTRUCTOR, TREE_TYPE (exp),
6463 TREE_VECTOR_CST_ELTS (exp)),
6464 ignore ? const0_rtx : target, tmode, modifier);
6465
6466 case CONST_DECL:
6467 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6468
6469 case REAL_CST:
6470 /* If optimized, generate immediate CONST_DOUBLE
6471 which will be turned into memory by reload if necessary.
6472
6473 We used to force a register so that loop.c could see it. But
6474 this does not allow gen_* patterns to perform optimizations with
6475 the constants. It also produces two insns in cases like "x = 1.0;".
6476 On most machines, floating-point constants are not permitted in
6477 many insns, so we'd end up copying it to a register in any case.
6478
6479 Now, we do the copying in expand_binop, if appropriate. */
6480 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6481 TYPE_MODE (TREE_TYPE (exp)));
6482
6483 case COMPLEX_CST:
6484 /* Handle evaluating a complex constant in a CONCAT target. */
6485 if (original_target && GET_CODE (original_target) == CONCAT)
6486 {
6487 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6488 rtx rtarg, itarg;
6489
6490 rtarg = XEXP (original_target, 0);
6491 itarg = XEXP (original_target, 1);
6492
6493 /* Move the real and imaginary parts separately. */
6494 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6495 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6496
6497 if (op0 != rtarg)
6498 emit_move_insn (rtarg, op0);
6499 if (op1 != itarg)
6500 emit_move_insn (itarg, op1);
6501
6502 return original_target;
6503 }
6504
6505 /* ... fall through ... */
6506
6507 case STRING_CST:
6508 temp = output_constant_def (exp, 1);
6509
6510 /* temp contains a constant address.
6511 On RISC machines where a constant address isn't valid,
6512 make some insns to get that address into a register. */
6513 if (modifier != EXPAND_CONST_ADDRESS
6514 && modifier != EXPAND_INITIALIZER
6515 && modifier != EXPAND_SUM
6516 && (! memory_address_p (mode, XEXP (temp, 0))
6517 || flag_force_addr))
6518 return replace_equiv_address (temp,
6519 copy_rtx (XEXP (temp, 0)));
6520 return temp;
6521
6522 case SAVE_EXPR:
6523 {
6524 tree val = TREE_OPERAND (exp, 0);
6525 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6526
6527 if (TREE_CODE (val) != VAR_DECL || !DECL_ARTIFICIAL (val))
6528 {
6529 /* We can indeed still hit this case, typically via builtin
6530 expanders calling save_expr immediately before expanding
6531 something. Assume this means that we only have to deal
6532 with non-BLKmode values. */
6533 if (GET_MODE (ret) == BLKmode)
6534 abort ();
6535
6536 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6537 DECL_ARTIFICIAL (val) = 1;
6538 TREE_OPERAND (exp, 0) = val;
6539
6540 if (!CONSTANT_P (ret))
6541 ret = copy_to_reg (ret);
6542 SET_DECL_RTL (val, ret);
6543 }
6544
6545 return ret;
6546 }
6547
6548 case UNSAVE_EXPR:
6549 {
6550 rtx temp;
6551 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6552 TREE_OPERAND (exp, 0)
6553 = lang_hooks.unsave_expr_now (TREE_OPERAND (exp, 0));
6554 return temp;
6555 }
6556
6557 case GOTO_EXPR:
6558 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6559 expand_goto (TREE_OPERAND (exp, 0));
6560 else
6561 expand_computed_goto (TREE_OPERAND (exp, 0));
6562 return const0_rtx;
6563
6564 /* These are lowered during gimplification, so we should never ever
6565 see them here. */
6566 case LOOP_EXPR:
6567 case EXIT_EXPR:
6568 abort ();
6569
6570 case LABELED_BLOCK_EXPR:
6571 if (LABELED_BLOCK_BODY (exp))
6572 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6573 /* Should perhaps use expand_label, but this is simpler and safer. */
6574 do_pending_stack_adjust ();
6575 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6576 return const0_rtx;
6577
6578 case EXIT_BLOCK_EXPR:
6579 if (EXIT_BLOCK_RETURN (exp))
6580 sorry ("returned value in block_exit_expr");
6581 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6582 return const0_rtx;
6583
6584 case CONSTRUCTOR:
6585 /* If we don't need the result, just ensure we evaluate any
6586 subexpressions. */
6587 if (ignore)
6588 {
6589 tree elt;
6590
6591 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6592 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6593
6594 return const0_rtx;
6595 }
6596
6597 /* All elts simple constants => refer to a constant in memory. But
6598 if this is a non-BLKmode mode, let it store a field at a time
6599 since that should make a CONST_INT or CONST_DOUBLE when we
6600 fold. Likewise, if we have a target we can use, it is best to
6601 store directly into the target unless the type is large enough
6602 that memcpy will be used. If we are making an initializer and
6603 all operands are constant, put it in memory as well.
6604
6605 FIXME: Avoid trying to fill vector constructors piece-meal.
6606 Output them with output_constant_def below unless we're sure
6607 they're zeros. This should go away when vector initializers
6608 are treated like VECTOR_CST instead of arrays.
6609 */
6610 else if ((TREE_STATIC (exp)
6611 && ((mode == BLKmode
6612 && ! (target != 0 && safe_from_p (target, exp, 1)))
6613 || TREE_ADDRESSABLE (exp)
6614 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6615 && (! MOVE_BY_PIECES_P
6616 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6617 TYPE_ALIGN (type)))
6618 && ! mostly_zeros_p (exp))))
6619 || ((modifier == EXPAND_INITIALIZER
6620 || modifier == EXPAND_CONST_ADDRESS)
6621 && TREE_CONSTANT (exp)))
6622 {
6623 rtx constructor = output_constant_def (exp, 1);
6624
6625 if (modifier != EXPAND_CONST_ADDRESS
6626 && modifier != EXPAND_INITIALIZER
6627 && modifier != EXPAND_SUM)
6628 constructor = validize_mem (constructor);
6629
6630 return constructor;
6631 }
6632 else
6633 {
6634 /* Handle calls that pass values in multiple non-contiguous
6635 locations. The Irix 6 ABI has examples of this. */
6636 if (target == 0 || ! safe_from_p (target, exp, 1)
6637 || GET_CODE (target) == PARALLEL
6638 || modifier == EXPAND_STACK_PARM)
6639 target
6640 = assign_temp (build_qualified_type (type,
6641 (TYPE_QUALS (type)
6642 | (TREE_READONLY (exp)
6643 * TYPE_QUAL_CONST))),
6644 0, TREE_ADDRESSABLE (exp), 1);
6645
6646 store_constructor (exp, target, 0, int_expr_size (exp));
6647 return target;
6648 }
6649
6650 case INDIRECT_REF:
6651 {
6652 tree exp1 = TREE_OPERAND (exp, 0);
6653
6654 if (modifier != EXPAND_WRITE)
6655 {
6656 tree t;
6657
6658 t = fold_read_from_constant_string (exp);
6659 if (t)
6660 return expand_expr (t, target, tmode, modifier);
6661 }
6662
6663 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6664 op0 = memory_address (mode, op0);
6665 temp = gen_rtx_MEM (mode, op0);
6666 set_mem_attributes (temp, exp, 0);
6667
6668 /* If we are writing to this object and its type is a record with
6669 readonly fields, we must mark it as readonly so it will
6670 conflict with readonly references to those fields. */
6671 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6672 RTX_UNCHANGING_P (temp) = 1;
6673
6674 return temp;
6675 }
6676
6677 case ARRAY_REF:
6678
6679 #ifdef ENABLE_CHECKING
6680 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6681 abort ();
6682 #endif
6683
6684 {
6685 tree array = TREE_OPERAND (exp, 0);
6686 tree low_bound = array_ref_low_bound (exp);
6687 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6688 HOST_WIDE_INT i;
6689
6690 /* Optimize the special-case of a zero lower bound.
6691
6692 We convert the low_bound to sizetype to avoid some problems
6693 with constant folding. (E.g. suppose the lower bound is 1,
6694 and its mode is QI. Without the conversion, (ARRAY
6695 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6696 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6697
6698 if (! integer_zerop (low_bound))
6699 index = size_diffop (index, convert (sizetype, low_bound));
6700
6701 /* Fold an expression like: "foo"[2].
6702 This is not done in fold so it won't happen inside &.
6703 Don't fold if this is for wide characters since it's too
6704 difficult to do correctly and this is a very rare case. */
6705
6706 if (modifier != EXPAND_CONST_ADDRESS
6707 && modifier != EXPAND_INITIALIZER
6708 && modifier != EXPAND_MEMORY)
6709 {
6710 tree t = fold_read_from_constant_string (exp);
6711
6712 if (t)
6713 return expand_expr (t, target, tmode, modifier);
6714 }
6715
6716 /* If this is a constant index into a constant array,
6717 just get the value from the array. Handle both the cases when
6718 we have an explicit constructor and when our operand is a variable
6719 that was declared const. */
6720
6721 if (modifier != EXPAND_CONST_ADDRESS
6722 && modifier != EXPAND_INITIALIZER
6723 && modifier != EXPAND_MEMORY
6724 && TREE_CODE (array) == CONSTRUCTOR
6725 && ! TREE_SIDE_EFFECTS (array)
6726 && TREE_CODE (index) == INTEGER_CST
6727 && 0 > compare_tree_int (index,
6728 list_length (CONSTRUCTOR_ELTS
6729 (TREE_OPERAND (exp, 0)))))
6730 {
6731 tree elem;
6732
6733 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6734 i = TREE_INT_CST_LOW (index);
6735 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6736 ;
6737
6738 if (elem)
6739 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6740 modifier);
6741 }
6742
6743 else if (optimize >= 1
6744 && modifier != EXPAND_CONST_ADDRESS
6745 && modifier != EXPAND_INITIALIZER
6746 && modifier != EXPAND_MEMORY
6747 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6748 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6749 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6750 && targetm.binds_local_p (array))
6751 {
6752 if (TREE_CODE (index) == INTEGER_CST)
6753 {
6754 tree init = DECL_INITIAL (array);
6755
6756 if (TREE_CODE (init) == CONSTRUCTOR)
6757 {
6758 tree elem;
6759
6760 for (elem = CONSTRUCTOR_ELTS (init);
6761 (elem
6762 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6763 elem = TREE_CHAIN (elem))
6764 ;
6765
6766 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6767 return expand_expr (fold (TREE_VALUE (elem)), target,
6768 tmode, modifier);
6769 }
6770 else if (TREE_CODE (init) == STRING_CST
6771 && 0 > compare_tree_int (index,
6772 TREE_STRING_LENGTH (init)))
6773 {
6774 tree type = TREE_TYPE (TREE_TYPE (init));
6775 enum machine_mode mode = TYPE_MODE (type);
6776
6777 if (GET_MODE_CLASS (mode) == MODE_INT
6778 && GET_MODE_SIZE (mode) == 1)
6779 return gen_int_mode (TREE_STRING_POINTER (init)
6780 [TREE_INT_CST_LOW (index)], mode);
6781 }
6782 }
6783 }
6784 }
6785 goto normal_inner_ref;
6786
6787 case COMPONENT_REF:
6788 /* If the operand is a CONSTRUCTOR, we can just extract the
6789 appropriate field if it is present. */
6790 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6791 {
6792 tree elt;
6793
6794 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6795 elt = TREE_CHAIN (elt))
6796 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6797 /* We can normally use the value of the field in the
6798 CONSTRUCTOR. However, if this is a bitfield in
6799 an integral mode that we can fit in a HOST_WIDE_INT,
6800 we must mask only the number of bits in the bitfield,
6801 since this is done implicitly by the constructor. If
6802 the bitfield does not meet either of those conditions,
6803 we can't do this optimization. */
6804 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6805 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6806 == MODE_INT)
6807 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6808 <= HOST_BITS_PER_WIDE_INT))))
6809 {
6810 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6811 && modifier == EXPAND_STACK_PARM)
6812 target = 0;
6813 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6814 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6815 {
6816 HOST_WIDE_INT bitsize
6817 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6818 enum machine_mode imode
6819 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6820
6821 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6822 {
6823 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6824 op0 = expand_and (imode, op0, op1, target);
6825 }
6826 else
6827 {
6828 tree count
6829 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6830 0);
6831
6832 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6833 target, 0);
6834 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6835 target, 0);
6836 }
6837 }
6838
6839 return op0;
6840 }
6841 }
6842 goto normal_inner_ref;
6843
6844 case BIT_FIELD_REF:
6845 case ARRAY_RANGE_REF:
6846 normal_inner_ref:
6847 {
6848 enum machine_mode mode1;
6849 HOST_WIDE_INT bitsize, bitpos;
6850 tree offset;
6851 int volatilep = 0;
6852 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6853 &mode1, &unsignedp, &volatilep);
6854 rtx orig_op0;
6855
6856 /* If we got back the original object, something is wrong. Perhaps
6857 we are evaluating an expression too early. In any event, don't
6858 infinitely recurse. */
6859 if (tem == exp)
6860 abort ();
6861
6862 /* If TEM's type is a union of variable size, pass TARGET to the inner
6863 computation, since it will need a temporary and TARGET is known
6864 to have to do. This occurs in unchecked conversion in Ada. */
6865
6866 orig_op0 = op0
6867 = expand_expr (tem,
6868 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6869 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6870 != INTEGER_CST)
6871 && modifier != EXPAND_STACK_PARM
6872 ? target : NULL_RTX),
6873 VOIDmode,
6874 (modifier == EXPAND_INITIALIZER
6875 || modifier == EXPAND_CONST_ADDRESS
6876 || modifier == EXPAND_STACK_PARM)
6877 ? modifier : EXPAND_NORMAL);
6878
6879 /* If this is a constant, put it into a register if it is a
6880 legitimate constant and OFFSET is 0 and memory if it isn't. */
6881 if (CONSTANT_P (op0))
6882 {
6883 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6884 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6885 && offset == 0)
6886 op0 = force_reg (mode, op0);
6887 else
6888 op0 = validize_mem (force_const_mem (mode, op0));
6889 }
6890
6891 /* Otherwise, if this object not in memory and we either have an
6892 offset or a BLKmode result, put it there. This case can't occur in
6893 C, but can in Ada if we have unchecked conversion of an expression
6894 from a scalar type to an array or record type or for an
6895 ARRAY_RANGE_REF whose type is BLKmode. */
6896 else if (!MEM_P (op0)
6897 && (offset != 0
6898 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
6899 {
6900 tree nt = build_qualified_type (TREE_TYPE (tem),
6901 (TYPE_QUALS (TREE_TYPE (tem))
6902 | TYPE_QUAL_CONST));
6903 rtx memloc = assign_temp (nt, 1, 1, 1);
6904
6905 emit_move_insn (memloc, op0);
6906 op0 = memloc;
6907 }
6908
6909 if (offset != 0)
6910 {
6911 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
6912 EXPAND_SUM);
6913
6914 if (!MEM_P (op0))
6915 abort ();
6916
6917 #ifdef POINTERS_EXTEND_UNSIGNED
6918 if (GET_MODE (offset_rtx) != Pmode)
6919 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
6920 #else
6921 if (GET_MODE (offset_rtx) != ptr_mode)
6922 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6923 #endif
6924
6925 if (GET_MODE (op0) == BLKmode
6926 /* A constant address in OP0 can have VOIDmode, we must
6927 not try to call force_reg in that case. */
6928 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6929 && bitsize != 0
6930 && (bitpos % bitsize) == 0
6931 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6932 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6933 {
6934 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6935 bitpos = 0;
6936 }
6937
6938 op0 = offset_address (op0, offset_rtx,
6939 highest_pow2_factor (offset));
6940 }
6941
6942 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6943 record its alignment as BIGGEST_ALIGNMENT. */
6944 if (MEM_P (op0) && bitpos == 0 && offset != 0
6945 && is_aligning_offset (offset, tem))
6946 set_mem_align (op0, BIGGEST_ALIGNMENT);
6947
6948 /* Don't forget about volatility even if this is a bitfield. */
6949 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
6950 {
6951 if (op0 == orig_op0)
6952 op0 = copy_rtx (op0);
6953
6954 MEM_VOLATILE_P (op0) = 1;
6955 }
6956
6957 /* The following code doesn't handle CONCAT.
6958 Assume only bitpos == 0 can be used for CONCAT, due to
6959 one element arrays having the same mode as its element. */
6960 if (GET_CODE (op0) == CONCAT)
6961 {
6962 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
6963 abort ();
6964 return op0;
6965 }
6966
6967 /* In cases where an aligned union has an unaligned object
6968 as a field, we might be extracting a BLKmode value from
6969 an integer-mode (e.g., SImode) object. Handle this case
6970 by doing the extract into an object as wide as the field
6971 (which we know to be the width of a basic mode), then
6972 storing into memory, and changing the mode to BLKmode. */
6973 if (mode1 == VOIDmode
6974 || REG_P (op0) || GET_CODE (op0) == SUBREG
6975 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6976 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6977 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6978 && modifier != EXPAND_CONST_ADDRESS
6979 && modifier != EXPAND_INITIALIZER)
6980 /* If the field isn't aligned enough to fetch as a memref,
6981 fetch it as a bit field. */
6982 || (mode1 != BLKmode
6983 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
6984 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
6985 || (MEM_P (op0)
6986 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
6987 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
6988 && ((modifier == EXPAND_CONST_ADDRESS
6989 || modifier == EXPAND_INITIALIZER)
6990 ? STRICT_ALIGNMENT
6991 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
6992 || (bitpos % BITS_PER_UNIT != 0)))
6993 /* If the type and the field are a constant size and the
6994 size of the type isn't the same size as the bitfield,
6995 we must use bitfield operations. */
6996 || (bitsize >= 0
6997 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6998 == INTEGER_CST)
6999 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7000 bitsize)))
7001 {
7002 enum machine_mode ext_mode = mode;
7003
7004 if (ext_mode == BLKmode
7005 && ! (target != 0 && MEM_P (op0)
7006 && MEM_P (target)
7007 && bitpos % BITS_PER_UNIT == 0))
7008 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7009
7010 if (ext_mode == BLKmode)
7011 {
7012 if (target == 0)
7013 target = assign_temp (type, 0, 1, 1);
7014
7015 if (bitsize == 0)
7016 return target;
7017
7018 /* In this case, BITPOS must start at a byte boundary and
7019 TARGET, if specified, must be a MEM. */
7020 if (!MEM_P (op0)
7021 || (target != 0 && !MEM_P (target))
7022 || bitpos % BITS_PER_UNIT != 0)
7023 abort ();
7024
7025 emit_block_move (target,
7026 adjust_address (op0, VOIDmode,
7027 bitpos / BITS_PER_UNIT),
7028 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7029 / BITS_PER_UNIT),
7030 (modifier == EXPAND_STACK_PARM
7031 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7032
7033 return target;
7034 }
7035
7036 op0 = validize_mem (op0);
7037
7038 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7039 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7040
7041 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7042 (modifier == EXPAND_STACK_PARM
7043 ? NULL_RTX : target),
7044 ext_mode, ext_mode);
7045
7046 /* If the result is a record type and BITSIZE is narrower than
7047 the mode of OP0, an integral mode, and this is a big endian
7048 machine, we must put the field into the high-order bits. */
7049 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7050 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7051 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7052 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7053 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7054 - bitsize),
7055 op0, 1);
7056
7057 /* If the result type is BLKmode, store the data into a temporary
7058 of the appropriate type, but with the mode corresponding to the
7059 mode for the data we have (op0's mode). It's tempting to make
7060 this a constant type, since we know it's only being stored once,
7061 but that can cause problems if we are taking the address of this
7062 COMPONENT_REF because the MEM of any reference via that address
7063 will have flags corresponding to the type, which will not
7064 necessarily be constant. */
7065 if (mode == BLKmode)
7066 {
7067 rtx new
7068 = assign_stack_temp_for_type
7069 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7070
7071 emit_move_insn (new, op0);
7072 op0 = copy_rtx (new);
7073 PUT_MODE (op0, BLKmode);
7074 set_mem_attributes (op0, exp, 1);
7075 }
7076
7077 return op0;
7078 }
7079
7080 /* If the result is BLKmode, use that to access the object
7081 now as well. */
7082 if (mode == BLKmode)
7083 mode1 = BLKmode;
7084
7085 /* Get a reference to just this component. */
7086 if (modifier == EXPAND_CONST_ADDRESS
7087 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7088 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7089 else
7090 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7091
7092 if (op0 == orig_op0)
7093 op0 = copy_rtx (op0);
7094
7095 set_mem_attributes (op0, exp, 0);
7096 if (REG_P (XEXP (op0, 0)))
7097 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7098
7099 MEM_VOLATILE_P (op0) |= volatilep;
7100 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7101 || modifier == EXPAND_CONST_ADDRESS
7102 || modifier == EXPAND_INITIALIZER)
7103 return op0;
7104 else if (target == 0)
7105 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7106
7107 convert_move (target, op0, unsignedp);
7108 return target;
7109 }
7110
7111 case OBJ_TYPE_REF:
7112 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7113
7114 case CALL_EXPR:
7115 /* Check for a built-in function. */
7116 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7117 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7118 == FUNCTION_DECL)
7119 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7120 {
7121 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7122 == BUILT_IN_FRONTEND)
7123 return lang_hooks.expand_expr (exp, original_target,
7124 tmode, modifier,
7125 alt_rtl);
7126 else
7127 return expand_builtin (exp, target, subtarget, tmode, ignore);
7128 }
7129
7130 return expand_call (exp, target, ignore);
7131
7132 case NON_LVALUE_EXPR:
7133 case NOP_EXPR:
7134 case CONVERT_EXPR:
7135 if (TREE_OPERAND (exp, 0) == error_mark_node)
7136 return const0_rtx;
7137
7138 if (TREE_CODE (type) == UNION_TYPE)
7139 {
7140 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7141
7142 /* If both input and output are BLKmode, this conversion isn't doing
7143 anything except possibly changing memory attribute. */
7144 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7145 {
7146 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7147 modifier);
7148
7149 result = copy_rtx (result);
7150 set_mem_attributes (result, exp, 0);
7151 return result;
7152 }
7153
7154 if (target == 0)
7155 {
7156 if (TYPE_MODE (type) != BLKmode)
7157 target = gen_reg_rtx (TYPE_MODE (type));
7158 else
7159 target = assign_temp (type, 0, 1, 1);
7160 }
7161
7162 if (MEM_P (target))
7163 /* Store data into beginning of memory target. */
7164 store_expr (TREE_OPERAND (exp, 0),
7165 adjust_address (target, TYPE_MODE (valtype), 0),
7166 modifier == EXPAND_STACK_PARM ? 2 : 0);
7167
7168 else if (REG_P (target))
7169 /* Store this field into a union of the proper type. */
7170 store_field (target,
7171 MIN ((int_size_in_bytes (TREE_TYPE
7172 (TREE_OPERAND (exp, 0)))
7173 * BITS_PER_UNIT),
7174 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7175 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7176 VOIDmode, 0, type, 0);
7177 else
7178 abort ();
7179
7180 /* Return the entire union. */
7181 return target;
7182 }
7183
7184 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7185 {
7186 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7187 modifier);
7188
7189 /* If the signedness of the conversion differs and OP0 is
7190 a promoted SUBREG, clear that indication since we now
7191 have to do the proper extension. */
7192 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7193 && GET_CODE (op0) == SUBREG)
7194 SUBREG_PROMOTED_VAR_P (op0) = 0;
7195
7196 return REDUCE_BIT_FIELD (op0);
7197 }
7198
7199 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7200 op0 = REDUCE_BIT_FIELD (op0);
7201 if (GET_MODE (op0) == mode)
7202 return op0;
7203
7204 /* If OP0 is a constant, just convert it into the proper mode. */
7205 if (CONSTANT_P (op0))
7206 {
7207 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7208 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7209
7210 if (modifier == EXPAND_INITIALIZER)
7211 return simplify_gen_subreg (mode, op0, inner_mode,
7212 subreg_lowpart_offset (mode,
7213 inner_mode));
7214 else
7215 return convert_modes (mode, inner_mode, op0,
7216 TYPE_UNSIGNED (inner_type));
7217 }
7218
7219 if (modifier == EXPAND_INITIALIZER)
7220 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7221
7222 if (target == 0)
7223 return
7224 convert_to_mode (mode, op0,
7225 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7226 else
7227 convert_move (target, op0,
7228 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7229 return target;
7230
7231 case VIEW_CONVERT_EXPR:
7232 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7233
7234 /* If the input and output modes are both the same, we are done.
7235 Otherwise, if neither mode is BLKmode and both are integral and within
7236 a word, we can use gen_lowpart. If neither is true, make sure the
7237 operand is in memory and convert the MEM to the new mode. */
7238 if (TYPE_MODE (type) == GET_MODE (op0))
7239 ;
7240 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7241 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7242 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7243 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7244 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7245 op0 = gen_lowpart (TYPE_MODE (type), op0);
7246 else if (!MEM_P (op0))
7247 {
7248 /* If the operand is not a MEM, force it into memory. Since we
7249 are going to be be changing the mode of the MEM, don't call
7250 force_const_mem for constants because we don't allow pool
7251 constants to change mode. */
7252 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7253
7254 if (TREE_ADDRESSABLE (exp))
7255 abort ();
7256
7257 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7258 target
7259 = assign_stack_temp_for_type
7260 (TYPE_MODE (inner_type),
7261 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7262
7263 emit_move_insn (target, op0);
7264 op0 = target;
7265 }
7266
7267 /* At this point, OP0 is in the correct mode. If the output type is such
7268 that the operand is known to be aligned, indicate that it is.
7269 Otherwise, we need only be concerned about alignment for non-BLKmode
7270 results. */
7271 if (MEM_P (op0))
7272 {
7273 op0 = copy_rtx (op0);
7274
7275 if (TYPE_ALIGN_OK (type))
7276 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7277 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7278 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7279 {
7280 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7281 HOST_WIDE_INT temp_size
7282 = MAX (int_size_in_bytes (inner_type),
7283 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7284 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7285 temp_size, 0, type);
7286 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7287
7288 if (TREE_ADDRESSABLE (exp))
7289 abort ();
7290
7291 if (GET_MODE (op0) == BLKmode)
7292 emit_block_move (new_with_op0_mode, op0,
7293 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7294 (modifier == EXPAND_STACK_PARM
7295 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7296 else
7297 emit_move_insn (new_with_op0_mode, op0);
7298
7299 op0 = new;
7300 }
7301
7302 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7303 }
7304
7305 return op0;
7306
7307 case PLUS_EXPR:
7308 this_optab = ! unsignedp && flag_trapv
7309 && (GET_MODE_CLASS (mode) == MODE_INT)
7310 ? addv_optab : add_optab;
7311
7312 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7313 something else, make sure we add the register to the constant and
7314 then to the other thing. This case can occur during strength
7315 reduction and doing it this way will produce better code if the
7316 frame pointer or argument pointer is eliminated.
7317
7318 fold-const.c will ensure that the constant is always in the inner
7319 PLUS_EXPR, so the only case we need to do anything about is if
7320 sp, ap, or fp is our second argument, in which case we must swap
7321 the innermost first argument and our second argument. */
7322
7323 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7324 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7325 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7326 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7327 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7328 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7329 {
7330 tree t = TREE_OPERAND (exp, 1);
7331
7332 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7333 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7334 }
7335
7336 /* If the result is to be ptr_mode and we are adding an integer to
7337 something, we might be forming a constant. So try to use
7338 plus_constant. If it produces a sum and we can't accept it,
7339 use force_operand. This allows P = &ARR[const] to generate
7340 efficient code on machines where a SYMBOL_REF is not a valid
7341 address.
7342
7343 If this is an EXPAND_SUM call, always return the sum. */
7344 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7345 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7346 {
7347 if (modifier == EXPAND_STACK_PARM)
7348 target = 0;
7349 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7350 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7351 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7352 {
7353 rtx constant_part;
7354
7355 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7356 EXPAND_SUM);
7357 /* Use immed_double_const to ensure that the constant is
7358 truncated according to the mode of OP1, then sign extended
7359 to a HOST_WIDE_INT. Using the constant directly can result
7360 in non-canonical RTL in a 64x32 cross compile. */
7361 constant_part
7362 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7363 (HOST_WIDE_INT) 0,
7364 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7365 op1 = plus_constant (op1, INTVAL (constant_part));
7366 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7367 op1 = force_operand (op1, target);
7368 return REDUCE_BIT_FIELD (op1);
7369 }
7370
7371 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7372 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7373 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7374 {
7375 rtx constant_part;
7376
7377 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7378 (modifier == EXPAND_INITIALIZER
7379 ? EXPAND_INITIALIZER : EXPAND_SUM));
7380 if (! CONSTANT_P (op0))
7381 {
7382 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7383 VOIDmode, modifier);
7384 /* Return a PLUS if modifier says it's OK. */
7385 if (modifier == EXPAND_SUM
7386 || modifier == EXPAND_INITIALIZER)
7387 return simplify_gen_binary (PLUS, mode, op0, op1);
7388 goto binop2;
7389 }
7390 /* Use immed_double_const to ensure that the constant is
7391 truncated according to the mode of OP1, then sign extended
7392 to a HOST_WIDE_INT. Using the constant directly can result
7393 in non-canonical RTL in a 64x32 cross compile. */
7394 constant_part
7395 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7396 (HOST_WIDE_INT) 0,
7397 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7398 op0 = plus_constant (op0, INTVAL (constant_part));
7399 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7400 op0 = force_operand (op0, target);
7401 return REDUCE_BIT_FIELD (op0);
7402 }
7403 }
7404
7405 /* No sense saving up arithmetic to be done
7406 if it's all in the wrong mode to form part of an address.
7407 And force_operand won't know whether to sign-extend or
7408 zero-extend. */
7409 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7410 || mode != ptr_mode)
7411 {
7412 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7413 subtarget, &op0, &op1, 0);
7414 if (op0 == const0_rtx)
7415 return op1;
7416 if (op1 == const0_rtx)
7417 return op0;
7418 goto binop2;
7419 }
7420
7421 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7422 subtarget, &op0, &op1, modifier);
7423 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7424
7425 case MINUS_EXPR:
7426 /* For initializers, we are allowed to return a MINUS of two
7427 symbolic constants. Here we handle all cases when both operands
7428 are constant. */
7429 /* Handle difference of two symbolic constants,
7430 for the sake of an initializer. */
7431 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7432 && really_constant_p (TREE_OPERAND (exp, 0))
7433 && really_constant_p (TREE_OPERAND (exp, 1)))
7434 {
7435 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7436 NULL_RTX, &op0, &op1, modifier);
7437
7438 /* If the last operand is a CONST_INT, use plus_constant of
7439 the negated constant. Else make the MINUS. */
7440 if (GET_CODE (op1) == CONST_INT)
7441 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7442 else
7443 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7444 }
7445
7446 this_optab = ! unsignedp && flag_trapv
7447 && (GET_MODE_CLASS(mode) == MODE_INT)
7448 ? subv_optab : sub_optab;
7449
7450 /* No sense saving up arithmetic to be done
7451 if it's all in the wrong mode to form part of an address.
7452 And force_operand won't know whether to sign-extend or
7453 zero-extend. */
7454 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7455 || mode != ptr_mode)
7456 goto binop;
7457
7458 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7459 subtarget, &op0, &op1, modifier);
7460
7461 /* Convert A - const to A + (-const). */
7462 if (GET_CODE (op1) == CONST_INT)
7463 {
7464 op1 = negate_rtx (mode, op1);
7465 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7466 }
7467
7468 goto binop2;
7469
7470 case MULT_EXPR:
7471 /* If first operand is constant, swap them.
7472 Thus the following special case checks need only
7473 check the second operand. */
7474 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7475 {
7476 tree t1 = TREE_OPERAND (exp, 0);
7477 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7478 TREE_OPERAND (exp, 1) = t1;
7479 }
7480
7481 /* Attempt to return something suitable for generating an
7482 indexed address, for machines that support that. */
7483
7484 if (modifier == EXPAND_SUM && mode == ptr_mode
7485 && host_integerp (TREE_OPERAND (exp, 1), 0))
7486 {
7487 tree exp1 = TREE_OPERAND (exp, 1);
7488
7489 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7490 EXPAND_SUM);
7491
7492 if (!REG_P (op0))
7493 op0 = force_operand (op0, NULL_RTX);
7494 if (!REG_P (op0))
7495 op0 = copy_to_mode_reg (mode, op0);
7496
7497 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7498 gen_int_mode (tree_low_cst (exp1, 0),
7499 TYPE_MODE (TREE_TYPE (exp1)))));
7500 }
7501
7502 if (modifier == EXPAND_STACK_PARM)
7503 target = 0;
7504
7505 /* Check for multiplying things that have been extended
7506 from a narrower type. If this machine supports multiplying
7507 in that narrower type with a result in the desired type,
7508 do it that way, and avoid the explicit type-conversion. */
7509 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7510 && TREE_CODE (type) == INTEGER_TYPE
7511 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7512 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7513 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7514 && int_fits_type_p (TREE_OPERAND (exp, 1),
7515 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7516 /* Don't use a widening multiply if a shift will do. */
7517 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7518 > HOST_BITS_PER_WIDE_INT)
7519 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7520 ||
7521 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7522 && (TYPE_PRECISION (TREE_TYPE
7523 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7524 == TYPE_PRECISION (TREE_TYPE
7525 (TREE_OPERAND
7526 (TREE_OPERAND (exp, 0), 0))))
7527 /* If both operands are extended, they must either both
7528 be zero-extended or both be sign-extended. */
7529 && (TYPE_UNSIGNED (TREE_TYPE
7530 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7531 == TYPE_UNSIGNED (TREE_TYPE
7532 (TREE_OPERAND
7533 (TREE_OPERAND (exp, 0), 0)))))))
7534 {
7535 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7536 enum machine_mode innermode = TYPE_MODE (op0type);
7537 bool zextend_p = TYPE_UNSIGNED (op0type);
7538 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7539 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7540
7541 if (mode == GET_MODE_WIDER_MODE (innermode))
7542 {
7543 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7544 {
7545 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7546 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7547 TREE_OPERAND (exp, 1),
7548 NULL_RTX, &op0, &op1, 0);
7549 else
7550 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7551 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7552 NULL_RTX, &op0, &op1, 0);
7553 goto binop2;
7554 }
7555 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7556 && innermode == word_mode)
7557 {
7558 rtx htem, hipart;
7559 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7560 NULL_RTX, VOIDmode, 0);
7561 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7562 op1 = convert_modes (innermode, mode,
7563 expand_expr (TREE_OPERAND (exp, 1),
7564 NULL_RTX, VOIDmode, 0),
7565 unsignedp);
7566 else
7567 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7568 NULL_RTX, VOIDmode, 0);
7569 temp = expand_binop (mode, other_optab, op0, op1, target,
7570 unsignedp, OPTAB_LIB_WIDEN);
7571 hipart = gen_highpart (innermode, temp);
7572 htem = expand_mult_highpart_adjust (innermode, hipart,
7573 op0, op1, hipart,
7574 zextend_p);
7575 if (htem != hipart)
7576 emit_move_insn (hipart, htem);
7577 return REDUCE_BIT_FIELD (temp);
7578 }
7579 }
7580 }
7581 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7582 subtarget, &op0, &op1, 0);
7583 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7584
7585 case TRUNC_DIV_EXPR:
7586 case FLOOR_DIV_EXPR:
7587 case CEIL_DIV_EXPR:
7588 case ROUND_DIV_EXPR:
7589 case EXACT_DIV_EXPR:
7590 if (modifier == EXPAND_STACK_PARM)
7591 target = 0;
7592 /* Possible optimization: compute the dividend with EXPAND_SUM
7593 then if the divisor is constant can optimize the case
7594 where some terms of the dividend have coeffs divisible by it. */
7595 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7596 subtarget, &op0, &op1, 0);
7597 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7598
7599 case RDIV_EXPR:
7600 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7601 expensive divide. If not, combine will rebuild the original
7602 computation. */
7603 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7604 && TREE_CODE (type) == REAL_TYPE
7605 && !real_onep (TREE_OPERAND (exp, 0)))
7606 return expand_expr (build2 (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7607 build2 (RDIV_EXPR, type,
7608 build_real (type, dconst1),
7609 TREE_OPERAND (exp, 1))),
7610 target, tmode, modifier);
7611 this_optab = sdiv_optab;
7612 goto binop;
7613
7614 case TRUNC_MOD_EXPR:
7615 case FLOOR_MOD_EXPR:
7616 case CEIL_MOD_EXPR:
7617 case ROUND_MOD_EXPR:
7618 if (modifier == EXPAND_STACK_PARM)
7619 target = 0;
7620 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7621 subtarget, &op0, &op1, 0);
7622 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7623
7624 case FIX_ROUND_EXPR:
7625 case FIX_FLOOR_EXPR:
7626 case FIX_CEIL_EXPR:
7627 abort (); /* Not used for C. */
7628
7629 case FIX_TRUNC_EXPR:
7630 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7631 if (target == 0 || modifier == EXPAND_STACK_PARM)
7632 target = gen_reg_rtx (mode);
7633 expand_fix (target, op0, unsignedp);
7634 return target;
7635
7636 case FLOAT_EXPR:
7637 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7638 if (target == 0 || modifier == EXPAND_STACK_PARM)
7639 target = gen_reg_rtx (mode);
7640 /* expand_float can't figure out what to do if FROM has VOIDmode.
7641 So give it the correct mode. With -O, cse will optimize this. */
7642 if (GET_MODE (op0) == VOIDmode)
7643 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7644 op0);
7645 expand_float (target, op0,
7646 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7647 return target;
7648
7649 case NEGATE_EXPR:
7650 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7651 if (modifier == EXPAND_STACK_PARM)
7652 target = 0;
7653 temp = expand_unop (mode,
7654 ! unsignedp && flag_trapv
7655 && (GET_MODE_CLASS(mode) == MODE_INT)
7656 ? negv_optab : neg_optab, op0, target, 0);
7657 if (temp == 0)
7658 abort ();
7659 return REDUCE_BIT_FIELD (temp);
7660
7661 case ABS_EXPR:
7662 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7663 if (modifier == EXPAND_STACK_PARM)
7664 target = 0;
7665
7666 /* ABS_EXPR is not valid for complex arguments. */
7667 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7668 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7669 abort ();
7670
7671 /* Unsigned abs is simply the operand. Testing here means we don't
7672 risk generating incorrect code below. */
7673 if (TYPE_UNSIGNED (type))
7674 return op0;
7675
7676 return expand_abs (mode, op0, target, unsignedp,
7677 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7678
7679 case MAX_EXPR:
7680 case MIN_EXPR:
7681 target = original_target;
7682 if (target == 0
7683 || modifier == EXPAND_STACK_PARM
7684 || (MEM_P (target) && MEM_VOLATILE_P (target))
7685 || GET_MODE (target) != mode
7686 || (REG_P (target)
7687 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7688 target = gen_reg_rtx (mode);
7689 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7690 target, &op0, &op1, 0);
7691
7692 /* First try to do it with a special MIN or MAX instruction.
7693 If that does not win, use a conditional jump to select the proper
7694 value. */
7695 this_optab = (unsignedp
7696 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7697 : (code == MIN_EXPR ? smin_optab : smax_optab));
7698
7699 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7700 OPTAB_WIDEN);
7701 if (temp != 0)
7702 return temp;
7703
7704 /* At this point, a MEM target is no longer useful; we will get better
7705 code without it. */
7706
7707 if (MEM_P (target))
7708 target = gen_reg_rtx (mode);
7709
7710 /* If op1 was placed in target, swap op0 and op1. */
7711 if (target != op0 && target == op1)
7712 {
7713 rtx tem = op0;
7714 op0 = op1;
7715 op1 = tem;
7716 }
7717
7718 if (target != op0)
7719 emit_move_insn (target, op0);
7720
7721 op0 = gen_label_rtx ();
7722
7723 /* If this mode is an integer too wide to compare properly,
7724 compare word by word. Rely on cse to optimize constant cases. */
7725 if (GET_MODE_CLASS (mode) == MODE_INT
7726 && ! can_compare_p (GE, mode, ccp_jump))
7727 {
7728 if (code == MAX_EXPR)
7729 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7730 NULL_RTX, op0);
7731 else
7732 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7733 NULL_RTX, op0);
7734 }
7735 else
7736 {
7737 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7738 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
7739 }
7740 emit_move_insn (target, op1);
7741 emit_label (op0);
7742 return target;
7743
7744 case BIT_NOT_EXPR:
7745 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7746 if (modifier == EXPAND_STACK_PARM)
7747 target = 0;
7748 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7749 if (temp == 0)
7750 abort ();
7751 return temp;
7752
7753 /* ??? Can optimize bitwise operations with one arg constant.
7754 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7755 and (a bitwise1 b) bitwise2 b (etc)
7756 but that is probably not worth while. */
7757
7758 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7759 boolean values when we want in all cases to compute both of them. In
7760 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7761 as actual zero-or-1 values and then bitwise anding. In cases where
7762 there cannot be any side effects, better code would be made by
7763 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7764 how to recognize those cases. */
7765
7766 case TRUTH_AND_EXPR:
7767 case BIT_AND_EXPR:
7768 this_optab = and_optab;
7769 goto binop;
7770
7771 case TRUTH_OR_EXPR:
7772 case BIT_IOR_EXPR:
7773 this_optab = ior_optab;
7774 goto binop;
7775
7776 case TRUTH_XOR_EXPR:
7777 case BIT_XOR_EXPR:
7778 this_optab = xor_optab;
7779 goto binop;
7780
7781 case LSHIFT_EXPR:
7782 case RSHIFT_EXPR:
7783 case LROTATE_EXPR:
7784 case RROTATE_EXPR:
7785 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7786 subtarget = 0;
7787 if (modifier == EXPAND_STACK_PARM)
7788 target = 0;
7789 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7790 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7791 unsignedp);
7792
7793 /* Could determine the answer when only additive constants differ. Also,
7794 the addition of one can be handled by changing the condition. */
7795 case LT_EXPR:
7796 case LE_EXPR:
7797 case GT_EXPR:
7798 case GE_EXPR:
7799 case EQ_EXPR:
7800 case NE_EXPR:
7801 case UNORDERED_EXPR:
7802 case ORDERED_EXPR:
7803 case UNLT_EXPR:
7804 case UNLE_EXPR:
7805 case UNGT_EXPR:
7806 case UNGE_EXPR:
7807 case UNEQ_EXPR:
7808 case LTGT_EXPR:
7809 temp = do_store_flag (exp,
7810 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
7811 tmode != VOIDmode ? tmode : mode, 0);
7812 if (temp != 0)
7813 return temp;
7814
7815 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7816 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7817 && original_target
7818 && REG_P (original_target)
7819 && (GET_MODE (original_target)
7820 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7821 {
7822 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7823 VOIDmode, 0);
7824
7825 /* If temp is constant, we can just compute the result. */
7826 if (GET_CODE (temp) == CONST_INT)
7827 {
7828 if (INTVAL (temp) != 0)
7829 emit_move_insn (target, const1_rtx);
7830 else
7831 emit_move_insn (target, const0_rtx);
7832
7833 return target;
7834 }
7835
7836 if (temp != original_target)
7837 {
7838 enum machine_mode mode1 = GET_MODE (temp);
7839 if (mode1 == VOIDmode)
7840 mode1 = tmode != VOIDmode ? tmode : mode;
7841
7842 temp = copy_to_mode_reg (mode1, temp);
7843 }
7844
7845 op1 = gen_label_rtx ();
7846 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7847 GET_MODE (temp), unsignedp, op1);
7848 emit_move_insn (temp, const1_rtx);
7849 emit_label (op1);
7850 return temp;
7851 }
7852
7853 /* If no set-flag instruction, must generate a conditional
7854 store into a temporary variable. Drop through
7855 and handle this like && and ||. */
7856
7857 case TRUTH_ANDIF_EXPR:
7858 case TRUTH_ORIF_EXPR:
7859 if (! ignore
7860 && (target == 0
7861 || modifier == EXPAND_STACK_PARM
7862 || ! safe_from_p (target, exp, 1)
7863 /* Make sure we don't have a hard reg (such as function's return
7864 value) live across basic blocks, if not optimizing. */
7865 || (!optimize && REG_P (target)
7866 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7867 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7868
7869 if (target)
7870 emit_clr_insn (target);
7871
7872 op1 = gen_label_rtx ();
7873 jumpifnot (exp, op1);
7874
7875 if (target)
7876 emit_0_to_1_insn (target);
7877
7878 emit_label (op1);
7879 return ignore ? const0_rtx : target;
7880
7881 case TRUTH_NOT_EXPR:
7882 if (modifier == EXPAND_STACK_PARM)
7883 target = 0;
7884 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7885 /* The parser is careful to generate TRUTH_NOT_EXPR
7886 only with operands that are always zero or one. */
7887 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7888 target, 1, OPTAB_LIB_WIDEN);
7889 if (temp == 0)
7890 abort ();
7891 return temp;
7892
7893 case COMPOUND_EXPR:
7894 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7895 return expand_expr_real (TREE_OPERAND (exp, 1),
7896 (ignore ? const0_rtx : target),
7897 VOIDmode, modifier, alt_rtl);
7898
7899 case STATEMENT_LIST:
7900 {
7901 tree_stmt_iterator iter;
7902
7903 if (!ignore)
7904 abort ();
7905
7906 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
7907 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
7908 }
7909 return const0_rtx;
7910
7911 case COND_EXPR:
7912 /* If it's void, we don't need to worry about computing a value. */
7913 if (VOID_TYPE_P (TREE_TYPE (exp)))
7914 {
7915 tree pred = TREE_OPERAND (exp, 0);
7916 tree then_ = TREE_OPERAND (exp, 1);
7917 tree else_ = TREE_OPERAND (exp, 2);
7918
7919 if (TREE_CODE (then_) == GOTO_EXPR
7920 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
7921 {
7922 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
7923 return expand_expr (else_, const0_rtx, VOIDmode, 0);
7924 }
7925 else if (TREE_CODE (else_) == GOTO_EXPR
7926 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
7927 {
7928 jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_)));
7929 return expand_expr (then_, const0_rtx, VOIDmode, 0);
7930 }
7931
7932 /* Just use the 'if' machinery. */
7933 expand_start_cond (pred, 0);
7934 expand_expr (then_, const0_rtx, VOIDmode, 0);
7935
7936 exp = else_;
7937
7938 /* Iterate over 'else if's instead of recursing. */
7939 for (; TREE_CODE (exp) == COND_EXPR; exp = TREE_OPERAND (exp, 2))
7940 {
7941 expand_start_else ();
7942 if (EXPR_HAS_LOCATION (exp))
7943 {
7944 emit_line_note (EXPR_LOCATION (exp));
7945 record_block_change (TREE_BLOCK (exp));
7946 }
7947 expand_elseif (TREE_OPERAND (exp, 0));
7948 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, 0);
7949 }
7950 /* Don't emit the jump and label if there's no 'else' clause. */
7951 if (TREE_SIDE_EFFECTS (exp))
7952 {
7953 expand_start_else ();
7954 expand_expr (exp, const0_rtx, VOIDmode, 0);
7955 }
7956 expand_end_cond ();
7957 return const0_rtx;
7958 }
7959
7960 /* If we would have a "singleton" (see below) were it not for a
7961 conversion in each arm, bring that conversion back out. */
7962 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7963 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7964 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7965 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7966 {
7967 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7968 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7969
7970 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
7971 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7972 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
7973 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
7974 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
7975 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7976 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
7977 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
7978 return expand_expr (build1 (NOP_EXPR, type,
7979 build3 (COND_EXPR, TREE_TYPE (iftrue),
7980 TREE_OPERAND (exp, 0),
7981 iftrue, iffalse)),
7982 target, tmode, modifier);
7983 }
7984
7985 {
7986 /* Note that COND_EXPRs whose type is a structure or union
7987 are required to be constructed to contain assignments of
7988 a temporary variable, so that we can evaluate them here
7989 for side effect only. If type is void, we must do likewise. */
7990
7991 /* If an arm of the branch requires a cleanup,
7992 only that cleanup is performed. */
7993
7994 tree singleton = 0;
7995 tree binary_op = 0, unary_op = 0;
7996
7997 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7998 convert it to our mode, if necessary. */
7999 if (integer_onep (TREE_OPERAND (exp, 1))
8000 && integer_zerop (TREE_OPERAND (exp, 2))
8001 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8002 {
8003 if (ignore)
8004 {
8005 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8006 modifier);
8007 return const0_rtx;
8008 }
8009
8010 if (modifier == EXPAND_STACK_PARM)
8011 target = 0;
8012 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8013 if (GET_MODE (op0) == mode)
8014 return op0;
8015
8016 if (target == 0)
8017 target = gen_reg_rtx (mode);
8018 convert_move (target, op0, unsignedp);
8019 return target;
8020 }
8021
8022 /* Check for X ? A + B : A. If we have this, we can copy A to the
8023 output and conditionally add B. Similarly for unary operations.
8024 Don't do this if X has side-effects because those side effects
8025 might affect A or B and the "?" operation is a sequence point in
8026 ANSI. (operand_equal_p tests for side effects.) */
8027
8028 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8029 && operand_equal_p (TREE_OPERAND (exp, 2),
8030 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8031 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8032 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8033 && operand_equal_p (TREE_OPERAND (exp, 1),
8034 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8035 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8036 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8037 && operand_equal_p (TREE_OPERAND (exp, 2),
8038 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8039 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8040 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8041 && operand_equal_p (TREE_OPERAND (exp, 1),
8042 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8043 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8044
8045 /* If we are not to produce a result, we have no target. Otherwise,
8046 if a target was specified use it; it will not be used as an
8047 intermediate target unless it is safe. If no target, use a
8048 temporary. */
8049
8050 if (ignore)
8051 temp = 0;
8052 else if (modifier == EXPAND_STACK_PARM)
8053 temp = assign_temp (type, 0, 0, 1);
8054 else if (original_target
8055 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8056 || (singleton && REG_P (original_target)
8057 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8058 && original_target == var_rtx (singleton)))
8059 && GET_MODE (original_target) == mode
8060 #ifdef HAVE_conditional_move
8061 && (! can_conditionally_move_p (mode)
8062 || REG_P (original_target)
8063 || TREE_ADDRESSABLE (type))
8064 #endif
8065 && (!MEM_P (original_target)
8066 || TREE_ADDRESSABLE (type)))
8067 temp = original_target;
8068 else if (TREE_ADDRESSABLE (type))
8069 abort ();
8070 else
8071 temp = assign_temp (type, 0, 0, 1);
8072
8073 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8074 do the test of X as a store-flag operation, do this as
8075 A + ((X != 0) << log C). Similarly for other simple binary
8076 operators. Only do for C == 1 if BRANCH_COST is low. */
8077 if (temp && singleton && binary_op
8078 && (TREE_CODE (binary_op) == PLUS_EXPR
8079 || TREE_CODE (binary_op) == MINUS_EXPR
8080 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8081 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8082 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8083 : integer_onep (TREE_OPERAND (binary_op, 1)))
8084 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8085 {
8086 rtx result;
8087 tree cond;
8088 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8089 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8090 ? addv_optab : add_optab)
8091 : TREE_CODE (binary_op) == MINUS_EXPR
8092 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8093 ? subv_optab : sub_optab)
8094 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8095 : xor_optab);
8096
8097 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8098 if (singleton == TREE_OPERAND (exp, 1))
8099 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8100 else
8101 cond = TREE_OPERAND (exp, 0);
8102
8103 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8104 ? temp : NULL_RTX),
8105 mode, BRANCH_COST <= 1);
8106
8107 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8108 result = expand_shift (LSHIFT_EXPR, mode, result,
8109 build_int_2 (tree_log2
8110 (TREE_OPERAND
8111 (binary_op, 1)),
8112 0),
8113 (safe_from_p (temp, singleton, 1)
8114 ? temp : NULL_RTX), 0);
8115
8116 if (result)
8117 {
8118 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8119 return expand_binop (mode, boptab, op1, result, temp,
8120 unsignedp, OPTAB_LIB_WIDEN);
8121 }
8122 }
8123
8124 do_pending_stack_adjust ();
8125 NO_DEFER_POP;
8126 op0 = gen_label_rtx ();
8127
8128 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8129 {
8130 if (temp != 0)
8131 {
8132 /* If the target conflicts with the other operand of the
8133 binary op, we can't use it. Also, we can't use the target
8134 if it is a hard register, because evaluating the condition
8135 might clobber it. */
8136 if ((binary_op
8137 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8138 || (REG_P (temp)
8139 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8140 temp = gen_reg_rtx (mode);
8141 store_expr (singleton, temp,
8142 modifier == EXPAND_STACK_PARM ? 2 : 0);
8143 }
8144 else
8145 expand_expr (singleton,
8146 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8147 if (singleton == TREE_OPERAND (exp, 1))
8148 jumpif (TREE_OPERAND (exp, 0), op0);
8149 else
8150 jumpifnot (TREE_OPERAND (exp, 0), op0);
8151
8152 if (binary_op && temp == 0)
8153 /* Just touch the other operand. */
8154 expand_expr (TREE_OPERAND (binary_op, 1),
8155 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8156 else if (binary_op)
8157 store_expr (build2 (TREE_CODE (binary_op), type,
8158 make_tree (type, temp),
8159 TREE_OPERAND (binary_op, 1)),
8160 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8161 else
8162 store_expr (build1 (TREE_CODE (unary_op), type,
8163 make_tree (type, temp)),
8164 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8165 op1 = op0;
8166 }
8167 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8168 comparison operator. If we have one of these cases, set the
8169 output to A, branch on A (cse will merge these two references),
8170 then set the output to FOO. */
8171 else if (temp
8172 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8173 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8174 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8175 TREE_OPERAND (exp, 1), 0)
8176 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8177 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8178 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8179 {
8180 if (REG_P (temp)
8181 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8182 temp = gen_reg_rtx (mode);
8183 store_expr (TREE_OPERAND (exp, 1), temp,
8184 modifier == EXPAND_STACK_PARM ? 2 : 0);
8185 jumpif (TREE_OPERAND (exp, 0), op0);
8186
8187 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8188 store_expr (TREE_OPERAND (exp, 2), temp,
8189 modifier == EXPAND_STACK_PARM ? 2 : 0);
8190 else
8191 expand_expr (TREE_OPERAND (exp, 2),
8192 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8193 op1 = op0;
8194 }
8195 else if (temp
8196 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8197 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8198 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8199 TREE_OPERAND (exp, 2), 0)
8200 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8201 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8202 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8203 {
8204 if (REG_P (temp)
8205 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8206 temp = gen_reg_rtx (mode);
8207 store_expr (TREE_OPERAND (exp, 2), temp,
8208 modifier == EXPAND_STACK_PARM ? 2 : 0);
8209 jumpifnot (TREE_OPERAND (exp, 0), op0);
8210
8211 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8212 store_expr (TREE_OPERAND (exp, 1), temp,
8213 modifier == EXPAND_STACK_PARM ? 2 : 0);
8214 else
8215 expand_expr (TREE_OPERAND (exp, 1),
8216 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8217 op1 = op0;
8218 }
8219 else
8220 {
8221 op1 = gen_label_rtx ();
8222 jumpifnot (TREE_OPERAND (exp, 0), op0);
8223
8224 /* One branch of the cond can be void, if it never returns. For
8225 example A ? throw : E */
8226 if (temp != 0
8227 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8228 store_expr (TREE_OPERAND (exp, 1), temp,
8229 modifier == EXPAND_STACK_PARM ? 2 : 0);
8230 else
8231 expand_expr (TREE_OPERAND (exp, 1),
8232 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8233 emit_jump_insn (gen_jump (op1));
8234 emit_barrier ();
8235 emit_label (op0);
8236 if (temp != 0
8237 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8238 store_expr (TREE_OPERAND (exp, 2), temp,
8239 modifier == EXPAND_STACK_PARM ? 2 : 0);
8240 else
8241 expand_expr (TREE_OPERAND (exp, 2),
8242 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8243 }
8244
8245 emit_label (op1);
8246 OK_DEFER_POP;
8247
8248 return temp;
8249 }
8250
8251 case INIT_EXPR:
8252 {
8253 tree lhs = TREE_OPERAND (exp, 0);
8254 tree rhs = TREE_OPERAND (exp, 1);
8255
8256 temp = expand_assignment (lhs, rhs, ! ignore);
8257 return temp;
8258 }
8259
8260 case MODIFY_EXPR:
8261 {
8262 /* If lhs is complex, expand calls in rhs before computing it.
8263 That's so we don't compute a pointer and save it over a
8264 call. If lhs is simple, compute it first so we can give it
8265 as a target if the rhs is just a call. This avoids an
8266 extra temp and copy and that prevents a partial-subsumption
8267 which makes bad code. Actually we could treat
8268 component_ref's of vars like vars. */
8269
8270 tree lhs = TREE_OPERAND (exp, 0);
8271 tree rhs = TREE_OPERAND (exp, 1);
8272
8273 temp = 0;
8274
8275 /* Check for |= or &= of a bitfield of size one into another bitfield
8276 of size 1. In this case, (unless we need the result of the
8277 assignment) we can do this more efficiently with a
8278 test followed by an assignment, if necessary.
8279
8280 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8281 things change so we do, this code should be enhanced to
8282 support it. */
8283 if (ignore
8284 && TREE_CODE (lhs) == COMPONENT_REF
8285 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8286 || TREE_CODE (rhs) == BIT_AND_EXPR)
8287 && TREE_OPERAND (rhs, 0) == lhs
8288 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8289 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8290 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8291 {
8292 rtx label = gen_label_rtx ();
8293
8294 do_jump (TREE_OPERAND (rhs, 1),
8295 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8296 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8297 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8298 (TREE_CODE (rhs) == BIT_IOR_EXPR
8299 ? integer_one_node
8300 : integer_zero_node)),
8301 0);
8302 do_pending_stack_adjust ();
8303 emit_label (label);
8304 return const0_rtx;
8305 }
8306
8307 temp = expand_assignment (lhs, rhs, ! ignore);
8308
8309 return temp;
8310 }
8311
8312 case RETURN_EXPR:
8313 if (!TREE_OPERAND (exp, 0))
8314 expand_null_return ();
8315 else
8316 expand_return (TREE_OPERAND (exp, 0));
8317 return const0_rtx;
8318
8319 case ADDR_EXPR:
8320 if (modifier == EXPAND_STACK_PARM)
8321 target = 0;
8322 /* If we are taking the address of something erroneous, just
8323 return a zero. */
8324 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8325 return const0_rtx;
8326 /* If we are taking the address of a constant and are at the
8327 top level, we have to use output_constant_def since we can't
8328 call force_const_mem at top level. */
8329 else if (cfun == 0
8330 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8331 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8332 == 'c')))
8333 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8334 else
8335 {
8336 /* We make sure to pass const0_rtx down if we came in with
8337 ignore set, to avoid doing the cleanups twice for something. */
8338 op0 = expand_expr (TREE_OPERAND (exp, 0),
8339 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8340 (modifier == EXPAND_INITIALIZER
8341 ? modifier : EXPAND_CONST_ADDRESS));
8342
8343 /* If we are going to ignore the result, OP0 will have been set
8344 to const0_rtx, so just return it. Don't get confused and
8345 think we are taking the address of the constant. */
8346 if (ignore)
8347 return op0;
8348
8349 /* We would like the object in memory. If it is a constant, we can
8350 have it be statically allocated into memory. For a non-constant,
8351 we need to allocate some memory and store the value into it. */
8352
8353 if (CONSTANT_P (op0))
8354 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8355 op0);
8356 else if (REG_P (op0) || GET_CODE (op0) == SUBREG
8357 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == PARALLEL
8358 || GET_CODE (op0) == LO_SUM)
8359 {
8360 /* If this object is in a register, it can't be BLKmode. */
8361 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8362 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8363
8364 if (GET_CODE (op0) == PARALLEL)
8365 /* Handle calls that pass values in multiple
8366 non-contiguous locations. The Irix 6 ABI has examples
8367 of this. */
8368 emit_group_store (memloc, op0, inner_type,
8369 int_size_in_bytes (inner_type));
8370 else
8371 emit_move_insn (memloc, op0);
8372
8373 op0 = memloc;
8374 }
8375
8376 if (!MEM_P (op0))
8377 abort ();
8378
8379 mark_temp_addr_taken (op0);
8380 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8381 {
8382 op0 = XEXP (op0, 0);
8383 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8384 op0 = convert_memory_address (ptr_mode, op0);
8385 return op0;
8386 }
8387
8388 /* If OP0 is not aligned as least as much as the type requires, we
8389 need to make a temporary, copy OP0 to it, and take the address of
8390 the temporary. We want to use the alignment of the type, not of
8391 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8392 the test for BLKmode means that can't happen. The test for
8393 BLKmode is because we never make mis-aligned MEMs with
8394 non-BLKmode.
8395
8396 We don't need to do this at all if the machine doesn't have
8397 strict alignment. */
8398 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8399 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8400 > MEM_ALIGN (op0))
8401 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8402 {
8403 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8404 rtx new;
8405
8406 if (TYPE_ALIGN_OK (inner_type))
8407 abort ();
8408
8409 if (TREE_ADDRESSABLE (inner_type))
8410 {
8411 /* We can't make a bitwise copy of this object, so fail. */
8412 error ("cannot take the address of an unaligned member");
8413 return const0_rtx;
8414 }
8415
8416 new = assign_stack_temp_for_type
8417 (TYPE_MODE (inner_type),
8418 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8419 : int_size_in_bytes (inner_type),
8420 1, build_qualified_type (inner_type,
8421 (TYPE_QUALS (inner_type)
8422 | TYPE_QUAL_CONST)));
8423
8424 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8425 (modifier == EXPAND_STACK_PARM
8426 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8427
8428 op0 = new;
8429 }
8430
8431 op0 = force_operand (XEXP (op0, 0), target);
8432 }
8433
8434 if (flag_force_addr
8435 && !REG_P (op0)
8436 && modifier != EXPAND_CONST_ADDRESS
8437 && modifier != EXPAND_INITIALIZER
8438 && modifier != EXPAND_SUM)
8439 op0 = force_reg (Pmode, op0);
8440
8441 if (REG_P (op0)
8442 && ! REG_USERVAR_P (op0))
8443 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8444
8445 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8446 op0 = convert_memory_address (ptr_mode, op0);
8447
8448 return op0;
8449
8450 case ENTRY_VALUE_EXPR:
8451 abort ();
8452
8453 /* COMPLEX type for Extended Pascal & Fortran */
8454 case COMPLEX_EXPR:
8455 {
8456 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8457 rtx insns;
8458
8459 /* Get the rtx code of the operands. */
8460 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8461 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8462
8463 if (! target)
8464 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8465
8466 start_sequence ();
8467
8468 /* Move the real (op0) and imaginary (op1) parts to their location. */
8469 emit_move_insn (gen_realpart (mode, target), op0);
8470 emit_move_insn (gen_imagpart (mode, target), op1);
8471
8472 insns = get_insns ();
8473 end_sequence ();
8474
8475 /* Complex construction should appear as a single unit. */
8476 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8477 each with a separate pseudo as destination.
8478 It's not correct for flow to treat them as a unit. */
8479 if (GET_CODE (target) != CONCAT)
8480 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8481 else
8482 emit_insn (insns);
8483
8484 return target;
8485 }
8486
8487 case REALPART_EXPR:
8488 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8489 return gen_realpart (mode, op0);
8490
8491 case IMAGPART_EXPR:
8492 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8493 return gen_imagpart (mode, op0);
8494
8495 case CONJ_EXPR:
8496 {
8497 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8498 rtx imag_t;
8499 rtx insns;
8500
8501 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8502
8503 if (! target)
8504 target = gen_reg_rtx (mode);
8505
8506 start_sequence ();
8507
8508 /* Store the realpart and the negated imagpart to target. */
8509 emit_move_insn (gen_realpart (partmode, target),
8510 gen_realpart (partmode, op0));
8511
8512 imag_t = gen_imagpart (partmode, target);
8513 temp = expand_unop (partmode,
8514 ! unsignedp && flag_trapv
8515 && (GET_MODE_CLASS(partmode) == MODE_INT)
8516 ? negv_optab : neg_optab,
8517 gen_imagpart (partmode, op0), imag_t, 0);
8518 if (temp != imag_t)
8519 emit_move_insn (imag_t, temp);
8520
8521 insns = get_insns ();
8522 end_sequence ();
8523
8524 /* Conjugate should appear as a single unit
8525 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8526 each with a separate pseudo as destination.
8527 It's not correct for flow to treat them as a unit. */
8528 if (GET_CODE (target) != CONCAT)
8529 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8530 else
8531 emit_insn (insns);
8532
8533 return target;
8534 }
8535
8536 case RESX_EXPR:
8537 expand_resx_expr (exp);
8538 return const0_rtx;
8539
8540 case TRY_CATCH_EXPR:
8541 case CATCH_EXPR:
8542 case EH_FILTER_EXPR:
8543 case TRY_FINALLY_EXPR:
8544 /* Lowered by tree-eh.c. */
8545 abort ();
8546
8547 case WITH_CLEANUP_EXPR:
8548 case CLEANUP_POINT_EXPR:
8549 case TARGET_EXPR:
8550 case CASE_LABEL_EXPR:
8551 case VA_ARG_EXPR:
8552 case BIND_EXPR:
8553 /* Lowered by gimplify.c. */
8554 abort ();
8555
8556 case EXC_PTR_EXPR:
8557 return get_exception_pointer (cfun);
8558
8559 case FILTER_EXPR:
8560 return get_exception_filter (cfun);
8561
8562 case PREINCREMENT_EXPR:
8563 case PREDECREMENT_EXPR:
8564 case POSTINCREMENT_EXPR:
8565 case POSTDECREMENT_EXPR:
8566 case FDESC_EXPR:
8567 /* Function descriptors are not valid except for as
8568 initialization constants, and should not be expanded. */
8569 abort ();
8570
8571 case SWITCH_EXPR:
8572 expand_start_case (SWITCH_COND (exp));
8573 /* The switch body is lowered in gimplify.c, we should never have
8574 switches with a non-NULL SWITCH_BODY here. */
8575 if (SWITCH_BODY (exp))
8576 abort ();
8577 if (SWITCH_LABELS (exp))
8578 {
8579 tree vec = SWITCH_LABELS (exp);
8580 size_t i = TREE_VEC_LENGTH (vec);
8581
8582 do
8583 {
8584 tree elt = TREE_VEC_ELT (vec, --i);
8585 add_case_node (CASE_LOW (elt), CASE_HIGH (elt),
8586 CASE_LABEL (elt));
8587 }
8588 while (i);
8589 }
8590 else
8591 abort ();
8592 expand_end_case_type (SWITCH_COND (exp), TREE_TYPE (exp));
8593 return const0_rtx;
8594
8595 case LABEL_EXPR:
8596 expand_label (TREE_OPERAND (exp, 0));
8597 return const0_rtx;
8598
8599 case ASM_EXPR:
8600 expand_asm_expr (exp);
8601 return const0_rtx;
8602
8603 case WITH_SIZE_EXPR:
8604 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8605 have pulled out the size to use in whatever context it needed. */
8606 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8607 modifier, alt_rtl);
8608
8609 default:
8610 return lang_hooks.expand_expr (exp, original_target, tmode,
8611 modifier, alt_rtl);
8612 }
8613
8614 /* Here to do an ordinary binary operator, generating an instruction
8615 from the optab already placed in `this_optab'. */
8616 binop:
8617 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8618 subtarget, &op0, &op1, 0);
8619 binop2:
8620 if (modifier == EXPAND_STACK_PARM)
8621 target = 0;
8622 temp = expand_binop (mode, this_optab, op0, op1, target,
8623 unsignedp, OPTAB_LIB_WIDEN);
8624 if (temp == 0)
8625 abort ();
8626 return REDUCE_BIT_FIELD (temp);
8627 }
8628 #undef REDUCE_BIT_FIELD
8629 \f
8630 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8631 signedness of TYPE), possibly returning the result in TARGET. */
8632 static rtx
8633 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8634 {
8635 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8636 if (target && GET_MODE (target) != GET_MODE (exp))
8637 target = 0;
8638 if (TYPE_UNSIGNED (type))
8639 {
8640 rtx mask;
8641 if (prec < HOST_BITS_PER_WIDE_INT)
8642 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8643 GET_MODE (exp));
8644 else
8645 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8646 ((unsigned HOST_WIDE_INT) 1
8647 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8648 GET_MODE (exp));
8649 return expand_and (GET_MODE (exp), exp, mask, target);
8650 }
8651 else
8652 {
8653 tree count = build_int_2 (GET_MODE_BITSIZE (GET_MODE (exp)) - prec, 0);
8654 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8655 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8656 }
8657 }
8658 \f
8659 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8660 when applied to the address of EXP produces an address known to be
8661 aligned more than BIGGEST_ALIGNMENT. */
8662
8663 static int
8664 is_aligning_offset (tree offset, tree exp)
8665 {
8666 /* Strip off any conversions. */
8667 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8668 || TREE_CODE (offset) == NOP_EXPR
8669 || TREE_CODE (offset) == CONVERT_EXPR)
8670 offset = TREE_OPERAND (offset, 0);
8671
8672 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8673 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8674 if (TREE_CODE (offset) != BIT_AND_EXPR
8675 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8676 || compare_tree_int (TREE_OPERAND (offset, 1),
8677 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8678 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8679 return 0;
8680
8681 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8682 It must be NEGATE_EXPR. Then strip any more conversions. */
8683 offset = TREE_OPERAND (offset, 0);
8684 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8685 || TREE_CODE (offset) == NOP_EXPR
8686 || TREE_CODE (offset) == CONVERT_EXPR)
8687 offset = TREE_OPERAND (offset, 0);
8688
8689 if (TREE_CODE (offset) != NEGATE_EXPR)
8690 return 0;
8691
8692 offset = TREE_OPERAND (offset, 0);
8693 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8694 || TREE_CODE (offset) == NOP_EXPR
8695 || TREE_CODE (offset) == CONVERT_EXPR)
8696 offset = TREE_OPERAND (offset, 0);
8697
8698 /* This must now be the address of EXP. */
8699 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8700 }
8701 \f
8702 /* Return the tree node if an ARG corresponds to a string constant or zero
8703 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8704 in bytes within the string that ARG is accessing. The type of the
8705 offset will be `sizetype'. */
8706
8707 tree
8708 string_constant (tree arg, tree *ptr_offset)
8709 {
8710 STRIP_NOPS (arg);
8711
8712 if (TREE_CODE (arg) == ADDR_EXPR
8713 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8714 {
8715 *ptr_offset = size_zero_node;
8716 return TREE_OPERAND (arg, 0);
8717 }
8718 if (TREE_CODE (arg) == ADDR_EXPR
8719 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
8720 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg, 0), 0)) == STRING_CST)
8721 {
8722 *ptr_offset = convert (sizetype, TREE_OPERAND (TREE_OPERAND (arg, 0), 1));
8723 return TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8724 }
8725 else if (TREE_CODE (arg) == PLUS_EXPR)
8726 {
8727 tree arg0 = TREE_OPERAND (arg, 0);
8728 tree arg1 = TREE_OPERAND (arg, 1);
8729
8730 STRIP_NOPS (arg0);
8731 STRIP_NOPS (arg1);
8732
8733 if (TREE_CODE (arg0) == ADDR_EXPR
8734 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8735 {
8736 *ptr_offset = convert (sizetype, arg1);
8737 return TREE_OPERAND (arg0, 0);
8738 }
8739 else if (TREE_CODE (arg1) == ADDR_EXPR
8740 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8741 {
8742 *ptr_offset = convert (sizetype, arg0);
8743 return TREE_OPERAND (arg1, 0);
8744 }
8745 }
8746
8747 return 0;
8748 }
8749 \f
8750 /* Generate code to calculate EXP using a store-flag instruction
8751 and return an rtx for the result. EXP is either a comparison
8752 or a TRUTH_NOT_EXPR whose operand is a comparison.
8753
8754 If TARGET is nonzero, store the result there if convenient.
8755
8756 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8757 cheap.
8758
8759 Return zero if there is no suitable set-flag instruction
8760 available on this machine.
8761
8762 Once expand_expr has been called on the arguments of the comparison,
8763 we are committed to doing the store flag, since it is not safe to
8764 re-evaluate the expression. We emit the store-flag insn by calling
8765 emit_store_flag, but only expand the arguments if we have a reason
8766 to believe that emit_store_flag will be successful. If we think that
8767 it will, but it isn't, we have to simulate the store-flag with a
8768 set/jump/set sequence. */
8769
8770 static rtx
8771 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8772 {
8773 enum rtx_code code;
8774 tree arg0, arg1, type;
8775 tree tem;
8776 enum machine_mode operand_mode;
8777 int invert = 0;
8778 int unsignedp;
8779 rtx op0, op1;
8780 enum insn_code icode;
8781 rtx subtarget = target;
8782 rtx result, label;
8783
8784 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8785 result at the end. We can't simply invert the test since it would
8786 have already been inverted if it were valid. This case occurs for
8787 some floating-point comparisons. */
8788
8789 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8790 invert = 1, exp = TREE_OPERAND (exp, 0);
8791
8792 arg0 = TREE_OPERAND (exp, 0);
8793 arg1 = TREE_OPERAND (exp, 1);
8794
8795 /* Don't crash if the comparison was erroneous. */
8796 if (arg0 == error_mark_node || arg1 == error_mark_node)
8797 return const0_rtx;
8798
8799 type = TREE_TYPE (arg0);
8800 operand_mode = TYPE_MODE (type);
8801 unsignedp = TYPE_UNSIGNED (type);
8802
8803 /* We won't bother with BLKmode store-flag operations because it would mean
8804 passing a lot of information to emit_store_flag. */
8805 if (operand_mode == BLKmode)
8806 return 0;
8807
8808 /* We won't bother with store-flag operations involving function pointers
8809 when function pointers must be canonicalized before comparisons. */
8810 #ifdef HAVE_canonicalize_funcptr_for_compare
8811 if (HAVE_canonicalize_funcptr_for_compare
8812 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8813 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8814 == FUNCTION_TYPE))
8815 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8816 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8817 == FUNCTION_TYPE))))
8818 return 0;
8819 #endif
8820
8821 STRIP_NOPS (arg0);
8822 STRIP_NOPS (arg1);
8823
8824 /* Get the rtx comparison code to use. We know that EXP is a comparison
8825 operation of some type. Some comparisons against 1 and -1 can be
8826 converted to comparisons with zero. Do so here so that the tests
8827 below will be aware that we have a comparison with zero. These
8828 tests will not catch constants in the first operand, but constants
8829 are rarely passed as the first operand. */
8830
8831 switch (TREE_CODE (exp))
8832 {
8833 case EQ_EXPR:
8834 code = EQ;
8835 break;
8836 case NE_EXPR:
8837 code = NE;
8838 break;
8839 case LT_EXPR:
8840 if (integer_onep (arg1))
8841 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8842 else
8843 code = unsignedp ? LTU : LT;
8844 break;
8845 case LE_EXPR:
8846 if (! unsignedp && integer_all_onesp (arg1))
8847 arg1 = integer_zero_node, code = LT;
8848 else
8849 code = unsignedp ? LEU : LE;
8850 break;
8851 case GT_EXPR:
8852 if (! unsignedp && integer_all_onesp (arg1))
8853 arg1 = integer_zero_node, code = GE;
8854 else
8855 code = unsignedp ? GTU : GT;
8856 break;
8857 case GE_EXPR:
8858 if (integer_onep (arg1))
8859 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8860 else
8861 code = unsignedp ? GEU : GE;
8862 break;
8863
8864 case UNORDERED_EXPR:
8865 code = UNORDERED;
8866 break;
8867 case ORDERED_EXPR:
8868 code = ORDERED;
8869 break;
8870 case UNLT_EXPR:
8871 code = UNLT;
8872 break;
8873 case UNLE_EXPR:
8874 code = UNLE;
8875 break;
8876 case UNGT_EXPR:
8877 code = UNGT;
8878 break;
8879 case UNGE_EXPR:
8880 code = UNGE;
8881 break;
8882 case UNEQ_EXPR:
8883 code = UNEQ;
8884 break;
8885 case LTGT_EXPR:
8886 code = LTGT;
8887 break;
8888
8889 default:
8890 abort ();
8891 }
8892
8893 /* Put a constant second. */
8894 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8895 {
8896 tem = arg0; arg0 = arg1; arg1 = tem;
8897 code = swap_condition (code);
8898 }
8899
8900 /* If this is an equality or inequality test of a single bit, we can
8901 do this by shifting the bit being tested to the low-order bit and
8902 masking the result with the constant 1. If the condition was EQ,
8903 we xor it with 1. This does not require an scc insn and is faster
8904 than an scc insn even if we have it.
8905
8906 The code to make this transformation was moved into fold_single_bit_test,
8907 so we just call into the folder and expand its result. */
8908
8909 if ((code == NE || code == EQ)
8910 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8911 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8912 {
8913 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8914 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8915 arg0, arg1, type),
8916 target, VOIDmode, EXPAND_NORMAL);
8917 }
8918
8919 /* Now see if we are likely to be able to do this. Return if not. */
8920 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8921 return 0;
8922
8923 icode = setcc_gen_code[(int) code];
8924 if (icode == CODE_FOR_nothing
8925 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8926 {
8927 /* We can only do this if it is one of the special cases that
8928 can be handled without an scc insn. */
8929 if ((code == LT && integer_zerop (arg1))
8930 || (! only_cheap && code == GE && integer_zerop (arg1)))
8931 ;
8932 else if (BRANCH_COST >= 0
8933 && ! only_cheap && (code == NE || code == EQ)
8934 && TREE_CODE (type) != REAL_TYPE
8935 && ((abs_optab->handlers[(int) operand_mode].insn_code
8936 != CODE_FOR_nothing)
8937 || (ffs_optab->handlers[(int) operand_mode].insn_code
8938 != CODE_FOR_nothing)))
8939 ;
8940 else
8941 return 0;
8942 }
8943
8944 if (! get_subtarget (target)
8945 || GET_MODE (subtarget) != operand_mode)
8946 subtarget = 0;
8947
8948 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8949
8950 if (target == 0)
8951 target = gen_reg_rtx (mode);
8952
8953 result = emit_store_flag (target, code, op0, op1,
8954 operand_mode, unsignedp, 1);
8955
8956 if (result)
8957 {
8958 if (invert)
8959 result = expand_binop (mode, xor_optab, result, const1_rtx,
8960 result, 0, OPTAB_LIB_WIDEN);
8961 return result;
8962 }
8963
8964 /* If this failed, we have to do this with set/compare/jump/set code. */
8965 if (!REG_P (target)
8966 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8967 target = gen_reg_rtx (GET_MODE (target));
8968
8969 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8970 result = compare_from_rtx (op0, op1, code, unsignedp,
8971 operand_mode, NULL_RTX);
8972 if (GET_CODE (result) == CONST_INT)
8973 return (((result == const0_rtx && ! invert)
8974 || (result != const0_rtx && invert))
8975 ? const0_rtx : const1_rtx);
8976
8977 /* The code of RESULT may not match CODE if compare_from_rtx
8978 decided to swap its operands and reverse the original code.
8979
8980 We know that compare_from_rtx returns either a CONST_INT or
8981 a new comparison code, so it is safe to just extract the
8982 code from RESULT. */
8983 code = GET_CODE (result);
8984
8985 label = gen_label_rtx ();
8986 if (bcc_gen_fctn[(int) code] == 0)
8987 abort ();
8988
8989 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8990 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8991 emit_label (label);
8992
8993 return target;
8994 }
8995 \f
8996
8997 /* Stubs in case we haven't got a casesi insn. */
8998 #ifndef HAVE_casesi
8999 # define HAVE_casesi 0
9000 # define gen_casesi(a, b, c, d, e) (0)
9001 # define CODE_FOR_casesi CODE_FOR_nothing
9002 #endif
9003
9004 /* If the machine does not have a case insn that compares the bounds,
9005 this means extra overhead for dispatch tables, which raises the
9006 threshold for using them. */
9007 #ifndef CASE_VALUES_THRESHOLD
9008 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9009 #endif /* CASE_VALUES_THRESHOLD */
9010
9011 unsigned int
9012 case_values_threshold (void)
9013 {
9014 return CASE_VALUES_THRESHOLD;
9015 }
9016
9017 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9018 0 otherwise (i.e. if there is no casesi instruction). */
9019 int
9020 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9021 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9022 {
9023 enum machine_mode index_mode = SImode;
9024 int index_bits = GET_MODE_BITSIZE (index_mode);
9025 rtx op1, op2, index;
9026 enum machine_mode op_mode;
9027
9028 if (! HAVE_casesi)
9029 return 0;
9030
9031 /* Convert the index to SImode. */
9032 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9033 {
9034 enum machine_mode omode = TYPE_MODE (index_type);
9035 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9036
9037 /* We must handle the endpoints in the original mode. */
9038 index_expr = build2 (MINUS_EXPR, index_type,
9039 index_expr, minval);
9040 minval = integer_zero_node;
9041 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9042 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9043 omode, 1, default_label);
9044 /* Now we can safely truncate. */
9045 index = convert_to_mode (index_mode, index, 0);
9046 }
9047 else
9048 {
9049 if (TYPE_MODE (index_type) != index_mode)
9050 {
9051 index_expr = convert (lang_hooks.types.type_for_size
9052 (index_bits, 0), index_expr);
9053 index_type = TREE_TYPE (index_expr);
9054 }
9055
9056 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9057 }
9058
9059 do_pending_stack_adjust ();
9060
9061 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9062 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9063 (index, op_mode))
9064 index = copy_to_mode_reg (op_mode, index);
9065
9066 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9067
9068 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9069 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9070 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9071 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9072 (op1, op_mode))
9073 op1 = copy_to_mode_reg (op_mode, op1);
9074
9075 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9076
9077 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9078 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9079 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9080 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9081 (op2, op_mode))
9082 op2 = copy_to_mode_reg (op_mode, op2);
9083
9084 emit_jump_insn (gen_casesi (index, op1, op2,
9085 table_label, default_label));
9086 return 1;
9087 }
9088
9089 /* Attempt to generate a tablejump instruction; same concept. */
9090 #ifndef HAVE_tablejump
9091 #define HAVE_tablejump 0
9092 #define gen_tablejump(x, y) (0)
9093 #endif
9094
9095 /* Subroutine of the next function.
9096
9097 INDEX is the value being switched on, with the lowest value
9098 in the table already subtracted.
9099 MODE is its expected mode (needed if INDEX is constant).
9100 RANGE is the length of the jump table.
9101 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9102
9103 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9104 index value is out of range. */
9105
9106 static void
9107 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9108 rtx default_label)
9109 {
9110 rtx temp, vector;
9111
9112 if (INTVAL (range) > cfun->max_jumptable_ents)
9113 cfun->max_jumptable_ents = INTVAL (range);
9114
9115 /* Do an unsigned comparison (in the proper mode) between the index
9116 expression and the value which represents the length of the range.
9117 Since we just finished subtracting the lower bound of the range
9118 from the index expression, this comparison allows us to simultaneously
9119 check that the original index expression value is both greater than
9120 or equal to the minimum value of the range and less than or equal to
9121 the maximum value of the range. */
9122
9123 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9124 default_label);
9125
9126 /* If index is in range, it must fit in Pmode.
9127 Convert to Pmode so we can index with it. */
9128 if (mode != Pmode)
9129 index = convert_to_mode (Pmode, index, 1);
9130
9131 /* Don't let a MEM slip through, because then INDEX that comes
9132 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9133 and break_out_memory_refs will go to work on it and mess it up. */
9134 #ifdef PIC_CASE_VECTOR_ADDRESS
9135 if (flag_pic && !REG_P (index))
9136 index = copy_to_mode_reg (Pmode, index);
9137 #endif
9138
9139 /* If flag_force_addr were to affect this address
9140 it could interfere with the tricky assumptions made
9141 about addresses that contain label-refs,
9142 which may be valid only very near the tablejump itself. */
9143 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9144 GET_MODE_SIZE, because this indicates how large insns are. The other
9145 uses should all be Pmode, because they are addresses. This code
9146 could fail if addresses and insns are not the same size. */
9147 index = gen_rtx_PLUS (Pmode,
9148 gen_rtx_MULT (Pmode, index,
9149 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9150 gen_rtx_LABEL_REF (Pmode, table_label));
9151 #ifdef PIC_CASE_VECTOR_ADDRESS
9152 if (flag_pic)
9153 index = PIC_CASE_VECTOR_ADDRESS (index);
9154 else
9155 #endif
9156 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9157 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9158 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9159 RTX_UNCHANGING_P (vector) = 1;
9160 MEM_NOTRAP_P (vector) = 1;
9161 convert_move (temp, vector, 0);
9162
9163 emit_jump_insn (gen_tablejump (temp, table_label));
9164
9165 /* If we are generating PIC code or if the table is PC-relative, the
9166 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9167 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9168 emit_barrier ();
9169 }
9170
9171 int
9172 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9173 rtx table_label, rtx default_label)
9174 {
9175 rtx index;
9176
9177 if (! HAVE_tablejump)
9178 return 0;
9179
9180 index_expr = fold (build2 (MINUS_EXPR, index_type,
9181 convert (index_type, index_expr),
9182 convert (index_type, minval)));
9183 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9184 do_pending_stack_adjust ();
9185
9186 do_tablejump (index, TYPE_MODE (index_type),
9187 convert_modes (TYPE_MODE (index_type),
9188 TYPE_MODE (TREE_TYPE (range)),
9189 expand_expr (range, NULL_RTX,
9190 VOIDmode, 0),
9191 TYPE_UNSIGNED (TREE_TYPE (range))),
9192 table_label, default_label);
9193 return 1;
9194 }
9195
9196 /* Nonzero if the mode is a valid vector mode for this architecture.
9197 This returns nonzero even if there is no hardware support for the
9198 vector mode, but we can emulate with narrower modes. */
9199
9200 int
9201 vector_mode_valid_p (enum machine_mode mode)
9202 {
9203 enum mode_class class = GET_MODE_CLASS (mode);
9204 enum machine_mode innermode;
9205
9206 /* Doh! What's going on? */
9207 if (class != MODE_VECTOR_INT
9208 && class != MODE_VECTOR_FLOAT)
9209 return 0;
9210
9211 /* Hardware support. Woo hoo! */
9212 if (VECTOR_MODE_SUPPORTED_P (mode))
9213 return 1;
9214
9215 innermode = GET_MODE_INNER (mode);
9216
9217 /* We should probably return 1 if requesting V4DI and we have no DI,
9218 but we have V2DI, but this is probably very unlikely. */
9219
9220 /* If we have support for the inner mode, we can safely emulate it.
9221 We may not have V2DI, but me can emulate with a pair of DIs. */
9222 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9223 }
9224
9225 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9226 static rtx
9227 const_vector_from_tree (tree exp)
9228 {
9229 rtvec v;
9230 int units, i;
9231 tree link, elt;
9232 enum machine_mode inner, mode;
9233
9234 mode = TYPE_MODE (TREE_TYPE (exp));
9235
9236 if (initializer_zerop (exp))
9237 return CONST0_RTX (mode);
9238
9239 units = GET_MODE_NUNITS (mode);
9240 inner = GET_MODE_INNER (mode);
9241
9242 v = rtvec_alloc (units);
9243
9244 link = TREE_VECTOR_CST_ELTS (exp);
9245 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9246 {
9247 elt = TREE_VALUE (link);
9248
9249 if (TREE_CODE (elt) == REAL_CST)
9250 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9251 inner);
9252 else
9253 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9254 TREE_INT_CST_HIGH (elt),
9255 inner);
9256 }
9257
9258 /* Initialize remaining elements to 0. */
9259 for (; i < units; ++i)
9260 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9261
9262 return gen_rtx_raw_CONST_VECTOR (mode, v);
9263 }
9264 #include "gt-expr.h"