]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/expr.c
expmed.c (store_bit_field, [...]): Use new named patterns
[thirdparty/gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "target.h"
51
52 /* Decide whether a function's arguments should be processed
53 from first to last or from last to first.
54
55 They should if the stack and args grow in opposite directions, but
56 only if we have push insns. */
57
58 #ifdef PUSH_ROUNDING
59
60 #ifndef PUSH_ARGS_REVERSED
61 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
62 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #endif
64 #endif
65
66 #endif
67
68 #ifndef STACK_PUSH_CODE
69 #ifdef STACK_GROWS_DOWNWARD
70 #define STACK_PUSH_CODE PRE_DEC
71 #else
72 #define STACK_PUSH_CODE PRE_INC
73 #endif
74 #endif
75
76 /* Assume that case vectors are not pc-relative. */
77 #ifndef CASE_VECTOR_PC_RELATIVE
78 #define CASE_VECTOR_PC_RELATIVE 0
79 #endif
80
81 /* Convert defined/undefined to boolean. */
82 #ifdef TARGET_MEM_FUNCTIONS
83 #undef TARGET_MEM_FUNCTIONS
84 #define TARGET_MEM_FUNCTIONS 1
85 #else
86 #define TARGET_MEM_FUNCTIONS 0
87 #endif
88
89
90 /* If this is nonzero, we do not bother generating VOLATILE
91 around volatile memory references, and we are willing to
92 output indirect addresses. If cse is to follow, we reject
93 indirect addresses so a useful potential cse is generated;
94 if it is used only once, instruction combination will produce
95 the same indirect address eventually. */
96 int cse_not_expected;
97
98 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
99 tree placeholder_list = 0;
100
101 /* This structure is used by move_by_pieces to describe the move to
102 be performed. */
103 struct move_by_pieces
104 {
105 rtx to;
106 rtx to_addr;
107 int autinc_to;
108 int explicit_inc_to;
109 rtx from;
110 rtx from_addr;
111 int autinc_from;
112 int explicit_inc_from;
113 unsigned HOST_WIDE_INT len;
114 HOST_WIDE_INT offset;
115 int reverse;
116 };
117
118 /* This structure is used by store_by_pieces to describe the clear to
119 be performed. */
120
121 struct store_by_pieces
122 {
123 rtx to;
124 rtx to_addr;
125 int autinc_to;
126 int explicit_inc_to;
127 unsigned HOST_WIDE_INT len;
128 HOST_WIDE_INT offset;
129 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
130 void *constfundata;
131 int reverse;
132 };
133
134 static rtx enqueue_insn (rtx, rtx);
135 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
136 unsigned int);
137 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *);
139 static bool block_move_libcall_safe_for_call_parm (void);
140 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
141 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
142 static tree emit_block_move_libcall_fn (int);
143 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
144 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
145 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
146 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
147 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
148 struct store_by_pieces *);
149 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
150 static rtx clear_storage_via_libcall (rtx, rtx);
151 static tree clear_storage_libcall_fn (int);
152 static rtx compress_float_constant (rtx, rtx);
153 static rtx get_subtarget (rtx);
154 static int is_zeros_p (tree);
155 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
156 HOST_WIDE_INT, enum machine_mode,
157 tree, tree, int, int);
158 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
159 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
160 tree, enum machine_mode, int, tree, int);
161 static rtx var_rtx (tree);
162
163 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
164 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type (tree, tree);
165
166 static int is_aligning_offset (tree, tree);
167 static rtx expand_increment (tree, int, int);
168 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
169 enum expand_modifier);
170 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
171 #ifdef PUSH_ROUNDING
172 static void emit_single_push_insn (enum machine_mode, rtx, tree);
173 #endif
174 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
175 static rtx const_vector_from_tree (tree);
176
177 /* Record for each mode whether we can move a register directly to or
178 from an object of that mode in memory. If we can't, we won't try
179 to use that mode directly when accessing a field of that mode. */
180
181 static char direct_load[NUM_MACHINE_MODES];
182 static char direct_store[NUM_MACHINE_MODES];
183
184 /* Record for each mode whether we can float-extend from memory. */
185
186 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
187
188 /* If a memory-to-memory move would take MOVE_RATIO or more simple
189 move-instruction sequences, we will do a movstr or libcall instead. */
190
191 #ifndef MOVE_RATIO
192 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
193 #define MOVE_RATIO 2
194 #else
195 /* If we are optimizing for space (-Os), cut down the default move ratio. */
196 #define MOVE_RATIO (optimize_size ? 3 : 15)
197 #endif
198 #endif
199
200 /* This macro is used to determine whether move_by_pieces should be called
201 to perform a structure copy. */
202 #ifndef MOVE_BY_PIECES_P
203 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
204 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
205 #endif
206
207 /* If a clear memory operation would take CLEAR_RATIO or more simple
208 move-instruction sequences, we will do a clrstr or libcall instead. */
209
210 #ifndef CLEAR_RATIO
211 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
212 #define CLEAR_RATIO 2
213 #else
214 /* If we are optimizing for space, cut down the default clear ratio. */
215 #define CLEAR_RATIO (optimize_size ? 3 : 15)
216 #endif
217 #endif
218
219 /* This macro is used to determine whether clear_by_pieces should be
220 called to clear storage. */
221 #ifndef CLEAR_BY_PIECES_P
222 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
223 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
224 #endif
225
226 /* This macro is used to determine whether store_by_pieces should be
227 called to "memset" storage with byte values other than zero, or
228 to "memcpy" storage when the source is a constant string. */
229 #ifndef STORE_BY_PIECES_P
230 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
231 #endif
232
233 /* This array records the insn_code of insns to perform block moves. */
234 enum insn_code movstr_optab[NUM_MACHINE_MODES];
235
236 /* This array records the insn_code of insns to perform block clears. */
237 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
238
239 /* These arrays record the insn_code of two different kinds of insns
240 to perform block compares. */
241 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
242 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
243
244 /* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
245 struct file_stack *expr_wfl_stack;
246
247 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
248
249 #ifndef SLOW_UNALIGNED_ACCESS
250 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
251 #endif
252 \f
253 /* This is run once per compilation to set up which modes can be used
254 directly in memory and to initialize the block move optab. */
255
256 void
257 init_expr_once (void)
258 {
259 rtx insn, pat;
260 enum machine_mode mode;
261 int num_clobbers;
262 rtx mem, mem1;
263 rtx reg;
264
265 /* Try indexing by frame ptr and try by stack ptr.
266 It is known that on the Convex the stack ptr isn't a valid index.
267 With luck, one or the other is valid on any machine. */
268 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
269 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
270
271 /* A scratch register we can modify in-place below to avoid
272 useless RTL allocations. */
273 reg = gen_rtx_REG (VOIDmode, -1);
274
275 insn = rtx_alloc (INSN);
276 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
277 PATTERN (insn) = pat;
278
279 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
280 mode = (enum machine_mode) ((int) mode + 1))
281 {
282 int regno;
283
284 direct_load[(int) mode] = direct_store[(int) mode] = 0;
285 PUT_MODE (mem, mode);
286 PUT_MODE (mem1, mode);
287 PUT_MODE (reg, mode);
288
289 /* See if there is some register that can be used in this mode and
290 directly loaded or stored from memory. */
291
292 if (mode != VOIDmode && mode != BLKmode)
293 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
294 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
295 regno++)
296 {
297 if (! HARD_REGNO_MODE_OK (regno, mode))
298 continue;
299
300 REGNO (reg) = regno;
301
302 SET_SRC (pat) = mem;
303 SET_DEST (pat) = reg;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_load[(int) mode] = 1;
306
307 SET_SRC (pat) = mem1;
308 SET_DEST (pat) = reg;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_load[(int) mode] = 1;
311
312 SET_SRC (pat) = reg;
313 SET_DEST (pat) = mem;
314 if (recog (pat, insn, &num_clobbers) >= 0)
315 direct_store[(int) mode] = 1;
316
317 SET_SRC (pat) = reg;
318 SET_DEST (pat) = mem1;
319 if (recog (pat, insn, &num_clobbers) >= 0)
320 direct_store[(int) mode] = 1;
321 }
322 }
323
324 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
325
326 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
327 mode = GET_MODE_WIDER_MODE (mode))
328 {
329 enum machine_mode srcmode;
330 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
331 srcmode = GET_MODE_WIDER_MODE (srcmode))
332 {
333 enum insn_code ic;
334
335 ic = can_extend_p (mode, srcmode, 0);
336 if (ic == CODE_FOR_nothing)
337 continue;
338
339 PUT_MODE (mem, srcmode);
340
341 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
342 float_extend_from_mem[mode][srcmode] = true;
343 }
344 }
345 }
346
347 /* This is run at the start of compiling a function. */
348
349 void
350 init_expr (void)
351 {
352 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
353 }
354
355 /* Small sanity check that the queue is empty at the end of a function. */
356
357 void
358 finish_expr_for_function (void)
359 {
360 if (pending_chain)
361 abort ();
362 }
363 \f
364 /* Manage the queue of increment instructions to be output
365 for POSTINCREMENT_EXPR expressions, etc. */
366
367 /* Queue up to increment (or change) VAR later. BODY says how:
368 BODY should be the same thing you would pass to emit_insn
369 to increment right away. It will go to emit_insn later on.
370
371 The value is a QUEUED expression to be used in place of VAR
372 where you want to guarantee the pre-incrementation value of VAR. */
373
374 static rtx
375 enqueue_insn (rtx var, rtx body)
376 {
377 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
378 body, pending_chain);
379 return pending_chain;
380 }
381
382 /* Use protect_from_queue to convert a QUEUED expression
383 into something that you can put immediately into an instruction.
384 If the queued incrementation has not happened yet,
385 protect_from_queue returns the variable itself.
386 If the incrementation has happened, protect_from_queue returns a temp
387 that contains a copy of the old value of the variable.
388
389 Any time an rtx which might possibly be a QUEUED is to be put
390 into an instruction, it must be passed through protect_from_queue first.
391 QUEUED expressions are not meaningful in instructions.
392
393 Do not pass a value through protect_from_queue and then hold
394 on to it for a while before putting it in an instruction!
395 If the queue is flushed in between, incorrect code will result. */
396
397 rtx
398 protect_from_queue (rtx x, int modify)
399 {
400 RTX_CODE code = GET_CODE (x);
401
402 #if 0 /* A QUEUED can hang around after the queue is forced out. */
403 /* Shortcut for most common case. */
404 if (pending_chain == 0)
405 return x;
406 #endif
407
408 if (code != QUEUED)
409 {
410 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
411 use of autoincrement. Make a copy of the contents of the memory
412 location rather than a copy of the address, but not if the value is
413 of mode BLKmode. Don't modify X in place since it might be
414 shared. */
415 if (code == MEM && GET_MODE (x) != BLKmode
416 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
417 {
418 rtx y = XEXP (x, 0);
419 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
420
421 if (QUEUED_INSN (y))
422 {
423 rtx temp = gen_reg_rtx (GET_MODE (x));
424
425 emit_insn_before (gen_move_insn (temp, new),
426 QUEUED_INSN (y));
427 return temp;
428 }
429
430 /* Copy the address into a pseudo, so that the returned value
431 remains correct across calls to emit_queue. */
432 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
433 }
434
435 /* Otherwise, recursively protect the subexpressions of all
436 the kinds of rtx's that can contain a QUEUED. */
437 if (code == MEM)
438 {
439 rtx tem = protect_from_queue (XEXP (x, 0), 0);
440 if (tem != XEXP (x, 0))
441 {
442 x = copy_rtx (x);
443 XEXP (x, 0) = tem;
444 }
445 }
446 else if (code == PLUS || code == MULT)
447 {
448 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
449 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
450 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
451 {
452 x = copy_rtx (x);
453 XEXP (x, 0) = new0;
454 XEXP (x, 1) = new1;
455 }
456 }
457 return x;
458 }
459 /* If the increment has not happened, use the variable itself. Copy it
460 into a new pseudo so that the value remains correct across calls to
461 emit_queue. */
462 if (QUEUED_INSN (x) == 0)
463 return copy_to_reg (QUEUED_VAR (x));
464 /* If the increment has happened and a pre-increment copy exists,
465 use that copy. */
466 if (QUEUED_COPY (x) != 0)
467 return QUEUED_COPY (x);
468 /* The increment has happened but we haven't set up a pre-increment copy.
469 Set one up now, and use it. */
470 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
471 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
472 QUEUED_INSN (x));
473 return QUEUED_COPY (x);
474 }
475
476 /* Return nonzero if X contains a QUEUED expression:
477 if it contains anything that will be altered by a queued increment.
478 We handle only combinations of MEM, PLUS, MINUS and MULT operators
479 since memory addresses generally contain only those. */
480
481 int
482 queued_subexp_p (rtx x)
483 {
484 enum rtx_code code = GET_CODE (x);
485 switch (code)
486 {
487 case QUEUED:
488 return 1;
489 case MEM:
490 return queued_subexp_p (XEXP (x, 0));
491 case MULT:
492 case PLUS:
493 case MINUS:
494 return (queued_subexp_p (XEXP (x, 0))
495 || queued_subexp_p (XEXP (x, 1)));
496 default:
497 return 0;
498 }
499 }
500
501 /* Perform all the pending incrementations. */
502
503 void
504 emit_queue (void)
505 {
506 rtx p;
507 while ((p = pending_chain))
508 {
509 rtx body = QUEUED_BODY (p);
510
511 switch (GET_CODE (body))
512 {
513 case INSN:
514 case JUMP_INSN:
515 case CALL_INSN:
516 case CODE_LABEL:
517 case BARRIER:
518 case NOTE:
519 QUEUED_INSN (p) = body;
520 emit_insn (body);
521 break;
522
523 #ifdef ENABLE_CHECKING
524 case SEQUENCE:
525 abort ();
526 break;
527 #endif
528
529 default:
530 QUEUED_INSN (p) = emit_insn (body);
531 break;
532 }
533
534 pending_chain = QUEUED_NEXT (p);
535 }
536 }
537 \f
538 /* Copy data from FROM to TO, where the machine modes are not the same.
539 Both modes may be integer, or both may be floating.
540 UNSIGNEDP should be nonzero if FROM is an unsigned type.
541 This causes zero-extension instead of sign-extension. */
542
543 void
544 convert_move (rtx to, rtx from, int unsignedp)
545 {
546 enum machine_mode to_mode = GET_MODE (to);
547 enum machine_mode from_mode = GET_MODE (from);
548 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
549 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
550 enum insn_code code;
551 rtx libcall;
552
553 /* rtx code for making an equivalent value. */
554 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
555 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
556
557 to = protect_from_queue (to, 1);
558 from = protect_from_queue (from, 0);
559
560 if (to_real != from_real)
561 abort ();
562
563 /* If FROM is a SUBREG that indicates that we have already done at least
564 the required extension, strip it. We don't handle such SUBREGs as
565 TO here. */
566
567 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
568 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
569 >= GET_MODE_SIZE (to_mode))
570 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
571 from = gen_lowpart (to_mode, from), from_mode = to_mode;
572
573 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
574 abort ();
575
576 if (to_mode == from_mode
577 || (from_mode == VOIDmode && CONSTANT_P (from)))
578 {
579 emit_move_insn (to, from);
580 return;
581 }
582
583 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
584 {
585 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
586 abort ();
587
588 if (VECTOR_MODE_P (to_mode))
589 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
590 else
591 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
592
593 emit_move_insn (to, from);
594 return;
595 }
596
597 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
598 {
599 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
600 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
601 return;
602 }
603
604 if (to_real)
605 {
606 rtx value, insns;
607 convert_optab tab;
608
609 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
610 tab = sext_optab;
611 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
612 tab = trunc_optab;
613 else
614 abort ();
615
616 /* Try converting directly if the insn is supported. */
617
618 code = tab->handlers[to_mode][from_mode].insn_code;
619 if (code != CODE_FOR_nothing)
620 {
621 emit_unop_insn (code, to, from,
622 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
623 return;
624 }
625
626 /* Otherwise use a libcall. */
627 libcall = tab->handlers[to_mode][from_mode].libfunc;
628
629 if (!libcall)
630 /* This conversion is not implemented yet. */
631 abort ();
632
633 start_sequence ();
634 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
635 1, from, from_mode);
636 insns = get_insns ();
637 end_sequence ();
638 emit_libcall_block (insns, to, value,
639 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
640 from)
641 : gen_rtx_FLOAT_EXTEND (to_mode, from));
642 return;
643 }
644
645 /* Handle pointer conversion. */ /* SPEE 900220. */
646 /* Targets are expected to provide conversion insns between PxImode and
647 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
648 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
649 {
650 enum machine_mode full_mode
651 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
652
653 if (trunc_optab->handlers[to_mode][full_mode].insn_code
654 == CODE_FOR_nothing)
655 abort ();
656
657 if (full_mode != from_mode)
658 from = convert_to_mode (full_mode, from, unsignedp);
659 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
660 to, from, UNKNOWN);
661 return;
662 }
663 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
664 {
665 enum machine_mode full_mode
666 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
667
668 if (sext_optab->handlers[full_mode][from_mode].insn_code
669 == CODE_FOR_nothing)
670 abort ();
671
672 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
673 to, from, UNKNOWN);
674 if (to_mode == full_mode)
675 return;
676
677 /* else proceed to integer conversions below */
678 from_mode = full_mode;
679 }
680
681 /* Now both modes are integers. */
682
683 /* Handle expanding beyond a word. */
684 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
685 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
686 {
687 rtx insns;
688 rtx lowpart;
689 rtx fill_value;
690 rtx lowfrom;
691 int i;
692 enum machine_mode lowpart_mode;
693 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
694
695 /* Try converting directly if the insn is supported. */
696 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
697 != CODE_FOR_nothing)
698 {
699 /* If FROM is a SUBREG, put it into a register. Do this
700 so that we always generate the same set of insns for
701 better cse'ing; if an intermediate assignment occurred,
702 we won't be doing the operation directly on the SUBREG. */
703 if (optimize > 0 && GET_CODE (from) == SUBREG)
704 from = force_reg (from_mode, from);
705 emit_unop_insn (code, to, from, equiv_code);
706 return;
707 }
708 /* Next, try converting via full word. */
709 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
710 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
711 != CODE_FOR_nothing))
712 {
713 if (GET_CODE (to) == REG)
714 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
715 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
716 emit_unop_insn (code, to,
717 gen_lowpart (word_mode, to), equiv_code);
718 return;
719 }
720
721 /* No special multiword conversion insn; do it by hand. */
722 start_sequence ();
723
724 /* Since we will turn this into a no conflict block, we must ensure
725 that the source does not overlap the target. */
726
727 if (reg_overlap_mentioned_p (to, from))
728 from = force_reg (from_mode, from);
729
730 /* Get a copy of FROM widened to a word, if necessary. */
731 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
732 lowpart_mode = word_mode;
733 else
734 lowpart_mode = from_mode;
735
736 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
737
738 lowpart = gen_lowpart (lowpart_mode, to);
739 emit_move_insn (lowpart, lowfrom);
740
741 /* Compute the value to put in each remaining word. */
742 if (unsignedp)
743 fill_value = const0_rtx;
744 else
745 {
746 #ifdef HAVE_slt
747 if (HAVE_slt
748 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
749 && STORE_FLAG_VALUE == -1)
750 {
751 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
752 lowpart_mode, 0);
753 fill_value = gen_reg_rtx (word_mode);
754 emit_insn (gen_slt (fill_value));
755 }
756 else
757 #endif
758 {
759 fill_value
760 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
761 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
762 NULL_RTX, 0);
763 fill_value = convert_to_mode (word_mode, fill_value, 1);
764 }
765 }
766
767 /* Fill the remaining words. */
768 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
769 {
770 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
771 rtx subword = operand_subword (to, index, 1, to_mode);
772
773 if (subword == 0)
774 abort ();
775
776 if (fill_value != subword)
777 emit_move_insn (subword, fill_value);
778 }
779
780 insns = get_insns ();
781 end_sequence ();
782
783 emit_no_conflict_block (insns, to, from, NULL_RTX,
784 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
785 return;
786 }
787
788 /* Truncating multi-word to a word or less. */
789 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
790 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
791 {
792 if (!((GET_CODE (from) == MEM
793 && ! MEM_VOLATILE_P (from)
794 && direct_load[(int) to_mode]
795 && ! mode_dependent_address_p (XEXP (from, 0)))
796 || GET_CODE (from) == REG
797 || GET_CODE (from) == SUBREG))
798 from = force_reg (from_mode, from);
799 convert_move (to, gen_lowpart (word_mode, from), 0);
800 return;
801 }
802
803 /* Now follow all the conversions between integers
804 no more than a word long. */
805
806 /* For truncation, usually we can just refer to FROM in a narrower mode. */
807 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
808 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
809 GET_MODE_BITSIZE (from_mode)))
810 {
811 if (!((GET_CODE (from) == MEM
812 && ! MEM_VOLATILE_P (from)
813 && direct_load[(int) to_mode]
814 && ! mode_dependent_address_p (XEXP (from, 0)))
815 || GET_CODE (from) == REG
816 || GET_CODE (from) == SUBREG))
817 from = force_reg (from_mode, from);
818 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
819 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
820 from = copy_to_reg (from);
821 emit_move_insn (to, gen_lowpart (to_mode, from));
822 return;
823 }
824
825 /* Handle extension. */
826 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
827 {
828 /* Convert directly if that works. */
829 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
830 != CODE_FOR_nothing)
831 {
832 if (flag_force_mem)
833 from = force_not_mem (from);
834
835 emit_unop_insn (code, to, from, equiv_code);
836 return;
837 }
838 else
839 {
840 enum machine_mode intermediate;
841 rtx tmp;
842 tree shift_amount;
843
844 /* Search for a mode to convert via. */
845 for (intermediate = from_mode; intermediate != VOIDmode;
846 intermediate = GET_MODE_WIDER_MODE (intermediate))
847 if (((can_extend_p (to_mode, intermediate, unsignedp)
848 != CODE_FOR_nothing)
849 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
850 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
851 GET_MODE_BITSIZE (intermediate))))
852 && (can_extend_p (intermediate, from_mode, unsignedp)
853 != CODE_FOR_nothing))
854 {
855 convert_move (to, convert_to_mode (intermediate, from,
856 unsignedp), unsignedp);
857 return;
858 }
859
860 /* No suitable intermediate mode.
861 Generate what we need with shifts. */
862 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
863 - GET_MODE_BITSIZE (from_mode), 0);
864 from = gen_lowpart (to_mode, force_reg (from_mode, from));
865 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
866 to, unsignedp);
867 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
868 to, unsignedp);
869 if (tmp != to)
870 emit_move_insn (to, tmp);
871 return;
872 }
873 }
874
875 /* Support special truncate insns for certain modes. */
876 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
877 {
878 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
879 to, from, UNKNOWN);
880 return;
881 }
882
883 /* Handle truncation of volatile memrefs, and so on;
884 the things that couldn't be truncated directly,
885 and for which there was no special instruction.
886
887 ??? Code above formerly short-circuited this, for most integer
888 mode pairs, with a force_reg in from_mode followed by a recursive
889 call to this routine. Appears always to have been wrong. */
890 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
891 {
892 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
893 emit_move_insn (to, temp);
894 return;
895 }
896
897 /* Mode combination is not recognized. */
898 abort ();
899 }
900
901 /* Return an rtx for a value that would result
902 from converting X to mode MODE.
903 Both X and MODE may be floating, or both integer.
904 UNSIGNEDP is nonzero if X is an unsigned value.
905 This can be done by referring to a part of X in place
906 or by copying to a new temporary with conversion.
907
908 This function *must not* call protect_from_queue
909 except when putting X into an insn (in which case convert_move does it). */
910
911 rtx
912 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
913 {
914 return convert_modes (mode, VOIDmode, x, unsignedp);
915 }
916
917 /* Return an rtx for a value that would result
918 from converting X from mode OLDMODE to mode MODE.
919 Both modes may be floating, or both integer.
920 UNSIGNEDP is nonzero if X is an unsigned value.
921
922 This can be done by referring to a part of X in place
923 or by copying to a new temporary with conversion.
924
925 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
926
927 This function *must not* call protect_from_queue
928 except when putting X into an insn (in which case convert_move does it). */
929
930 rtx
931 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
932 {
933 rtx temp;
934
935 /* If FROM is a SUBREG that indicates that we have already done at least
936 the required extension, strip it. */
937
938 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
939 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
940 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
941 x = gen_lowpart (mode, x);
942
943 if (GET_MODE (x) != VOIDmode)
944 oldmode = GET_MODE (x);
945
946 if (mode == oldmode)
947 return x;
948
949 /* There is one case that we must handle specially: If we are converting
950 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
951 we are to interpret the constant as unsigned, gen_lowpart will do
952 the wrong if the constant appears negative. What we want to do is
953 make the high-order word of the constant zero, not all ones. */
954
955 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
956 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
957 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
958 {
959 HOST_WIDE_INT val = INTVAL (x);
960
961 if (oldmode != VOIDmode
962 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
963 {
964 int width = GET_MODE_BITSIZE (oldmode);
965
966 /* We need to zero extend VAL. */
967 val &= ((HOST_WIDE_INT) 1 << width) - 1;
968 }
969
970 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
971 }
972
973 /* We can do this with a gen_lowpart if both desired and current modes
974 are integer, and this is either a constant integer, a register, or a
975 non-volatile MEM. Except for the constant case where MODE is no
976 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
977
978 if ((GET_CODE (x) == CONST_INT
979 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
980 || (GET_MODE_CLASS (mode) == MODE_INT
981 && GET_MODE_CLASS (oldmode) == MODE_INT
982 && (GET_CODE (x) == CONST_DOUBLE
983 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
984 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
985 && direct_load[(int) mode])
986 || (GET_CODE (x) == REG
987 && (! HARD_REGISTER_P (x)
988 || HARD_REGNO_MODE_OK (REGNO (x), mode))
989 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
990 GET_MODE_BITSIZE (GET_MODE (x)))))))))
991 {
992 /* ?? If we don't know OLDMODE, we have to assume here that
993 X does not need sign- or zero-extension. This may not be
994 the case, but it's the best we can do. */
995 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
996 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
997 {
998 HOST_WIDE_INT val = INTVAL (x);
999 int width = GET_MODE_BITSIZE (oldmode);
1000
1001 /* We must sign or zero-extend in this case. Start by
1002 zero-extending, then sign extend if we need to. */
1003 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1004 if (! unsignedp
1005 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1006 val |= (HOST_WIDE_INT) (-1) << width;
1007
1008 return gen_int_mode (val, mode);
1009 }
1010
1011 return gen_lowpart (mode, x);
1012 }
1013
1014 /* Converting from integer constant into mode is always equivalent to an
1015 subreg operation. */
1016 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1017 {
1018 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1019 abort ();
1020 return simplify_gen_subreg (mode, x, oldmode, 0);
1021 }
1022
1023 temp = gen_reg_rtx (mode);
1024 convert_move (temp, x, unsignedp);
1025 return temp;
1026 }
1027 \f
1028 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1029 store efficiently. Due to internal GCC limitations, this is
1030 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1031 for an immediate constant. */
1032
1033 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1034
1035 /* Determine whether the LEN bytes can be moved by using several move
1036 instructions. Return nonzero if a call to move_by_pieces should
1037 succeed. */
1038
1039 int
1040 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1041 unsigned int align ATTRIBUTE_UNUSED)
1042 {
1043 return MOVE_BY_PIECES_P (len, align);
1044 }
1045
1046 /* Generate several move instructions to copy LEN bytes from block FROM to
1047 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1048 and TO through protect_from_queue before calling.
1049
1050 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1051 used to push FROM to the stack.
1052
1053 ALIGN is maximum stack alignment we can assume.
1054
1055 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1056 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1057 stpcpy. */
1058
1059 rtx
1060 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1061 unsigned int align, int endp)
1062 {
1063 struct move_by_pieces data;
1064 rtx to_addr, from_addr = XEXP (from, 0);
1065 unsigned int max_size = MOVE_MAX_PIECES + 1;
1066 enum machine_mode mode = VOIDmode, tmode;
1067 enum insn_code icode;
1068
1069 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1070
1071 data.offset = 0;
1072 data.from_addr = from_addr;
1073 if (to)
1074 {
1075 to_addr = XEXP (to, 0);
1076 data.to = to;
1077 data.autinc_to
1078 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1079 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1080 data.reverse
1081 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1082 }
1083 else
1084 {
1085 to_addr = NULL_RTX;
1086 data.to = NULL_RTX;
1087 data.autinc_to = 1;
1088 #ifdef STACK_GROWS_DOWNWARD
1089 data.reverse = 1;
1090 #else
1091 data.reverse = 0;
1092 #endif
1093 }
1094 data.to_addr = to_addr;
1095 data.from = from;
1096 data.autinc_from
1097 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1098 || GET_CODE (from_addr) == POST_INC
1099 || GET_CODE (from_addr) == POST_DEC);
1100
1101 data.explicit_inc_from = 0;
1102 data.explicit_inc_to = 0;
1103 if (data.reverse) data.offset = len;
1104 data.len = len;
1105
1106 /* If copying requires more than two move insns,
1107 copy addresses to registers (to make displacements shorter)
1108 and use post-increment if available. */
1109 if (!(data.autinc_from && data.autinc_to)
1110 && move_by_pieces_ninsns (len, align) > 2)
1111 {
1112 /* Find the mode of the largest move... */
1113 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1114 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1115 if (GET_MODE_SIZE (tmode) < max_size)
1116 mode = tmode;
1117
1118 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1119 {
1120 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1121 data.autinc_from = 1;
1122 data.explicit_inc_from = -1;
1123 }
1124 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1125 {
1126 data.from_addr = copy_addr_to_reg (from_addr);
1127 data.autinc_from = 1;
1128 data.explicit_inc_from = 1;
1129 }
1130 if (!data.autinc_from && CONSTANT_P (from_addr))
1131 data.from_addr = copy_addr_to_reg (from_addr);
1132 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1133 {
1134 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1135 data.autinc_to = 1;
1136 data.explicit_inc_to = -1;
1137 }
1138 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1139 {
1140 data.to_addr = copy_addr_to_reg (to_addr);
1141 data.autinc_to = 1;
1142 data.explicit_inc_to = 1;
1143 }
1144 if (!data.autinc_to && CONSTANT_P (to_addr))
1145 data.to_addr = copy_addr_to_reg (to_addr);
1146 }
1147
1148 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1149 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1150 align = MOVE_MAX * BITS_PER_UNIT;
1151
1152 /* First move what we can in the largest integer mode, then go to
1153 successively smaller modes. */
1154
1155 while (max_size > 1)
1156 {
1157 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1158 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1159 if (GET_MODE_SIZE (tmode) < max_size)
1160 mode = tmode;
1161
1162 if (mode == VOIDmode)
1163 break;
1164
1165 icode = mov_optab->handlers[(int) mode].insn_code;
1166 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1167 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1168
1169 max_size = GET_MODE_SIZE (mode);
1170 }
1171
1172 /* The code above should have handled everything. */
1173 if (data.len > 0)
1174 abort ();
1175
1176 if (endp)
1177 {
1178 rtx to1;
1179
1180 if (data.reverse)
1181 abort ();
1182 if (data.autinc_to)
1183 {
1184 if (endp == 2)
1185 {
1186 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1187 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1188 else
1189 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1190 -1));
1191 }
1192 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1193 data.offset);
1194 }
1195 else
1196 {
1197 if (endp == 2)
1198 --data.offset;
1199 to1 = adjust_address (data.to, QImode, data.offset);
1200 }
1201 return to1;
1202 }
1203 else
1204 return data.to;
1205 }
1206
1207 /* Return number of insns required to move L bytes by pieces.
1208 ALIGN (in bits) is maximum alignment we can assume. */
1209
1210 static unsigned HOST_WIDE_INT
1211 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1212 {
1213 unsigned HOST_WIDE_INT n_insns = 0;
1214 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1215
1216 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1217 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1218 align = MOVE_MAX * BITS_PER_UNIT;
1219
1220 while (max_size > 1)
1221 {
1222 enum machine_mode mode = VOIDmode, tmode;
1223 enum insn_code icode;
1224
1225 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1226 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1227 if (GET_MODE_SIZE (tmode) < max_size)
1228 mode = tmode;
1229
1230 if (mode == VOIDmode)
1231 break;
1232
1233 icode = mov_optab->handlers[(int) mode].insn_code;
1234 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1235 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1236
1237 max_size = GET_MODE_SIZE (mode);
1238 }
1239
1240 if (l)
1241 abort ();
1242 return n_insns;
1243 }
1244
1245 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1246 with move instructions for mode MODE. GENFUN is the gen_... function
1247 to make a move insn for that mode. DATA has all the other info. */
1248
1249 static void
1250 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1251 struct move_by_pieces *data)
1252 {
1253 unsigned int size = GET_MODE_SIZE (mode);
1254 rtx to1 = NULL_RTX, from1;
1255
1256 while (data->len >= size)
1257 {
1258 if (data->reverse)
1259 data->offset -= size;
1260
1261 if (data->to)
1262 {
1263 if (data->autinc_to)
1264 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1265 data->offset);
1266 else
1267 to1 = adjust_address (data->to, mode, data->offset);
1268 }
1269
1270 if (data->autinc_from)
1271 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1272 data->offset);
1273 else
1274 from1 = adjust_address (data->from, mode, data->offset);
1275
1276 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1277 emit_insn (gen_add2_insn (data->to_addr,
1278 GEN_INT (-(HOST_WIDE_INT)size)));
1279 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1280 emit_insn (gen_add2_insn (data->from_addr,
1281 GEN_INT (-(HOST_WIDE_INT)size)));
1282
1283 if (data->to)
1284 emit_insn ((*genfun) (to1, from1));
1285 else
1286 {
1287 #ifdef PUSH_ROUNDING
1288 emit_single_push_insn (mode, from1, NULL);
1289 #else
1290 abort ();
1291 #endif
1292 }
1293
1294 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1295 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1296 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1297 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1298
1299 if (! data->reverse)
1300 data->offset += size;
1301
1302 data->len -= size;
1303 }
1304 }
1305 \f
1306 /* Emit code to move a block Y to a block X. This may be done with
1307 string-move instructions, with multiple scalar move instructions,
1308 or with a library call.
1309
1310 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1311 SIZE is an rtx that says how long they are.
1312 ALIGN is the maximum alignment we can assume they have.
1313 METHOD describes what kind of copy this is, and what mechanisms may be used.
1314
1315 Return the address of the new block, if memcpy is called and returns it,
1316 0 otherwise. */
1317
1318 rtx
1319 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1320 {
1321 bool may_use_call;
1322 rtx retval = 0;
1323 unsigned int align;
1324
1325 switch (method)
1326 {
1327 case BLOCK_OP_NORMAL:
1328 may_use_call = true;
1329 break;
1330
1331 case BLOCK_OP_CALL_PARM:
1332 may_use_call = block_move_libcall_safe_for_call_parm ();
1333
1334 /* Make inhibit_defer_pop nonzero around the library call
1335 to force it to pop the arguments right away. */
1336 NO_DEFER_POP;
1337 break;
1338
1339 case BLOCK_OP_NO_LIBCALL:
1340 may_use_call = false;
1341 break;
1342
1343 default:
1344 abort ();
1345 }
1346
1347 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1348
1349 if (GET_MODE (x) != BLKmode)
1350 abort ();
1351 if (GET_MODE (y) != BLKmode)
1352 abort ();
1353
1354 x = protect_from_queue (x, 1);
1355 y = protect_from_queue (y, 0);
1356 size = protect_from_queue (size, 0);
1357
1358 if (GET_CODE (x) != MEM)
1359 abort ();
1360 if (GET_CODE (y) != MEM)
1361 abort ();
1362 if (size == 0)
1363 abort ();
1364
1365 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1366 can be incorrect is coming from __builtin_memcpy. */
1367 if (GET_CODE (size) == CONST_INT)
1368 {
1369 if (INTVAL (size) == 0)
1370 return 0;
1371
1372 x = shallow_copy_rtx (x);
1373 y = shallow_copy_rtx (y);
1374 set_mem_size (x, size);
1375 set_mem_size (y, size);
1376 }
1377
1378 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1379 move_by_pieces (x, y, INTVAL (size), align, 0);
1380 else if (emit_block_move_via_movstr (x, y, size, align))
1381 ;
1382 else if (may_use_call)
1383 retval = emit_block_move_via_libcall (x, y, size);
1384 else
1385 emit_block_move_via_loop (x, y, size, align);
1386
1387 if (method == BLOCK_OP_CALL_PARM)
1388 OK_DEFER_POP;
1389
1390 return retval;
1391 }
1392
1393 /* A subroutine of emit_block_move. Returns true if calling the
1394 block move libcall will not clobber any parameters which may have
1395 already been placed on the stack. */
1396
1397 static bool
1398 block_move_libcall_safe_for_call_parm (void)
1399 {
1400 /* If arguments are pushed on the stack, then they're safe. */
1401 if (PUSH_ARGS)
1402 return true;
1403
1404 /* If registers go on the stack anyway, any argument is sure to clobber
1405 an outgoing argument. */
1406 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1407 {
1408 tree fn = emit_block_move_libcall_fn (false);
1409 (void) fn;
1410 if (REG_PARM_STACK_SPACE (fn) != 0)
1411 return false;
1412 }
1413 #endif
1414
1415 /* If any argument goes in memory, then it might clobber an outgoing
1416 argument. */
1417 {
1418 CUMULATIVE_ARGS args_so_far;
1419 tree fn, arg;
1420
1421 fn = emit_block_move_libcall_fn (false);
1422 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1423
1424 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1425 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1426 {
1427 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1428 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1429 if (!tmp || !REG_P (tmp))
1430 return false;
1431 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1432 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1433 NULL_TREE, 1))
1434 return false;
1435 #endif
1436 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1437 }
1438 }
1439 return true;
1440 }
1441
1442 /* A subroutine of emit_block_move. Expand a movstr pattern;
1443 return true if successful. */
1444
1445 static bool
1446 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1447 {
1448 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1449 enum machine_mode mode;
1450
1451 /* Since this is a move insn, we don't care about volatility. */
1452 volatile_ok = 1;
1453
1454 /* Try the most limited insn first, because there's no point
1455 including more than one in the machine description unless
1456 the more limited one has some advantage. */
1457
1458 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1459 mode = GET_MODE_WIDER_MODE (mode))
1460 {
1461 enum insn_code code = movstr_optab[(int) mode];
1462 insn_operand_predicate_fn pred;
1463
1464 if (code != CODE_FOR_nothing
1465 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1466 here because if SIZE is less than the mode mask, as it is
1467 returned by the macro, it will definitely be less than the
1468 actual mode mask. */
1469 && ((GET_CODE (size) == CONST_INT
1470 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1471 <= (GET_MODE_MASK (mode) >> 1)))
1472 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1473 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1474 || (*pred) (x, BLKmode))
1475 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1476 || (*pred) (y, BLKmode))
1477 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1478 || (*pred) (opalign, VOIDmode)))
1479 {
1480 rtx op2;
1481 rtx last = get_last_insn ();
1482 rtx pat;
1483
1484 op2 = convert_to_mode (mode, size, 1);
1485 pred = insn_data[(int) code].operand[2].predicate;
1486 if (pred != 0 && ! (*pred) (op2, mode))
1487 op2 = copy_to_mode_reg (mode, op2);
1488
1489 /* ??? When called via emit_block_move_for_call, it'd be
1490 nice if there were some way to inform the backend, so
1491 that it doesn't fail the expansion because it thinks
1492 emitting the libcall would be more efficient. */
1493
1494 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1495 if (pat)
1496 {
1497 emit_insn (pat);
1498 volatile_ok = 0;
1499 return true;
1500 }
1501 else
1502 delete_insns_since (last);
1503 }
1504 }
1505
1506 volatile_ok = 0;
1507 return false;
1508 }
1509
1510 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1511 Return the return value from memcpy, 0 otherwise. */
1512
1513 static rtx
1514 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1515 {
1516 rtx dst_addr, src_addr;
1517 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1518 enum machine_mode size_mode;
1519 rtx retval;
1520
1521 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1522
1523 It is unsafe to save the value generated by protect_from_queue and reuse
1524 it later. Consider what happens if emit_queue is called before the
1525 return value from protect_from_queue is used.
1526
1527 Expansion of the CALL_EXPR below will call emit_queue before we are
1528 finished emitting RTL for argument setup. So if we are not careful we
1529 could get the wrong value for an argument.
1530
1531 To avoid this problem we go ahead and emit code to copy the addresses of
1532 DST and SRC and SIZE into new pseudos. We can then place those new
1533 pseudos into an RTL_EXPR and use them later, even after a call to
1534 emit_queue.
1535
1536 Note this is not strictly needed for library calls since they do not call
1537 emit_queue before loading their arguments. However, we may need to have
1538 library calls call emit_queue in the future since failing to do so could
1539 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1540 arguments in registers. */
1541
1542 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1543 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1544
1545 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1546 src_addr = convert_memory_address (ptr_mode, src_addr);
1547
1548 dst_tree = make_tree (ptr_type_node, dst_addr);
1549 src_tree = make_tree (ptr_type_node, src_addr);
1550
1551 if (TARGET_MEM_FUNCTIONS)
1552 size_mode = TYPE_MODE (sizetype);
1553 else
1554 size_mode = TYPE_MODE (unsigned_type_node);
1555
1556 size = convert_to_mode (size_mode, size, 1);
1557 size = copy_to_mode_reg (size_mode, size);
1558
1559 /* It is incorrect to use the libcall calling conventions to call
1560 memcpy in this context. This could be a user call to memcpy and
1561 the user may wish to examine the return value from memcpy. For
1562 targets where libcalls and normal calls have different conventions
1563 for returning pointers, we could end up generating incorrect code.
1564
1565 For convenience, we generate the call to bcopy this way as well. */
1566
1567 if (TARGET_MEM_FUNCTIONS)
1568 size_tree = make_tree (sizetype, size);
1569 else
1570 size_tree = make_tree (unsigned_type_node, size);
1571
1572 fn = emit_block_move_libcall_fn (true);
1573 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1574 if (TARGET_MEM_FUNCTIONS)
1575 {
1576 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1577 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1578 }
1579 else
1580 {
1581 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1582 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1583 }
1584
1585 /* Now we have to build up the CALL_EXPR itself. */
1586 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1587 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1588 call_expr, arg_list, NULL_TREE);
1589
1590 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1591
1592 /* If we are initializing a readonly value, show the above call clobbered
1593 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1594 the delay slot scheduler might overlook conflicts and take nasty
1595 decisions. */
1596 if (RTX_UNCHANGING_P (dst))
1597 add_function_usage_to
1598 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1599 gen_rtx_CLOBBER (VOIDmode, dst),
1600 NULL_RTX));
1601
1602 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
1603 }
1604
1605 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1606 for the function we use for block copies. The first time FOR_CALL
1607 is true, we call assemble_external. */
1608
1609 static GTY(()) tree block_move_fn;
1610
1611 void
1612 init_block_move_fn (const char *asmspec)
1613 {
1614 if (!block_move_fn)
1615 {
1616 tree args, fn;
1617
1618 if (TARGET_MEM_FUNCTIONS)
1619 {
1620 fn = get_identifier ("memcpy");
1621 args = build_function_type_list (ptr_type_node, ptr_type_node,
1622 const_ptr_type_node, sizetype,
1623 NULL_TREE);
1624 }
1625 else
1626 {
1627 fn = get_identifier ("bcopy");
1628 args = build_function_type_list (void_type_node, const_ptr_type_node,
1629 ptr_type_node, unsigned_type_node,
1630 NULL_TREE);
1631 }
1632
1633 fn = build_decl (FUNCTION_DECL, fn, args);
1634 DECL_EXTERNAL (fn) = 1;
1635 TREE_PUBLIC (fn) = 1;
1636 DECL_ARTIFICIAL (fn) = 1;
1637 TREE_NOTHROW (fn) = 1;
1638
1639 block_move_fn = fn;
1640 }
1641
1642 if (asmspec)
1643 {
1644 SET_DECL_RTL (block_move_fn, NULL_RTX);
1645 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1646 }
1647 }
1648
1649 static tree
1650 emit_block_move_libcall_fn (int for_call)
1651 {
1652 static bool emitted_extern;
1653
1654 if (!block_move_fn)
1655 init_block_move_fn (NULL);
1656
1657 if (for_call && !emitted_extern)
1658 {
1659 emitted_extern = true;
1660 make_decl_rtl (block_move_fn, NULL);
1661 assemble_external (block_move_fn);
1662 }
1663
1664 return block_move_fn;
1665 }
1666
1667 /* A subroutine of emit_block_move. Copy the data via an explicit
1668 loop. This is used only when libcalls are forbidden. */
1669 /* ??? It'd be nice to copy in hunks larger than QImode. */
1670
1671 static void
1672 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1673 unsigned int align ATTRIBUTE_UNUSED)
1674 {
1675 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1676 enum machine_mode iter_mode;
1677
1678 iter_mode = GET_MODE (size);
1679 if (iter_mode == VOIDmode)
1680 iter_mode = word_mode;
1681
1682 top_label = gen_label_rtx ();
1683 cmp_label = gen_label_rtx ();
1684 iter = gen_reg_rtx (iter_mode);
1685
1686 emit_move_insn (iter, const0_rtx);
1687
1688 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1689 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1690 do_pending_stack_adjust ();
1691
1692 emit_note (NOTE_INSN_LOOP_BEG);
1693
1694 emit_jump (cmp_label);
1695 emit_label (top_label);
1696
1697 tmp = convert_modes (Pmode, iter_mode, iter, true);
1698 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1699 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1700 x = change_address (x, QImode, x_addr);
1701 y = change_address (y, QImode, y_addr);
1702
1703 emit_move_insn (x, y);
1704
1705 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1706 true, OPTAB_LIB_WIDEN);
1707 if (tmp != iter)
1708 emit_move_insn (iter, tmp);
1709
1710 emit_note (NOTE_INSN_LOOP_CONT);
1711 emit_label (cmp_label);
1712
1713 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1714 true, top_label);
1715
1716 emit_note (NOTE_INSN_LOOP_END);
1717 }
1718 \f
1719 /* Copy all or part of a value X into registers starting at REGNO.
1720 The number of registers to be filled is NREGS. */
1721
1722 void
1723 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1724 {
1725 int i;
1726 #ifdef HAVE_load_multiple
1727 rtx pat;
1728 rtx last;
1729 #endif
1730
1731 if (nregs == 0)
1732 return;
1733
1734 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1735 x = validize_mem (force_const_mem (mode, x));
1736
1737 /* See if the machine can do this with a load multiple insn. */
1738 #ifdef HAVE_load_multiple
1739 if (HAVE_load_multiple)
1740 {
1741 last = get_last_insn ();
1742 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1743 GEN_INT (nregs));
1744 if (pat)
1745 {
1746 emit_insn (pat);
1747 return;
1748 }
1749 else
1750 delete_insns_since (last);
1751 }
1752 #endif
1753
1754 for (i = 0; i < nregs; i++)
1755 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1756 operand_subword_force (x, i, mode));
1757 }
1758
1759 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1760 The number of registers to be filled is NREGS. */
1761
1762 void
1763 move_block_from_reg (int regno, rtx x, int nregs)
1764 {
1765 int i;
1766
1767 if (nregs == 0)
1768 return;
1769
1770 /* See if the machine can do this with a store multiple insn. */
1771 #ifdef HAVE_store_multiple
1772 if (HAVE_store_multiple)
1773 {
1774 rtx last = get_last_insn ();
1775 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1776 GEN_INT (nregs));
1777 if (pat)
1778 {
1779 emit_insn (pat);
1780 return;
1781 }
1782 else
1783 delete_insns_since (last);
1784 }
1785 #endif
1786
1787 for (i = 0; i < nregs; i++)
1788 {
1789 rtx tem = operand_subword (x, i, 1, BLKmode);
1790
1791 if (tem == 0)
1792 abort ();
1793
1794 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1795 }
1796 }
1797
1798 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1799 ORIG, where ORIG is a non-consecutive group of registers represented by
1800 a PARALLEL. The clone is identical to the original except in that the
1801 original set of registers is replaced by a new set of pseudo registers.
1802 The new set has the same modes as the original set. */
1803
1804 rtx
1805 gen_group_rtx (rtx orig)
1806 {
1807 int i, length;
1808 rtx *tmps;
1809
1810 if (GET_CODE (orig) != PARALLEL)
1811 abort ();
1812
1813 length = XVECLEN (orig, 0);
1814 tmps = alloca (sizeof (rtx) * length);
1815
1816 /* Skip a NULL entry in first slot. */
1817 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1818
1819 if (i)
1820 tmps[0] = 0;
1821
1822 for (; i < length; i++)
1823 {
1824 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1825 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1826
1827 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1828 }
1829
1830 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1831 }
1832
1833 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1834 where DST is non-consecutive registers represented by a PARALLEL.
1835 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1836 if not known. */
1837
1838 void
1839 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1840 {
1841 rtx *tmps, src;
1842 int start, i;
1843
1844 if (GET_CODE (dst) != PARALLEL)
1845 abort ();
1846
1847 /* Check for a NULL entry, used to indicate that the parameter goes
1848 both on the stack and in registers. */
1849 if (XEXP (XVECEXP (dst, 0, 0), 0))
1850 start = 0;
1851 else
1852 start = 1;
1853
1854 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1855
1856 /* Process the pieces. */
1857 for (i = start; i < XVECLEN (dst, 0); i++)
1858 {
1859 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1860 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1861 unsigned int bytelen = GET_MODE_SIZE (mode);
1862 int shift = 0;
1863
1864 /* Handle trailing fragments that run over the size of the struct. */
1865 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1866 {
1867 /* Arrange to shift the fragment to where it belongs.
1868 extract_bit_field loads to the lsb of the reg. */
1869 if (
1870 #ifdef BLOCK_REG_PADDING
1871 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1872 == (BYTES_BIG_ENDIAN ? upward : downward)
1873 #else
1874 BYTES_BIG_ENDIAN
1875 #endif
1876 )
1877 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1878 bytelen = ssize - bytepos;
1879 if (bytelen <= 0)
1880 abort ();
1881 }
1882
1883 /* If we won't be loading directly from memory, protect the real source
1884 from strange tricks we might play; but make sure that the source can
1885 be loaded directly into the destination. */
1886 src = orig_src;
1887 if (GET_CODE (orig_src) != MEM
1888 && (!CONSTANT_P (orig_src)
1889 || (GET_MODE (orig_src) != mode
1890 && GET_MODE (orig_src) != VOIDmode)))
1891 {
1892 if (GET_MODE (orig_src) == VOIDmode)
1893 src = gen_reg_rtx (mode);
1894 else
1895 src = gen_reg_rtx (GET_MODE (orig_src));
1896
1897 emit_move_insn (src, orig_src);
1898 }
1899
1900 /* Optimize the access just a bit. */
1901 if (GET_CODE (src) == MEM
1902 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1903 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1904 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1905 && bytelen == GET_MODE_SIZE (mode))
1906 {
1907 tmps[i] = gen_reg_rtx (mode);
1908 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1909 }
1910 else if (GET_CODE (src) == CONCAT)
1911 {
1912 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1913 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1914
1915 if ((bytepos == 0 && bytelen == slen0)
1916 || (bytepos != 0 && bytepos + bytelen <= slen))
1917 {
1918 /* The following assumes that the concatenated objects all
1919 have the same size. In this case, a simple calculation
1920 can be used to determine the object and the bit field
1921 to be extracted. */
1922 tmps[i] = XEXP (src, bytepos / slen0);
1923 if (! CONSTANT_P (tmps[i])
1924 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
1925 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1926 (bytepos % slen0) * BITS_PER_UNIT,
1927 1, NULL_RTX, mode, mode, ssize);
1928 }
1929 else if (bytepos == 0)
1930 {
1931 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1932 emit_move_insn (mem, src);
1933 tmps[i] = adjust_address (mem, mode, 0);
1934 }
1935 else
1936 abort ();
1937 }
1938 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1939 SIMD register, which is currently broken. While we get GCC
1940 to emit proper RTL for these cases, let's dump to memory. */
1941 else if (VECTOR_MODE_P (GET_MODE (dst))
1942 && GET_CODE (src) == REG)
1943 {
1944 int slen = GET_MODE_SIZE (GET_MODE (src));
1945 rtx mem;
1946
1947 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1948 emit_move_insn (mem, src);
1949 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1950 }
1951 else if (CONSTANT_P (src)
1952 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1953 tmps[i] = src;
1954 else
1955 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1956 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1957 mode, mode, ssize);
1958
1959 if (shift)
1960 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1961 tmps[i], 0, OPTAB_WIDEN);
1962 }
1963
1964 emit_queue ();
1965
1966 /* Copy the extracted pieces into the proper (probable) hard regs. */
1967 for (i = start; i < XVECLEN (dst, 0); i++)
1968 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1969 }
1970
1971 /* Emit code to move a block SRC to block DST, where SRC and DST are
1972 non-consecutive groups of registers, each represented by a PARALLEL. */
1973
1974 void
1975 emit_group_move (rtx dst, rtx src)
1976 {
1977 int i;
1978
1979 if (GET_CODE (src) != PARALLEL
1980 || GET_CODE (dst) != PARALLEL
1981 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1982 abort ();
1983
1984 /* Skip first entry if NULL. */
1985 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1986 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1987 XEXP (XVECEXP (src, 0, i), 0));
1988 }
1989
1990 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1991 where SRC is non-consecutive registers represented by a PARALLEL.
1992 SSIZE represents the total size of block ORIG_DST, or -1 if not
1993 known. */
1994
1995 void
1996 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1997 {
1998 rtx *tmps, dst;
1999 int start, i;
2000
2001 if (GET_CODE (src) != PARALLEL)
2002 abort ();
2003
2004 /* Check for a NULL entry, used to indicate that the parameter goes
2005 both on the stack and in registers. */
2006 if (XEXP (XVECEXP (src, 0, 0), 0))
2007 start = 0;
2008 else
2009 start = 1;
2010
2011 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
2012
2013 /* Copy the (probable) hard regs into pseudos. */
2014 for (i = start; i < XVECLEN (src, 0); i++)
2015 {
2016 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2017 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2018 emit_move_insn (tmps[i], reg);
2019 }
2020 emit_queue ();
2021
2022 /* If we won't be storing directly into memory, protect the real destination
2023 from strange tricks we might play. */
2024 dst = orig_dst;
2025 if (GET_CODE (dst) == PARALLEL)
2026 {
2027 rtx temp;
2028
2029 /* We can get a PARALLEL dst if there is a conditional expression in
2030 a return statement. In that case, the dst and src are the same,
2031 so no action is necessary. */
2032 if (rtx_equal_p (dst, src))
2033 return;
2034
2035 /* It is unclear if we can ever reach here, but we may as well handle
2036 it. Allocate a temporary, and split this into a store/load to/from
2037 the temporary. */
2038
2039 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2040 emit_group_store (temp, src, type, ssize);
2041 emit_group_load (dst, temp, type, ssize);
2042 return;
2043 }
2044 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2045 {
2046 dst = gen_reg_rtx (GET_MODE (orig_dst));
2047 /* Make life a bit easier for combine. */
2048 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2049 }
2050
2051 /* Process the pieces. */
2052 for (i = start; i < XVECLEN (src, 0); i++)
2053 {
2054 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2055 enum machine_mode mode = GET_MODE (tmps[i]);
2056 unsigned int bytelen = GET_MODE_SIZE (mode);
2057 rtx dest = dst;
2058
2059 /* Handle trailing fragments that run over the size of the struct. */
2060 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2061 {
2062 /* store_bit_field always takes its value from the lsb.
2063 Move the fragment to the lsb if it's not already there. */
2064 if (
2065 #ifdef BLOCK_REG_PADDING
2066 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2067 == (BYTES_BIG_ENDIAN ? upward : downward)
2068 #else
2069 BYTES_BIG_ENDIAN
2070 #endif
2071 )
2072 {
2073 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2074 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2075 tmps[i], 0, OPTAB_WIDEN);
2076 }
2077 bytelen = ssize - bytepos;
2078 }
2079
2080 if (GET_CODE (dst) == CONCAT)
2081 {
2082 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2083 dest = XEXP (dst, 0);
2084 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2085 {
2086 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2087 dest = XEXP (dst, 1);
2088 }
2089 else if (bytepos == 0 && XVECLEN (src, 0))
2090 {
2091 dest = assign_stack_temp (GET_MODE (dest),
2092 GET_MODE_SIZE (GET_MODE (dest)), 0);
2093 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2094 tmps[i]);
2095 dst = dest;
2096 break;
2097 }
2098 else
2099 abort ();
2100 }
2101
2102 /* Optimize the access just a bit. */
2103 if (GET_CODE (dest) == MEM
2104 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2105 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2106 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2107 && bytelen == GET_MODE_SIZE (mode))
2108 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2109 else
2110 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2111 mode, tmps[i], ssize);
2112 }
2113
2114 emit_queue ();
2115
2116 /* Copy from the pseudo into the (probable) hard reg. */
2117 if (orig_dst != dst)
2118 emit_move_insn (orig_dst, dst);
2119 }
2120
2121 /* Generate code to copy a BLKmode object of TYPE out of a
2122 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2123 is null, a stack temporary is created. TGTBLK is returned.
2124
2125 The purpose of this routine is to handle functions that return
2126 BLKmode structures in registers. Some machines (the PA for example)
2127 want to return all small structures in registers regardless of the
2128 structure's alignment. */
2129
2130 rtx
2131 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2132 {
2133 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2134 rtx src = NULL, dst = NULL;
2135 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2136 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2137
2138 if (tgtblk == 0)
2139 {
2140 tgtblk = assign_temp (build_qualified_type (type,
2141 (TYPE_QUALS (type)
2142 | TYPE_QUAL_CONST)),
2143 0, 1, 1);
2144 preserve_temp_slots (tgtblk);
2145 }
2146
2147 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2148 into a new pseudo which is a full word. */
2149
2150 if (GET_MODE (srcreg) != BLKmode
2151 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2152 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2153
2154 /* If the structure doesn't take up a whole number of words, see whether
2155 SRCREG is padded on the left or on the right. If it's on the left,
2156 set PADDING_CORRECTION to the number of bits to skip.
2157
2158 In most ABIs, the structure will be returned at the least end of
2159 the register, which translates to right padding on little-endian
2160 targets and left padding on big-endian targets. The opposite
2161 holds if the structure is returned at the most significant
2162 end of the register. */
2163 if (bytes % UNITS_PER_WORD != 0
2164 && (targetm.calls.return_in_msb (type)
2165 ? !BYTES_BIG_ENDIAN
2166 : BYTES_BIG_ENDIAN))
2167 padding_correction
2168 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2169
2170 /* Copy the structure BITSIZE bites at a time.
2171
2172 We could probably emit more efficient code for machines which do not use
2173 strict alignment, but it doesn't seem worth the effort at the current
2174 time. */
2175 for (bitpos = 0, xbitpos = padding_correction;
2176 bitpos < bytes * BITS_PER_UNIT;
2177 bitpos += bitsize, xbitpos += bitsize)
2178 {
2179 /* We need a new source operand each time xbitpos is on a
2180 word boundary and when xbitpos == padding_correction
2181 (the first time through). */
2182 if (xbitpos % BITS_PER_WORD == 0
2183 || xbitpos == padding_correction)
2184 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2185 GET_MODE (srcreg));
2186
2187 /* We need a new destination operand each time bitpos is on
2188 a word boundary. */
2189 if (bitpos % BITS_PER_WORD == 0)
2190 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2191
2192 /* Use xbitpos for the source extraction (right justified) and
2193 xbitpos for the destination store (left justified). */
2194 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2195 extract_bit_field (src, bitsize,
2196 xbitpos % BITS_PER_WORD, 1,
2197 NULL_RTX, word_mode, word_mode,
2198 BITS_PER_WORD),
2199 BITS_PER_WORD);
2200 }
2201
2202 return tgtblk;
2203 }
2204
2205 /* Add a USE expression for REG to the (possibly empty) list pointed
2206 to by CALL_FUSAGE. REG must denote a hard register. */
2207
2208 void
2209 use_reg (rtx *call_fusage, rtx reg)
2210 {
2211 if (GET_CODE (reg) != REG
2212 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2213 abort ();
2214
2215 *call_fusage
2216 = gen_rtx_EXPR_LIST (VOIDmode,
2217 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2218 }
2219
2220 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2221 starting at REGNO. All of these registers must be hard registers. */
2222
2223 void
2224 use_regs (rtx *call_fusage, int regno, int nregs)
2225 {
2226 int i;
2227
2228 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2229 abort ();
2230
2231 for (i = 0; i < nregs; i++)
2232 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2233 }
2234
2235 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2236 PARALLEL REGS. This is for calls that pass values in multiple
2237 non-contiguous locations. The Irix 6 ABI has examples of this. */
2238
2239 void
2240 use_group_regs (rtx *call_fusage, rtx regs)
2241 {
2242 int i;
2243
2244 for (i = 0; i < XVECLEN (regs, 0); i++)
2245 {
2246 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2247
2248 /* A NULL entry means the parameter goes both on the stack and in
2249 registers. This can also be a MEM for targets that pass values
2250 partially on the stack and partially in registers. */
2251 if (reg != 0 && GET_CODE (reg) == REG)
2252 use_reg (call_fusage, reg);
2253 }
2254 }
2255 \f
2256
2257 /* Determine whether the LEN bytes generated by CONSTFUN can be
2258 stored to memory using several move instructions. CONSTFUNDATA is
2259 a pointer which will be passed as argument in every CONSTFUN call.
2260 ALIGN is maximum alignment we can assume. Return nonzero if a
2261 call to store_by_pieces should succeed. */
2262
2263 int
2264 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2265 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2266 void *constfundata, unsigned int align)
2267 {
2268 unsigned HOST_WIDE_INT max_size, l;
2269 HOST_WIDE_INT offset = 0;
2270 enum machine_mode mode, tmode;
2271 enum insn_code icode;
2272 int reverse;
2273 rtx cst;
2274
2275 if (len == 0)
2276 return 1;
2277
2278 if (! STORE_BY_PIECES_P (len, align))
2279 return 0;
2280
2281 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2282 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2283 align = MOVE_MAX * BITS_PER_UNIT;
2284
2285 /* We would first store what we can in the largest integer mode, then go to
2286 successively smaller modes. */
2287
2288 for (reverse = 0;
2289 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2290 reverse++)
2291 {
2292 l = len;
2293 mode = VOIDmode;
2294 max_size = STORE_MAX_PIECES + 1;
2295 while (max_size > 1)
2296 {
2297 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2298 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2299 if (GET_MODE_SIZE (tmode) < max_size)
2300 mode = tmode;
2301
2302 if (mode == VOIDmode)
2303 break;
2304
2305 icode = mov_optab->handlers[(int) mode].insn_code;
2306 if (icode != CODE_FOR_nothing
2307 && align >= GET_MODE_ALIGNMENT (mode))
2308 {
2309 unsigned int size = GET_MODE_SIZE (mode);
2310
2311 while (l >= size)
2312 {
2313 if (reverse)
2314 offset -= size;
2315
2316 cst = (*constfun) (constfundata, offset, mode);
2317 if (!LEGITIMATE_CONSTANT_P (cst))
2318 return 0;
2319
2320 if (!reverse)
2321 offset += size;
2322
2323 l -= size;
2324 }
2325 }
2326
2327 max_size = GET_MODE_SIZE (mode);
2328 }
2329
2330 /* The code above should have handled everything. */
2331 if (l != 0)
2332 abort ();
2333 }
2334
2335 return 1;
2336 }
2337
2338 /* Generate several move instructions to store LEN bytes generated by
2339 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2340 pointer which will be passed as argument in every CONSTFUN call.
2341 ALIGN is maximum alignment we can assume.
2342 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2343 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2344 stpcpy. */
2345
2346 rtx
2347 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2348 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2349 void *constfundata, unsigned int align, int endp)
2350 {
2351 struct store_by_pieces data;
2352
2353 if (len == 0)
2354 {
2355 if (endp == 2)
2356 abort ();
2357 return to;
2358 }
2359
2360 if (! STORE_BY_PIECES_P (len, align))
2361 abort ();
2362 to = protect_from_queue (to, 1);
2363 data.constfun = constfun;
2364 data.constfundata = constfundata;
2365 data.len = len;
2366 data.to = to;
2367 store_by_pieces_1 (&data, align);
2368 if (endp)
2369 {
2370 rtx to1;
2371
2372 if (data.reverse)
2373 abort ();
2374 if (data.autinc_to)
2375 {
2376 if (endp == 2)
2377 {
2378 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2379 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2380 else
2381 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2382 -1));
2383 }
2384 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2385 data.offset);
2386 }
2387 else
2388 {
2389 if (endp == 2)
2390 --data.offset;
2391 to1 = adjust_address (data.to, QImode, data.offset);
2392 }
2393 return to1;
2394 }
2395 else
2396 return data.to;
2397 }
2398
2399 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2400 rtx with BLKmode). The caller must pass TO through protect_from_queue
2401 before calling. ALIGN is maximum alignment we can assume. */
2402
2403 static void
2404 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2405 {
2406 struct store_by_pieces data;
2407
2408 if (len == 0)
2409 return;
2410
2411 data.constfun = clear_by_pieces_1;
2412 data.constfundata = NULL;
2413 data.len = len;
2414 data.to = to;
2415 store_by_pieces_1 (&data, align);
2416 }
2417
2418 /* Callback routine for clear_by_pieces.
2419 Return const0_rtx unconditionally. */
2420
2421 static rtx
2422 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2423 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2424 enum machine_mode mode ATTRIBUTE_UNUSED)
2425 {
2426 return const0_rtx;
2427 }
2428
2429 /* Subroutine of clear_by_pieces and store_by_pieces.
2430 Generate several move instructions to store LEN bytes of block TO. (A MEM
2431 rtx with BLKmode). The caller must pass TO through protect_from_queue
2432 before calling. ALIGN is maximum alignment we can assume. */
2433
2434 static void
2435 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2436 unsigned int align ATTRIBUTE_UNUSED)
2437 {
2438 rtx to_addr = XEXP (data->to, 0);
2439 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2440 enum machine_mode mode = VOIDmode, tmode;
2441 enum insn_code icode;
2442
2443 data->offset = 0;
2444 data->to_addr = to_addr;
2445 data->autinc_to
2446 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2447 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2448
2449 data->explicit_inc_to = 0;
2450 data->reverse
2451 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2452 if (data->reverse)
2453 data->offset = data->len;
2454
2455 /* If storing requires more than two move insns,
2456 copy addresses to registers (to make displacements shorter)
2457 and use post-increment if available. */
2458 if (!data->autinc_to
2459 && move_by_pieces_ninsns (data->len, align) > 2)
2460 {
2461 /* Determine the main mode we'll be using. */
2462 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2463 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2464 if (GET_MODE_SIZE (tmode) < max_size)
2465 mode = tmode;
2466
2467 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2468 {
2469 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2470 data->autinc_to = 1;
2471 data->explicit_inc_to = -1;
2472 }
2473
2474 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2475 && ! data->autinc_to)
2476 {
2477 data->to_addr = copy_addr_to_reg (to_addr);
2478 data->autinc_to = 1;
2479 data->explicit_inc_to = 1;
2480 }
2481
2482 if ( !data->autinc_to && CONSTANT_P (to_addr))
2483 data->to_addr = copy_addr_to_reg (to_addr);
2484 }
2485
2486 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2487 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2488 align = MOVE_MAX * BITS_PER_UNIT;
2489
2490 /* First store what we can in the largest integer mode, then go to
2491 successively smaller modes. */
2492
2493 while (max_size > 1)
2494 {
2495 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2496 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2497 if (GET_MODE_SIZE (tmode) < max_size)
2498 mode = tmode;
2499
2500 if (mode == VOIDmode)
2501 break;
2502
2503 icode = mov_optab->handlers[(int) mode].insn_code;
2504 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2505 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2506
2507 max_size = GET_MODE_SIZE (mode);
2508 }
2509
2510 /* The code above should have handled everything. */
2511 if (data->len != 0)
2512 abort ();
2513 }
2514
2515 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2516 with move instructions for mode MODE. GENFUN is the gen_... function
2517 to make a move insn for that mode. DATA has all the other info. */
2518
2519 static void
2520 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2521 struct store_by_pieces *data)
2522 {
2523 unsigned int size = GET_MODE_SIZE (mode);
2524 rtx to1, cst;
2525
2526 while (data->len >= size)
2527 {
2528 if (data->reverse)
2529 data->offset -= size;
2530
2531 if (data->autinc_to)
2532 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2533 data->offset);
2534 else
2535 to1 = adjust_address (data->to, mode, data->offset);
2536
2537 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2538 emit_insn (gen_add2_insn (data->to_addr,
2539 GEN_INT (-(HOST_WIDE_INT) size)));
2540
2541 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2542 emit_insn ((*genfun) (to1, cst));
2543
2544 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2545 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2546
2547 if (! data->reverse)
2548 data->offset += size;
2549
2550 data->len -= size;
2551 }
2552 }
2553 \f
2554 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2555 its length in bytes. */
2556
2557 rtx
2558 clear_storage (rtx object, rtx size)
2559 {
2560 rtx retval = 0;
2561 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2562 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2563
2564 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2565 just move a zero. Otherwise, do this a piece at a time. */
2566 if (GET_MODE (object) != BLKmode
2567 && GET_CODE (size) == CONST_INT
2568 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2569 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2570 else
2571 {
2572 object = protect_from_queue (object, 1);
2573 size = protect_from_queue (size, 0);
2574
2575 if (size == const0_rtx)
2576 ;
2577 else if (GET_CODE (size) == CONST_INT
2578 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2579 clear_by_pieces (object, INTVAL (size), align);
2580 else if (clear_storage_via_clrstr (object, size, align))
2581 ;
2582 else
2583 retval = clear_storage_via_libcall (object, size);
2584 }
2585
2586 return retval;
2587 }
2588
2589 /* A subroutine of clear_storage. Expand a clrstr pattern;
2590 return true if successful. */
2591
2592 static bool
2593 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2594 {
2595 /* Try the most limited insn first, because there's no point
2596 including more than one in the machine description unless
2597 the more limited one has some advantage. */
2598
2599 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2600 enum machine_mode mode;
2601
2602 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2603 mode = GET_MODE_WIDER_MODE (mode))
2604 {
2605 enum insn_code code = clrstr_optab[(int) mode];
2606 insn_operand_predicate_fn pred;
2607
2608 if (code != CODE_FOR_nothing
2609 /* We don't need MODE to be narrower than
2610 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2611 the mode mask, as it is returned by the macro, it will
2612 definitely be less than the actual mode mask. */
2613 && ((GET_CODE (size) == CONST_INT
2614 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2615 <= (GET_MODE_MASK (mode) >> 1)))
2616 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2617 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2618 || (*pred) (object, BLKmode))
2619 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2620 || (*pred) (opalign, VOIDmode)))
2621 {
2622 rtx op1;
2623 rtx last = get_last_insn ();
2624 rtx pat;
2625
2626 op1 = convert_to_mode (mode, size, 1);
2627 pred = insn_data[(int) code].operand[1].predicate;
2628 if (pred != 0 && ! (*pred) (op1, mode))
2629 op1 = copy_to_mode_reg (mode, op1);
2630
2631 pat = GEN_FCN ((int) code) (object, op1, opalign);
2632 if (pat)
2633 {
2634 emit_insn (pat);
2635 return true;
2636 }
2637 else
2638 delete_insns_since (last);
2639 }
2640 }
2641
2642 return false;
2643 }
2644
2645 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2646 Return the return value of memset, 0 otherwise. */
2647
2648 static rtx
2649 clear_storage_via_libcall (rtx object, rtx size)
2650 {
2651 tree call_expr, arg_list, fn, object_tree, size_tree;
2652 enum machine_mode size_mode;
2653 rtx retval;
2654
2655 /* OBJECT or SIZE may have been passed through protect_from_queue.
2656
2657 It is unsafe to save the value generated by protect_from_queue
2658 and reuse it later. Consider what happens if emit_queue is
2659 called before the return value from protect_from_queue is used.
2660
2661 Expansion of the CALL_EXPR below will call emit_queue before
2662 we are finished emitting RTL for argument setup. So if we are
2663 not careful we could get the wrong value for an argument.
2664
2665 To avoid this problem we go ahead and emit code to copy OBJECT
2666 and SIZE into new pseudos. We can then place those new pseudos
2667 into an RTL_EXPR and use them later, even after a call to
2668 emit_queue.
2669
2670 Note this is not strictly needed for library calls since they
2671 do not call emit_queue before loading their arguments. However,
2672 we may need to have library calls call emit_queue in the future
2673 since failing to do so could cause problems for targets which
2674 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2675
2676 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2677
2678 if (TARGET_MEM_FUNCTIONS)
2679 size_mode = TYPE_MODE (sizetype);
2680 else
2681 size_mode = TYPE_MODE (unsigned_type_node);
2682 size = convert_to_mode (size_mode, size, 1);
2683 size = copy_to_mode_reg (size_mode, size);
2684
2685 /* It is incorrect to use the libcall calling conventions to call
2686 memset in this context. This could be a user call to memset and
2687 the user may wish to examine the return value from memset. For
2688 targets where libcalls and normal calls have different conventions
2689 for returning pointers, we could end up generating incorrect code.
2690
2691 For convenience, we generate the call to bzero this way as well. */
2692
2693 object_tree = make_tree (ptr_type_node, object);
2694 if (TARGET_MEM_FUNCTIONS)
2695 size_tree = make_tree (sizetype, size);
2696 else
2697 size_tree = make_tree (unsigned_type_node, size);
2698
2699 fn = clear_storage_libcall_fn (true);
2700 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2701 if (TARGET_MEM_FUNCTIONS)
2702 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2703 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2704
2705 /* Now we have to build up the CALL_EXPR itself. */
2706 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2707 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2708 call_expr, arg_list, NULL_TREE);
2709
2710 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2711
2712 /* If we are initializing a readonly value, show the above call
2713 clobbered it. Otherwise, a load from it may erroneously be
2714 hoisted from a loop. */
2715 if (RTX_UNCHANGING_P (object))
2716 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2717
2718 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2719 }
2720
2721 /* A subroutine of clear_storage_via_libcall. Create the tree node
2722 for the function we use for block clears. The first time FOR_CALL
2723 is true, we call assemble_external. */
2724
2725 static GTY(()) tree block_clear_fn;
2726
2727 void
2728 init_block_clear_fn (const char *asmspec)
2729 {
2730 if (!block_clear_fn)
2731 {
2732 tree fn, args;
2733
2734 if (TARGET_MEM_FUNCTIONS)
2735 {
2736 fn = get_identifier ("memset");
2737 args = build_function_type_list (ptr_type_node, ptr_type_node,
2738 integer_type_node, sizetype,
2739 NULL_TREE);
2740 }
2741 else
2742 {
2743 fn = get_identifier ("bzero");
2744 args = build_function_type_list (void_type_node, ptr_type_node,
2745 unsigned_type_node, NULL_TREE);
2746 }
2747
2748 fn = build_decl (FUNCTION_DECL, fn, args);
2749 DECL_EXTERNAL (fn) = 1;
2750 TREE_PUBLIC (fn) = 1;
2751 DECL_ARTIFICIAL (fn) = 1;
2752 TREE_NOTHROW (fn) = 1;
2753
2754 block_clear_fn = fn;
2755 }
2756
2757 if (asmspec)
2758 {
2759 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2760 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2761 }
2762 }
2763
2764 static tree
2765 clear_storage_libcall_fn (int for_call)
2766 {
2767 static bool emitted_extern;
2768
2769 if (!block_clear_fn)
2770 init_block_clear_fn (NULL);
2771
2772 if (for_call && !emitted_extern)
2773 {
2774 emitted_extern = true;
2775 make_decl_rtl (block_clear_fn, NULL);
2776 assemble_external (block_clear_fn);
2777 }
2778
2779 return block_clear_fn;
2780 }
2781 \f
2782 /* Generate code to copy Y into X.
2783 Both Y and X must have the same mode, except that
2784 Y can be a constant with VOIDmode.
2785 This mode cannot be BLKmode; use emit_block_move for that.
2786
2787 Return the last instruction emitted. */
2788
2789 rtx
2790 emit_move_insn (rtx x, rtx y)
2791 {
2792 enum machine_mode mode = GET_MODE (x);
2793 rtx y_cst = NULL_RTX;
2794 rtx last_insn, set;
2795
2796 x = protect_from_queue (x, 1);
2797 y = protect_from_queue (y, 0);
2798
2799 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2800 abort ();
2801
2802 /* Never force constant_p_rtx to memory. */
2803 if (GET_CODE (y) == CONSTANT_P_RTX)
2804 ;
2805 else if (CONSTANT_P (y))
2806 {
2807 if (optimize
2808 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2809 && (last_insn = compress_float_constant (x, y)))
2810 return last_insn;
2811
2812 y_cst = y;
2813
2814 if (!LEGITIMATE_CONSTANT_P (y))
2815 {
2816 y = force_const_mem (mode, y);
2817
2818 /* If the target's cannot_force_const_mem prevented the spill,
2819 assume that the target's move expanders will also take care
2820 of the non-legitimate constant. */
2821 if (!y)
2822 y = y_cst;
2823 }
2824 }
2825
2826 /* If X or Y are memory references, verify that their addresses are valid
2827 for the machine. */
2828 if (GET_CODE (x) == MEM
2829 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2830 && ! push_operand (x, GET_MODE (x)))
2831 || (flag_force_addr
2832 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2833 x = validize_mem (x);
2834
2835 if (GET_CODE (y) == MEM
2836 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2837 || (flag_force_addr
2838 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2839 y = validize_mem (y);
2840
2841 if (mode == BLKmode)
2842 abort ();
2843
2844 last_insn = emit_move_insn_1 (x, y);
2845
2846 if (y_cst && GET_CODE (x) == REG
2847 && (set = single_set (last_insn)) != NULL_RTX
2848 && SET_DEST (set) == x
2849 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2850 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2851
2852 return last_insn;
2853 }
2854
2855 /* Low level part of emit_move_insn.
2856 Called just like emit_move_insn, but assumes X and Y
2857 are basically valid. */
2858
2859 rtx
2860 emit_move_insn_1 (rtx x, rtx y)
2861 {
2862 enum machine_mode mode = GET_MODE (x);
2863 enum machine_mode submode;
2864 enum mode_class class = GET_MODE_CLASS (mode);
2865
2866 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2867 abort ();
2868
2869 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2870 return
2871 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2872
2873 /* Expand complex moves by moving real part and imag part, if possible. */
2874 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2875 && BLKmode != (submode = GET_MODE_INNER (mode))
2876 && (mov_optab->handlers[(int) submode].insn_code
2877 != CODE_FOR_nothing))
2878 {
2879 /* Don't split destination if it is a stack push. */
2880 int stack = push_operand (x, GET_MODE (x));
2881
2882 #ifdef PUSH_ROUNDING
2883 /* In case we output to the stack, but the size is smaller than the
2884 machine can push exactly, we need to use move instructions. */
2885 if (stack
2886 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2887 != GET_MODE_SIZE (submode)))
2888 {
2889 rtx temp;
2890 HOST_WIDE_INT offset1, offset2;
2891
2892 /* Do not use anti_adjust_stack, since we don't want to update
2893 stack_pointer_delta. */
2894 temp = expand_binop (Pmode,
2895 #ifdef STACK_GROWS_DOWNWARD
2896 sub_optab,
2897 #else
2898 add_optab,
2899 #endif
2900 stack_pointer_rtx,
2901 GEN_INT
2902 (PUSH_ROUNDING
2903 (GET_MODE_SIZE (GET_MODE (x)))),
2904 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2905
2906 if (temp != stack_pointer_rtx)
2907 emit_move_insn (stack_pointer_rtx, temp);
2908
2909 #ifdef STACK_GROWS_DOWNWARD
2910 offset1 = 0;
2911 offset2 = GET_MODE_SIZE (submode);
2912 #else
2913 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2914 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2915 + GET_MODE_SIZE (submode));
2916 #endif
2917
2918 emit_move_insn (change_address (x, submode,
2919 gen_rtx_PLUS (Pmode,
2920 stack_pointer_rtx,
2921 GEN_INT (offset1))),
2922 gen_realpart (submode, y));
2923 emit_move_insn (change_address (x, submode,
2924 gen_rtx_PLUS (Pmode,
2925 stack_pointer_rtx,
2926 GEN_INT (offset2))),
2927 gen_imagpart (submode, y));
2928 }
2929 else
2930 #endif
2931 /* If this is a stack, push the highpart first, so it
2932 will be in the argument order.
2933
2934 In that case, change_address is used only to convert
2935 the mode, not to change the address. */
2936 if (stack)
2937 {
2938 /* Note that the real part always precedes the imag part in memory
2939 regardless of machine's endianness. */
2940 #ifdef STACK_GROWS_DOWNWARD
2941 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2942 gen_imagpart (submode, y));
2943 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2944 gen_realpart (submode, y));
2945 #else
2946 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2947 gen_realpart (submode, y));
2948 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2949 gen_imagpart (submode, y));
2950 #endif
2951 }
2952 else
2953 {
2954 rtx realpart_x, realpart_y;
2955 rtx imagpart_x, imagpart_y;
2956
2957 /* If this is a complex value with each part being smaller than a
2958 word, the usual calling sequence will likely pack the pieces into
2959 a single register. Unfortunately, SUBREG of hard registers only
2960 deals in terms of words, so we have a problem converting input
2961 arguments to the CONCAT of two registers that is used elsewhere
2962 for complex values. If this is before reload, we can copy it into
2963 memory and reload. FIXME, we should see about using extract and
2964 insert on integer registers, but complex short and complex char
2965 variables should be rarely used. */
2966 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2967 && (reload_in_progress | reload_completed) == 0)
2968 {
2969 int packed_dest_p
2970 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2971 int packed_src_p
2972 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2973
2974 if (packed_dest_p || packed_src_p)
2975 {
2976 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2977 ? MODE_FLOAT : MODE_INT);
2978
2979 enum machine_mode reg_mode
2980 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2981
2982 if (reg_mode != BLKmode)
2983 {
2984 rtx mem = assign_stack_temp (reg_mode,
2985 GET_MODE_SIZE (mode), 0);
2986 rtx cmem = adjust_address (mem, mode, 0);
2987
2988 cfun->cannot_inline
2989 = N_("function using short complex types cannot be inline");
2990
2991 if (packed_dest_p)
2992 {
2993 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2994
2995 emit_move_insn_1 (cmem, y);
2996 return emit_move_insn_1 (sreg, mem);
2997 }
2998 else
2999 {
3000 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3001
3002 emit_move_insn_1 (mem, sreg);
3003 return emit_move_insn_1 (x, cmem);
3004 }
3005 }
3006 }
3007 }
3008
3009 realpart_x = gen_realpart (submode, x);
3010 realpart_y = gen_realpart (submode, y);
3011 imagpart_x = gen_imagpart (submode, x);
3012 imagpart_y = gen_imagpart (submode, y);
3013
3014 /* Show the output dies here. This is necessary for SUBREGs
3015 of pseudos since we cannot track their lifetimes correctly;
3016 hard regs shouldn't appear here except as return values.
3017 We never want to emit such a clobber after reload. */
3018 if (x != y
3019 && ! (reload_in_progress || reload_completed)
3020 && (GET_CODE (realpart_x) == SUBREG
3021 || GET_CODE (imagpart_x) == SUBREG))
3022 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3023
3024 emit_move_insn (realpart_x, realpart_y);
3025 emit_move_insn (imagpart_x, imagpart_y);
3026 }
3027
3028 return get_last_insn ();
3029 }
3030
3031 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3032 find a mode to do it in. If we have a movcc, use it. Otherwise,
3033 find the MODE_INT mode of the same width. */
3034 else if (GET_MODE_CLASS (mode) == MODE_CC
3035 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3036 {
3037 enum insn_code insn_code;
3038 enum machine_mode tmode = VOIDmode;
3039 rtx x1 = x, y1 = y;
3040
3041 if (mode != CCmode
3042 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3043 tmode = CCmode;
3044 else
3045 for (tmode = QImode; tmode != VOIDmode;
3046 tmode = GET_MODE_WIDER_MODE (tmode))
3047 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3048 break;
3049
3050 if (tmode == VOIDmode)
3051 abort ();
3052
3053 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3054 may call change_address which is not appropriate if we were
3055 called when a reload was in progress. We don't have to worry
3056 about changing the address since the size in bytes is supposed to
3057 be the same. Copy the MEM to change the mode and move any
3058 substitutions from the old MEM to the new one. */
3059
3060 if (reload_in_progress)
3061 {
3062 x = gen_lowpart_common (tmode, x1);
3063 if (x == 0 && GET_CODE (x1) == MEM)
3064 {
3065 x = adjust_address_nv (x1, tmode, 0);
3066 copy_replacements (x1, x);
3067 }
3068
3069 y = gen_lowpart_common (tmode, y1);
3070 if (y == 0 && GET_CODE (y1) == MEM)
3071 {
3072 y = adjust_address_nv (y1, tmode, 0);
3073 copy_replacements (y1, y);
3074 }
3075 }
3076 else
3077 {
3078 x = gen_lowpart (tmode, x);
3079 y = gen_lowpart (tmode, y);
3080 }
3081
3082 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3083 return emit_insn (GEN_FCN (insn_code) (x, y));
3084 }
3085
3086 /* Try using a move pattern for the corresponding integer mode. This is
3087 only safe when simplify_subreg can convert MODE constants into integer
3088 constants. At present, it can only do this reliably if the value
3089 fits within a HOST_WIDE_INT. */
3090 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3091 && (submode = int_mode_for_mode (mode)) != BLKmode
3092 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3093 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3094 (simplify_gen_subreg (submode, x, mode, 0),
3095 simplify_gen_subreg (submode, y, mode, 0)));
3096
3097 /* This will handle any multi-word or full-word mode that lacks a move_insn
3098 pattern. However, you will get better code if you define such patterns,
3099 even if they must turn into multiple assembler instructions. */
3100 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3101 {
3102 rtx last_insn = 0;
3103 rtx seq, inner;
3104 int need_clobber;
3105 int i;
3106
3107 #ifdef PUSH_ROUNDING
3108
3109 /* If X is a push on the stack, do the push now and replace
3110 X with a reference to the stack pointer. */
3111 if (push_operand (x, GET_MODE (x)))
3112 {
3113 rtx temp;
3114 enum rtx_code code;
3115
3116 /* Do not use anti_adjust_stack, since we don't want to update
3117 stack_pointer_delta. */
3118 temp = expand_binop (Pmode,
3119 #ifdef STACK_GROWS_DOWNWARD
3120 sub_optab,
3121 #else
3122 add_optab,
3123 #endif
3124 stack_pointer_rtx,
3125 GEN_INT
3126 (PUSH_ROUNDING
3127 (GET_MODE_SIZE (GET_MODE (x)))),
3128 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3129
3130 if (temp != stack_pointer_rtx)
3131 emit_move_insn (stack_pointer_rtx, temp);
3132
3133 code = GET_CODE (XEXP (x, 0));
3134
3135 /* Just hope that small offsets off SP are OK. */
3136 if (code == POST_INC)
3137 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3138 GEN_INT (-((HOST_WIDE_INT)
3139 GET_MODE_SIZE (GET_MODE (x)))));
3140 else if (code == POST_DEC)
3141 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3142 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3143 else
3144 temp = stack_pointer_rtx;
3145
3146 x = change_address (x, VOIDmode, temp);
3147 }
3148 #endif
3149
3150 /* If we are in reload, see if either operand is a MEM whose address
3151 is scheduled for replacement. */
3152 if (reload_in_progress && GET_CODE (x) == MEM
3153 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3154 x = replace_equiv_address_nv (x, inner);
3155 if (reload_in_progress && GET_CODE (y) == MEM
3156 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3157 y = replace_equiv_address_nv (y, inner);
3158
3159 start_sequence ();
3160
3161 need_clobber = 0;
3162 for (i = 0;
3163 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3164 i++)
3165 {
3166 rtx xpart = operand_subword (x, i, 1, mode);
3167 rtx ypart = operand_subword (y, i, 1, mode);
3168
3169 /* If we can't get a part of Y, put Y into memory if it is a
3170 constant. Otherwise, force it into a register. If we still
3171 can't get a part of Y, abort. */
3172 if (ypart == 0 && CONSTANT_P (y))
3173 {
3174 y = force_const_mem (mode, y);
3175 ypart = operand_subword (y, i, 1, mode);
3176 }
3177 else if (ypart == 0)
3178 ypart = operand_subword_force (y, i, mode);
3179
3180 if (xpart == 0 || ypart == 0)
3181 abort ();
3182
3183 need_clobber |= (GET_CODE (xpart) == SUBREG);
3184
3185 last_insn = emit_move_insn (xpart, ypart);
3186 }
3187
3188 seq = get_insns ();
3189 end_sequence ();
3190
3191 /* Show the output dies here. This is necessary for SUBREGs
3192 of pseudos since we cannot track their lifetimes correctly;
3193 hard regs shouldn't appear here except as return values.
3194 We never want to emit such a clobber after reload. */
3195 if (x != y
3196 && ! (reload_in_progress || reload_completed)
3197 && need_clobber != 0)
3198 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3199
3200 emit_insn (seq);
3201
3202 return last_insn;
3203 }
3204 else
3205 abort ();
3206 }
3207
3208 /* If Y is representable exactly in a narrower mode, and the target can
3209 perform the extension directly from constant or memory, then emit the
3210 move as an extension. */
3211
3212 static rtx
3213 compress_float_constant (rtx x, rtx y)
3214 {
3215 enum machine_mode dstmode = GET_MODE (x);
3216 enum machine_mode orig_srcmode = GET_MODE (y);
3217 enum machine_mode srcmode;
3218 REAL_VALUE_TYPE r;
3219
3220 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3221
3222 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3223 srcmode != orig_srcmode;
3224 srcmode = GET_MODE_WIDER_MODE (srcmode))
3225 {
3226 enum insn_code ic;
3227 rtx trunc_y, last_insn;
3228
3229 /* Skip if the target can't extend this way. */
3230 ic = can_extend_p (dstmode, srcmode, 0);
3231 if (ic == CODE_FOR_nothing)
3232 continue;
3233
3234 /* Skip if the narrowed value isn't exact. */
3235 if (! exact_real_truncate (srcmode, &r))
3236 continue;
3237
3238 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3239
3240 if (LEGITIMATE_CONSTANT_P (trunc_y))
3241 {
3242 /* Skip if the target needs extra instructions to perform
3243 the extension. */
3244 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3245 continue;
3246 }
3247 else if (float_extend_from_mem[dstmode][srcmode])
3248 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3249 else
3250 continue;
3251
3252 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3253 last_insn = get_last_insn ();
3254
3255 if (GET_CODE (x) == REG)
3256 set_unique_reg_note (last_insn, REG_EQUAL, y);
3257
3258 return last_insn;
3259 }
3260
3261 return NULL_RTX;
3262 }
3263 \f
3264 /* Pushing data onto the stack. */
3265
3266 /* Push a block of length SIZE (perhaps variable)
3267 and return an rtx to address the beginning of the block.
3268 Note that it is not possible for the value returned to be a QUEUED.
3269 The value may be virtual_outgoing_args_rtx.
3270
3271 EXTRA is the number of bytes of padding to push in addition to SIZE.
3272 BELOW nonzero means this padding comes at low addresses;
3273 otherwise, the padding comes at high addresses. */
3274
3275 rtx
3276 push_block (rtx size, int extra, int below)
3277 {
3278 rtx temp;
3279
3280 size = convert_modes (Pmode, ptr_mode, size, 1);
3281 if (CONSTANT_P (size))
3282 anti_adjust_stack (plus_constant (size, extra));
3283 else if (GET_CODE (size) == REG && extra == 0)
3284 anti_adjust_stack (size);
3285 else
3286 {
3287 temp = copy_to_mode_reg (Pmode, size);
3288 if (extra != 0)
3289 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3290 temp, 0, OPTAB_LIB_WIDEN);
3291 anti_adjust_stack (temp);
3292 }
3293
3294 #ifndef STACK_GROWS_DOWNWARD
3295 if (0)
3296 #else
3297 if (1)
3298 #endif
3299 {
3300 temp = virtual_outgoing_args_rtx;
3301 if (extra != 0 && below)
3302 temp = plus_constant (temp, extra);
3303 }
3304 else
3305 {
3306 if (GET_CODE (size) == CONST_INT)
3307 temp = plus_constant (virtual_outgoing_args_rtx,
3308 -INTVAL (size) - (below ? 0 : extra));
3309 else if (extra != 0 && !below)
3310 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3311 negate_rtx (Pmode, plus_constant (size, extra)));
3312 else
3313 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3314 negate_rtx (Pmode, size));
3315 }
3316
3317 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3318 }
3319
3320 #ifdef PUSH_ROUNDING
3321
3322 /* Emit single push insn. */
3323
3324 static void
3325 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3326 {
3327 rtx dest_addr;
3328 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3329 rtx dest;
3330 enum insn_code icode;
3331 insn_operand_predicate_fn pred;
3332
3333 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3334 /* If there is push pattern, use it. Otherwise try old way of throwing
3335 MEM representing push operation to move expander. */
3336 icode = push_optab->handlers[(int) mode].insn_code;
3337 if (icode != CODE_FOR_nothing)
3338 {
3339 if (((pred = insn_data[(int) icode].operand[0].predicate)
3340 && !((*pred) (x, mode))))
3341 x = force_reg (mode, x);
3342 emit_insn (GEN_FCN (icode) (x));
3343 return;
3344 }
3345 if (GET_MODE_SIZE (mode) == rounded_size)
3346 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3347 /* If we are to pad downward, adjust the stack pointer first and
3348 then store X into the stack location using an offset. This is
3349 because emit_move_insn does not know how to pad; it does not have
3350 access to type. */
3351 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3352 {
3353 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3354 HOST_WIDE_INT offset;
3355
3356 emit_move_insn (stack_pointer_rtx,
3357 expand_binop (Pmode,
3358 #ifdef STACK_GROWS_DOWNWARD
3359 sub_optab,
3360 #else
3361 add_optab,
3362 #endif
3363 stack_pointer_rtx,
3364 GEN_INT (rounded_size),
3365 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3366
3367 offset = (HOST_WIDE_INT) padding_size;
3368 #ifdef STACK_GROWS_DOWNWARD
3369 if (STACK_PUSH_CODE == POST_DEC)
3370 /* We have already decremented the stack pointer, so get the
3371 previous value. */
3372 offset += (HOST_WIDE_INT) rounded_size;
3373 #else
3374 if (STACK_PUSH_CODE == POST_INC)
3375 /* We have already incremented the stack pointer, so get the
3376 previous value. */
3377 offset -= (HOST_WIDE_INT) rounded_size;
3378 #endif
3379 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3380 }
3381 else
3382 {
3383 #ifdef STACK_GROWS_DOWNWARD
3384 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3385 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3386 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3387 #else
3388 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3389 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3390 GEN_INT (rounded_size));
3391 #endif
3392 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3393 }
3394
3395 dest = gen_rtx_MEM (mode, dest_addr);
3396
3397 if (type != 0)
3398 {
3399 set_mem_attributes (dest, type, 1);
3400
3401 if (flag_optimize_sibling_calls)
3402 /* Function incoming arguments may overlap with sibling call
3403 outgoing arguments and we cannot allow reordering of reads
3404 from function arguments with stores to outgoing arguments
3405 of sibling calls. */
3406 set_mem_alias_set (dest, 0);
3407 }
3408 emit_move_insn (dest, x);
3409 }
3410 #endif
3411
3412 /* Generate code to push X onto the stack, assuming it has mode MODE and
3413 type TYPE.
3414 MODE is redundant except when X is a CONST_INT (since they don't
3415 carry mode info).
3416 SIZE is an rtx for the size of data to be copied (in bytes),
3417 needed only if X is BLKmode.
3418
3419 ALIGN (in bits) is maximum alignment we can assume.
3420
3421 If PARTIAL and REG are both nonzero, then copy that many of the first
3422 words of X into registers starting with REG, and push the rest of X.
3423 The amount of space pushed is decreased by PARTIAL words,
3424 rounded *down* to a multiple of PARM_BOUNDARY.
3425 REG must be a hard register in this case.
3426 If REG is zero but PARTIAL is not, take any all others actions for an
3427 argument partially in registers, but do not actually load any
3428 registers.
3429
3430 EXTRA is the amount in bytes of extra space to leave next to this arg.
3431 This is ignored if an argument block has already been allocated.
3432
3433 On a machine that lacks real push insns, ARGS_ADDR is the address of
3434 the bottom of the argument block for this call. We use indexing off there
3435 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3436 argument block has not been preallocated.
3437
3438 ARGS_SO_FAR is the size of args previously pushed for this call.
3439
3440 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3441 for arguments passed in registers. If nonzero, it will be the number
3442 of bytes required. */
3443
3444 void
3445 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3446 unsigned int align, int partial, rtx reg, int extra,
3447 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3448 rtx alignment_pad)
3449 {
3450 rtx xinner;
3451 enum direction stack_direction
3452 #ifdef STACK_GROWS_DOWNWARD
3453 = downward;
3454 #else
3455 = upward;
3456 #endif
3457
3458 /* Decide where to pad the argument: `downward' for below,
3459 `upward' for above, or `none' for don't pad it.
3460 Default is below for small data on big-endian machines; else above. */
3461 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3462
3463 /* Invert direction if stack is post-decrement.
3464 FIXME: why? */
3465 if (STACK_PUSH_CODE == POST_DEC)
3466 if (where_pad != none)
3467 where_pad = (where_pad == downward ? upward : downward);
3468
3469 xinner = x = protect_from_queue (x, 0);
3470
3471 if (mode == BLKmode)
3472 {
3473 /* Copy a block into the stack, entirely or partially. */
3474
3475 rtx temp;
3476 int used = partial * UNITS_PER_WORD;
3477 int offset;
3478 int skip;
3479
3480 if (reg && GET_CODE (reg) == PARALLEL)
3481 {
3482 /* Use the size of the elt to compute offset. */
3483 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3484 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3485 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3486 }
3487 else
3488 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3489
3490 if (size == 0)
3491 abort ();
3492
3493 used -= offset;
3494
3495 /* USED is now the # of bytes we need not copy to the stack
3496 because registers will take care of them. */
3497
3498 if (partial != 0)
3499 xinner = adjust_address (xinner, BLKmode, used);
3500
3501 /* If the partial register-part of the arg counts in its stack size,
3502 skip the part of stack space corresponding to the registers.
3503 Otherwise, start copying to the beginning of the stack space,
3504 by setting SKIP to 0. */
3505 skip = (reg_parm_stack_space == 0) ? 0 : used;
3506
3507 #ifdef PUSH_ROUNDING
3508 /* Do it with several push insns if that doesn't take lots of insns
3509 and if there is no difficulty with push insns that skip bytes
3510 on the stack for alignment purposes. */
3511 if (args_addr == 0
3512 && PUSH_ARGS
3513 && GET_CODE (size) == CONST_INT
3514 && skip == 0
3515 && MEM_ALIGN (xinner) >= align
3516 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3517 /* Here we avoid the case of a structure whose weak alignment
3518 forces many pushes of a small amount of data,
3519 and such small pushes do rounding that causes trouble. */
3520 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3521 || align >= BIGGEST_ALIGNMENT
3522 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3523 == (align / BITS_PER_UNIT)))
3524 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3525 {
3526 /* Push padding now if padding above and stack grows down,
3527 or if padding below and stack grows up.
3528 But if space already allocated, this has already been done. */
3529 if (extra && args_addr == 0
3530 && where_pad != none && where_pad != stack_direction)
3531 anti_adjust_stack (GEN_INT (extra));
3532
3533 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3534 }
3535 else
3536 #endif /* PUSH_ROUNDING */
3537 {
3538 rtx target;
3539
3540 /* Otherwise make space on the stack and copy the data
3541 to the address of that space. */
3542
3543 /* Deduct words put into registers from the size we must copy. */
3544 if (partial != 0)
3545 {
3546 if (GET_CODE (size) == CONST_INT)
3547 size = GEN_INT (INTVAL (size) - used);
3548 else
3549 size = expand_binop (GET_MODE (size), sub_optab, size,
3550 GEN_INT (used), NULL_RTX, 0,
3551 OPTAB_LIB_WIDEN);
3552 }
3553
3554 /* Get the address of the stack space.
3555 In this case, we do not deal with EXTRA separately.
3556 A single stack adjust will do. */
3557 if (! args_addr)
3558 {
3559 temp = push_block (size, extra, where_pad == downward);
3560 extra = 0;
3561 }
3562 else if (GET_CODE (args_so_far) == CONST_INT)
3563 temp = memory_address (BLKmode,
3564 plus_constant (args_addr,
3565 skip + INTVAL (args_so_far)));
3566 else
3567 temp = memory_address (BLKmode,
3568 plus_constant (gen_rtx_PLUS (Pmode,
3569 args_addr,
3570 args_so_far),
3571 skip));
3572
3573 if (!ACCUMULATE_OUTGOING_ARGS)
3574 {
3575 /* If the source is referenced relative to the stack pointer,
3576 copy it to another register to stabilize it. We do not need
3577 to do this if we know that we won't be changing sp. */
3578
3579 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3580 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3581 temp = copy_to_reg (temp);
3582 }
3583
3584 target = gen_rtx_MEM (BLKmode, temp);
3585
3586 if (type != 0)
3587 {
3588 set_mem_attributes (target, type, 1);
3589 /* Function incoming arguments may overlap with sibling call
3590 outgoing arguments and we cannot allow reordering of reads
3591 from function arguments with stores to outgoing arguments
3592 of sibling calls. */
3593 set_mem_alias_set (target, 0);
3594 }
3595
3596 /* ALIGN may well be better aligned than TYPE, e.g. due to
3597 PARM_BOUNDARY. Assume the caller isn't lying. */
3598 set_mem_align (target, align);
3599
3600 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3601 }
3602 }
3603 else if (partial > 0)
3604 {
3605 /* Scalar partly in registers. */
3606
3607 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3608 int i;
3609 int not_stack;
3610 /* # words of start of argument
3611 that we must make space for but need not store. */
3612 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3613 int args_offset = INTVAL (args_so_far);
3614 int skip;
3615
3616 /* Push padding now if padding above and stack grows down,
3617 or if padding below and stack grows up.
3618 But if space already allocated, this has already been done. */
3619 if (extra && args_addr == 0
3620 && where_pad != none && where_pad != stack_direction)
3621 anti_adjust_stack (GEN_INT (extra));
3622
3623 /* If we make space by pushing it, we might as well push
3624 the real data. Otherwise, we can leave OFFSET nonzero
3625 and leave the space uninitialized. */
3626 if (args_addr == 0)
3627 offset = 0;
3628
3629 /* Now NOT_STACK gets the number of words that we don't need to
3630 allocate on the stack. */
3631 not_stack = partial - offset;
3632
3633 /* If the partial register-part of the arg counts in its stack size,
3634 skip the part of stack space corresponding to the registers.
3635 Otherwise, start copying to the beginning of the stack space,
3636 by setting SKIP to 0. */
3637 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3638
3639 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3640 x = validize_mem (force_const_mem (mode, x));
3641
3642 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3643 SUBREGs of such registers are not allowed. */
3644 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3645 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3646 x = copy_to_reg (x);
3647
3648 /* Loop over all the words allocated on the stack for this arg. */
3649 /* We can do it by words, because any scalar bigger than a word
3650 has a size a multiple of a word. */
3651 #ifndef PUSH_ARGS_REVERSED
3652 for (i = not_stack; i < size; i++)
3653 #else
3654 for (i = size - 1; i >= not_stack; i--)
3655 #endif
3656 if (i >= not_stack + offset)
3657 emit_push_insn (operand_subword_force (x, i, mode),
3658 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3659 0, args_addr,
3660 GEN_INT (args_offset + ((i - not_stack + skip)
3661 * UNITS_PER_WORD)),
3662 reg_parm_stack_space, alignment_pad);
3663 }
3664 else
3665 {
3666 rtx addr;
3667 rtx dest;
3668
3669 /* Push padding now if padding above and stack grows down,
3670 or if padding below and stack grows up.
3671 But if space already allocated, this has already been done. */
3672 if (extra && args_addr == 0
3673 && where_pad != none && where_pad != stack_direction)
3674 anti_adjust_stack (GEN_INT (extra));
3675
3676 #ifdef PUSH_ROUNDING
3677 if (args_addr == 0 && PUSH_ARGS)
3678 emit_single_push_insn (mode, x, type);
3679 else
3680 #endif
3681 {
3682 if (GET_CODE (args_so_far) == CONST_INT)
3683 addr
3684 = memory_address (mode,
3685 plus_constant (args_addr,
3686 INTVAL (args_so_far)));
3687 else
3688 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3689 args_so_far));
3690 dest = gen_rtx_MEM (mode, addr);
3691 if (type != 0)
3692 {
3693 set_mem_attributes (dest, type, 1);
3694 /* Function incoming arguments may overlap with sibling call
3695 outgoing arguments and we cannot allow reordering of reads
3696 from function arguments with stores to outgoing arguments
3697 of sibling calls. */
3698 set_mem_alias_set (dest, 0);
3699 }
3700
3701 emit_move_insn (dest, x);
3702 }
3703 }
3704
3705 /* If part should go in registers, copy that part
3706 into the appropriate registers. Do this now, at the end,
3707 since mem-to-mem copies above may do function calls. */
3708 if (partial > 0 && reg != 0)
3709 {
3710 /* Handle calls that pass values in multiple non-contiguous locations.
3711 The Irix 6 ABI has examples of this. */
3712 if (GET_CODE (reg) == PARALLEL)
3713 emit_group_load (reg, x, type, -1);
3714 else
3715 move_block_to_reg (REGNO (reg), x, partial, mode);
3716 }
3717
3718 if (extra && args_addr == 0 && where_pad == stack_direction)
3719 anti_adjust_stack (GEN_INT (extra));
3720
3721 if (alignment_pad && args_addr == 0)
3722 anti_adjust_stack (alignment_pad);
3723 }
3724 \f
3725 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3726 operations. */
3727
3728 static rtx
3729 get_subtarget (rtx x)
3730 {
3731 return ((x == 0
3732 /* Only registers can be subtargets. */
3733 || GET_CODE (x) != REG
3734 /* If the register is readonly, it can't be set more than once. */
3735 || RTX_UNCHANGING_P (x)
3736 /* Don't use hard regs to avoid extending their life. */
3737 || REGNO (x) < FIRST_PSEUDO_REGISTER
3738 /* Avoid subtargets inside loops,
3739 since they hide some invariant expressions. */
3740 || preserve_subexpressions_p ())
3741 ? 0 : x);
3742 }
3743
3744 /* Expand an assignment that stores the value of FROM into TO.
3745 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3746 (This may contain a QUEUED rtx;
3747 if the value is constant, this rtx is a constant.)
3748 Otherwise, the returned value is NULL_RTX. */
3749
3750 rtx
3751 expand_assignment (tree to, tree from, int want_value)
3752 {
3753 rtx to_rtx = 0;
3754 rtx result;
3755
3756 /* Don't crash if the lhs of the assignment was erroneous. */
3757
3758 if (TREE_CODE (to) == ERROR_MARK)
3759 {
3760 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3761 return want_value ? result : NULL_RTX;
3762 }
3763
3764 /* Assignment of a structure component needs special treatment
3765 if the structure component's rtx is not simply a MEM.
3766 Assignment of an array element at a constant index, and assignment of
3767 an array element in an unaligned packed structure field, has the same
3768 problem. */
3769
3770 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3771 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3772 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3773 {
3774 enum machine_mode mode1;
3775 HOST_WIDE_INT bitsize, bitpos;
3776 rtx orig_to_rtx;
3777 tree offset;
3778 int unsignedp;
3779 int volatilep = 0;
3780 tree tem;
3781
3782 push_temp_slots ();
3783 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3784 &unsignedp, &volatilep);
3785
3786 /* If we are going to use store_bit_field and extract_bit_field,
3787 make sure to_rtx will be safe for multiple use. */
3788
3789 if (mode1 == VOIDmode && want_value)
3790 tem = stabilize_reference (tem);
3791
3792 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3793
3794 if (offset != 0)
3795 {
3796 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3797
3798 if (GET_CODE (to_rtx) != MEM)
3799 abort ();
3800
3801 #ifdef POINTERS_EXTEND_UNSIGNED
3802 if (GET_MODE (offset_rtx) != Pmode)
3803 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3804 #else
3805 if (GET_MODE (offset_rtx) != ptr_mode)
3806 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3807 #endif
3808
3809 /* A constant address in TO_RTX can have VOIDmode, we must not try
3810 to call force_reg for that case. Avoid that case. */
3811 if (GET_CODE (to_rtx) == MEM
3812 && GET_MODE (to_rtx) == BLKmode
3813 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3814 && bitsize > 0
3815 && (bitpos % bitsize) == 0
3816 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3817 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3818 {
3819 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3820 bitpos = 0;
3821 }
3822
3823 to_rtx = offset_address (to_rtx, offset_rtx,
3824 highest_pow2_factor_for_type (TREE_TYPE (to),
3825 offset));
3826 }
3827
3828 if (GET_CODE (to_rtx) == MEM)
3829 {
3830 /* If the field is at offset zero, we could have been given the
3831 DECL_RTX of the parent struct. Don't munge it. */
3832 to_rtx = shallow_copy_rtx (to_rtx);
3833
3834 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3835 }
3836
3837 /* Deal with volatile and readonly fields. The former is only done
3838 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3839 if (volatilep && GET_CODE (to_rtx) == MEM)
3840 {
3841 if (to_rtx == orig_to_rtx)
3842 to_rtx = copy_rtx (to_rtx);
3843 MEM_VOLATILE_P (to_rtx) = 1;
3844 }
3845
3846 if (TREE_CODE (to) == COMPONENT_REF
3847 && TREE_READONLY (TREE_OPERAND (to, 1))
3848 /* We can't assert that a MEM won't be set more than once
3849 if the component is not addressable because another
3850 non-addressable component may be referenced by the same MEM. */
3851 && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
3852 {
3853 if (to_rtx == orig_to_rtx)
3854 to_rtx = copy_rtx (to_rtx);
3855 RTX_UNCHANGING_P (to_rtx) = 1;
3856 }
3857
3858 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3859 {
3860 if (to_rtx == orig_to_rtx)
3861 to_rtx = copy_rtx (to_rtx);
3862 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3863 }
3864
3865 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3866 (want_value
3867 /* Spurious cast for HPUX compiler. */
3868 ? ((enum machine_mode)
3869 TYPE_MODE (TREE_TYPE (to)))
3870 : VOIDmode),
3871 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3872
3873 preserve_temp_slots (result);
3874 free_temp_slots ();
3875 pop_temp_slots ();
3876
3877 /* If the value is meaningful, convert RESULT to the proper mode.
3878 Otherwise, return nothing. */
3879 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3880 TYPE_MODE (TREE_TYPE (from)),
3881 result,
3882 TREE_UNSIGNED (TREE_TYPE (to)))
3883 : NULL_RTX);
3884 }
3885
3886 /* If the rhs is a function call and its value is not an aggregate,
3887 call the function before we start to compute the lhs.
3888 This is needed for correct code for cases such as
3889 val = setjmp (buf) on machines where reference to val
3890 requires loading up part of an address in a separate insn.
3891
3892 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3893 since it might be a promoted variable where the zero- or sign- extension
3894 needs to be done. Handling this in the normal way is safe because no
3895 computation is done before the call. */
3896 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3897 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3898 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3899 && GET_CODE (DECL_RTL (to)) == REG))
3900 {
3901 rtx value;
3902
3903 push_temp_slots ();
3904 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3905 if (to_rtx == 0)
3906 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3907
3908 /* Handle calls that return values in multiple non-contiguous locations.
3909 The Irix 6 ABI has examples of this. */
3910 if (GET_CODE (to_rtx) == PARALLEL)
3911 emit_group_load (to_rtx, value, TREE_TYPE (from),
3912 int_size_in_bytes (TREE_TYPE (from)));
3913 else if (GET_MODE (to_rtx) == BLKmode)
3914 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3915 else
3916 {
3917 if (POINTER_TYPE_P (TREE_TYPE (to)))
3918 value = convert_memory_address (GET_MODE (to_rtx), value);
3919 emit_move_insn (to_rtx, value);
3920 }
3921 preserve_temp_slots (to_rtx);
3922 free_temp_slots ();
3923 pop_temp_slots ();
3924 return want_value ? to_rtx : NULL_RTX;
3925 }
3926
3927 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3928 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3929
3930 if (to_rtx == 0)
3931 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3932
3933 /* Don't move directly into a return register. */
3934 if (TREE_CODE (to) == RESULT_DECL
3935 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3936 {
3937 rtx temp;
3938
3939 push_temp_slots ();
3940 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3941
3942 if (GET_CODE (to_rtx) == PARALLEL)
3943 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3944 int_size_in_bytes (TREE_TYPE (from)));
3945 else
3946 emit_move_insn (to_rtx, temp);
3947
3948 preserve_temp_slots (to_rtx);
3949 free_temp_slots ();
3950 pop_temp_slots ();
3951 return want_value ? to_rtx : NULL_RTX;
3952 }
3953
3954 /* In case we are returning the contents of an object which overlaps
3955 the place the value is being stored, use a safe function when copying
3956 a value through a pointer into a structure value return block. */
3957 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3958 && current_function_returns_struct
3959 && !current_function_returns_pcc_struct)
3960 {
3961 rtx from_rtx, size;
3962
3963 push_temp_slots ();
3964 size = expr_size (from);
3965 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3966
3967 if (TARGET_MEM_FUNCTIONS)
3968 emit_library_call (memmove_libfunc, LCT_NORMAL,
3969 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3970 XEXP (from_rtx, 0), Pmode,
3971 convert_to_mode (TYPE_MODE (sizetype),
3972 size, TREE_UNSIGNED (sizetype)),
3973 TYPE_MODE (sizetype));
3974 else
3975 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3976 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3977 XEXP (to_rtx, 0), Pmode,
3978 convert_to_mode (TYPE_MODE (integer_type_node),
3979 size,
3980 TREE_UNSIGNED (integer_type_node)),
3981 TYPE_MODE (integer_type_node));
3982
3983 preserve_temp_slots (to_rtx);
3984 free_temp_slots ();
3985 pop_temp_slots ();
3986 return want_value ? to_rtx : NULL_RTX;
3987 }
3988
3989 /* Compute FROM and store the value in the rtx we got. */
3990
3991 push_temp_slots ();
3992 result = store_expr (from, to_rtx, want_value);
3993 preserve_temp_slots (result);
3994 free_temp_slots ();
3995 pop_temp_slots ();
3996 return want_value ? result : NULL_RTX;
3997 }
3998
3999 /* Generate code for computing expression EXP,
4000 and storing the value into TARGET.
4001 TARGET may contain a QUEUED rtx.
4002
4003 If WANT_VALUE & 1 is nonzero, return a copy of the value
4004 not in TARGET, so that we can be sure to use the proper
4005 value in a containing expression even if TARGET has something
4006 else stored in it. If possible, we copy the value through a pseudo
4007 and return that pseudo. Or, if the value is constant, we try to
4008 return the constant. In some cases, we return a pseudo
4009 copied *from* TARGET.
4010
4011 If the mode is BLKmode then we may return TARGET itself.
4012 It turns out that in BLKmode it doesn't cause a problem.
4013 because C has no operators that could combine two different
4014 assignments into the same BLKmode object with different values
4015 with no sequence point. Will other languages need this to
4016 be more thorough?
4017
4018 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4019 to catch quickly any cases where the caller uses the value
4020 and fails to set WANT_VALUE.
4021
4022 If WANT_VALUE & 2 is set, this is a store into a call param on the
4023 stack, and block moves may need to be treated specially. */
4024
4025 rtx
4026 store_expr (tree exp, rtx target, int want_value)
4027 {
4028 rtx temp;
4029 int dont_return_target = 0;
4030 int dont_store_target = 0;
4031
4032 if (VOID_TYPE_P (TREE_TYPE (exp)))
4033 {
4034 /* C++ can generate ?: expressions with a throw expression in one
4035 branch and an rvalue in the other. Here, we resolve attempts to
4036 store the throw expression's nonexistent result. */
4037 if (want_value)
4038 abort ();
4039 expand_expr (exp, const0_rtx, VOIDmode, 0);
4040 return NULL_RTX;
4041 }
4042 if (TREE_CODE (exp) == COMPOUND_EXPR)
4043 {
4044 /* Perform first part of compound expression, then assign from second
4045 part. */
4046 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4047 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4048 emit_queue ();
4049 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4050 }
4051 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4052 {
4053 /* For conditional expression, get safe form of the target. Then
4054 test the condition, doing the appropriate assignment on either
4055 side. This avoids the creation of unnecessary temporaries.
4056 For non-BLKmode, it is more efficient not to do this. */
4057
4058 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4059
4060 emit_queue ();
4061 target = protect_from_queue (target, 1);
4062
4063 do_pending_stack_adjust ();
4064 NO_DEFER_POP;
4065 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4066 start_cleanup_deferral ();
4067 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4068 end_cleanup_deferral ();
4069 emit_queue ();
4070 emit_jump_insn (gen_jump (lab2));
4071 emit_barrier ();
4072 emit_label (lab1);
4073 start_cleanup_deferral ();
4074 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4075 end_cleanup_deferral ();
4076 emit_queue ();
4077 emit_label (lab2);
4078 OK_DEFER_POP;
4079
4080 return want_value & 1 ? target : NULL_RTX;
4081 }
4082 else if (queued_subexp_p (target))
4083 /* If target contains a postincrement, let's not risk
4084 using it as the place to generate the rhs. */
4085 {
4086 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4087 {
4088 /* Expand EXP into a new pseudo. */
4089 temp = gen_reg_rtx (GET_MODE (target));
4090 temp = expand_expr (exp, temp, GET_MODE (target),
4091 (want_value & 2
4092 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4093 }
4094 else
4095 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4096 (want_value & 2
4097 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4098
4099 /* If target is volatile, ANSI requires accessing the value
4100 *from* the target, if it is accessed. So make that happen.
4101 In no case return the target itself. */
4102 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4103 dont_return_target = 1;
4104 }
4105 else if ((want_value & 1) != 0
4106 && GET_CODE (target) == MEM
4107 && ! MEM_VOLATILE_P (target)
4108 && GET_MODE (target) != BLKmode)
4109 /* If target is in memory and caller wants value in a register instead,
4110 arrange that. Pass TARGET as target for expand_expr so that,
4111 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4112 We know expand_expr will not use the target in that case.
4113 Don't do this if TARGET is volatile because we are supposed
4114 to write it and then read it. */
4115 {
4116 temp = expand_expr (exp, target, GET_MODE (target),
4117 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4118 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4119 {
4120 /* If TEMP is already in the desired TARGET, only copy it from
4121 memory and don't store it there again. */
4122 if (temp == target
4123 || (rtx_equal_p (temp, target)
4124 && ! side_effects_p (temp) && ! side_effects_p (target)))
4125 dont_store_target = 1;
4126 temp = copy_to_reg (temp);
4127 }
4128 dont_return_target = 1;
4129 }
4130 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4131 /* If this is a scalar in a register that is stored in a wider mode
4132 than the declared mode, compute the result into its declared mode
4133 and then convert to the wider mode. Our value is the computed
4134 expression. */
4135 {
4136 rtx inner_target = 0;
4137
4138 /* If we don't want a value, we can do the conversion inside EXP,
4139 which will often result in some optimizations. Do the conversion
4140 in two steps: first change the signedness, if needed, then
4141 the extend. But don't do this if the type of EXP is a subtype
4142 of something else since then the conversion might involve
4143 more than just converting modes. */
4144 if ((want_value & 1) == 0
4145 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4146 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4147 {
4148 if (TREE_UNSIGNED (TREE_TYPE (exp))
4149 != SUBREG_PROMOTED_UNSIGNED_P (target))
4150 exp = convert
4151 ((*lang_hooks.types.signed_or_unsigned_type)
4152 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4153
4154 exp = convert ((*lang_hooks.types.type_for_mode)
4155 (GET_MODE (SUBREG_REG (target)),
4156 SUBREG_PROMOTED_UNSIGNED_P (target)),
4157 exp);
4158
4159 inner_target = SUBREG_REG (target);
4160 }
4161
4162 temp = expand_expr (exp, inner_target, VOIDmode,
4163 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4164
4165 /* If TEMP is a MEM and we want a result value, make the access
4166 now so it gets done only once. Strictly speaking, this is
4167 only necessary if the MEM is volatile, or if the address
4168 overlaps TARGET. But not performing the load twice also
4169 reduces the amount of rtl we generate and then have to CSE. */
4170 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4171 temp = copy_to_reg (temp);
4172
4173 /* If TEMP is a VOIDmode constant, use convert_modes to make
4174 sure that we properly convert it. */
4175 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4176 {
4177 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4178 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4179 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4180 GET_MODE (target), temp,
4181 SUBREG_PROMOTED_UNSIGNED_P (target));
4182 }
4183
4184 convert_move (SUBREG_REG (target), temp,
4185 SUBREG_PROMOTED_UNSIGNED_P (target));
4186
4187 /* If we promoted a constant, change the mode back down to match
4188 target. Otherwise, the caller might get confused by a result whose
4189 mode is larger than expected. */
4190
4191 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4192 {
4193 if (GET_MODE (temp) != VOIDmode)
4194 {
4195 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4196 SUBREG_PROMOTED_VAR_P (temp) = 1;
4197 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4198 SUBREG_PROMOTED_UNSIGNED_P (target));
4199 }
4200 else
4201 temp = convert_modes (GET_MODE (target),
4202 GET_MODE (SUBREG_REG (target)),
4203 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4204 }
4205
4206 return want_value & 1 ? temp : NULL_RTX;
4207 }
4208 else
4209 {
4210 temp = expand_expr (exp, target, GET_MODE (target),
4211 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4212 /* Return TARGET if it's a specified hardware register.
4213 If TARGET is a volatile mem ref, either return TARGET
4214 or return a reg copied *from* TARGET; ANSI requires this.
4215
4216 Otherwise, if TEMP is not TARGET, return TEMP
4217 if it is constant (for efficiency),
4218 or if we really want the correct value. */
4219 if (!(target && GET_CODE (target) == REG
4220 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4221 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4222 && ! rtx_equal_p (temp, target)
4223 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4224 dont_return_target = 1;
4225 }
4226
4227 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4228 the same as that of TARGET, adjust the constant. This is needed, for
4229 example, in case it is a CONST_DOUBLE and we want only a word-sized
4230 value. */
4231 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4232 && TREE_CODE (exp) != ERROR_MARK
4233 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4234 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4235 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4236
4237 /* If value was not generated in the target, store it there.
4238 Convert the value to TARGET's type first if necessary.
4239 If TEMP and TARGET compare equal according to rtx_equal_p, but
4240 one or both of them are volatile memory refs, we have to distinguish
4241 two cases:
4242 - expand_expr has used TARGET. In this case, we must not generate
4243 another copy. This can be detected by TARGET being equal according
4244 to == .
4245 - expand_expr has not used TARGET - that means that the source just
4246 happens to have the same RTX form. Since temp will have been created
4247 by expand_expr, it will compare unequal according to == .
4248 We must generate a copy in this case, to reach the correct number
4249 of volatile memory references. */
4250
4251 if ((! rtx_equal_p (temp, target)
4252 || (temp != target && (side_effects_p (temp)
4253 || side_effects_p (target))))
4254 && TREE_CODE (exp) != ERROR_MARK
4255 && ! dont_store_target
4256 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4257 but TARGET is not valid memory reference, TEMP will differ
4258 from TARGET although it is really the same location. */
4259 && !(GET_CODE (target) == MEM
4260 && GET_CODE (XEXP (target, 0)) != QUEUED
4261 && (!memory_address_p (GET_MODE (target), XEXP (target, 0))
4262 || (flag_force_addr && !REG_P (XEXP (target, 0)))))
4263 /* If there's nothing to copy, don't bother. Don't call expr_size
4264 unless necessary, because some front-ends (C++) expr_size-hook
4265 aborts on objects that are not supposed to be bit-copied or
4266 bit-initialized. */
4267 && expr_size (exp) != const0_rtx)
4268 {
4269 target = protect_from_queue (target, 1);
4270 if (GET_MODE (temp) != GET_MODE (target)
4271 && GET_MODE (temp) != VOIDmode)
4272 {
4273 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4274 if (dont_return_target)
4275 {
4276 /* In this case, we will return TEMP,
4277 so make sure it has the proper mode.
4278 But don't forget to store the value into TARGET. */
4279 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4280 emit_move_insn (target, temp);
4281 }
4282 else
4283 convert_move (target, temp, unsignedp);
4284 }
4285
4286 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4287 {
4288 /* Handle copying a string constant into an array. The string
4289 constant may be shorter than the array. So copy just the string's
4290 actual length, and clear the rest. First get the size of the data
4291 type of the string, which is actually the size of the target. */
4292 rtx size = expr_size (exp);
4293
4294 if (GET_CODE (size) == CONST_INT
4295 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4296 emit_block_move (target, temp, size,
4297 (want_value & 2
4298 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4299 else
4300 {
4301 /* Compute the size of the data to copy from the string. */
4302 tree copy_size
4303 = size_binop (MIN_EXPR,
4304 make_tree (sizetype, size),
4305 size_int (TREE_STRING_LENGTH (exp)));
4306 rtx copy_size_rtx
4307 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4308 (want_value & 2
4309 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4310 rtx label = 0;
4311
4312 /* Copy that much. */
4313 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4314 TREE_UNSIGNED (sizetype));
4315 emit_block_move (target, temp, copy_size_rtx,
4316 (want_value & 2
4317 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4318
4319 /* Figure out how much is left in TARGET that we have to clear.
4320 Do all calculations in ptr_mode. */
4321 if (GET_CODE (copy_size_rtx) == CONST_INT)
4322 {
4323 size = plus_constant (size, -INTVAL (copy_size_rtx));
4324 target = adjust_address (target, BLKmode,
4325 INTVAL (copy_size_rtx));
4326 }
4327 else
4328 {
4329 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4330 copy_size_rtx, NULL_RTX, 0,
4331 OPTAB_LIB_WIDEN);
4332
4333 #ifdef POINTERS_EXTEND_UNSIGNED
4334 if (GET_MODE (copy_size_rtx) != Pmode)
4335 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4336 TREE_UNSIGNED (sizetype));
4337 #endif
4338
4339 target = offset_address (target, copy_size_rtx,
4340 highest_pow2_factor (copy_size));
4341 label = gen_label_rtx ();
4342 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4343 GET_MODE (size), 0, label);
4344 }
4345
4346 if (size != const0_rtx)
4347 clear_storage (target, size);
4348
4349 if (label)
4350 emit_label (label);
4351 }
4352 }
4353 /* Handle calls that return values in multiple non-contiguous locations.
4354 The Irix 6 ABI has examples of this. */
4355 else if (GET_CODE (target) == PARALLEL)
4356 emit_group_load (target, temp, TREE_TYPE (exp),
4357 int_size_in_bytes (TREE_TYPE (exp)));
4358 else if (GET_MODE (temp) == BLKmode)
4359 emit_block_move (target, temp, expr_size (exp),
4360 (want_value & 2
4361 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4362 else
4363 emit_move_insn (target, temp);
4364 }
4365
4366 /* If we don't want a value, return NULL_RTX. */
4367 if ((want_value & 1) == 0)
4368 return NULL_RTX;
4369
4370 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4371 ??? The latter test doesn't seem to make sense. */
4372 else if (dont_return_target && GET_CODE (temp) != MEM)
4373 return temp;
4374
4375 /* Return TARGET itself if it is a hard register. */
4376 else if ((want_value & 1) != 0
4377 && GET_MODE (target) != BLKmode
4378 && ! (GET_CODE (target) == REG
4379 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4380 return copy_to_reg (target);
4381
4382 else
4383 return target;
4384 }
4385 \f
4386 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4387
4388 static int
4389 is_zeros_p (tree exp)
4390 {
4391 tree elt;
4392
4393 switch (TREE_CODE (exp))
4394 {
4395 case CONVERT_EXPR:
4396 case NOP_EXPR:
4397 case NON_LVALUE_EXPR:
4398 case VIEW_CONVERT_EXPR:
4399 return is_zeros_p (TREE_OPERAND (exp, 0));
4400
4401 case INTEGER_CST:
4402 return integer_zerop (exp);
4403
4404 case COMPLEX_CST:
4405 return
4406 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4407
4408 case REAL_CST:
4409 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4410
4411 case VECTOR_CST:
4412 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4413 elt = TREE_CHAIN (elt))
4414 if (!is_zeros_p (TREE_VALUE (elt)))
4415 return 0;
4416
4417 return 1;
4418
4419 case CONSTRUCTOR:
4420 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4421 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4422 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4423 if (! is_zeros_p (TREE_VALUE (elt)))
4424 return 0;
4425
4426 return 1;
4427
4428 default:
4429 return 0;
4430 }
4431 }
4432
4433 /* Return 1 if EXP contains mostly (3/4) zeros. */
4434
4435 int
4436 mostly_zeros_p (tree exp)
4437 {
4438 if (TREE_CODE (exp) == CONSTRUCTOR)
4439 {
4440 int elts = 0, zeros = 0;
4441 tree elt = CONSTRUCTOR_ELTS (exp);
4442 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4443 {
4444 /* If there are no ranges of true bits, it is all zero. */
4445 return elt == NULL_TREE;
4446 }
4447 for (; elt; elt = TREE_CHAIN (elt))
4448 {
4449 /* We do not handle the case where the index is a RANGE_EXPR,
4450 so the statistic will be somewhat inaccurate.
4451 We do make a more accurate count in store_constructor itself,
4452 so since this function is only used for nested array elements,
4453 this should be close enough. */
4454 if (mostly_zeros_p (TREE_VALUE (elt)))
4455 zeros++;
4456 elts++;
4457 }
4458
4459 return 4 * zeros >= 3 * elts;
4460 }
4461
4462 return is_zeros_p (exp);
4463 }
4464 \f
4465 /* Helper function for store_constructor.
4466 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4467 TYPE is the type of the CONSTRUCTOR, not the element type.
4468 CLEARED is as for store_constructor.
4469 ALIAS_SET is the alias set to use for any stores.
4470
4471 This provides a recursive shortcut back to store_constructor when it isn't
4472 necessary to go through store_field. This is so that we can pass through
4473 the cleared field to let store_constructor know that we may not have to
4474 clear a substructure if the outer structure has already been cleared. */
4475
4476 static void
4477 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4478 HOST_WIDE_INT bitpos, enum machine_mode mode,
4479 tree exp, tree type, int cleared, int alias_set)
4480 {
4481 if (TREE_CODE (exp) == CONSTRUCTOR
4482 && bitpos % BITS_PER_UNIT == 0
4483 /* If we have a nonzero bitpos for a register target, then we just
4484 let store_field do the bitfield handling. This is unlikely to
4485 generate unnecessary clear instructions anyways. */
4486 && (bitpos == 0 || GET_CODE (target) == MEM))
4487 {
4488 if (GET_CODE (target) == MEM)
4489 target
4490 = adjust_address (target,
4491 GET_MODE (target) == BLKmode
4492 || 0 != (bitpos
4493 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4494 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4495
4496
4497 /* Update the alias set, if required. */
4498 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4499 && MEM_ALIAS_SET (target) != 0)
4500 {
4501 target = copy_rtx (target);
4502 set_mem_alias_set (target, alias_set);
4503 }
4504
4505 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4506 }
4507 else
4508 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4509 alias_set);
4510 }
4511
4512 /* Store the value of constructor EXP into the rtx TARGET.
4513 TARGET is either a REG or a MEM; we know it cannot conflict, since
4514 safe_from_p has been called.
4515 CLEARED is true if TARGET is known to have been zero'd.
4516 SIZE is the number of bytes of TARGET we are allowed to modify: this
4517 may not be the same as the size of EXP if we are assigning to a field
4518 which has been packed to exclude padding bits. */
4519
4520 static void
4521 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4522 {
4523 tree type = TREE_TYPE (exp);
4524 #ifdef WORD_REGISTER_OPERATIONS
4525 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4526 #endif
4527
4528 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4529 || TREE_CODE (type) == QUAL_UNION_TYPE)
4530 {
4531 tree elt;
4532
4533 /* If size is zero or the target is already cleared, do nothing. */
4534 if (size == 0 || cleared)
4535 cleared = 1;
4536 /* We either clear the aggregate or indicate the value is dead. */
4537 else if ((TREE_CODE (type) == UNION_TYPE
4538 || TREE_CODE (type) == QUAL_UNION_TYPE)
4539 && ! CONSTRUCTOR_ELTS (exp))
4540 /* If the constructor is empty, clear the union. */
4541 {
4542 clear_storage (target, expr_size (exp));
4543 cleared = 1;
4544 }
4545
4546 /* If we are building a static constructor into a register,
4547 set the initial value as zero so we can fold the value into
4548 a constant. But if more than one register is involved,
4549 this probably loses. */
4550 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4551 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4552 {
4553 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4554 cleared = 1;
4555 }
4556
4557 /* If the constructor has fewer fields than the structure
4558 or if we are initializing the structure to mostly zeros,
4559 clear the whole structure first. Don't do this if TARGET is a
4560 register whose mode size isn't equal to SIZE since clear_storage
4561 can't handle this case. */
4562 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4563 || mostly_zeros_p (exp))
4564 && (GET_CODE (target) != REG
4565 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4566 == size)))
4567 {
4568 rtx xtarget = target;
4569
4570 if (readonly_fields_p (type))
4571 {
4572 xtarget = copy_rtx (xtarget);
4573 RTX_UNCHANGING_P (xtarget) = 1;
4574 }
4575
4576 clear_storage (xtarget, GEN_INT (size));
4577 cleared = 1;
4578 }
4579
4580 if (! cleared)
4581 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4582
4583 /* Store each element of the constructor into
4584 the corresponding field of TARGET. */
4585
4586 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4587 {
4588 tree field = TREE_PURPOSE (elt);
4589 tree value = TREE_VALUE (elt);
4590 enum machine_mode mode;
4591 HOST_WIDE_INT bitsize;
4592 HOST_WIDE_INT bitpos = 0;
4593 tree offset;
4594 rtx to_rtx = target;
4595
4596 /* Just ignore missing fields.
4597 We cleared the whole structure, above,
4598 if any fields are missing. */
4599 if (field == 0)
4600 continue;
4601
4602 if (cleared && is_zeros_p (value))
4603 continue;
4604
4605 if (host_integerp (DECL_SIZE (field), 1))
4606 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4607 else
4608 bitsize = -1;
4609
4610 mode = DECL_MODE (field);
4611 if (DECL_BIT_FIELD (field))
4612 mode = VOIDmode;
4613
4614 offset = DECL_FIELD_OFFSET (field);
4615 if (host_integerp (offset, 0)
4616 && host_integerp (bit_position (field), 0))
4617 {
4618 bitpos = int_bit_position (field);
4619 offset = 0;
4620 }
4621 else
4622 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4623
4624 if (offset)
4625 {
4626 rtx offset_rtx;
4627
4628 if (CONTAINS_PLACEHOLDER_P (offset))
4629 offset = build (WITH_RECORD_EXPR, sizetype,
4630 offset, make_tree (TREE_TYPE (exp), target));
4631
4632 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4633 if (GET_CODE (to_rtx) != MEM)
4634 abort ();
4635
4636 #ifdef POINTERS_EXTEND_UNSIGNED
4637 if (GET_MODE (offset_rtx) != Pmode)
4638 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4639 #else
4640 if (GET_MODE (offset_rtx) != ptr_mode)
4641 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4642 #endif
4643
4644 to_rtx = offset_address (to_rtx, offset_rtx,
4645 highest_pow2_factor (offset));
4646 }
4647
4648 /* If the constructor has been cleared, setting RTX_UNCHANGING_P
4649 on the MEM might lead to scheduling the clearing after the
4650 store. */
4651 if (TREE_READONLY (field) && !cleared)
4652 {
4653 if (GET_CODE (to_rtx) == MEM)
4654 to_rtx = copy_rtx (to_rtx);
4655
4656 RTX_UNCHANGING_P (to_rtx) = 1;
4657 }
4658
4659 #ifdef WORD_REGISTER_OPERATIONS
4660 /* If this initializes a field that is smaller than a word, at the
4661 start of a word, try to widen it to a full word.
4662 This special case allows us to output C++ member function
4663 initializations in a form that the optimizers can understand. */
4664 if (GET_CODE (target) == REG
4665 && bitsize < BITS_PER_WORD
4666 && bitpos % BITS_PER_WORD == 0
4667 && GET_MODE_CLASS (mode) == MODE_INT
4668 && TREE_CODE (value) == INTEGER_CST
4669 && exp_size >= 0
4670 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4671 {
4672 tree type = TREE_TYPE (value);
4673
4674 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4675 {
4676 type = (*lang_hooks.types.type_for_size)
4677 (BITS_PER_WORD, TREE_UNSIGNED (type));
4678 value = convert (type, value);
4679 }
4680
4681 if (BYTES_BIG_ENDIAN)
4682 value
4683 = fold (build (LSHIFT_EXPR, type, value,
4684 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4685 bitsize = BITS_PER_WORD;
4686 mode = word_mode;
4687 }
4688 #endif
4689
4690 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4691 && DECL_NONADDRESSABLE_P (field))
4692 {
4693 to_rtx = copy_rtx (to_rtx);
4694 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4695 }
4696
4697 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4698 value, type, cleared,
4699 get_alias_set (TREE_TYPE (field)));
4700 }
4701 }
4702 else if (TREE_CODE (type) == ARRAY_TYPE
4703 || TREE_CODE (type) == VECTOR_TYPE)
4704 {
4705 tree elt;
4706 int i;
4707 int need_to_clear;
4708 tree domain = TYPE_DOMAIN (type);
4709 tree elttype = TREE_TYPE (type);
4710 int const_bounds_p;
4711 HOST_WIDE_INT minelt = 0;
4712 HOST_WIDE_INT maxelt = 0;
4713 int icode = 0;
4714 rtx *vector = NULL;
4715 int elt_size = 0;
4716 unsigned n_elts = 0;
4717
4718 /* Vectors are like arrays, but the domain is stored via an array
4719 type indirectly. */
4720 if (TREE_CODE (type) == VECTOR_TYPE)
4721 {
4722 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4723 the same field as TYPE_DOMAIN, we are not guaranteed that
4724 it always will. */
4725 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4726 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4727 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4728 {
4729 enum machine_mode mode = GET_MODE (target);
4730
4731 icode = (int) vec_init_optab->handlers[mode].insn_code;
4732 if (icode != CODE_FOR_nothing)
4733 {
4734 unsigned int i;
4735
4736 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4737 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4738 vector = alloca (n_elts);
4739 for (i = 0; i < n_elts; i++)
4740 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4741 }
4742 }
4743 }
4744
4745 const_bounds_p = (TYPE_MIN_VALUE (domain)
4746 && TYPE_MAX_VALUE (domain)
4747 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4748 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4749
4750 /* If we have constant bounds for the range of the type, get them. */
4751 if (const_bounds_p)
4752 {
4753 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4754 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4755 }
4756
4757 /* If the constructor has fewer elements than the array,
4758 clear the whole array first. Similarly if this is
4759 static constructor of a non-BLKmode object. */
4760 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4761 need_to_clear = 1;
4762 else
4763 {
4764 HOST_WIDE_INT count = 0, zero_count = 0;
4765 need_to_clear = ! const_bounds_p;
4766
4767 /* This loop is a more accurate version of the loop in
4768 mostly_zeros_p (it handles RANGE_EXPR in an index).
4769 It is also needed to check for missing elements. */
4770 for (elt = CONSTRUCTOR_ELTS (exp);
4771 elt != NULL_TREE && ! need_to_clear;
4772 elt = TREE_CHAIN (elt))
4773 {
4774 tree index = TREE_PURPOSE (elt);
4775 HOST_WIDE_INT this_node_count;
4776
4777 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4778 {
4779 tree lo_index = TREE_OPERAND (index, 0);
4780 tree hi_index = TREE_OPERAND (index, 1);
4781
4782 if (! host_integerp (lo_index, 1)
4783 || ! host_integerp (hi_index, 1))
4784 {
4785 need_to_clear = 1;
4786 break;
4787 }
4788
4789 this_node_count = (tree_low_cst (hi_index, 1)
4790 - tree_low_cst (lo_index, 1) + 1);
4791 }
4792 else
4793 this_node_count = 1;
4794
4795 count += this_node_count;
4796 if (mostly_zeros_p (TREE_VALUE (elt)))
4797 zero_count += this_node_count;
4798 }
4799
4800 /* Clear the entire array first if there are any missing elements,
4801 or if the incidence of zero elements is >= 75%. */
4802 if (! need_to_clear
4803 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4804 need_to_clear = 1;
4805 }
4806
4807 if (need_to_clear && size > 0 && !vector)
4808 {
4809 if (! cleared)
4810 {
4811 if (REG_P (target))
4812 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4813 else
4814 clear_storage (target, GEN_INT (size));
4815 }
4816 cleared = 1;
4817 }
4818 else if (REG_P (target))
4819 /* Inform later passes that the old value is dead. */
4820 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4821
4822 /* Store each element of the constructor into
4823 the corresponding element of TARGET, determined
4824 by counting the elements. */
4825 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4826 elt;
4827 elt = TREE_CHAIN (elt), i++)
4828 {
4829 enum machine_mode mode;
4830 HOST_WIDE_INT bitsize;
4831 HOST_WIDE_INT bitpos;
4832 int unsignedp;
4833 tree value = TREE_VALUE (elt);
4834 tree index = TREE_PURPOSE (elt);
4835 rtx xtarget = target;
4836
4837 if (cleared && is_zeros_p (value))
4838 continue;
4839
4840 unsignedp = TREE_UNSIGNED (elttype);
4841 mode = TYPE_MODE (elttype);
4842 if (mode == BLKmode)
4843 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4844 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4845 : -1);
4846 else
4847 bitsize = GET_MODE_BITSIZE (mode);
4848
4849 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4850 {
4851 tree lo_index = TREE_OPERAND (index, 0);
4852 tree hi_index = TREE_OPERAND (index, 1);
4853 rtx index_r, pos_rtx, loop_end;
4854 struct nesting *loop;
4855 HOST_WIDE_INT lo, hi, count;
4856 tree position;
4857
4858 if (vector)
4859 abort ();
4860
4861 /* If the range is constant and "small", unroll the loop. */
4862 if (const_bounds_p
4863 && host_integerp (lo_index, 0)
4864 && host_integerp (hi_index, 0)
4865 && (lo = tree_low_cst (lo_index, 0),
4866 hi = tree_low_cst (hi_index, 0),
4867 count = hi - lo + 1,
4868 (GET_CODE (target) != MEM
4869 || count <= 2
4870 || (host_integerp (TYPE_SIZE (elttype), 1)
4871 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4872 <= 40 * 8)))))
4873 {
4874 lo -= minelt; hi -= minelt;
4875 for (; lo <= hi; lo++)
4876 {
4877 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4878
4879 if (GET_CODE (target) == MEM
4880 && !MEM_KEEP_ALIAS_SET_P (target)
4881 && TREE_CODE (type) == ARRAY_TYPE
4882 && TYPE_NONALIASED_COMPONENT (type))
4883 {
4884 target = copy_rtx (target);
4885 MEM_KEEP_ALIAS_SET_P (target) = 1;
4886 }
4887
4888 store_constructor_field
4889 (target, bitsize, bitpos, mode, value, type, cleared,
4890 get_alias_set (elttype));
4891 }
4892 }
4893 else
4894 {
4895 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4896 loop_end = gen_label_rtx ();
4897
4898 unsignedp = TREE_UNSIGNED (domain);
4899
4900 index = build_decl (VAR_DECL, NULL_TREE, domain);
4901
4902 index_r
4903 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4904 &unsignedp, 0));
4905 SET_DECL_RTL (index, index_r);
4906 if (TREE_CODE (value) == SAVE_EXPR
4907 && SAVE_EXPR_RTL (value) == 0)
4908 {
4909 /* Make sure value gets expanded once before the
4910 loop. */
4911 expand_expr (value, const0_rtx, VOIDmode, 0);
4912 emit_queue ();
4913 }
4914 store_expr (lo_index, index_r, 0);
4915 loop = expand_start_loop (0);
4916
4917 /* Assign value to element index. */
4918 position
4919 = convert (ssizetype,
4920 fold (build (MINUS_EXPR, TREE_TYPE (index),
4921 index, TYPE_MIN_VALUE (domain))));
4922 position = size_binop (MULT_EXPR, position,
4923 convert (ssizetype,
4924 TYPE_SIZE_UNIT (elttype)));
4925
4926 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4927 xtarget = offset_address (target, pos_rtx,
4928 highest_pow2_factor (position));
4929 xtarget = adjust_address (xtarget, mode, 0);
4930 if (TREE_CODE (value) == CONSTRUCTOR)
4931 store_constructor (value, xtarget, cleared,
4932 bitsize / BITS_PER_UNIT);
4933 else
4934 store_expr (value, xtarget, 0);
4935
4936 expand_exit_loop_if_false (loop,
4937 build (LT_EXPR, integer_type_node,
4938 index, hi_index));
4939
4940 expand_increment (build (PREINCREMENT_EXPR,
4941 TREE_TYPE (index),
4942 index, integer_one_node), 0, 0);
4943 expand_end_loop ();
4944 emit_label (loop_end);
4945 }
4946 }
4947 else if ((index != 0 && ! host_integerp (index, 0))
4948 || ! host_integerp (TYPE_SIZE (elttype), 1))
4949 {
4950 tree position;
4951
4952 if (vector)
4953 abort ();
4954
4955 if (index == 0)
4956 index = ssize_int (1);
4957
4958 if (minelt)
4959 index = convert (ssizetype,
4960 fold (build (MINUS_EXPR, index,
4961 TYPE_MIN_VALUE (domain))));
4962
4963 position = size_binop (MULT_EXPR, index,
4964 convert (ssizetype,
4965 TYPE_SIZE_UNIT (elttype)));
4966 xtarget = offset_address (target,
4967 expand_expr (position, 0, VOIDmode, 0),
4968 highest_pow2_factor (position));
4969 xtarget = adjust_address (xtarget, mode, 0);
4970 store_expr (value, xtarget, 0);
4971 }
4972 else if (vector)
4973 {
4974 int pos;
4975
4976 if (index != 0)
4977 pos = tree_low_cst (index, 0) - minelt;
4978 else
4979 pos = i;
4980 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4981 }
4982 else
4983 {
4984 if (index != 0)
4985 bitpos = ((tree_low_cst (index, 0) - minelt)
4986 * tree_low_cst (TYPE_SIZE (elttype), 1));
4987 else
4988 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4989
4990 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4991 && TREE_CODE (type) == ARRAY_TYPE
4992 && TYPE_NONALIASED_COMPONENT (type))
4993 {
4994 target = copy_rtx (target);
4995 MEM_KEEP_ALIAS_SET_P (target) = 1;
4996 }
4997 else
4998 store_constructor_field (target, bitsize, bitpos, mode, value,
4999 type, cleared, get_alias_set (elttype));
5000 }
5001 }
5002 if (vector)
5003 {
5004 emit_insn (GEN_FCN (icode) (target,
5005 gen_rtx_PARALLEL (GET_MODE (target),
5006 gen_rtvec_v (n_elts, vector))));
5007 }
5008 }
5009
5010 /* Set constructor assignments. */
5011 else if (TREE_CODE (type) == SET_TYPE)
5012 {
5013 tree elt = CONSTRUCTOR_ELTS (exp);
5014 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5015 tree domain = TYPE_DOMAIN (type);
5016 tree domain_min, domain_max, bitlength;
5017
5018 /* The default implementation strategy is to extract the constant
5019 parts of the constructor, use that to initialize the target,
5020 and then "or" in whatever non-constant ranges we need in addition.
5021
5022 If a large set is all zero or all ones, it is
5023 probably better to set it using memset (if available) or bzero.
5024 Also, if a large set has just a single range, it may also be
5025 better to first clear all the first clear the set (using
5026 bzero/memset), and set the bits we want. */
5027
5028 /* Check for all zeros. */
5029 if (elt == NULL_TREE && size > 0)
5030 {
5031 if (!cleared)
5032 clear_storage (target, GEN_INT (size));
5033 return;
5034 }
5035
5036 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5037 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5038 bitlength = size_binop (PLUS_EXPR,
5039 size_diffop (domain_max, domain_min),
5040 ssize_int (1));
5041
5042 nbits = tree_low_cst (bitlength, 1);
5043
5044 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5045 are "complicated" (more than one range), initialize (the
5046 constant parts) by copying from a constant. */
5047 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5048 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5049 {
5050 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5051 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5052 char *bit_buffer = alloca (nbits);
5053 HOST_WIDE_INT word = 0;
5054 unsigned int bit_pos = 0;
5055 unsigned int ibit = 0;
5056 unsigned int offset = 0; /* In bytes from beginning of set. */
5057
5058 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5059 for (;;)
5060 {
5061 if (bit_buffer[ibit])
5062 {
5063 if (BYTES_BIG_ENDIAN)
5064 word |= (1 << (set_word_size - 1 - bit_pos));
5065 else
5066 word |= 1 << bit_pos;
5067 }
5068
5069 bit_pos++; ibit++;
5070 if (bit_pos >= set_word_size || ibit == nbits)
5071 {
5072 if (word != 0 || ! cleared)
5073 {
5074 rtx datum = GEN_INT (word);
5075 rtx to_rtx;
5076
5077 /* The assumption here is that it is safe to use
5078 XEXP if the set is multi-word, but not if
5079 it's single-word. */
5080 if (GET_CODE (target) == MEM)
5081 to_rtx = adjust_address (target, mode, offset);
5082 else if (offset == 0)
5083 to_rtx = target;
5084 else
5085 abort ();
5086 emit_move_insn (to_rtx, datum);
5087 }
5088
5089 if (ibit == nbits)
5090 break;
5091 word = 0;
5092 bit_pos = 0;
5093 offset += set_word_size / BITS_PER_UNIT;
5094 }
5095 }
5096 }
5097 else if (!cleared)
5098 /* Don't bother clearing storage if the set is all ones. */
5099 if (TREE_CHAIN (elt) != NULL_TREE
5100 || (TREE_PURPOSE (elt) == NULL_TREE
5101 ? nbits != 1
5102 : ( ! host_integerp (TREE_VALUE (elt), 0)
5103 || ! host_integerp (TREE_PURPOSE (elt), 0)
5104 || (tree_low_cst (TREE_VALUE (elt), 0)
5105 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5106 != (HOST_WIDE_INT) nbits))))
5107 clear_storage (target, expr_size (exp));
5108
5109 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5110 {
5111 /* Start of range of element or NULL. */
5112 tree startbit = TREE_PURPOSE (elt);
5113 /* End of range of element, or element value. */
5114 tree endbit = TREE_VALUE (elt);
5115 HOST_WIDE_INT startb, endb;
5116 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5117
5118 bitlength_rtx = expand_expr (bitlength,
5119 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5120
5121 /* Handle non-range tuple element like [ expr ]. */
5122 if (startbit == NULL_TREE)
5123 {
5124 startbit = save_expr (endbit);
5125 endbit = startbit;
5126 }
5127
5128 startbit = convert (sizetype, startbit);
5129 endbit = convert (sizetype, endbit);
5130 if (! integer_zerop (domain_min))
5131 {
5132 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5133 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5134 }
5135 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5136 EXPAND_CONST_ADDRESS);
5137 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5138 EXPAND_CONST_ADDRESS);
5139
5140 if (REG_P (target))
5141 {
5142 targetx
5143 = assign_temp
5144 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5145 (GET_MODE (target), 0),
5146 TYPE_QUAL_CONST)),
5147 0, 1, 1);
5148 emit_move_insn (targetx, target);
5149 }
5150
5151 else if (GET_CODE (target) == MEM)
5152 targetx = target;
5153 else
5154 abort ();
5155
5156 /* Optimization: If startbit and endbit are constants divisible
5157 by BITS_PER_UNIT, call memset instead. */
5158 if (TARGET_MEM_FUNCTIONS
5159 && TREE_CODE (startbit) == INTEGER_CST
5160 && TREE_CODE (endbit) == INTEGER_CST
5161 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5162 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5163 {
5164 emit_library_call (memset_libfunc, LCT_NORMAL,
5165 VOIDmode, 3,
5166 plus_constant (XEXP (targetx, 0),
5167 startb / BITS_PER_UNIT),
5168 Pmode,
5169 constm1_rtx, TYPE_MODE (integer_type_node),
5170 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5171 TYPE_MODE (sizetype));
5172 }
5173 else
5174 emit_library_call (setbits_libfunc, LCT_NORMAL,
5175 VOIDmode, 4, XEXP (targetx, 0),
5176 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5177 startbit_rtx, TYPE_MODE (sizetype),
5178 endbit_rtx, TYPE_MODE (sizetype));
5179
5180 if (REG_P (target))
5181 emit_move_insn (target, targetx);
5182 }
5183 }
5184
5185 else
5186 abort ();
5187 }
5188
5189 /* Store the value of EXP (an expression tree)
5190 into a subfield of TARGET which has mode MODE and occupies
5191 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5192 If MODE is VOIDmode, it means that we are storing into a bit-field.
5193
5194 If VALUE_MODE is VOIDmode, return nothing in particular.
5195 UNSIGNEDP is not used in this case.
5196
5197 Otherwise, return an rtx for the value stored. This rtx
5198 has mode VALUE_MODE if that is convenient to do.
5199 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5200
5201 TYPE is the type of the underlying object,
5202
5203 ALIAS_SET is the alias set for the destination. This value will
5204 (in general) be different from that for TARGET, since TARGET is a
5205 reference to the containing structure. */
5206
5207 static rtx
5208 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5209 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5210 int unsignedp, tree type, int alias_set)
5211 {
5212 HOST_WIDE_INT width_mask = 0;
5213
5214 if (TREE_CODE (exp) == ERROR_MARK)
5215 return const0_rtx;
5216
5217 /* If we have nothing to store, do nothing unless the expression has
5218 side-effects. */
5219 if (bitsize == 0)
5220 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5221 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5222 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5223
5224 /* If we are storing into an unaligned field of an aligned union that is
5225 in a register, we may have the mode of TARGET being an integer mode but
5226 MODE == BLKmode. In that case, get an aligned object whose size and
5227 alignment are the same as TARGET and store TARGET into it (we can avoid
5228 the store if the field being stored is the entire width of TARGET). Then
5229 call ourselves recursively to store the field into a BLKmode version of
5230 that object. Finally, load from the object into TARGET. This is not
5231 very efficient in general, but should only be slightly more expensive
5232 than the otherwise-required unaligned accesses. Perhaps this can be
5233 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5234 twice, once with emit_move_insn and once via store_field. */
5235
5236 if (mode == BLKmode
5237 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5238 {
5239 rtx object = assign_temp (type, 0, 1, 1);
5240 rtx blk_object = adjust_address (object, BLKmode, 0);
5241
5242 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5243 emit_move_insn (object, target);
5244
5245 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5246 alias_set);
5247
5248 emit_move_insn (target, object);
5249
5250 /* We want to return the BLKmode version of the data. */
5251 return blk_object;
5252 }
5253
5254 if (GET_CODE (target) == CONCAT)
5255 {
5256 /* We're storing into a struct containing a single __complex. */
5257
5258 if (bitpos != 0)
5259 abort ();
5260 return store_expr (exp, target, 0);
5261 }
5262
5263 /* If the structure is in a register or if the component
5264 is a bit field, we cannot use addressing to access it.
5265 Use bit-field techniques or SUBREG to store in it. */
5266
5267 if (mode == VOIDmode
5268 || (mode != BLKmode && ! direct_store[(int) mode]
5269 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5270 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5271 || GET_CODE (target) == REG
5272 || GET_CODE (target) == SUBREG
5273 /* If the field isn't aligned enough to store as an ordinary memref,
5274 store it as a bit field. */
5275 || (mode != BLKmode
5276 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5277 || bitpos % GET_MODE_ALIGNMENT (mode))
5278 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5279 || (bitpos % BITS_PER_UNIT != 0)))
5280 /* If the RHS and field are a constant size and the size of the
5281 RHS isn't the same size as the bitfield, we must use bitfield
5282 operations. */
5283 || (bitsize >= 0
5284 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5285 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5286 {
5287 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5288
5289 /* If BITSIZE is narrower than the size of the type of EXP
5290 we will be narrowing TEMP. Normally, what's wanted are the
5291 low-order bits. However, if EXP's type is a record and this is
5292 big-endian machine, we want the upper BITSIZE bits. */
5293 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5294 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5295 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5296 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5297 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5298 - bitsize),
5299 NULL_RTX, 1);
5300
5301 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5302 MODE. */
5303 if (mode != VOIDmode && mode != BLKmode
5304 && mode != TYPE_MODE (TREE_TYPE (exp)))
5305 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5306
5307 /* If the modes of TARGET and TEMP are both BLKmode, both
5308 must be in memory and BITPOS must be aligned on a byte
5309 boundary. If so, we simply do a block copy. */
5310 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5311 {
5312 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5313 || bitpos % BITS_PER_UNIT != 0)
5314 abort ();
5315
5316 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5317 emit_block_move (target, temp,
5318 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5319 / BITS_PER_UNIT),
5320 BLOCK_OP_NORMAL);
5321
5322 return value_mode == VOIDmode ? const0_rtx : target;
5323 }
5324
5325 /* Store the value in the bitfield. */
5326 store_bit_field (target, bitsize, bitpos, mode, temp,
5327 int_size_in_bytes (type));
5328
5329 if (value_mode != VOIDmode)
5330 {
5331 /* The caller wants an rtx for the value.
5332 If possible, avoid refetching from the bitfield itself. */
5333 if (width_mask != 0
5334 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5335 {
5336 tree count;
5337 enum machine_mode tmode;
5338
5339 tmode = GET_MODE (temp);
5340 if (tmode == VOIDmode)
5341 tmode = value_mode;
5342
5343 if (unsignedp)
5344 return expand_and (tmode, temp,
5345 gen_int_mode (width_mask, tmode),
5346 NULL_RTX);
5347
5348 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5349 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5350 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5351 }
5352
5353 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5354 NULL_RTX, value_mode, VOIDmode,
5355 int_size_in_bytes (type));
5356 }
5357 return const0_rtx;
5358 }
5359 else
5360 {
5361 rtx addr = XEXP (target, 0);
5362 rtx to_rtx = target;
5363
5364 /* If a value is wanted, it must be the lhs;
5365 so make the address stable for multiple use. */
5366
5367 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5368 && ! CONSTANT_ADDRESS_P (addr)
5369 /* A frame-pointer reference is already stable. */
5370 && ! (GET_CODE (addr) == PLUS
5371 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5372 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5373 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5374 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5375
5376 /* Now build a reference to just the desired component. */
5377
5378 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5379
5380 if (to_rtx == target)
5381 to_rtx = copy_rtx (to_rtx);
5382
5383 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5384 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5385 set_mem_alias_set (to_rtx, alias_set);
5386
5387 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5388 }
5389 }
5390 \f
5391 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5392 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5393 codes and find the ultimate containing object, which we return.
5394
5395 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5396 bit position, and *PUNSIGNEDP to the signedness of the field.
5397 If the position of the field is variable, we store a tree
5398 giving the variable offset (in units) in *POFFSET.
5399 This offset is in addition to the bit position.
5400 If the position is not variable, we store 0 in *POFFSET.
5401
5402 If any of the extraction expressions is volatile,
5403 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5404
5405 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5406 is a mode that can be used to access the field. In that case, *PBITSIZE
5407 is redundant.
5408
5409 If the field describes a variable-sized object, *PMODE is set to
5410 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5411 this case, but the address of the object can be found. */
5412
5413 tree
5414 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5415 HOST_WIDE_INT *pbitpos, tree *poffset,
5416 enum machine_mode *pmode, int *punsignedp,
5417 int *pvolatilep)
5418 {
5419 tree size_tree = 0;
5420 enum machine_mode mode = VOIDmode;
5421 tree offset = size_zero_node;
5422 tree bit_offset = bitsize_zero_node;
5423 tree placeholder_ptr = 0;
5424 tree tem;
5425
5426 /* First get the mode, signedness, and size. We do this from just the
5427 outermost expression. */
5428 if (TREE_CODE (exp) == COMPONENT_REF)
5429 {
5430 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5431 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5432 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5433
5434 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5435 }
5436 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5437 {
5438 size_tree = TREE_OPERAND (exp, 1);
5439 *punsignedp = TREE_UNSIGNED (exp);
5440 }
5441 else
5442 {
5443 mode = TYPE_MODE (TREE_TYPE (exp));
5444 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5445
5446 if (mode == BLKmode)
5447 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5448 else
5449 *pbitsize = GET_MODE_BITSIZE (mode);
5450 }
5451
5452 if (size_tree != 0)
5453 {
5454 if (! host_integerp (size_tree, 1))
5455 mode = BLKmode, *pbitsize = -1;
5456 else
5457 *pbitsize = tree_low_cst (size_tree, 1);
5458 }
5459
5460 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5461 and find the ultimate containing object. */
5462 while (1)
5463 {
5464 if (TREE_CODE (exp) == BIT_FIELD_REF)
5465 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5466 else if (TREE_CODE (exp) == COMPONENT_REF)
5467 {
5468 tree field = TREE_OPERAND (exp, 1);
5469 tree this_offset = DECL_FIELD_OFFSET (field);
5470
5471 /* If this field hasn't been filled in yet, don't go
5472 past it. This should only happen when folding expressions
5473 made during type construction. */
5474 if (this_offset == 0)
5475 break;
5476 else if (CONTAINS_PLACEHOLDER_P (this_offset))
5477 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5478
5479 offset = size_binop (PLUS_EXPR, offset, this_offset);
5480 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5481 DECL_FIELD_BIT_OFFSET (field));
5482
5483 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5484 }
5485
5486 else if (TREE_CODE (exp) == ARRAY_REF
5487 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5488 {
5489 tree index = TREE_OPERAND (exp, 1);
5490 tree array = TREE_OPERAND (exp, 0);
5491 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5492 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5493 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5494
5495 /* We assume all arrays have sizes that are a multiple of a byte.
5496 First subtract the lower bound, if any, in the type of the
5497 index, then convert to sizetype and multiply by the size of the
5498 array element. */
5499 if (low_bound != 0 && ! integer_zerop (low_bound))
5500 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5501 index, low_bound));
5502
5503 /* If the index has a self-referential type, pass it to a
5504 WITH_RECORD_EXPR; if the component size is, pass our
5505 component to one. */
5506 if (CONTAINS_PLACEHOLDER_P (index))
5507 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5508 if (CONTAINS_PLACEHOLDER_P (unit_size))
5509 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5510
5511 offset = size_binop (PLUS_EXPR, offset,
5512 size_binop (MULT_EXPR,
5513 convert (sizetype, index),
5514 unit_size));
5515 }
5516
5517 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5518 {
5519 tree new = find_placeholder (exp, &placeholder_ptr);
5520
5521 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5522 We might have been called from tree optimization where we
5523 haven't set up an object yet. */
5524 if (new == 0)
5525 break;
5526 else
5527 exp = new;
5528
5529 continue;
5530 }
5531
5532 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5533 conversions that don't change the mode, and all view conversions
5534 except those that need to "step up" the alignment. */
5535 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5536 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5537 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5538 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5539 && STRICT_ALIGNMENT
5540 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5541 < BIGGEST_ALIGNMENT)
5542 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5543 || TYPE_ALIGN_OK (TREE_TYPE
5544 (TREE_OPERAND (exp, 0))))))
5545 && ! ((TREE_CODE (exp) == NOP_EXPR
5546 || TREE_CODE (exp) == CONVERT_EXPR)
5547 && (TYPE_MODE (TREE_TYPE (exp))
5548 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5549 break;
5550
5551 /* If any reference in the chain is volatile, the effect is volatile. */
5552 if (TREE_THIS_VOLATILE (exp))
5553 *pvolatilep = 1;
5554
5555 exp = TREE_OPERAND (exp, 0);
5556 }
5557
5558 /* If OFFSET is constant, see if we can return the whole thing as a
5559 constant bit position. Otherwise, split it up. */
5560 if (host_integerp (offset, 0)
5561 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5562 bitsize_unit_node))
5563 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5564 && host_integerp (tem, 0))
5565 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5566 else
5567 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5568
5569 *pmode = mode;
5570 return exp;
5571 }
5572
5573 /* Return 1 if T is an expression that get_inner_reference handles. */
5574
5575 int
5576 handled_component_p (tree t)
5577 {
5578 switch (TREE_CODE (t))
5579 {
5580 case BIT_FIELD_REF:
5581 case COMPONENT_REF:
5582 case ARRAY_REF:
5583 case ARRAY_RANGE_REF:
5584 case NON_LVALUE_EXPR:
5585 case VIEW_CONVERT_EXPR:
5586 return 1;
5587
5588 /* ??? Sure they are handled, but get_inner_reference may return
5589 a different PBITSIZE, depending upon whether the expression is
5590 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5591 case NOP_EXPR:
5592 case CONVERT_EXPR:
5593 return (TYPE_MODE (TREE_TYPE (t))
5594 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5595
5596 default:
5597 return 0;
5598 }
5599 }
5600 \f
5601 /* Given an rtx VALUE that may contain additions and multiplications, return
5602 an equivalent value that just refers to a register, memory, or constant.
5603 This is done by generating instructions to perform the arithmetic and
5604 returning a pseudo-register containing the value.
5605
5606 The returned value may be a REG, SUBREG, MEM or constant. */
5607
5608 rtx
5609 force_operand (rtx value, rtx target)
5610 {
5611 rtx op1, op2;
5612 /* Use subtarget as the target for operand 0 of a binary operation. */
5613 rtx subtarget = get_subtarget (target);
5614 enum rtx_code code = GET_CODE (value);
5615
5616 /* Check for a PIC address load. */
5617 if ((code == PLUS || code == MINUS)
5618 && XEXP (value, 0) == pic_offset_table_rtx
5619 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5620 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5621 || GET_CODE (XEXP (value, 1)) == CONST))
5622 {
5623 if (!subtarget)
5624 subtarget = gen_reg_rtx (GET_MODE (value));
5625 emit_move_insn (subtarget, value);
5626 return subtarget;
5627 }
5628
5629 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5630 {
5631 if (!target)
5632 target = gen_reg_rtx (GET_MODE (value));
5633 convert_move (target, force_operand (XEXP (value, 0), NULL),
5634 code == ZERO_EXTEND);
5635 return target;
5636 }
5637
5638 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5639 {
5640 op2 = XEXP (value, 1);
5641 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5642 subtarget = 0;
5643 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5644 {
5645 code = PLUS;
5646 op2 = negate_rtx (GET_MODE (value), op2);
5647 }
5648
5649 /* Check for an addition with OP2 a constant integer and our first
5650 operand a PLUS of a virtual register and something else. In that
5651 case, we want to emit the sum of the virtual register and the
5652 constant first and then add the other value. This allows virtual
5653 register instantiation to simply modify the constant rather than
5654 creating another one around this addition. */
5655 if (code == PLUS && GET_CODE (op2) == CONST_INT
5656 && GET_CODE (XEXP (value, 0)) == PLUS
5657 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5658 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5659 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5660 {
5661 rtx temp = expand_simple_binop (GET_MODE (value), code,
5662 XEXP (XEXP (value, 0), 0), op2,
5663 subtarget, 0, OPTAB_LIB_WIDEN);
5664 return expand_simple_binop (GET_MODE (value), code, temp,
5665 force_operand (XEXP (XEXP (value,
5666 0), 1), 0),
5667 target, 0, OPTAB_LIB_WIDEN);
5668 }
5669
5670 op1 = force_operand (XEXP (value, 0), subtarget);
5671 op2 = force_operand (op2, NULL_RTX);
5672 switch (code)
5673 {
5674 case MULT:
5675 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5676 case DIV:
5677 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5678 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5679 target, 1, OPTAB_LIB_WIDEN);
5680 else
5681 return expand_divmod (0,
5682 FLOAT_MODE_P (GET_MODE (value))
5683 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5684 GET_MODE (value), op1, op2, target, 0);
5685 break;
5686 case MOD:
5687 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5688 target, 0);
5689 break;
5690 case UDIV:
5691 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5692 target, 1);
5693 break;
5694 case UMOD:
5695 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5696 target, 1);
5697 break;
5698 case ASHIFTRT:
5699 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5700 target, 0, OPTAB_LIB_WIDEN);
5701 break;
5702 default:
5703 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5704 target, 1, OPTAB_LIB_WIDEN);
5705 }
5706 }
5707 if (GET_RTX_CLASS (code) == '1')
5708 {
5709 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5710 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5711 }
5712
5713 #ifdef INSN_SCHEDULING
5714 /* On machines that have insn scheduling, we want all memory reference to be
5715 explicit, so we need to deal with such paradoxical SUBREGs. */
5716 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5717 && (GET_MODE_SIZE (GET_MODE (value))
5718 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5719 value
5720 = simplify_gen_subreg (GET_MODE (value),
5721 force_reg (GET_MODE (SUBREG_REG (value)),
5722 force_operand (SUBREG_REG (value),
5723 NULL_RTX)),
5724 GET_MODE (SUBREG_REG (value)),
5725 SUBREG_BYTE (value));
5726 #endif
5727
5728 return value;
5729 }
5730 \f
5731 /* Subroutine of expand_expr: return nonzero iff there is no way that
5732 EXP can reference X, which is being modified. TOP_P is nonzero if this
5733 call is going to be used to determine whether we need a temporary
5734 for EXP, as opposed to a recursive call to this function.
5735
5736 It is always safe for this routine to return zero since it merely
5737 searches for optimization opportunities. */
5738
5739 int
5740 safe_from_p (rtx x, tree exp, int top_p)
5741 {
5742 rtx exp_rtl = 0;
5743 int i, nops;
5744 static tree save_expr_list;
5745
5746 if (x == 0
5747 /* If EXP has varying size, we MUST use a target since we currently
5748 have no way of allocating temporaries of variable size
5749 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5750 So we assume here that something at a higher level has prevented a
5751 clash. This is somewhat bogus, but the best we can do. Only
5752 do this when X is BLKmode and when we are at the top level. */
5753 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5754 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5755 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5756 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5757 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5758 != INTEGER_CST)
5759 && GET_MODE (x) == BLKmode)
5760 /* If X is in the outgoing argument area, it is always safe. */
5761 || (GET_CODE (x) == MEM
5762 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5763 || (GET_CODE (XEXP (x, 0)) == PLUS
5764 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5765 return 1;
5766
5767 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5768 find the underlying pseudo. */
5769 if (GET_CODE (x) == SUBREG)
5770 {
5771 x = SUBREG_REG (x);
5772 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5773 return 0;
5774 }
5775
5776 /* A SAVE_EXPR might appear many times in the expression passed to the
5777 top-level safe_from_p call, and if it has a complex subexpression,
5778 examining it multiple times could result in a combinatorial explosion.
5779 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5780 with optimization took about 28 minutes to compile -- even though it was
5781 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5782 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5783 we have processed. Note that the only test of top_p was above. */
5784
5785 if (top_p)
5786 {
5787 int rtn;
5788 tree t;
5789
5790 save_expr_list = 0;
5791
5792 rtn = safe_from_p (x, exp, 0);
5793
5794 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5795 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5796
5797 return rtn;
5798 }
5799
5800 /* Now look at our tree code and possibly recurse. */
5801 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5802 {
5803 case 'd':
5804 exp_rtl = DECL_RTL_IF_SET (exp);
5805 break;
5806
5807 case 'c':
5808 return 1;
5809
5810 case 'x':
5811 if (TREE_CODE (exp) == TREE_LIST)
5812 {
5813 while (1)
5814 {
5815 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5816 return 0;
5817 exp = TREE_CHAIN (exp);
5818 if (!exp)
5819 return 1;
5820 if (TREE_CODE (exp) != TREE_LIST)
5821 return safe_from_p (x, exp, 0);
5822 }
5823 }
5824 else if (TREE_CODE (exp) == ERROR_MARK)
5825 return 1; /* An already-visited SAVE_EXPR? */
5826 else
5827 return 0;
5828
5829 case '2':
5830 case '<':
5831 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5832 return 0;
5833 /* FALLTHRU */
5834
5835 case '1':
5836 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5837
5838 case 'e':
5839 case 'r':
5840 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5841 the expression. If it is set, we conflict iff we are that rtx or
5842 both are in memory. Otherwise, we check all operands of the
5843 expression recursively. */
5844
5845 switch (TREE_CODE (exp))
5846 {
5847 case ADDR_EXPR:
5848 /* If the operand is static or we are static, we can't conflict.
5849 Likewise if we don't conflict with the operand at all. */
5850 if (staticp (TREE_OPERAND (exp, 0))
5851 || TREE_STATIC (exp)
5852 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5853 return 1;
5854
5855 /* Otherwise, the only way this can conflict is if we are taking
5856 the address of a DECL a that address if part of X, which is
5857 very rare. */
5858 exp = TREE_OPERAND (exp, 0);
5859 if (DECL_P (exp))
5860 {
5861 if (!DECL_RTL_SET_P (exp)
5862 || GET_CODE (DECL_RTL (exp)) != MEM)
5863 return 0;
5864 else
5865 exp_rtl = XEXP (DECL_RTL (exp), 0);
5866 }
5867 break;
5868
5869 case INDIRECT_REF:
5870 if (GET_CODE (x) == MEM
5871 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5872 get_alias_set (exp)))
5873 return 0;
5874 break;
5875
5876 case CALL_EXPR:
5877 /* Assume that the call will clobber all hard registers and
5878 all of memory. */
5879 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5880 || GET_CODE (x) == MEM)
5881 return 0;
5882 break;
5883
5884 case RTL_EXPR:
5885 /* If a sequence exists, we would have to scan every instruction
5886 in the sequence to see if it was safe. This is probably not
5887 worthwhile. */
5888 if (RTL_EXPR_SEQUENCE (exp))
5889 return 0;
5890
5891 exp_rtl = RTL_EXPR_RTL (exp);
5892 break;
5893
5894 case WITH_CLEANUP_EXPR:
5895 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5896 break;
5897
5898 case CLEANUP_POINT_EXPR:
5899 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5900
5901 case SAVE_EXPR:
5902 exp_rtl = SAVE_EXPR_RTL (exp);
5903 if (exp_rtl)
5904 break;
5905
5906 /* If we've already scanned this, don't do it again. Otherwise,
5907 show we've scanned it and record for clearing the flag if we're
5908 going on. */
5909 if (TREE_PRIVATE (exp))
5910 return 1;
5911
5912 TREE_PRIVATE (exp) = 1;
5913 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5914 {
5915 TREE_PRIVATE (exp) = 0;
5916 return 0;
5917 }
5918
5919 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5920 return 1;
5921
5922 case BIND_EXPR:
5923 /* The only operand we look at is operand 1. The rest aren't
5924 part of the expression. */
5925 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5926
5927 default:
5928 break;
5929 }
5930
5931 /* If we have an rtx, we do not need to scan our operands. */
5932 if (exp_rtl)
5933 break;
5934
5935 nops = first_rtl_op (TREE_CODE (exp));
5936 for (i = 0; i < nops; i++)
5937 if (TREE_OPERAND (exp, i) != 0
5938 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5939 return 0;
5940
5941 /* If this is a language-specific tree code, it may require
5942 special handling. */
5943 if ((unsigned int) TREE_CODE (exp)
5944 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5945 && !(*lang_hooks.safe_from_p) (x, exp))
5946 return 0;
5947 }
5948
5949 /* If we have an rtl, find any enclosed object. Then see if we conflict
5950 with it. */
5951 if (exp_rtl)
5952 {
5953 if (GET_CODE (exp_rtl) == SUBREG)
5954 {
5955 exp_rtl = SUBREG_REG (exp_rtl);
5956 if (GET_CODE (exp_rtl) == REG
5957 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5958 return 0;
5959 }
5960
5961 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5962 are memory and they conflict. */
5963 return ! (rtx_equal_p (x, exp_rtl)
5964 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5965 && true_dependence (exp_rtl, VOIDmode, x,
5966 rtx_addr_varies_p)));
5967 }
5968
5969 /* If we reach here, it is safe. */
5970 return 1;
5971 }
5972
5973 /* Subroutine of expand_expr: return rtx if EXP is a
5974 variable or parameter; else return 0. */
5975
5976 static rtx
5977 var_rtx (tree exp)
5978 {
5979 STRIP_NOPS (exp);
5980 switch (TREE_CODE (exp))
5981 {
5982 case PARM_DECL:
5983 case VAR_DECL:
5984 return DECL_RTL (exp);
5985 default:
5986 return 0;
5987 }
5988 }
5989 \f
5990 /* Return the highest power of two that EXP is known to be a multiple of.
5991 This is used in updating alignment of MEMs in array references. */
5992
5993 static unsigned HOST_WIDE_INT
5994 highest_pow2_factor (tree exp)
5995 {
5996 unsigned HOST_WIDE_INT c0, c1;
5997
5998 switch (TREE_CODE (exp))
5999 {
6000 case INTEGER_CST:
6001 /* We can find the lowest bit that's a one. If the low
6002 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6003 We need to handle this case since we can find it in a COND_EXPR,
6004 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6005 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6006 later ICE. */
6007 if (TREE_CONSTANT_OVERFLOW (exp))
6008 return BIGGEST_ALIGNMENT;
6009 else
6010 {
6011 /* Note: tree_low_cst is intentionally not used here,
6012 we don't care about the upper bits. */
6013 c0 = TREE_INT_CST_LOW (exp);
6014 c0 &= -c0;
6015 return c0 ? c0 : BIGGEST_ALIGNMENT;
6016 }
6017 break;
6018
6019 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6020 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6021 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6022 return MIN (c0, c1);
6023
6024 case MULT_EXPR:
6025 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6026 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6027 return c0 * c1;
6028
6029 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6030 case CEIL_DIV_EXPR:
6031 if (integer_pow2p (TREE_OPERAND (exp, 1))
6032 && host_integerp (TREE_OPERAND (exp, 1), 1))
6033 {
6034 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6035 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6036 return MAX (1, c0 / c1);
6037 }
6038 break;
6039
6040 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6041 case SAVE_EXPR: case WITH_RECORD_EXPR:
6042 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6043
6044 case COMPOUND_EXPR:
6045 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6046
6047 case COND_EXPR:
6048 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6049 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6050 return MIN (c0, c1);
6051
6052 default:
6053 break;
6054 }
6055
6056 return 1;
6057 }
6058
6059 /* Similar, except that it is known that the expression must be a multiple
6060 of the alignment of TYPE. */
6061
6062 static unsigned HOST_WIDE_INT
6063 highest_pow2_factor_for_type (tree type, tree exp)
6064 {
6065 unsigned HOST_WIDE_INT type_align, factor;
6066
6067 factor = highest_pow2_factor (exp);
6068 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6069 return MAX (factor, type_align);
6070 }
6071 \f
6072 /* Return an object on the placeholder list that matches EXP, a
6073 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6074 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6075 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6076 is a location which initially points to a starting location in the
6077 placeholder list (zero means start of the list) and where a pointer into
6078 the placeholder list at which the object is found is placed. */
6079
6080 tree
6081 find_placeholder (tree exp, tree *plist)
6082 {
6083 tree type = TREE_TYPE (exp);
6084 tree placeholder_expr;
6085
6086 for (placeholder_expr
6087 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6088 placeholder_expr != 0;
6089 placeholder_expr = TREE_CHAIN (placeholder_expr))
6090 {
6091 tree need_type = TYPE_MAIN_VARIANT (type);
6092 tree elt;
6093
6094 /* Find the outermost reference that is of the type we want. If none,
6095 see if any object has a type that is a pointer to the type we
6096 want. */
6097 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6098 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6099 || TREE_CODE (elt) == COND_EXPR)
6100 ? TREE_OPERAND (elt, 1)
6101 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6102 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6103 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6104 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6105 ? TREE_OPERAND (elt, 0) : 0))
6106 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6107 {
6108 if (plist)
6109 *plist = placeholder_expr;
6110 return elt;
6111 }
6112
6113 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6114 elt
6115 = ((TREE_CODE (elt) == COMPOUND_EXPR
6116 || TREE_CODE (elt) == COND_EXPR)
6117 ? TREE_OPERAND (elt, 1)
6118 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6119 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6120 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6121 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6122 ? TREE_OPERAND (elt, 0) : 0))
6123 if (POINTER_TYPE_P (TREE_TYPE (elt))
6124 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6125 == need_type))
6126 {
6127 if (plist)
6128 *plist = placeholder_expr;
6129 return build1 (INDIRECT_REF, need_type, elt);
6130 }
6131 }
6132
6133 return 0;
6134 }
6135
6136 /* Subroutine of expand_expr. Expand the two operands of a binary
6137 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6138 The value may be stored in TARGET if TARGET is nonzero. The
6139 MODIFIER argument is as documented by expand_expr. */
6140
6141 static void
6142 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6143 enum expand_modifier modifier)
6144 {
6145 if (! safe_from_p (target, exp1, 1))
6146 target = 0;
6147 if (operand_equal_p (exp0, exp1, 0))
6148 {
6149 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6150 *op1 = copy_rtx (*op0);
6151 }
6152 else
6153 {
6154 /* If we need to preserve evaluation order, copy exp0 into its own
6155 temporary variable so that it can't be clobbered by exp1. */
6156 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6157 exp0 = save_expr (exp0);
6158 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6159 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6160 }
6161 }
6162
6163 \f
6164 /* expand_expr: generate code for computing expression EXP.
6165 An rtx for the computed value is returned. The value is never null.
6166 In the case of a void EXP, const0_rtx is returned.
6167
6168 The value may be stored in TARGET if TARGET is nonzero.
6169 TARGET is just a suggestion; callers must assume that
6170 the rtx returned may not be the same as TARGET.
6171
6172 If TARGET is CONST0_RTX, it means that the value will be ignored.
6173
6174 If TMODE is not VOIDmode, it suggests generating the
6175 result in mode TMODE. But this is done only when convenient.
6176 Otherwise, TMODE is ignored and the value generated in its natural mode.
6177 TMODE is just a suggestion; callers must assume that
6178 the rtx returned may not have mode TMODE.
6179
6180 Note that TARGET may have neither TMODE nor MODE. In that case, it
6181 probably will not be used.
6182
6183 If MODIFIER is EXPAND_SUM then when EXP is an addition
6184 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6185 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6186 products as above, or REG or MEM, or constant.
6187 Ordinarily in such cases we would output mul or add instructions
6188 and then return a pseudo reg containing the sum.
6189
6190 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6191 it also marks a label as absolutely required (it can't be dead).
6192 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6193 This is used for outputting expressions used in initializers.
6194
6195 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6196 with a constant address even if that address is not normally legitimate.
6197 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6198
6199 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6200 a call parameter. Such targets require special care as we haven't yet
6201 marked TARGET so that it's safe from being trashed by libcalls. We
6202 don't want to use TARGET for anything but the final result;
6203 Intermediate values must go elsewhere. Additionally, calls to
6204 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6205
6206 rtx
6207 expand_expr (tree exp, rtx target, enum machine_mode tmode,
6208 enum expand_modifier modifier)
6209 {
6210 rtx op0, op1, temp;
6211 tree type = TREE_TYPE (exp);
6212 int unsignedp = TREE_UNSIGNED (type);
6213 enum machine_mode mode;
6214 enum tree_code code = TREE_CODE (exp);
6215 optab this_optab;
6216 rtx subtarget, original_target;
6217 int ignore;
6218 tree context;
6219
6220 /* Handle ERROR_MARK before anybody tries to access its type. */
6221 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6222 {
6223 op0 = CONST0_RTX (tmode);
6224 if (op0 != 0)
6225 return op0;
6226 return const0_rtx;
6227 }
6228
6229 mode = TYPE_MODE (type);
6230 /* Use subtarget as the target for operand 0 of a binary operation. */
6231 subtarget = get_subtarget (target);
6232 original_target = target;
6233 ignore = (target == const0_rtx
6234 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6235 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6236 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6237 && TREE_CODE (type) == VOID_TYPE));
6238
6239 /* If we are going to ignore this result, we need only do something
6240 if there is a side-effect somewhere in the expression. If there
6241 is, short-circuit the most common cases here. Note that we must
6242 not call expand_expr with anything but const0_rtx in case this
6243 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6244
6245 if (ignore)
6246 {
6247 if (! TREE_SIDE_EFFECTS (exp))
6248 return const0_rtx;
6249
6250 /* Ensure we reference a volatile object even if value is ignored, but
6251 don't do this if all we are doing is taking its address. */
6252 if (TREE_THIS_VOLATILE (exp)
6253 && TREE_CODE (exp) != FUNCTION_DECL
6254 && mode != VOIDmode && mode != BLKmode
6255 && modifier != EXPAND_CONST_ADDRESS)
6256 {
6257 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6258 if (GET_CODE (temp) == MEM)
6259 temp = copy_to_reg (temp);
6260 return const0_rtx;
6261 }
6262
6263 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6264 || code == INDIRECT_REF || code == BUFFER_REF)
6265 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6266 modifier);
6267
6268 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6269 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6270 {
6271 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6272 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6273 return const0_rtx;
6274 }
6275 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6276 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6277 /* If the second operand has no side effects, just evaluate
6278 the first. */
6279 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6280 modifier);
6281 else if (code == BIT_FIELD_REF)
6282 {
6283 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6284 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6285 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6286 return const0_rtx;
6287 }
6288
6289 target = 0;
6290 }
6291
6292 /* If will do cse, generate all results into pseudo registers
6293 since 1) that allows cse to find more things
6294 and 2) otherwise cse could produce an insn the machine
6295 cannot support. An exception is a CONSTRUCTOR into a multi-word
6296 MEM: that's much more likely to be most efficient into the MEM.
6297 Another is a CALL_EXPR which must return in memory. */
6298
6299 if (! cse_not_expected && mode != BLKmode && target
6300 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6301 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6302 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6303 target = 0;
6304
6305 switch (code)
6306 {
6307 case LABEL_DECL:
6308 {
6309 tree function = decl_function_context (exp);
6310 /* Labels in containing functions, or labels used from initializers,
6311 must be forced. */
6312 if (modifier == EXPAND_INITIALIZER
6313 || (function != current_function_decl
6314 && function != inline_function_decl
6315 && function != 0))
6316 temp = force_label_rtx (exp);
6317 else
6318 temp = label_rtx (exp);
6319
6320 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6321 if (function != current_function_decl
6322 && function != inline_function_decl && function != 0)
6323 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6324 return temp;
6325 }
6326
6327 case PARM_DECL:
6328 if (!DECL_RTL_SET_P (exp))
6329 {
6330 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6331 return CONST0_RTX (mode);
6332 }
6333
6334 /* ... fall through ... */
6335
6336 case VAR_DECL:
6337 /* If a static var's type was incomplete when the decl was written,
6338 but the type is complete now, lay out the decl now. */
6339 if (DECL_SIZE (exp) == 0
6340 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6341 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6342 layout_decl (exp, 0);
6343
6344 /* ... fall through ... */
6345
6346 case FUNCTION_DECL:
6347 case RESULT_DECL:
6348 if (DECL_RTL (exp) == 0)
6349 abort ();
6350
6351 /* Ensure variable marked as used even if it doesn't go through
6352 a parser. If it hasn't be used yet, write out an external
6353 definition. */
6354 if (! TREE_USED (exp))
6355 {
6356 assemble_external (exp);
6357 TREE_USED (exp) = 1;
6358 }
6359
6360 /* Show we haven't gotten RTL for this yet. */
6361 temp = 0;
6362
6363 /* Handle variables inherited from containing functions. */
6364 context = decl_function_context (exp);
6365
6366 /* We treat inline_function_decl as an alias for the current function
6367 because that is the inline function whose vars, types, etc.
6368 are being merged into the current function.
6369 See expand_inline_function. */
6370
6371 if (context != 0 && context != current_function_decl
6372 && context != inline_function_decl
6373 /* If var is static, we don't need a static chain to access it. */
6374 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6375 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6376 {
6377 rtx addr;
6378
6379 /* Mark as non-local and addressable. */
6380 DECL_NONLOCAL (exp) = 1;
6381 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6382 abort ();
6383 (*lang_hooks.mark_addressable) (exp);
6384 if (GET_CODE (DECL_RTL (exp)) != MEM)
6385 abort ();
6386 addr = XEXP (DECL_RTL (exp), 0);
6387 if (GET_CODE (addr) == MEM)
6388 addr
6389 = replace_equiv_address (addr,
6390 fix_lexical_addr (XEXP (addr, 0), exp));
6391 else
6392 addr = fix_lexical_addr (addr, exp);
6393
6394 temp = replace_equiv_address (DECL_RTL (exp), addr);
6395 }
6396
6397 /* This is the case of an array whose size is to be determined
6398 from its initializer, while the initializer is still being parsed.
6399 See expand_decl. */
6400
6401 else if (GET_CODE (DECL_RTL (exp)) == MEM
6402 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6403 temp = validize_mem (DECL_RTL (exp));
6404
6405 /* If DECL_RTL is memory, we are in the normal case and either
6406 the address is not valid or it is not a register and -fforce-addr
6407 is specified, get the address into a register. */
6408
6409 else if (GET_CODE (DECL_RTL (exp)) == MEM
6410 && modifier != EXPAND_CONST_ADDRESS
6411 && modifier != EXPAND_SUM
6412 && modifier != EXPAND_INITIALIZER
6413 && (! memory_address_p (DECL_MODE (exp),
6414 XEXP (DECL_RTL (exp), 0))
6415 || (flag_force_addr
6416 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6417 temp = replace_equiv_address (DECL_RTL (exp),
6418 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6419
6420 /* If we got something, return it. But first, set the alignment
6421 if the address is a register. */
6422 if (temp != 0)
6423 {
6424 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6425 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6426
6427 return temp;
6428 }
6429
6430 /* If the mode of DECL_RTL does not match that of the decl, it
6431 must be a promoted value. We return a SUBREG of the wanted mode,
6432 but mark it so that we know that it was already extended. */
6433
6434 if (GET_CODE (DECL_RTL (exp)) == REG
6435 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6436 {
6437 /* Get the signedness used for this variable. Ensure we get the
6438 same mode we got when the variable was declared. */
6439 if (GET_MODE (DECL_RTL (exp))
6440 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6441 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6442 abort ();
6443
6444 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6445 SUBREG_PROMOTED_VAR_P (temp) = 1;
6446 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6447 return temp;
6448 }
6449
6450 return DECL_RTL (exp);
6451
6452 case INTEGER_CST:
6453 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6454 TREE_INT_CST_HIGH (exp), mode);
6455
6456 /* ??? If overflow is set, fold will have done an incomplete job,
6457 which can result in (plus xx (const_int 0)), which can get
6458 simplified by validate_replace_rtx during virtual register
6459 instantiation, which can result in unrecognizable insns.
6460 Avoid this by forcing all overflows into registers. */
6461 if (TREE_CONSTANT_OVERFLOW (exp)
6462 && modifier != EXPAND_INITIALIZER)
6463 temp = force_reg (mode, temp);
6464
6465 return temp;
6466
6467 case VECTOR_CST:
6468 return const_vector_from_tree (exp);
6469
6470 case CONST_DECL:
6471 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6472
6473 case REAL_CST:
6474 /* If optimized, generate immediate CONST_DOUBLE
6475 which will be turned into memory by reload if necessary.
6476
6477 We used to force a register so that loop.c could see it. But
6478 this does not allow gen_* patterns to perform optimizations with
6479 the constants. It also produces two insns in cases like "x = 1.0;".
6480 On most machines, floating-point constants are not permitted in
6481 many insns, so we'd end up copying it to a register in any case.
6482
6483 Now, we do the copying in expand_binop, if appropriate. */
6484 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6485 TYPE_MODE (TREE_TYPE (exp)));
6486
6487 case COMPLEX_CST:
6488 /* Handle evaluating a complex constant in a CONCAT target. */
6489 if (original_target && GET_CODE (original_target) == CONCAT)
6490 {
6491 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6492 rtx rtarg, itarg;
6493
6494 rtarg = XEXP (original_target, 0);
6495 itarg = XEXP (original_target, 1);
6496
6497 /* Move the real and imaginary parts separately. */
6498 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6499 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6500
6501 if (op0 != rtarg)
6502 emit_move_insn (rtarg, op0);
6503 if (op1 != itarg)
6504 emit_move_insn (itarg, op1);
6505
6506 return original_target;
6507 }
6508
6509 /* ... fall through ... */
6510
6511 case STRING_CST:
6512 temp = output_constant_def (exp, 1);
6513
6514 /* temp contains a constant address.
6515 On RISC machines where a constant address isn't valid,
6516 make some insns to get that address into a register. */
6517 if (modifier != EXPAND_CONST_ADDRESS
6518 && modifier != EXPAND_INITIALIZER
6519 && modifier != EXPAND_SUM
6520 && (! memory_address_p (mode, XEXP (temp, 0))
6521 || flag_force_addr))
6522 return replace_equiv_address (temp,
6523 copy_rtx (XEXP (temp, 0)));
6524 return temp;
6525
6526 case EXPR_WITH_FILE_LOCATION:
6527 {
6528 rtx to_return;
6529 struct file_stack fs;
6530
6531 fs.location = input_location;
6532 fs.next = expr_wfl_stack;
6533 input_filename = EXPR_WFL_FILENAME (exp);
6534 input_line = EXPR_WFL_LINENO (exp);
6535 expr_wfl_stack = &fs;
6536 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6537 emit_line_note (input_location);
6538 /* Possibly avoid switching back and forth here. */
6539 to_return = expand_expr (EXPR_WFL_NODE (exp),
6540 (ignore ? const0_rtx : target),
6541 tmode, modifier);
6542 if (expr_wfl_stack != &fs)
6543 abort ();
6544 input_location = fs.location;
6545 expr_wfl_stack = fs.next;
6546 return to_return;
6547 }
6548
6549 case SAVE_EXPR:
6550 context = decl_function_context (exp);
6551
6552 /* If this SAVE_EXPR was at global context, assume we are an
6553 initialization function and move it into our context. */
6554 if (context == 0)
6555 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6556
6557 /* We treat inline_function_decl as an alias for the current function
6558 because that is the inline function whose vars, types, etc.
6559 are being merged into the current function.
6560 See expand_inline_function. */
6561 if (context == current_function_decl || context == inline_function_decl)
6562 context = 0;
6563
6564 /* If this is non-local, handle it. */
6565 if (context)
6566 {
6567 /* The following call just exists to abort if the context is
6568 not of a containing function. */
6569 find_function_data (context);
6570
6571 temp = SAVE_EXPR_RTL (exp);
6572 if (temp && GET_CODE (temp) == REG)
6573 {
6574 put_var_into_stack (exp, /*rescan=*/true);
6575 temp = SAVE_EXPR_RTL (exp);
6576 }
6577 if (temp == 0 || GET_CODE (temp) != MEM)
6578 abort ();
6579 return
6580 replace_equiv_address (temp,
6581 fix_lexical_addr (XEXP (temp, 0), exp));
6582 }
6583 if (SAVE_EXPR_RTL (exp) == 0)
6584 {
6585 if (mode == VOIDmode)
6586 temp = const0_rtx;
6587 else
6588 temp = assign_temp (build_qualified_type (type,
6589 (TYPE_QUALS (type)
6590 | TYPE_QUAL_CONST)),
6591 3, 0, 0);
6592
6593 SAVE_EXPR_RTL (exp) = temp;
6594 if (!optimize && GET_CODE (temp) == REG)
6595 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6596 save_expr_regs);
6597
6598 /* If the mode of TEMP does not match that of the expression, it
6599 must be a promoted value. We pass store_expr a SUBREG of the
6600 wanted mode but mark it so that we know that it was already
6601 extended. */
6602
6603 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6604 {
6605 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6606 promote_mode (type, mode, &unsignedp, 0);
6607 SUBREG_PROMOTED_VAR_P (temp) = 1;
6608 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6609 }
6610
6611 if (temp == const0_rtx)
6612 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6613 else
6614 store_expr (TREE_OPERAND (exp, 0), temp,
6615 modifier == EXPAND_STACK_PARM ? 2 : 0);
6616
6617 TREE_USED (exp) = 1;
6618 }
6619
6620 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6621 must be a promoted value. We return a SUBREG of the wanted mode,
6622 but mark it so that we know that it was already extended. */
6623
6624 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6625 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6626 {
6627 /* Compute the signedness and make the proper SUBREG. */
6628 promote_mode (type, mode, &unsignedp, 0);
6629 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6630 SUBREG_PROMOTED_VAR_P (temp) = 1;
6631 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6632 return temp;
6633 }
6634
6635 return SAVE_EXPR_RTL (exp);
6636
6637 case UNSAVE_EXPR:
6638 {
6639 rtx temp;
6640 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6641 TREE_OPERAND (exp, 0)
6642 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6643 return temp;
6644 }
6645
6646 case PLACEHOLDER_EXPR:
6647 {
6648 tree old_list = placeholder_list;
6649 tree placeholder_expr = 0;
6650
6651 exp = find_placeholder (exp, &placeholder_expr);
6652 if (exp == 0)
6653 abort ();
6654
6655 placeholder_list = TREE_CHAIN (placeholder_expr);
6656 temp = expand_expr (exp, original_target, tmode, modifier);
6657 placeholder_list = old_list;
6658 return temp;
6659 }
6660
6661 case WITH_RECORD_EXPR:
6662 /* Put the object on the placeholder list, expand our first operand,
6663 and pop the list. */
6664 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6665 placeholder_list);
6666 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6667 modifier);
6668 placeholder_list = TREE_CHAIN (placeholder_list);
6669 return target;
6670
6671 case GOTO_EXPR:
6672 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6673 expand_goto (TREE_OPERAND (exp, 0));
6674 else
6675 expand_computed_goto (TREE_OPERAND (exp, 0));
6676 return const0_rtx;
6677
6678 case EXIT_EXPR:
6679 expand_exit_loop_if_false (NULL,
6680 invert_truthvalue (TREE_OPERAND (exp, 0)));
6681 return const0_rtx;
6682
6683 case LABELED_BLOCK_EXPR:
6684 if (LABELED_BLOCK_BODY (exp))
6685 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6686 /* Should perhaps use expand_label, but this is simpler and safer. */
6687 do_pending_stack_adjust ();
6688 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6689 return const0_rtx;
6690
6691 case EXIT_BLOCK_EXPR:
6692 if (EXIT_BLOCK_RETURN (exp))
6693 sorry ("returned value in block_exit_expr");
6694 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6695 return const0_rtx;
6696
6697 case LOOP_EXPR:
6698 push_temp_slots ();
6699 expand_start_loop (1);
6700 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6701 expand_end_loop ();
6702 pop_temp_slots ();
6703
6704 return const0_rtx;
6705
6706 case BIND_EXPR:
6707 {
6708 tree vars = TREE_OPERAND (exp, 0);
6709
6710 /* Need to open a binding contour here because
6711 if there are any cleanups they must be contained here. */
6712 expand_start_bindings (2);
6713
6714 /* Mark the corresponding BLOCK for output in its proper place. */
6715 if (TREE_OPERAND (exp, 2) != 0
6716 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6717 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6718
6719 /* If VARS have not yet been expanded, expand them now. */
6720 while (vars)
6721 {
6722 if (!DECL_RTL_SET_P (vars))
6723 expand_decl (vars);
6724 expand_decl_init (vars);
6725 vars = TREE_CHAIN (vars);
6726 }
6727
6728 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6729
6730 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6731
6732 return temp;
6733 }
6734
6735 case RTL_EXPR:
6736 if (RTL_EXPR_SEQUENCE (exp))
6737 {
6738 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6739 abort ();
6740 emit_insn (RTL_EXPR_SEQUENCE (exp));
6741 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6742 }
6743 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6744 free_temps_for_rtl_expr (exp);
6745 return RTL_EXPR_RTL (exp);
6746
6747 case CONSTRUCTOR:
6748 /* If we don't need the result, just ensure we evaluate any
6749 subexpressions. */
6750 if (ignore)
6751 {
6752 tree elt;
6753
6754 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6755 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6756
6757 return const0_rtx;
6758 }
6759
6760 /* All elts simple constants => refer to a constant in memory. But
6761 if this is a non-BLKmode mode, let it store a field at a time
6762 since that should make a CONST_INT or CONST_DOUBLE when we
6763 fold. Likewise, if we have a target we can use, it is best to
6764 store directly into the target unless the type is large enough
6765 that memcpy will be used. If we are making an initializer and
6766 all operands are constant, put it in memory as well.
6767
6768 FIXME: Avoid trying to fill vector constructors piece-meal.
6769 Output them with output_constant_def below unless we're sure
6770 they're zeros. This should go away when vector initializers
6771 are treated like VECTOR_CST instead of arrays.
6772 */
6773 else if ((TREE_STATIC (exp)
6774 && ((mode == BLKmode
6775 && ! (target != 0 && safe_from_p (target, exp, 1)))
6776 || TREE_ADDRESSABLE (exp)
6777 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6778 && (! MOVE_BY_PIECES_P
6779 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6780 TYPE_ALIGN (type)))
6781 && ((TREE_CODE (type) == VECTOR_TYPE
6782 && !is_zeros_p (exp))
6783 || ! mostly_zeros_p (exp)))))
6784 || ((modifier == EXPAND_INITIALIZER
6785 || modifier == EXPAND_CONST_ADDRESS)
6786 && TREE_CONSTANT (exp)))
6787 {
6788 rtx constructor = output_constant_def (exp, 1);
6789
6790 if (modifier != EXPAND_CONST_ADDRESS
6791 && modifier != EXPAND_INITIALIZER
6792 && modifier != EXPAND_SUM)
6793 constructor = validize_mem (constructor);
6794
6795 return constructor;
6796 }
6797 else
6798 {
6799 /* Handle calls that pass values in multiple non-contiguous
6800 locations. The Irix 6 ABI has examples of this. */
6801 if (target == 0 || ! safe_from_p (target, exp, 1)
6802 || GET_CODE (target) == PARALLEL
6803 || modifier == EXPAND_STACK_PARM)
6804 target
6805 = assign_temp (build_qualified_type (type,
6806 (TYPE_QUALS (type)
6807 | (TREE_READONLY (exp)
6808 * TYPE_QUAL_CONST))),
6809 0, TREE_ADDRESSABLE (exp), 1);
6810
6811 store_constructor (exp, target, 0, int_expr_size (exp));
6812 return target;
6813 }
6814
6815 case INDIRECT_REF:
6816 {
6817 tree exp1 = TREE_OPERAND (exp, 0);
6818 tree index;
6819 tree string = string_constant (exp1, &index);
6820
6821 /* Try to optimize reads from const strings. */
6822 if (string
6823 && TREE_CODE (string) == STRING_CST
6824 && TREE_CODE (index) == INTEGER_CST
6825 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6826 && GET_MODE_CLASS (mode) == MODE_INT
6827 && GET_MODE_SIZE (mode) == 1
6828 && modifier != EXPAND_WRITE)
6829 return gen_int_mode (TREE_STRING_POINTER (string)
6830 [TREE_INT_CST_LOW (index)], mode);
6831
6832 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6833 op0 = memory_address (mode, op0);
6834 temp = gen_rtx_MEM (mode, op0);
6835 set_mem_attributes (temp, exp, 0);
6836
6837 /* If we are writing to this object and its type is a record with
6838 readonly fields, we must mark it as readonly so it will
6839 conflict with readonly references to those fields. */
6840 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6841 RTX_UNCHANGING_P (temp) = 1;
6842
6843 return temp;
6844 }
6845
6846 case ARRAY_REF:
6847 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6848 abort ();
6849
6850 {
6851 tree array = TREE_OPERAND (exp, 0);
6852 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6853 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6854 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6855 HOST_WIDE_INT i;
6856
6857 /* Optimize the special-case of a zero lower bound.
6858
6859 We convert the low_bound to sizetype to avoid some problems
6860 with constant folding. (E.g. suppose the lower bound is 1,
6861 and its mode is QI. Without the conversion, (ARRAY
6862 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6863 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6864
6865 if (! integer_zerop (low_bound))
6866 index = size_diffop (index, convert (sizetype, low_bound));
6867
6868 /* Fold an expression like: "foo"[2].
6869 This is not done in fold so it won't happen inside &.
6870 Don't fold if this is for wide characters since it's too
6871 difficult to do correctly and this is a very rare case. */
6872
6873 if (modifier != EXPAND_CONST_ADDRESS
6874 && modifier != EXPAND_INITIALIZER
6875 && modifier != EXPAND_MEMORY
6876 && TREE_CODE (array) == STRING_CST
6877 && TREE_CODE (index) == INTEGER_CST
6878 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6879 && GET_MODE_CLASS (mode) == MODE_INT
6880 && GET_MODE_SIZE (mode) == 1)
6881 return gen_int_mode (TREE_STRING_POINTER (array)
6882 [TREE_INT_CST_LOW (index)], mode);
6883
6884 /* If this is a constant index into a constant array,
6885 just get the value from the array. Handle both the cases when
6886 we have an explicit constructor and when our operand is a variable
6887 that was declared const. */
6888
6889 if (modifier != EXPAND_CONST_ADDRESS
6890 && modifier != EXPAND_INITIALIZER
6891 && modifier != EXPAND_MEMORY
6892 && TREE_CODE (array) == CONSTRUCTOR
6893 && ! TREE_SIDE_EFFECTS (array)
6894 && TREE_CODE (index) == INTEGER_CST
6895 && 0 > compare_tree_int (index,
6896 list_length (CONSTRUCTOR_ELTS
6897 (TREE_OPERAND (exp, 0)))))
6898 {
6899 tree elem;
6900
6901 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6902 i = TREE_INT_CST_LOW (index);
6903 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6904 ;
6905
6906 if (elem)
6907 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6908 modifier);
6909 }
6910
6911 else if (optimize >= 1
6912 && modifier != EXPAND_CONST_ADDRESS
6913 && modifier != EXPAND_INITIALIZER
6914 && modifier != EXPAND_MEMORY
6915 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6916 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6917 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6918 && targetm.binds_local_p (array))
6919 {
6920 if (TREE_CODE (index) == INTEGER_CST)
6921 {
6922 tree init = DECL_INITIAL (array);
6923
6924 if (TREE_CODE (init) == CONSTRUCTOR)
6925 {
6926 tree elem;
6927
6928 for (elem = CONSTRUCTOR_ELTS (init);
6929 (elem
6930 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6931 elem = TREE_CHAIN (elem))
6932 ;
6933
6934 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6935 return expand_expr (fold (TREE_VALUE (elem)), target,
6936 tmode, modifier);
6937 }
6938 else if (TREE_CODE (init) == STRING_CST
6939 && 0 > compare_tree_int (index,
6940 TREE_STRING_LENGTH (init)))
6941 {
6942 tree type = TREE_TYPE (TREE_TYPE (init));
6943 enum machine_mode mode = TYPE_MODE (type);
6944
6945 if (GET_MODE_CLASS (mode) == MODE_INT
6946 && GET_MODE_SIZE (mode) == 1)
6947 return gen_int_mode (TREE_STRING_POINTER (init)
6948 [TREE_INT_CST_LOW (index)], mode);
6949 }
6950 }
6951 }
6952 }
6953 goto normal_inner_ref;
6954
6955 case COMPONENT_REF:
6956 /* If the operand is a CONSTRUCTOR, we can just extract the
6957 appropriate field if it is present. */
6958 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6959 {
6960 tree elt;
6961
6962 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6963 elt = TREE_CHAIN (elt))
6964 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6965 /* We can normally use the value of the field in the
6966 CONSTRUCTOR. However, if this is a bitfield in
6967 an integral mode that we can fit in a HOST_WIDE_INT,
6968 we must mask only the number of bits in the bitfield,
6969 since this is done implicitly by the constructor. If
6970 the bitfield does not meet either of those conditions,
6971 we can't do this optimization. */
6972 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6973 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6974 == MODE_INT)
6975 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6976 <= HOST_BITS_PER_WIDE_INT))))
6977 {
6978 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6979 && modifier == EXPAND_STACK_PARM)
6980 target = 0;
6981 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6982 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6983 {
6984 HOST_WIDE_INT bitsize
6985 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6986 enum machine_mode imode
6987 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6988
6989 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6990 {
6991 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6992 op0 = expand_and (imode, op0, op1, target);
6993 }
6994 else
6995 {
6996 tree count
6997 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6998 0);
6999
7000 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7001 target, 0);
7002 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7003 target, 0);
7004 }
7005 }
7006
7007 return op0;
7008 }
7009 }
7010 goto normal_inner_ref;
7011
7012 case BIT_FIELD_REF:
7013 case ARRAY_RANGE_REF:
7014 normal_inner_ref:
7015 {
7016 enum machine_mode mode1;
7017 HOST_WIDE_INT bitsize, bitpos;
7018 tree offset;
7019 int volatilep = 0;
7020 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7021 &mode1, &unsignedp, &volatilep);
7022 rtx orig_op0;
7023
7024 /* If we got back the original object, something is wrong. Perhaps
7025 we are evaluating an expression too early. In any event, don't
7026 infinitely recurse. */
7027 if (tem == exp)
7028 abort ();
7029
7030 /* If TEM's type is a union of variable size, pass TARGET to the inner
7031 computation, since it will need a temporary and TARGET is known
7032 to have to do. This occurs in unchecked conversion in Ada. */
7033
7034 orig_op0 = op0
7035 = expand_expr (tem,
7036 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7037 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7038 != INTEGER_CST)
7039 && modifier != EXPAND_STACK_PARM
7040 ? target : NULL_RTX),
7041 VOIDmode,
7042 (modifier == EXPAND_INITIALIZER
7043 || modifier == EXPAND_CONST_ADDRESS
7044 || modifier == EXPAND_STACK_PARM)
7045 ? modifier : EXPAND_NORMAL);
7046
7047 /* If this is a constant, put it into a register if it is a
7048 legitimate constant and OFFSET is 0 and memory if it isn't. */
7049 if (CONSTANT_P (op0))
7050 {
7051 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7052 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7053 && offset == 0)
7054 op0 = force_reg (mode, op0);
7055 else
7056 op0 = validize_mem (force_const_mem (mode, op0));
7057 }
7058
7059 /* Otherwise, if this object not in memory and we either have an
7060 offset or a BLKmode result, put it there. This case can't occur in
7061 C, but can in Ada if we have unchecked conversion of an expression
7062 from a scalar type to an array or record type or for an
7063 ARRAY_RANGE_REF whose type is BLKmode. */
7064 else if (GET_CODE (op0) != MEM
7065 && (offset != 0
7066 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7067 {
7068 /* If the operand is a SAVE_EXPR, we can deal with this by
7069 forcing the SAVE_EXPR into memory. */
7070 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7071 {
7072 put_var_into_stack (TREE_OPERAND (exp, 0),
7073 /*rescan=*/true);
7074 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7075 }
7076 else
7077 {
7078 tree nt
7079 = build_qualified_type (TREE_TYPE (tem),
7080 (TYPE_QUALS (TREE_TYPE (tem))
7081 | TYPE_QUAL_CONST));
7082 rtx memloc = assign_temp (nt, 1, 1, 1);
7083
7084 emit_move_insn (memloc, op0);
7085 op0 = memloc;
7086 }
7087 }
7088
7089 if (offset != 0)
7090 {
7091 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7092 EXPAND_SUM);
7093
7094 if (GET_CODE (op0) != MEM)
7095 abort ();
7096
7097 #ifdef POINTERS_EXTEND_UNSIGNED
7098 if (GET_MODE (offset_rtx) != Pmode)
7099 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7100 #else
7101 if (GET_MODE (offset_rtx) != ptr_mode)
7102 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7103 #endif
7104
7105 if (GET_MODE (op0) == BLKmode
7106 /* A constant address in OP0 can have VOIDmode, we must
7107 not try to call force_reg in that case. */
7108 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7109 && bitsize != 0
7110 && (bitpos % bitsize) == 0
7111 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7112 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7113 {
7114 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7115 bitpos = 0;
7116 }
7117
7118 op0 = offset_address (op0, offset_rtx,
7119 highest_pow2_factor (offset));
7120 }
7121
7122 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7123 record its alignment as BIGGEST_ALIGNMENT. */
7124 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7125 && is_aligning_offset (offset, tem))
7126 set_mem_align (op0, BIGGEST_ALIGNMENT);
7127
7128 /* Don't forget about volatility even if this is a bitfield. */
7129 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7130 {
7131 if (op0 == orig_op0)
7132 op0 = copy_rtx (op0);
7133
7134 MEM_VOLATILE_P (op0) = 1;
7135 }
7136
7137 /* The following code doesn't handle CONCAT.
7138 Assume only bitpos == 0 can be used for CONCAT, due to
7139 one element arrays having the same mode as its element. */
7140 if (GET_CODE (op0) == CONCAT)
7141 {
7142 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7143 abort ();
7144 return op0;
7145 }
7146
7147 /* In cases where an aligned union has an unaligned object
7148 as a field, we might be extracting a BLKmode value from
7149 an integer-mode (e.g., SImode) object. Handle this case
7150 by doing the extract into an object as wide as the field
7151 (which we know to be the width of a basic mode), then
7152 storing into memory, and changing the mode to BLKmode. */
7153 if (mode1 == VOIDmode
7154 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7155 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7156 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7157 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7158 && modifier != EXPAND_CONST_ADDRESS
7159 && modifier != EXPAND_INITIALIZER)
7160 /* If the field isn't aligned enough to fetch as a memref,
7161 fetch it as a bit field. */
7162 || (mode1 != BLKmode
7163 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7164 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7165 || (GET_CODE (op0) == MEM
7166 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7167 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7168 && ((modifier == EXPAND_CONST_ADDRESS
7169 || modifier == EXPAND_INITIALIZER)
7170 ? STRICT_ALIGNMENT
7171 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7172 || (bitpos % BITS_PER_UNIT != 0)))
7173 /* If the type and the field are a constant size and the
7174 size of the type isn't the same size as the bitfield,
7175 we must use bitfield operations. */
7176 || (bitsize >= 0
7177 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7178 == INTEGER_CST)
7179 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7180 bitsize)))
7181 {
7182 enum machine_mode ext_mode = mode;
7183
7184 if (ext_mode == BLKmode
7185 && ! (target != 0 && GET_CODE (op0) == MEM
7186 && GET_CODE (target) == MEM
7187 && bitpos % BITS_PER_UNIT == 0))
7188 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7189
7190 if (ext_mode == BLKmode)
7191 {
7192 if (target == 0)
7193 target = assign_temp (type, 0, 1, 1);
7194
7195 if (bitsize == 0)
7196 return target;
7197
7198 /* In this case, BITPOS must start at a byte boundary and
7199 TARGET, if specified, must be a MEM. */
7200 if (GET_CODE (op0) != MEM
7201 || (target != 0 && GET_CODE (target) != MEM)
7202 || bitpos % BITS_PER_UNIT != 0)
7203 abort ();
7204
7205 emit_block_move (target,
7206 adjust_address (op0, VOIDmode,
7207 bitpos / BITS_PER_UNIT),
7208 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7209 / BITS_PER_UNIT),
7210 (modifier == EXPAND_STACK_PARM
7211 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7212
7213 return target;
7214 }
7215
7216 op0 = validize_mem (op0);
7217
7218 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7219 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7220
7221 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7222 (modifier == EXPAND_STACK_PARM
7223 ? NULL_RTX : target),
7224 ext_mode, ext_mode,
7225 int_size_in_bytes (TREE_TYPE (tem)));
7226
7227 /* If the result is a record type and BITSIZE is narrower than
7228 the mode of OP0, an integral mode, and this is a big endian
7229 machine, we must put the field into the high-order bits. */
7230 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7231 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7232 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7233 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7234 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7235 - bitsize),
7236 op0, 1);
7237
7238 if (mode == BLKmode)
7239 {
7240 rtx new = assign_temp (build_qualified_type
7241 ((*lang_hooks.types.type_for_mode)
7242 (ext_mode, 0),
7243 TYPE_QUAL_CONST), 0, 1, 1);
7244
7245 emit_move_insn (new, op0);
7246 op0 = copy_rtx (new);
7247 PUT_MODE (op0, BLKmode);
7248 set_mem_attributes (op0, exp, 1);
7249 }
7250
7251 return op0;
7252 }
7253
7254 /* If the result is BLKmode, use that to access the object
7255 now as well. */
7256 if (mode == BLKmode)
7257 mode1 = BLKmode;
7258
7259 /* Get a reference to just this component. */
7260 if (modifier == EXPAND_CONST_ADDRESS
7261 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7262 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7263 else
7264 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7265
7266 if (op0 == orig_op0)
7267 op0 = copy_rtx (op0);
7268
7269 set_mem_attributes (op0, exp, 0);
7270 if (GET_CODE (XEXP (op0, 0)) == REG)
7271 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7272
7273 MEM_VOLATILE_P (op0) |= volatilep;
7274 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7275 || modifier == EXPAND_CONST_ADDRESS
7276 || modifier == EXPAND_INITIALIZER)
7277 return op0;
7278 else if (target == 0)
7279 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7280
7281 convert_move (target, op0, unsignedp);
7282 return target;
7283 }
7284
7285 case VTABLE_REF:
7286 {
7287 rtx insn, before = get_last_insn (), vtbl_ref;
7288
7289 /* Evaluate the interior expression. */
7290 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7291 tmode, modifier);
7292
7293 /* Get or create an instruction off which to hang a note. */
7294 if (REG_P (subtarget))
7295 {
7296 target = subtarget;
7297 insn = get_last_insn ();
7298 if (insn == before)
7299 abort ();
7300 if (! INSN_P (insn))
7301 insn = prev_nonnote_insn (insn);
7302 }
7303 else
7304 {
7305 target = gen_reg_rtx (GET_MODE (subtarget));
7306 insn = emit_move_insn (target, subtarget);
7307 }
7308
7309 /* Collect the data for the note. */
7310 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7311 vtbl_ref = plus_constant (vtbl_ref,
7312 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7313 /* Discard the initial CONST that was added. */
7314 vtbl_ref = XEXP (vtbl_ref, 0);
7315
7316 REG_NOTES (insn)
7317 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7318
7319 return target;
7320 }
7321
7322 /* Intended for a reference to a buffer of a file-object in Pascal.
7323 But it's not certain that a special tree code will really be
7324 necessary for these. INDIRECT_REF might work for them. */
7325 case BUFFER_REF:
7326 abort ();
7327
7328 case IN_EXPR:
7329 {
7330 /* Pascal set IN expression.
7331
7332 Algorithm:
7333 rlo = set_low - (set_low%bits_per_word);
7334 the_word = set [ (index - rlo)/bits_per_word ];
7335 bit_index = index % bits_per_word;
7336 bitmask = 1 << bit_index;
7337 return !!(the_word & bitmask); */
7338
7339 tree set = TREE_OPERAND (exp, 0);
7340 tree index = TREE_OPERAND (exp, 1);
7341 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7342 tree set_type = TREE_TYPE (set);
7343 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7344 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7345 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7346 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7347 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7348 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7349 rtx setaddr = XEXP (setval, 0);
7350 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7351 rtx rlow;
7352 rtx diff, quo, rem, addr, bit, result;
7353
7354 /* If domain is empty, answer is no. Likewise if index is constant
7355 and out of bounds. */
7356 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7357 && TREE_CODE (set_low_bound) == INTEGER_CST
7358 && tree_int_cst_lt (set_high_bound, set_low_bound))
7359 || (TREE_CODE (index) == INTEGER_CST
7360 && TREE_CODE (set_low_bound) == INTEGER_CST
7361 && tree_int_cst_lt (index, set_low_bound))
7362 || (TREE_CODE (set_high_bound) == INTEGER_CST
7363 && TREE_CODE (index) == INTEGER_CST
7364 && tree_int_cst_lt (set_high_bound, index))))
7365 return const0_rtx;
7366
7367 if (target == 0)
7368 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7369
7370 /* If we get here, we have to generate the code for both cases
7371 (in range and out of range). */
7372
7373 op0 = gen_label_rtx ();
7374 op1 = gen_label_rtx ();
7375
7376 if (! (GET_CODE (index_val) == CONST_INT
7377 && GET_CODE (lo_r) == CONST_INT))
7378 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7379 GET_MODE (index_val), iunsignedp, op1);
7380
7381 if (! (GET_CODE (index_val) == CONST_INT
7382 && GET_CODE (hi_r) == CONST_INT))
7383 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7384 GET_MODE (index_val), iunsignedp, op1);
7385
7386 /* Calculate the element number of bit zero in the first word
7387 of the set. */
7388 if (GET_CODE (lo_r) == CONST_INT)
7389 rlow = GEN_INT (INTVAL (lo_r)
7390 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7391 else
7392 rlow = expand_binop (index_mode, and_optab, lo_r,
7393 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7394 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7395
7396 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7397 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7398
7399 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7400 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7401 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7402 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7403
7404 addr = memory_address (byte_mode,
7405 expand_binop (index_mode, add_optab, diff,
7406 setaddr, NULL_RTX, iunsignedp,
7407 OPTAB_LIB_WIDEN));
7408
7409 /* Extract the bit we want to examine. */
7410 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7411 gen_rtx_MEM (byte_mode, addr),
7412 make_tree (TREE_TYPE (index), rem),
7413 NULL_RTX, 1);
7414 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7415 GET_MODE (target) == byte_mode ? target : 0,
7416 1, OPTAB_LIB_WIDEN);
7417
7418 if (result != target)
7419 convert_move (target, result, 1);
7420
7421 /* Output the code to handle the out-of-range case. */
7422 emit_jump (op0);
7423 emit_label (op1);
7424 emit_move_insn (target, const0_rtx);
7425 emit_label (op0);
7426 return target;
7427 }
7428
7429 case WITH_CLEANUP_EXPR:
7430 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7431 {
7432 WITH_CLEANUP_EXPR_RTL (exp)
7433 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7434 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7435 CLEANUP_EH_ONLY (exp));
7436
7437 /* That's it for this cleanup. */
7438 TREE_OPERAND (exp, 1) = 0;
7439 }
7440 return WITH_CLEANUP_EXPR_RTL (exp);
7441
7442 case CLEANUP_POINT_EXPR:
7443 {
7444 /* Start a new binding layer that will keep track of all cleanup
7445 actions to be performed. */
7446 expand_start_bindings (2);
7447
7448 target_temp_slot_level = temp_slot_level;
7449
7450 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7451 /* If we're going to use this value, load it up now. */
7452 if (! ignore)
7453 op0 = force_not_mem (op0);
7454 preserve_temp_slots (op0);
7455 expand_end_bindings (NULL_TREE, 0, 0);
7456 }
7457 return op0;
7458
7459 case CALL_EXPR:
7460 /* Check for a built-in function. */
7461 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7462 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7463 == FUNCTION_DECL)
7464 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7465 {
7466 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7467 == BUILT_IN_FRONTEND)
7468 return (*lang_hooks.expand_expr) (exp, original_target,
7469 tmode, modifier);
7470 else
7471 return expand_builtin (exp, target, subtarget, tmode, ignore);
7472 }
7473
7474 return expand_call (exp, target, ignore);
7475
7476 case NON_LVALUE_EXPR:
7477 case NOP_EXPR:
7478 case CONVERT_EXPR:
7479 case REFERENCE_EXPR:
7480 if (TREE_OPERAND (exp, 0) == error_mark_node)
7481 return const0_rtx;
7482
7483 if (TREE_CODE (type) == UNION_TYPE)
7484 {
7485 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7486
7487 /* If both input and output are BLKmode, this conversion isn't doing
7488 anything except possibly changing memory attribute. */
7489 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7490 {
7491 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7492 modifier);
7493
7494 result = copy_rtx (result);
7495 set_mem_attributes (result, exp, 0);
7496 return result;
7497 }
7498
7499 if (target == 0)
7500 target = assign_temp (type, 0, 1, 1);
7501
7502 if (GET_CODE (target) == MEM)
7503 /* Store data into beginning of memory target. */
7504 store_expr (TREE_OPERAND (exp, 0),
7505 adjust_address (target, TYPE_MODE (valtype), 0),
7506 modifier == EXPAND_STACK_PARM ? 2 : 0);
7507
7508 else if (GET_CODE (target) == REG)
7509 /* Store this field into a union of the proper type. */
7510 store_field (target,
7511 MIN ((int_size_in_bytes (TREE_TYPE
7512 (TREE_OPERAND (exp, 0)))
7513 * BITS_PER_UNIT),
7514 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7515 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7516 VOIDmode, 0, type, 0);
7517 else
7518 abort ();
7519
7520 /* Return the entire union. */
7521 return target;
7522 }
7523
7524 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7525 {
7526 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7527 modifier);
7528
7529 /* If the signedness of the conversion differs and OP0 is
7530 a promoted SUBREG, clear that indication since we now
7531 have to do the proper extension. */
7532 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7533 && GET_CODE (op0) == SUBREG)
7534 SUBREG_PROMOTED_VAR_P (op0) = 0;
7535
7536 return op0;
7537 }
7538
7539 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7540 if (GET_MODE (op0) == mode)
7541 return op0;
7542
7543 /* If OP0 is a constant, just convert it into the proper mode. */
7544 if (CONSTANT_P (op0))
7545 {
7546 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7547 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7548
7549 if (modifier == EXPAND_INITIALIZER)
7550 return simplify_gen_subreg (mode, op0, inner_mode,
7551 subreg_lowpart_offset (mode,
7552 inner_mode));
7553 else
7554 return convert_modes (mode, inner_mode, op0,
7555 TREE_UNSIGNED (inner_type));
7556 }
7557
7558 if (modifier == EXPAND_INITIALIZER)
7559 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7560
7561 if (target == 0)
7562 return
7563 convert_to_mode (mode, op0,
7564 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7565 else
7566 convert_move (target, op0,
7567 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7568 return target;
7569
7570 case VIEW_CONVERT_EXPR:
7571 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7572
7573 /* If the input and output modes are both the same, we are done.
7574 Otherwise, if neither mode is BLKmode and both are integral and within
7575 a word, we can use gen_lowpart. If neither is true, make sure the
7576 operand is in memory and convert the MEM to the new mode. */
7577 if (TYPE_MODE (type) == GET_MODE (op0))
7578 ;
7579 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7580 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7581 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7582 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7583 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7584 op0 = gen_lowpart (TYPE_MODE (type), op0);
7585 else if (GET_CODE (op0) != MEM)
7586 {
7587 /* If the operand is not a MEM, force it into memory. Since we
7588 are going to be be changing the mode of the MEM, don't call
7589 force_const_mem for constants because we don't allow pool
7590 constants to change mode. */
7591 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7592
7593 if (TREE_ADDRESSABLE (exp))
7594 abort ();
7595
7596 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7597 target
7598 = assign_stack_temp_for_type
7599 (TYPE_MODE (inner_type),
7600 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7601
7602 emit_move_insn (target, op0);
7603 op0 = target;
7604 }
7605
7606 /* At this point, OP0 is in the correct mode. If the output type is such
7607 that the operand is known to be aligned, indicate that it is.
7608 Otherwise, we need only be concerned about alignment for non-BLKmode
7609 results. */
7610 if (GET_CODE (op0) == MEM)
7611 {
7612 op0 = copy_rtx (op0);
7613
7614 if (TYPE_ALIGN_OK (type))
7615 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7616 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7617 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7618 {
7619 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7620 HOST_WIDE_INT temp_size
7621 = MAX (int_size_in_bytes (inner_type),
7622 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7623 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7624 temp_size, 0, type);
7625 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7626
7627 if (TREE_ADDRESSABLE (exp))
7628 abort ();
7629
7630 if (GET_MODE (op0) == BLKmode)
7631 emit_block_move (new_with_op0_mode, op0,
7632 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7633 (modifier == EXPAND_STACK_PARM
7634 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7635 else
7636 emit_move_insn (new_with_op0_mode, op0);
7637
7638 op0 = new;
7639 }
7640
7641 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7642 }
7643
7644 return op0;
7645
7646 case PLUS_EXPR:
7647 this_optab = ! unsignedp && flag_trapv
7648 && (GET_MODE_CLASS (mode) == MODE_INT)
7649 ? addv_optab : add_optab;
7650
7651 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7652 something else, make sure we add the register to the constant and
7653 then to the other thing. This case can occur during strength
7654 reduction and doing it this way will produce better code if the
7655 frame pointer or argument pointer is eliminated.
7656
7657 fold-const.c will ensure that the constant is always in the inner
7658 PLUS_EXPR, so the only case we need to do anything about is if
7659 sp, ap, or fp is our second argument, in which case we must swap
7660 the innermost first argument and our second argument. */
7661
7662 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7663 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7664 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7665 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7666 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7667 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7668 {
7669 tree t = TREE_OPERAND (exp, 1);
7670
7671 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7672 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7673 }
7674
7675 /* If the result is to be ptr_mode and we are adding an integer to
7676 something, we might be forming a constant. So try to use
7677 plus_constant. If it produces a sum and we can't accept it,
7678 use force_operand. This allows P = &ARR[const] to generate
7679 efficient code on machines where a SYMBOL_REF is not a valid
7680 address.
7681
7682 If this is an EXPAND_SUM call, always return the sum. */
7683 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7684 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7685 {
7686 if (modifier == EXPAND_STACK_PARM)
7687 target = 0;
7688 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7689 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7690 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7691 {
7692 rtx constant_part;
7693
7694 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7695 EXPAND_SUM);
7696 /* Use immed_double_const to ensure that the constant is
7697 truncated according to the mode of OP1, then sign extended
7698 to a HOST_WIDE_INT. Using the constant directly can result
7699 in non-canonical RTL in a 64x32 cross compile. */
7700 constant_part
7701 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7702 (HOST_WIDE_INT) 0,
7703 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7704 op1 = plus_constant (op1, INTVAL (constant_part));
7705 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7706 op1 = force_operand (op1, target);
7707 return op1;
7708 }
7709
7710 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7711 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7712 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7713 {
7714 rtx constant_part;
7715
7716 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7717 (modifier == EXPAND_INITIALIZER
7718 ? EXPAND_INITIALIZER : EXPAND_SUM));
7719 if (! CONSTANT_P (op0))
7720 {
7721 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7722 VOIDmode, modifier);
7723 /* Return a PLUS if modifier says it's OK. */
7724 if (modifier == EXPAND_SUM
7725 || modifier == EXPAND_INITIALIZER)
7726 return simplify_gen_binary (PLUS, mode, op0, op1);
7727 goto binop2;
7728 }
7729 /* Use immed_double_const to ensure that the constant is
7730 truncated according to the mode of OP1, then sign extended
7731 to a HOST_WIDE_INT. Using the constant directly can result
7732 in non-canonical RTL in a 64x32 cross compile. */
7733 constant_part
7734 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7735 (HOST_WIDE_INT) 0,
7736 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7737 op0 = plus_constant (op0, INTVAL (constant_part));
7738 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7739 op0 = force_operand (op0, target);
7740 return op0;
7741 }
7742 }
7743
7744 /* No sense saving up arithmetic to be done
7745 if it's all in the wrong mode to form part of an address.
7746 And force_operand won't know whether to sign-extend or
7747 zero-extend. */
7748 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7749 || mode != ptr_mode)
7750 {
7751 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7752 subtarget, &op0, &op1, 0);
7753 if (op0 == const0_rtx)
7754 return op1;
7755 if (op1 == const0_rtx)
7756 return op0;
7757 goto binop2;
7758 }
7759
7760 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7761 subtarget, &op0, &op1, modifier);
7762 return simplify_gen_binary (PLUS, mode, op0, op1);
7763
7764 case MINUS_EXPR:
7765 /* For initializers, we are allowed to return a MINUS of two
7766 symbolic constants. Here we handle all cases when both operands
7767 are constant. */
7768 /* Handle difference of two symbolic constants,
7769 for the sake of an initializer. */
7770 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7771 && really_constant_p (TREE_OPERAND (exp, 0))
7772 && really_constant_p (TREE_OPERAND (exp, 1)))
7773 {
7774 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7775 NULL_RTX, &op0, &op1, modifier);
7776
7777 /* If the last operand is a CONST_INT, use plus_constant of
7778 the negated constant. Else make the MINUS. */
7779 if (GET_CODE (op1) == CONST_INT)
7780 return plus_constant (op0, - INTVAL (op1));
7781 else
7782 return gen_rtx_MINUS (mode, op0, op1);
7783 }
7784
7785 this_optab = ! unsignedp && flag_trapv
7786 && (GET_MODE_CLASS(mode) == MODE_INT)
7787 ? subv_optab : sub_optab;
7788
7789 /* No sense saving up arithmetic to be done
7790 if it's all in the wrong mode to form part of an address.
7791 And force_operand won't know whether to sign-extend or
7792 zero-extend. */
7793 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7794 || mode != ptr_mode)
7795 goto binop;
7796
7797 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7798 subtarget, &op0, &op1, modifier);
7799
7800 /* Convert A - const to A + (-const). */
7801 if (GET_CODE (op1) == CONST_INT)
7802 {
7803 op1 = negate_rtx (mode, op1);
7804 return simplify_gen_binary (PLUS, mode, op0, op1);
7805 }
7806
7807 goto binop2;
7808
7809 case MULT_EXPR:
7810 /* If first operand is constant, swap them.
7811 Thus the following special case checks need only
7812 check the second operand. */
7813 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7814 {
7815 tree t1 = TREE_OPERAND (exp, 0);
7816 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7817 TREE_OPERAND (exp, 1) = t1;
7818 }
7819
7820 /* Attempt to return something suitable for generating an
7821 indexed address, for machines that support that. */
7822
7823 if (modifier == EXPAND_SUM && mode == ptr_mode
7824 && host_integerp (TREE_OPERAND (exp, 1), 0))
7825 {
7826 tree exp1 = TREE_OPERAND (exp, 1);
7827
7828 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7829 EXPAND_SUM);
7830
7831 if (GET_CODE (op0) != REG)
7832 op0 = force_operand (op0, NULL_RTX);
7833 if (GET_CODE (op0) != REG)
7834 op0 = copy_to_mode_reg (mode, op0);
7835
7836 return gen_rtx_MULT (mode, op0,
7837 gen_int_mode (tree_low_cst (exp1, 0),
7838 TYPE_MODE (TREE_TYPE (exp1))));
7839 }
7840
7841 if (modifier == EXPAND_STACK_PARM)
7842 target = 0;
7843
7844 /* Check for multiplying things that have been extended
7845 from a narrower type. If this machine supports multiplying
7846 in that narrower type with a result in the desired type,
7847 do it that way, and avoid the explicit type-conversion. */
7848 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7849 && TREE_CODE (type) == INTEGER_TYPE
7850 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7851 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7852 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7853 && int_fits_type_p (TREE_OPERAND (exp, 1),
7854 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7855 /* Don't use a widening multiply if a shift will do. */
7856 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7857 > HOST_BITS_PER_WIDE_INT)
7858 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7859 ||
7860 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7861 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7862 ==
7863 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7864 /* If both operands are extended, they must either both
7865 be zero-extended or both be sign-extended. */
7866 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7867 ==
7868 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7869 {
7870 enum machine_mode innermode
7871 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7872 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7873 ? smul_widen_optab : umul_widen_optab);
7874 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7875 ? umul_widen_optab : smul_widen_optab);
7876 if (mode == GET_MODE_WIDER_MODE (innermode))
7877 {
7878 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7879 {
7880 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7881 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7882 TREE_OPERAND (exp, 1),
7883 NULL_RTX, &op0, &op1, 0);
7884 else
7885 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7886 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7887 NULL_RTX, &op0, &op1, 0);
7888 goto binop2;
7889 }
7890 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7891 && innermode == word_mode)
7892 {
7893 rtx htem;
7894 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7895 NULL_RTX, VOIDmode, 0);
7896 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7897 op1 = convert_modes (innermode, mode,
7898 expand_expr (TREE_OPERAND (exp, 1),
7899 NULL_RTX, VOIDmode, 0),
7900 unsignedp);
7901 else
7902 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7903 NULL_RTX, VOIDmode, 0);
7904 temp = expand_binop (mode, other_optab, op0, op1, target,
7905 unsignedp, OPTAB_LIB_WIDEN);
7906 htem = expand_mult_highpart_adjust (innermode,
7907 gen_highpart (innermode, temp),
7908 op0, op1,
7909 gen_highpart (innermode, temp),
7910 unsignedp);
7911 emit_move_insn (gen_highpart (innermode, temp), htem);
7912 return temp;
7913 }
7914 }
7915 }
7916 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7917 subtarget, &op0, &op1, 0);
7918 return expand_mult (mode, op0, op1, target, unsignedp);
7919
7920 case TRUNC_DIV_EXPR:
7921 case FLOOR_DIV_EXPR:
7922 case CEIL_DIV_EXPR:
7923 case ROUND_DIV_EXPR:
7924 case EXACT_DIV_EXPR:
7925 if (modifier == EXPAND_STACK_PARM)
7926 target = 0;
7927 /* Possible optimization: compute the dividend with EXPAND_SUM
7928 then if the divisor is constant can optimize the case
7929 where some terms of the dividend have coeffs divisible by it. */
7930 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7931 subtarget, &op0, &op1, 0);
7932 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7933
7934 case RDIV_EXPR:
7935 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7936 expensive divide. If not, combine will rebuild the original
7937 computation. */
7938 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7939 && TREE_CODE (type) == REAL_TYPE
7940 && !real_onep (TREE_OPERAND (exp, 0)))
7941 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7942 build (RDIV_EXPR, type,
7943 build_real (type, dconst1),
7944 TREE_OPERAND (exp, 1))),
7945 target, tmode, modifier);
7946 this_optab = sdiv_optab;
7947 goto binop;
7948
7949 case TRUNC_MOD_EXPR:
7950 case FLOOR_MOD_EXPR:
7951 case CEIL_MOD_EXPR:
7952 case ROUND_MOD_EXPR:
7953 if (modifier == EXPAND_STACK_PARM)
7954 target = 0;
7955 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7956 subtarget, &op0, &op1, 0);
7957 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7958
7959 case FIX_ROUND_EXPR:
7960 case FIX_FLOOR_EXPR:
7961 case FIX_CEIL_EXPR:
7962 abort (); /* Not used for C. */
7963
7964 case FIX_TRUNC_EXPR:
7965 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7966 if (target == 0 || modifier == EXPAND_STACK_PARM)
7967 target = gen_reg_rtx (mode);
7968 expand_fix (target, op0, unsignedp);
7969 return target;
7970
7971 case FLOAT_EXPR:
7972 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7973 if (target == 0 || modifier == EXPAND_STACK_PARM)
7974 target = gen_reg_rtx (mode);
7975 /* expand_float can't figure out what to do if FROM has VOIDmode.
7976 So give it the correct mode. With -O, cse will optimize this. */
7977 if (GET_MODE (op0) == VOIDmode)
7978 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7979 op0);
7980 expand_float (target, op0,
7981 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7982 return target;
7983
7984 case NEGATE_EXPR:
7985 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7986 if (modifier == EXPAND_STACK_PARM)
7987 target = 0;
7988 temp = expand_unop (mode,
7989 ! unsignedp && flag_trapv
7990 && (GET_MODE_CLASS(mode) == MODE_INT)
7991 ? negv_optab : neg_optab, op0, target, 0);
7992 if (temp == 0)
7993 abort ();
7994 return temp;
7995
7996 case ABS_EXPR:
7997 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7998 if (modifier == EXPAND_STACK_PARM)
7999 target = 0;
8000
8001 /* ABS_EXPR is not valid for complex arguments. */
8002 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8003 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8004 abort ();
8005
8006 /* Unsigned abs is simply the operand. Testing here means we don't
8007 risk generating incorrect code below. */
8008 if (TREE_UNSIGNED (type))
8009 return op0;
8010
8011 return expand_abs (mode, op0, target, unsignedp,
8012 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8013
8014 case MAX_EXPR:
8015 case MIN_EXPR:
8016 target = original_target;
8017 if (target == 0
8018 || modifier == EXPAND_STACK_PARM
8019 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8020 || GET_MODE (target) != mode
8021 || (GET_CODE (target) == REG
8022 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8023 target = gen_reg_rtx (mode);
8024 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8025 target, &op0, &op1, 0);
8026
8027 /* First try to do it with a special MIN or MAX instruction.
8028 If that does not win, use a conditional jump to select the proper
8029 value. */
8030 this_optab = (TREE_UNSIGNED (type)
8031 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8032 : (code == MIN_EXPR ? smin_optab : smax_optab));
8033
8034 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8035 OPTAB_WIDEN);
8036 if (temp != 0)
8037 return temp;
8038
8039 /* At this point, a MEM target is no longer useful; we will get better
8040 code without it. */
8041
8042 if (GET_CODE (target) == MEM)
8043 target = gen_reg_rtx (mode);
8044
8045 /* If op1 was placed in target, swap op0 and op1. */
8046 if (target != op0 && target == op1)
8047 {
8048 rtx tem = op0;
8049 op0 = op1;
8050 op1 = tem;
8051 }
8052
8053 if (target != op0)
8054 emit_move_insn (target, op0);
8055
8056 op0 = gen_label_rtx ();
8057
8058 /* If this mode is an integer too wide to compare properly,
8059 compare word by word. Rely on cse to optimize constant cases. */
8060 if (GET_MODE_CLASS (mode) == MODE_INT
8061 && ! can_compare_p (GE, mode, ccp_jump))
8062 {
8063 if (code == MAX_EXPR)
8064 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8065 target, op1, NULL_RTX, op0);
8066 else
8067 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8068 op1, target, NULL_RTX, op0);
8069 }
8070 else
8071 {
8072 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8073 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8074 unsignedp, mode, NULL_RTX, NULL_RTX,
8075 op0);
8076 }
8077 emit_move_insn (target, op1);
8078 emit_label (op0);
8079 return target;
8080
8081 case BIT_NOT_EXPR:
8082 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8083 if (modifier == EXPAND_STACK_PARM)
8084 target = 0;
8085 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8086 if (temp == 0)
8087 abort ();
8088 return temp;
8089
8090 /* ??? Can optimize bitwise operations with one arg constant.
8091 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8092 and (a bitwise1 b) bitwise2 b (etc)
8093 but that is probably not worth while. */
8094
8095 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8096 boolean values when we want in all cases to compute both of them. In
8097 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8098 as actual zero-or-1 values and then bitwise anding. In cases where
8099 there cannot be any side effects, better code would be made by
8100 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8101 how to recognize those cases. */
8102
8103 case TRUTH_AND_EXPR:
8104 case BIT_AND_EXPR:
8105 this_optab = and_optab;
8106 goto binop;
8107
8108 case TRUTH_OR_EXPR:
8109 case BIT_IOR_EXPR:
8110 this_optab = ior_optab;
8111 goto binop;
8112
8113 case TRUTH_XOR_EXPR:
8114 case BIT_XOR_EXPR:
8115 this_optab = xor_optab;
8116 goto binop;
8117
8118 case LSHIFT_EXPR:
8119 case RSHIFT_EXPR:
8120 case LROTATE_EXPR:
8121 case RROTATE_EXPR:
8122 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8123 subtarget = 0;
8124 if (modifier == EXPAND_STACK_PARM)
8125 target = 0;
8126 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8127 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8128 unsignedp);
8129
8130 /* Could determine the answer when only additive constants differ. Also,
8131 the addition of one can be handled by changing the condition. */
8132 case LT_EXPR:
8133 case LE_EXPR:
8134 case GT_EXPR:
8135 case GE_EXPR:
8136 case EQ_EXPR:
8137 case NE_EXPR:
8138 case UNORDERED_EXPR:
8139 case ORDERED_EXPR:
8140 case UNLT_EXPR:
8141 case UNLE_EXPR:
8142 case UNGT_EXPR:
8143 case UNGE_EXPR:
8144 case UNEQ_EXPR:
8145 temp = do_store_flag (exp,
8146 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8147 tmode != VOIDmode ? tmode : mode, 0);
8148 if (temp != 0)
8149 return temp;
8150
8151 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8152 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8153 && original_target
8154 && GET_CODE (original_target) == REG
8155 && (GET_MODE (original_target)
8156 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8157 {
8158 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8159 VOIDmode, 0);
8160
8161 /* If temp is constant, we can just compute the result. */
8162 if (GET_CODE (temp) == CONST_INT)
8163 {
8164 if (INTVAL (temp) != 0)
8165 emit_move_insn (target, const1_rtx);
8166 else
8167 emit_move_insn (target, const0_rtx);
8168
8169 return target;
8170 }
8171
8172 if (temp != original_target)
8173 {
8174 enum machine_mode mode1 = GET_MODE (temp);
8175 if (mode1 == VOIDmode)
8176 mode1 = tmode != VOIDmode ? tmode : mode;
8177
8178 temp = copy_to_mode_reg (mode1, temp);
8179 }
8180
8181 op1 = gen_label_rtx ();
8182 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8183 GET_MODE (temp), unsignedp, op1);
8184 emit_move_insn (temp, const1_rtx);
8185 emit_label (op1);
8186 return temp;
8187 }
8188
8189 /* If no set-flag instruction, must generate a conditional
8190 store into a temporary variable. Drop through
8191 and handle this like && and ||. */
8192
8193 case TRUTH_ANDIF_EXPR:
8194 case TRUTH_ORIF_EXPR:
8195 if (! ignore
8196 && (target == 0
8197 || modifier == EXPAND_STACK_PARM
8198 || ! safe_from_p (target, exp, 1)
8199 /* Make sure we don't have a hard reg (such as function's return
8200 value) live across basic blocks, if not optimizing. */
8201 || (!optimize && GET_CODE (target) == REG
8202 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8203 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8204
8205 if (target)
8206 emit_clr_insn (target);
8207
8208 op1 = gen_label_rtx ();
8209 jumpifnot (exp, op1);
8210
8211 if (target)
8212 emit_0_to_1_insn (target);
8213
8214 emit_label (op1);
8215 return ignore ? const0_rtx : target;
8216
8217 case TRUTH_NOT_EXPR:
8218 if (modifier == EXPAND_STACK_PARM)
8219 target = 0;
8220 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8221 /* The parser is careful to generate TRUTH_NOT_EXPR
8222 only with operands that are always zero or one. */
8223 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8224 target, 1, OPTAB_LIB_WIDEN);
8225 if (temp == 0)
8226 abort ();
8227 return temp;
8228
8229 case COMPOUND_EXPR:
8230 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8231 emit_queue ();
8232 return expand_expr (TREE_OPERAND (exp, 1),
8233 (ignore ? const0_rtx : target),
8234 VOIDmode, modifier);
8235
8236 case COND_EXPR:
8237 /* If we would have a "singleton" (see below) were it not for a
8238 conversion in each arm, bring that conversion back out. */
8239 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8240 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8241 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8242 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8243 {
8244 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8245 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8246
8247 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8248 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8249 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8250 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8251 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8252 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8253 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8254 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8255 return expand_expr (build1 (NOP_EXPR, type,
8256 build (COND_EXPR, TREE_TYPE (iftrue),
8257 TREE_OPERAND (exp, 0),
8258 iftrue, iffalse)),
8259 target, tmode, modifier);
8260 }
8261
8262 {
8263 /* Note that COND_EXPRs whose type is a structure or union
8264 are required to be constructed to contain assignments of
8265 a temporary variable, so that we can evaluate them here
8266 for side effect only. If type is void, we must do likewise. */
8267
8268 /* If an arm of the branch requires a cleanup,
8269 only that cleanup is performed. */
8270
8271 tree singleton = 0;
8272 tree binary_op = 0, unary_op = 0;
8273
8274 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8275 convert it to our mode, if necessary. */
8276 if (integer_onep (TREE_OPERAND (exp, 1))
8277 && integer_zerop (TREE_OPERAND (exp, 2))
8278 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8279 {
8280 if (ignore)
8281 {
8282 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8283 modifier);
8284 return const0_rtx;
8285 }
8286
8287 if (modifier == EXPAND_STACK_PARM)
8288 target = 0;
8289 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8290 if (GET_MODE (op0) == mode)
8291 return op0;
8292
8293 if (target == 0)
8294 target = gen_reg_rtx (mode);
8295 convert_move (target, op0, unsignedp);
8296 return target;
8297 }
8298
8299 /* Check for X ? A + B : A. If we have this, we can copy A to the
8300 output and conditionally add B. Similarly for unary operations.
8301 Don't do this if X has side-effects because those side effects
8302 might affect A or B and the "?" operation is a sequence point in
8303 ANSI. (operand_equal_p tests for side effects.) */
8304
8305 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8306 && operand_equal_p (TREE_OPERAND (exp, 2),
8307 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8308 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8309 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8310 && operand_equal_p (TREE_OPERAND (exp, 1),
8311 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8312 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8313 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8314 && operand_equal_p (TREE_OPERAND (exp, 2),
8315 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8316 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8317 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8318 && operand_equal_p (TREE_OPERAND (exp, 1),
8319 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8320 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8321
8322 /* If we are not to produce a result, we have no target. Otherwise,
8323 if a target was specified use it; it will not be used as an
8324 intermediate target unless it is safe. If no target, use a
8325 temporary. */
8326
8327 if (ignore)
8328 temp = 0;
8329 else if (modifier == EXPAND_STACK_PARM)
8330 temp = assign_temp (type, 0, 0, 1);
8331 else if (original_target
8332 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8333 || (singleton && GET_CODE (original_target) == REG
8334 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8335 && original_target == var_rtx (singleton)))
8336 && GET_MODE (original_target) == mode
8337 #ifdef HAVE_conditional_move
8338 && (! can_conditionally_move_p (mode)
8339 || GET_CODE (original_target) == REG
8340 || TREE_ADDRESSABLE (type))
8341 #endif
8342 && (GET_CODE (original_target) != MEM
8343 || TREE_ADDRESSABLE (type)))
8344 temp = original_target;
8345 else if (TREE_ADDRESSABLE (type))
8346 abort ();
8347 else
8348 temp = assign_temp (type, 0, 0, 1);
8349
8350 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8351 do the test of X as a store-flag operation, do this as
8352 A + ((X != 0) << log C). Similarly for other simple binary
8353 operators. Only do for C == 1 if BRANCH_COST is low. */
8354 if (temp && singleton && binary_op
8355 && (TREE_CODE (binary_op) == PLUS_EXPR
8356 || TREE_CODE (binary_op) == MINUS_EXPR
8357 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8358 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8359 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8360 : integer_onep (TREE_OPERAND (binary_op, 1)))
8361 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8362 {
8363 rtx result;
8364 tree cond;
8365 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8366 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8367 ? addv_optab : add_optab)
8368 : TREE_CODE (binary_op) == MINUS_EXPR
8369 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8370 ? subv_optab : sub_optab)
8371 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8372 : xor_optab);
8373
8374 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8375 if (singleton == TREE_OPERAND (exp, 1))
8376 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8377 else
8378 cond = TREE_OPERAND (exp, 0);
8379
8380 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8381 ? temp : NULL_RTX),
8382 mode, BRANCH_COST <= 1);
8383
8384 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8385 result = expand_shift (LSHIFT_EXPR, mode, result,
8386 build_int_2 (tree_log2
8387 (TREE_OPERAND
8388 (binary_op, 1)),
8389 0),
8390 (safe_from_p (temp, singleton, 1)
8391 ? temp : NULL_RTX), 0);
8392
8393 if (result)
8394 {
8395 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8396 return expand_binop (mode, boptab, op1, result, temp,
8397 unsignedp, OPTAB_LIB_WIDEN);
8398 }
8399 }
8400
8401 do_pending_stack_adjust ();
8402 NO_DEFER_POP;
8403 op0 = gen_label_rtx ();
8404
8405 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8406 {
8407 if (temp != 0)
8408 {
8409 /* If the target conflicts with the other operand of the
8410 binary op, we can't use it. Also, we can't use the target
8411 if it is a hard register, because evaluating the condition
8412 might clobber it. */
8413 if ((binary_op
8414 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8415 || (GET_CODE (temp) == REG
8416 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8417 temp = gen_reg_rtx (mode);
8418 store_expr (singleton, temp,
8419 modifier == EXPAND_STACK_PARM ? 2 : 0);
8420 }
8421 else
8422 expand_expr (singleton,
8423 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8424 if (singleton == TREE_OPERAND (exp, 1))
8425 jumpif (TREE_OPERAND (exp, 0), op0);
8426 else
8427 jumpifnot (TREE_OPERAND (exp, 0), op0);
8428
8429 start_cleanup_deferral ();
8430 if (binary_op && temp == 0)
8431 /* Just touch the other operand. */
8432 expand_expr (TREE_OPERAND (binary_op, 1),
8433 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8434 else if (binary_op)
8435 store_expr (build (TREE_CODE (binary_op), type,
8436 make_tree (type, temp),
8437 TREE_OPERAND (binary_op, 1)),
8438 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8439 else
8440 store_expr (build1 (TREE_CODE (unary_op), type,
8441 make_tree (type, temp)),
8442 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8443 op1 = op0;
8444 }
8445 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8446 comparison operator. If we have one of these cases, set the
8447 output to A, branch on A (cse will merge these two references),
8448 then set the output to FOO. */
8449 else if (temp
8450 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8451 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8452 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8453 TREE_OPERAND (exp, 1), 0)
8454 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8455 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8456 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8457 {
8458 if (GET_CODE (temp) == REG
8459 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8460 temp = gen_reg_rtx (mode);
8461 store_expr (TREE_OPERAND (exp, 1), temp,
8462 modifier == EXPAND_STACK_PARM ? 2 : 0);
8463 jumpif (TREE_OPERAND (exp, 0), op0);
8464
8465 start_cleanup_deferral ();
8466 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8467 store_expr (TREE_OPERAND (exp, 2), temp,
8468 modifier == EXPAND_STACK_PARM ? 2 : 0);
8469 else
8470 expand_expr (TREE_OPERAND (exp, 2),
8471 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8472 op1 = op0;
8473 }
8474 else if (temp
8475 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8476 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8477 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8478 TREE_OPERAND (exp, 2), 0)
8479 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8480 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8481 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8482 {
8483 if (GET_CODE (temp) == REG
8484 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8485 temp = gen_reg_rtx (mode);
8486 store_expr (TREE_OPERAND (exp, 2), temp,
8487 modifier == EXPAND_STACK_PARM ? 2 : 0);
8488 jumpifnot (TREE_OPERAND (exp, 0), op0);
8489
8490 start_cleanup_deferral ();
8491 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8492 store_expr (TREE_OPERAND (exp, 1), temp,
8493 modifier == EXPAND_STACK_PARM ? 2 : 0);
8494 else
8495 expand_expr (TREE_OPERAND (exp, 1),
8496 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8497 op1 = op0;
8498 }
8499 else
8500 {
8501 op1 = gen_label_rtx ();
8502 jumpifnot (TREE_OPERAND (exp, 0), op0);
8503
8504 start_cleanup_deferral ();
8505
8506 /* One branch of the cond can be void, if it never returns. For
8507 example A ? throw : E */
8508 if (temp != 0
8509 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8510 store_expr (TREE_OPERAND (exp, 1), temp,
8511 modifier == EXPAND_STACK_PARM ? 2 : 0);
8512 else
8513 expand_expr (TREE_OPERAND (exp, 1),
8514 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8515 end_cleanup_deferral ();
8516 emit_queue ();
8517 emit_jump_insn (gen_jump (op1));
8518 emit_barrier ();
8519 emit_label (op0);
8520 start_cleanup_deferral ();
8521 if (temp != 0
8522 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8523 store_expr (TREE_OPERAND (exp, 2), temp,
8524 modifier == EXPAND_STACK_PARM ? 2 : 0);
8525 else
8526 expand_expr (TREE_OPERAND (exp, 2),
8527 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8528 }
8529
8530 end_cleanup_deferral ();
8531
8532 emit_queue ();
8533 emit_label (op1);
8534 OK_DEFER_POP;
8535
8536 return temp;
8537 }
8538
8539 case TARGET_EXPR:
8540 {
8541 /* Something needs to be initialized, but we didn't know
8542 where that thing was when building the tree. For example,
8543 it could be the return value of a function, or a parameter
8544 to a function which lays down in the stack, or a temporary
8545 variable which must be passed by reference.
8546
8547 We guarantee that the expression will either be constructed
8548 or copied into our original target. */
8549
8550 tree slot = TREE_OPERAND (exp, 0);
8551 tree cleanups = NULL_TREE;
8552 tree exp1;
8553
8554 if (TREE_CODE (slot) != VAR_DECL)
8555 abort ();
8556
8557 if (! ignore)
8558 target = original_target;
8559
8560 /* Set this here so that if we get a target that refers to a
8561 register variable that's already been used, put_reg_into_stack
8562 knows that it should fix up those uses. */
8563 TREE_USED (slot) = 1;
8564
8565 if (target == 0)
8566 {
8567 if (DECL_RTL_SET_P (slot))
8568 {
8569 target = DECL_RTL (slot);
8570 /* If we have already expanded the slot, so don't do
8571 it again. (mrs) */
8572 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8573 return target;
8574 }
8575 else
8576 {
8577 target = assign_temp (type, 2, 0, 1);
8578 /* All temp slots at this level must not conflict. */
8579 preserve_temp_slots (target);
8580 SET_DECL_RTL (slot, target);
8581 if (TREE_ADDRESSABLE (slot))
8582 put_var_into_stack (slot, /*rescan=*/false);
8583
8584 /* Since SLOT is not known to the called function
8585 to belong to its stack frame, we must build an explicit
8586 cleanup. This case occurs when we must build up a reference
8587 to pass the reference as an argument. In this case,
8588 it is very likely that such a reference need not be
8589 built here. */
8590
8591 if (TREE_OPERAND (exp, 2) == 0)
8592 TREE_OPERAND (exp, 2)
8593 = (*lang_hooks.maybe_build_cleanup) (slot);
8594 cleanups = TREE_OPERAND (exp, 2);
8595 }
8596 }
8597 else
8598 {
8599 /* This case does occur, when expanding a parameter which
8600 needs to be constructed on the stack. The target
8601 is the actual stack address that we want to initialize.
8602 The function we call will perform the cleanup in this case. */
8603
8604 /* If we have already assigned it space, use that space,
8605 not target that we were passed in, as our target
8606 parameter is only a hint. */
8607 if (DECL_RTL_SET_P (slot))
8608 {
8609 target = DECL_RTL (slot);
8610 /* If we have already expanded the slot, so don't do
8611 it again. (mrs) */
8612 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8613 return target;
8614 }
8615 else
8616 {
8617 SET_DECL_RTL (slot, target);
8618 /* If we must have an addressable slot, then make sure that
8619 the RTL that we just stored in slot is OK. */
8620 if (TREE_ADDRESSABLE (slot))
8621 put_var_into_stack (slot, /*rescan=*/true);
8622 }
8623 }
8624
8625 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8626 /* Mark it as expanded. */
8627 TREE_OPERAND (exp, 1) = NULL_TREE;
8628
8629 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8630
8631 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8632
8633 return target;
8634 }
8635
8636 case INIT_EXPR:
8637 {
8638 tree lhs = TREE_OPERAND (exp, 0);
8639 tree rhs = TREE_OPERAND (exp, 1);
8640
8641 temp = expand_assignment (lhs, rhs, ! ignore);
8642 return temp;
8643 }
8644
8645 case MODIFY_EXPR:
8646 {
8647 /* If lhs is complex, expand calls in rhs before computing it.
8648 That's so we don't compute a pointer and save it over a
8649 call. If lhs is simple, compute it first so we can give it
8650 as a target if the rhs is just a call. This avoids an
8651 extra temp and copy and that prevents a partial-subsumption
8652 which makes bad code. Actually we could treat
8653 component_ref's of vars like vars. */
8654
8655 tree lhs = TREE_OPERAND (exp, 0);
8656 tree rhs = TREE_OPERAND (exp, 1);
8657
8658 temp = 0;
8659
8660 /* Check for |= or &= of a bitfield of size one into another bitfield
8661 of size 1. In this case, (unless we need the result of the
8662 assignment) we can do this more efficiently with a
8663 test followed by an assignment, if necessary.
8664
8665 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8666 things change so we do, this code should be enhanced to
8667 support it. */
8668 if (ignore
8669 && TREE_CODE (lhs) == COMPONENT_REF
8670 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8671 || TREE_CODE (rhs) == BIT_AND_EXPR)
8672 && TREE_OPERAND (rhs, 0) == lhs
8673 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8674 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8675 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8676 {
8677 rtx label = gen_label_rtx ();
8678
8679 do_jump (TREE_OPERAND (rhs, 1),
8680 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8681 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8682 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8683 (TREE_CODE (rhs) == BIT_IOR_EXPR
8684 ? integer_one_node
8685 : integer_zero_node)),
8686 0);
8687 do_pending_stack_adjust ();
8688 emit_label (label);
8689 return const0_rtx;
8690 }
8691
8692 temp = expand_assignment (lhs, rhs, ! ignore);
8693
8694 return temp;
8695 }
8696
8697 case RETURN_EXPR:
8698 if (!TREE_OPERAND (exp, 0))
8699 expand_null_return ();
8700 else
8701 expand_return (TREE_OPERAND (exp, 0));
8702 return const0_rtx;
8703
8704 case PREINCREMENT_EXPR:
8705 case PREDECREMENT_EXPR:
8706 return expand_increment (exp, 0, ignore);
8707
8708 case POSTINCREMENT_EXPR:
8709 case POSTDECREMENT_EXPR:
8710 /* Faster to treat as pre-increment if result is not used. */
8711 return expand_increment (exp, ! ignore, ignore);
8712
8713 case ADDR_EXPR:
8714 if (modifier == EXPAND_STACK_PARM)
8715 target = 0;
8716 /* Are we taking the address of a nested function? */
8717 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8718 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8719 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8720 && ! TREE_STATIC (exp))
8721 {
8722 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8723 op0 = force_operand (op0, target);
8724 }
8725 /* If we are taking the address of something erroneous, just
8726 return a zero. */
8727 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8728 return const0_rtx;
8729 /* If we are taking the address of a constant and are at the
8730 top level, we have to use output_constant_def since we can't
8731 call force_const_mem at top level. */
8732 else if (cfun == 0
8733 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8734 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8735 == 'c')))
8736 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8737 else
8738 {
8739 /* We make sure to pass const0_rtx down if we came in with
8740 ignore set, to avoid doing the cleanups twice for something. */
8741 op0 = expand_expr (TREE_OPERAND (exp, 0),
8742 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8743 (modifier == EXPAND_INITIALIZER
8744 ? modifier : EXPAND_CONST_ADDRESS));
8745
8746 /* If we are going to ignore the result, OP0 will have been set
8747 to const0_rtx, so just return it. Don't get confused and
8748 think we are taking the address of the constant. */
8749 if (ignore)
8750 return op0;
8751
8752 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8753 clever and returns a REG when given a MEM. */
8754 op0 = protect_from_queue (op0, 1);
8755
8756 /* We would like the object in memory. If it is a constant, we can
8757 have it be statically allocated into memory. For a non-constant,
8758 we need to allocate some memory and store the value into it. */
8759
8760 if (CONSTANT_P (op0))
8761 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8762 op0);
8763 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8764 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8765 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
8766 {
8767 /* If the operand is a SAVE_EXPR, we can deal with this by
8768 forcing the SAVE_EXPR into memory. */
8769 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8770 {
8771 put_var_into_stack (TREE_OPERAND (exp, 0),
8772 /*rescan=*/true);
8773 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8774 }
8775 else
8776 {
8777 /* If this object is in a register, it can't be BLKmode. */
8778 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8779 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8780
8781 if (GET_CODE (op0) == PARALLEL)
8782 /* Handle calls that pass values in multiple
8783 non-contiguous locations. The Irix 6 ABI has examples
8784 of this. */
8785 emit_group_store (memloc, op0, inner_type,
8786 int_size_in_bytes (inner_type));
8787 else
8788 emit_move_insn (memloc, op0);
8789
8790 op0 = memloc;
8791 }
8792 }
8793
8794 if (GET_CODE (op0) != MEM)
8795 abort ();
8796
8797 mark_temp_addr_taken (op0);
8798 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8799 {
8800 op0 = XEXP (op0, 0);
8801 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8802 op0 = convert_memory_address (ptr_mode, op0);
8803 return op0;
8804 }
8805
8806 /* If OP0 is not aligned as least as much as the type requires, we
8807 need to make a temporary, copy OP0 to it, and take the address of
8808 the temporary. We want to use the alignment of the type, not of
8809 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8810 the test for BLKmode means that can't happen. The test for
8811 BLKmode is because we never make mis-aligned MEMs with
8812 non-BLKmode.
8813
8814 We don't need to do this at all if the machine doesn't have
8815 strict alignment. */
8816 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8817 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8818 > MEM_ALIGN (op0))
8819 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8820 {
8821 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8822 rtx new;
8823
8824 if (TYPE_ALIGN_OK (inner_type))
8825 abort ();
8826
8827 if (TREE_ADDRESSABLE (inner_type))
8828 {
8829 /* We can't make a bitwise copy of this object, so fail. */
8830 error ("cannot take the address of an unaligned member");
8831 return const0_rtx;
8832 }
8833
8834 new = assign_stack_temp_for_type
8835 (TYPE_MODE (inner_type),
8836 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8837 : int_size_in_bytes (inner_type),
8838 1, build_qualified_type (inner_type,
8839 (TYPE_QUALS (inner_type)
8840 | TYPE_QUAL_CONST)));
8841
8842 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8843 (modifier == EXPAND_STACK_PARM
8844 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8845
8846 op0 = new;
8847 }
8848
8849 op0 = force_operand (XEXP (op0, 0), target);
8850 }
8851
8852 if (flag_force_addr
8853 && GET_CODE (op0) != REG
8854 && modifier != EXPAND_CONST_ADDRESS
8855 && modifier != EXPAND_INITIALIZER
8856 && modifier != EXPAND_SUM)
8857 op0 = force_reg (Pmode, op0);
8858
8859 if (GET_CODE (op0) == REG
8860 && ! REG_USERVAR_P (op0))
8861 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8862
8863 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8864 op0 = convert_memory_address (ptr_mode, op0);
8865
8866 return op0;
8867
8868 case ENTRY_VALUE_EXPR:
8869 abort ();
8870
8871 /* COMPLEX type for Extended Pascal & Fortran */
8872 case COMPLEX_EXPR:
8873 {
8874 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8875 rtx insns;
8876
8877 /* Get the rtx code of the operands. */
8878 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8879 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8880
8881 if (! target)
8882 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8883
8884 start_sequence ();
8885
8886 /* Move the real (op0) and imaginary (op1) parts to their location. */
8887 emit_move_insn (gen_realpart (mode, target), op0);
8888 emit_move_insn (gen_imagpart (mode, target), op1);
8889
8890 insns = get_insns ();
8891 end_sequence ();
8892
8893 /* Complex construction should appear as a single unit. */
8894 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8895 each with a separate pseudo as destination.
8896 It's not correct for flow to treat them as a unit. */
8897 if (GET_CODE (target) != CONCAT)
8898 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8899 else
8900 emit_insn (insns);
8901
8902 return target;
8903 }
8904
8905 case REALPART_EXPR:
8906 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8907 return gen_realpart (mode, op0);
8908
8909 case IMAGPART_EXPR:
8910 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8911 return gen_imagpart (mode, op0);
8912
8913 case CONJ_EXPR:
8914 {
8915 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8916 rtx imag_t;
8917 rtx insns;
8918
8919 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8920
8921 if (! target)
8922 target = gen_reg_rtx (mode);
8923
8924 start_sequence ();
8925
8926 /* Store the realpart and the negated imagpart to target. */
8927 emit_move_insn (gen_realpart (partmode, target),
8928 gen_realpart (partmode, op0));
8929
8930 imag_t = gen_imagpart (partmode, target);
8931 temp = expand_unop (partmode,
8932 ! unsignedp && flag_trapv
8933 && (GET_MODE_CLASS(partmode) == MODE_INT)
8934 ? negv_optab : neg_optab,
8935 gen_imagpart (partmode, op0), imag_t, 0);
8936 if (temp != imag_t)
8937 emit_move_insn (imag_t, temp);
8938
8939 insns = get_insns ();
8940 end_sequence ();
8941
8942 /* Conjugate should appear as a single unit
8943 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8944 each with a separate pseudo as destination.
8945 It's not correct for flow to treat them as a unit. */
8946 if (GET_CODE (target) != CONCAT)
8947 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8948 else
8949 emit_insn (insns);
8950
8951 return target;
8952 }
8953
8954 case TRY_CATCH_EXPR:
8955 {
8956 tree handler = TREE_OPERAND (exp, 1);
8957
8958 expand_eh_region_start ();
8959
8960 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8961
8962 expand_eh_region_end_cleanup (handler);
8963
8964 return op0;
8965 }
8966
8967 case TRY_FINALLY_EXPR:
8968 {
8969 tree try_block = TREE_OPERAND (exp, 0);
8970 tree finally_block = TREE_OPERAND (exp, 1);
8971
8972 if (!optimize || unsafe_for_reeval (finally_block) > 1)
8973 {
8974 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
8975 is not sufficient, so we cannot expand the block twice.
8976 So we play games with GOTO_SUBROUTINE_EXPR to let us
8977 expand the thing only once. */
8978 /* When not optimizing, we go ahead with this form since
8979 (1) user breakpoints operate more predictably without
8980 code duplication, and
8981 (2) we're not running any of the global optimizers
8982 that would explode in time/space with the highly
8983 connected CFG created by the indirect branching. */
8984
8985 rtx finally_label = gen_label_rtx ();
8986 rtx done_label = gen_label_rtx ();
8987 rtx return_link = gen_reg_rtx (Pmode);
8988 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8989 (tree) finally_label, (tree) return_link);
8990 TREE_SIDE_EFFECTS (cleanup) = 1;
8991
8992 /* Start a new binding layer that will keep track of all cleanup
8993 actions to be performed. */
8994 expand_start_bindings (2);
8995 target_temp_slot_level = temp_slot_level;
8996
8997 expand_decl_cleanup (NULL_TREE, cleanup);
8998 op0 = expand_expr (try_block, target, tmode, modifier);
8999
9000 preserve_temp_slots (op0);
9001 expand_end_bindings (NULL_TREE, 0, 0);
9002 emit_jump (done_label);
9003 emit_label (finally_label);
9004 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9005 emit_indirect_jump (return_link);
9006 emit_label (done_label);
9007 }
9008 else
9009 {
9010 expand_start_bindings (2);
9011 target_temp_slot_level = temp_slot_level;
9012
9013 expand_decl_cleanup (NULL_TREE, finally_block);
9014 op0 = expand_expr (try_block, target, tmode, modifier);
9015
9016 preserve_temp_slots (op0);
9017 expand_end_bindings (NULL_TREE, 0, 0);
9018 }
9019
9020 return op0;
9021 }
9022
9023 case GOTO_SUBROUTINE_EXPR:
9024 {
9025 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9026 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9027 rtx return_address = gen_label_rtx ();
9028 emit_move_insn (return_link,
9029 gen_rtx_LABEL_REF (Pmode, return_address));
9030 emit_jump (subr);
9031 emit_label (return_address);
9032 return const0_rtx;
9033 }
9034
9035 case VA_ARG_EXPR:
9036 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9037
9038 case EXC_PTR_EXPR:
9039 return get_exception_pointer (cfun);
9040
9041 case FDESC_EXPR:
9042 /* Function descriptors are not valid except for as
9043 initialization constants, and should not be expanded. */
9044 abort ();
9045
9046 default:
9047 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9048 }
9049
9050 /* Here to do an ordinary binary operator, generating an instruction
9051 from the optab already placed in `this_optab'. */
9052 binop:
9053 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9054 subtarget, &op0, &op1, 0);
9055 binop2:
9056 if (modifier == EXPAND_STACK_PARM)
9057 target = 0;
9058 temp = expand_binop (mode, this_optab, op0, op1, target,
9059 unsignedp, OPTAB_LIB_WIDEN);
9060 if (temp == 0)
9061 abort ();
9062 return temp;
9063 }
9064 \f
9065 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9066 when applied to the address of EXP produces an address known to be
9067 aligned more than BIGGEST_ALIGNMENT. */
9068
9069 static int
9070 is_aligning_offset (tree offset, tree exp)
9071 {
9072 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9073 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9074 || TREE_CODE (offset) == NOP_EXPR
9075 || TREE_CODE (offset) == CONVERT_EXPR
9076 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9077 offset = TREE_OPERAND (offset, 0);
9078
9079 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9080 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9081 if (TREE_CODE (offset) != BIT_AND_EXPR
9082 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9083 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9084 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9085 return 0;
9086
9087 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9088 It must be NEGATE_EXPR. Then strip any more conversions. */
9089 offset = TREE_OPERAND (offset, 0);
9090 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9091 || TREE_CODE (offset) == NOP_EXPR
9092 || TREE_CODE (offset) == CONVERT_EXPR)
9093 offset = TREE_OPERAND (offset, 0);
9094
9095 if (TREE_CODE (offset) != NEGATE_EXPR)
9096 return 0;
9097
9098 offset = TREE_OPERAND (offset, 0);
9099 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9100 || TREE_CODE (offset) == NOP_EXPR
9101 || TREE_CODE (offset) == CONVERT_EXPR)
9102 offset = TREE_OPERAND (offset, 0);
9103
9104 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9105 whose type is the same as EXP. */
9106 return (TREE_CODE (offset) == ADDR_EXPR
9107 && (TREE_OPERAND (offset, 0) == exp
9108 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9109 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9110 == TREE_TYPE (exp)))));
9111 }
9112 \f
9113 /* Return the tree node if an ARG corresponds to a string constant or zero
9114 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9115 in bytes within the string that ARG is accessing. The type of the
9116 offset will be `sizetype'. */
9117
9118 tree
9119 string_constant (tree arg, tree *ptr_offset)
9120 {
9121 STRIP_NOPS (arg);
9122
9123 if (TREE_CODE (arg) == ADDR_EXPR
9124 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9125 {
9126 *ptr_offset = size_zero_node;
9127 return TREE_OPERAND (arg, 0);
9128 }
9129 else if (TREE_CODE (arg) == PLUS_EXPR)
9130 {
9131 tree arg0 = TREE_OPERAND (arg, 0);
9132 tree arg1 = TREE_OPERAND (arg, 1);
9133
9134 STRIP_NOPS (arg0);
9135 STRIP_NOPS (arg1);
9136
9137 if (TREE_CODE (arg0) == ADDR_EXPR
9138 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9139 {
9140 *ptr_offset = convert (sizetype, arg1);
9141 return TREE_OPERAND (arg0, 0);
9142 }
9143 else if (TREE_CODE (arg1) == ADDR_EXPR
9144 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9145 {
9146 *ptr_offset = convert (sizetype, arg0);
9147 return TREE_OPERAND (arg1, 0);
9148 }
9149 }
9150
9151 return 0;
9152 }
9153 \f
9154 /* Expand code for a post- or pre- increment or decrement
9155 and return the RTX for the result.
9156 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9157
9158 static rtx
9159 expand_increment (tree exp, int post, int ignore)
9160 {
9161 rtx op0, op1;
9162 rtx temp, value;
9163 tree incremented = TREE_OPERAND (exp, 0);
9164 optab this_optab = add_optab;
9165 int icode;
9166 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9167 int op0_is_copy = 0;
9168 int single_insn = 0;
9169 /* 1 means we can't store into OP0 directly,
9170 because it is a subreg narrower than a word,
9171 and we don't dare clobber the rest of the word. */
9172 int bad_subreg = 0;
9173
9174 /* Stabilize any component ref that might need to be
9175 evaluated more than once below. */
9176 if (!post
9177 || TREE_CODE (incremented) == BIT_FIELD_REF
9178 || (TREE_CODE (incremented) == COMPONENT_REF
9179 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9180 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9181 incremented = stabilize_reference (incremented);
9182 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9183 ones into save exprs so that they don't accidentally get evaluated
9184 more than once by the code below. */
9185 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9186 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9187 incremented = save_expr (incremented);
9188
9189 /* Compute the operands as RTX.
9190 Note whether OP0 is the actual lvalue or a copy of it:
9191 I believe it is a copy iff it is a register or subreg
9192 and insns were generated in computing it. */
9193
9194 temp = get_last_insn ();
9195 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9196
9197 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9198 in place but instead must do sign- or zero-extension during assignment,
9199 so we copy it into a new register and let the code below use it as
9200 a copy.
9201
9202 Note that we can safely modify this SUBREG since it is know not to be
9203 shared (it was made by the expand_expr call above). */
9204
9205 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9206 {
9207 if (post)
9208 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9209 else
9210 bad_subreg = 1;
9211 }
9212 else if (GET_CODE (op0) == SUBREG
9213 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9214 {
9215 /* We cannot increment this SUBREG in place. If we are
9216 post-incrementing, get a copy of the old value. Otherwise,
9217 just mark that we cannot increment in place. */
9218 if (post)
9219 op0 = copy_to_reg (op0);
9220 else
9221 bad_subreg = 1;
9222 }
9223
9224 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9225 && temp != get_last_insn ());
9226 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9227
9228 /* Decide whether incrementing or decrementing. */
9229 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9230 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9231 this_optab = sub_optab;
9232
9233 /* Convert decrement by a constant into a negative increment. */
9234 if (this_optab == sub_optab
9235 && GET_CODE (op1) == CONST_INT)
9236 {
9237 op1 = GEN_INT (-INTVAL (op1));
9238 this_optab = add_optab;
9239 }
9240
9241 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9242 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9243
9244 /* For a preincrement, see if we can do this with a single instruction. */
9245 if (!post)
9246 {
9247 icode = (int) this_optab->handlers[(int) mode].insn_code;
9248 if (icode != (int) CODE_FOR_nothing
9249 /* Make sure that OP0 is valid for operands 0 and 1
9250 of the insn we want to queue. */
9251 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9252 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9253 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9254 single_insn = 1;
9255 }
9256
9257 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9258 then we cannot just increment OP0. We must therefore contrive to
9259 increment the original value. Then, for postincrement, we can return
9260 OP0 since it is a copy of the old value. For preincrement, expand here
9261 unless we can do it with a single insn.
9262
9263 Likewise if storing directly into OP0 would clobber high bits
9264 we need to preserve (bad_subreg). */
9265 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9266 {
9267 /* This is the easiest way to increment the value wherever it is.
9268 Problems with multiple evaluation of INCREMENTED are prevented
9269 because either (1) it is a component_ref or preincrement,
9270 in which case it was stabilized above, or (2) it is an array_ref
9271 with constant index in an array in a register, which is
9272 safe to reevaluate. */
9273 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9274 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9275 ? MINUS_EXPR : PLUS_EXPR),
9276 TREE_TYPE (exp),
9277 incremented,
9278 TREE_OPERAND (exp, 1));
9279
9280 while (TREE_CODE (incremented) == NOP_EXPR
9281 || TREE_CODE (incremented) == CONVERT_EXPR)
9282 {
9283 newexp = convert (TREE_TYPE (incremented), newexp);
9284 incremented = TREE_OPERAND (incremented, 0);
9285 }
9286
9287 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9288 return post ? op0 : temp;
9289 }
9290
9291 if (post)
9292 {
9293 /* We have a true reference to the value in OP0.
9294 If there is an insn to add or subtract in this mode, queue it.
9295 Queuing the increment insn avoids the register shuffling
9296 that often results if we must increment now and first save
9297 the old value for subsequent use. */
9298
9299 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9300 op0 = stabilize (op0);
9301 #endif
9302
9303 icode = (int) this_optab->handlers[(int) mode].insn_code;
9304 if (icode != (int) CODE_FOR_nothing
9305 /* Make sure that OP0 is valid for operands 0 and 1
9306 of the insn we want to queue. */
9307 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9308 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9309 {
9310 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9311 op1 = force_reg (mode, op1);
9312
9313 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9314 }
9315 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9316 {
9317 rtx addr = (general_operand (XEXP (op0, 0), mode)
9318 ? force_reg (Pmode, XEXP (op0, 0))
9319 : copy_to_reg (XEXP (op0, 0)));
9320 rtx temp, result;
9321
9322 op0 = replace_equiv_address (op0, addr);
9323 temp = force_reg (GET_MODE (op0), op0);
9324 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9325 op1 = force_reg (mode, op1);
9326
9327 /* The increment queue is LIFO, thus we have to `queue'
9328 the instructions in reverse order. */
9329 enqueue_insn (op0, gen_move_insn (op0, temp));
9330 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9331 return result;
9332 }
9333 }
9334
9335 /* Preincrement, or we can't increment with one simple insn. */
9336 if (post)
9337 /* Save a copy of the value before inc or dec, to return it later. */
9338 temp = value = copy_to_reg (op0);
9339 else
9340 /* Arrange to return the incremented value. */
9341 /* Copy the rtx because expand_binop will protect from the queue,
9342 and the results of that would be invalid for us to return
9343 if our caller does emit_queue before using our result. */
9344 temp = copy_rtx (value = op0);
9345
9346 /* Increment however we can. */
9347 op1 = expand_binop (mode, this_optab, value, op1, op0,
9348 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9349
9350 /* Make sure the value is stored into OP0. */
9351 if (op1 != op0)
9352 emit_move_insn (op0, op1);
9353
9354 return temp;
9355 }
9356 \f
9357 /* Generate code to calculate EXP using a store-flag instruction
9358 and return an rtx for the result. EXP is either a comparison
9359 or a TRUTH_NOT_EXPR whose operand is a comparison.
9360
9361 If TARGET is nonzero, store the result there if convenient.
9362
9363 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9364 cheap.
9365
9366 Return zero if there is no suitable set-flag instruction
9367 available on this machine.
9368
9369 Once expand_expr has been called on the arguments of the comparison,
9370 we are committed to doing the store flag, since it is not safe to
9371 re-evaluate the expression. We emit the store-flag insn by calling
9372 emit_store_flag, but only expand the arguments if we have a reason
9373 to believe that emit_store_flag will be successful. If we think that
9374 it will, but it isn't, we have to simulate the store-flag with a
9375 set/jump/set sequence. */
9376
9377 static rtx
9378 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9379 {
9380 enum rtx_code code;
9381 tree arg0, arg1, type;
9382 tree tem;
9383 enum machine_mode operand_mode;
9384 int invert = 0;
9385 int unsignedp;
9386 rtx op0, op1;
9387 enum insn_code icode;
9388 rtx subtarget = target;
9389 rtx result, label;
9390
9391 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9392 result at the end. We can't simply invert the test since it would
9393 have already been inverted if it were valid. This case occurs for
9394 some floating-point comparisons. */
9395
9396 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9397 invert = 1, exp = TREE_OPERAND (exp, 0);
9398
9399 arg0 = TREE_OPERAND (exp, 0);
9400 arg1 = TREE_OPERAND (exp, 1);
9401
9402 /* Don't crash if the comparison was erroneous. */
9403 if (arg0 == error_mark_node || arg1 == error_mark_node)
9404 return const0_rtx;
9405
9406 type = TREE_TYPE (arg0);
9407 operand_mode = TYPE_MODE (type);
9408 unsignedp = TREE_UNSIGNED (type);
9409
9410 /* We won't bother with BLKmode store-flag operations because it would mean
9411 passing a lot of information to emit_store_flag. */
9412 if (operand_mode == BLKmode)
9413 return 0;
9414
9415 /* We won't bother with store-flag operations involving function pointers
9416 when function pointers must be canonicalized before comparisons. */
9417 #ifdef HAVE_canonicalize_funcptr_for_compare
9418 if (HAVE_canonicalize_funcptr_for_compare
9419 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9420 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9421 == FUNCTION_TYPE))
9422 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9423 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9424 == FUNCTION_TYPE))))
9425 return 0;
9426 #endif
9427
9428 STRIP_NOPS (arg0);
9429 STRIP_NOPS (arg1);
9430
9431 /* Get the rtx comparison code to use. We know that EXP is a comparison
9432 operation of some type. Some comparisons against 1 and -1 can be
9433 converted to comparisons with zero. Do so here so that the tests
9434 below will be aware that we have a comparison with zero. These
9435 tests will not catch constants in the first operand, but constants
9436 are rarely passed as the first operand. */
9437
9438 switch (TREE_CODE (exp))
9439 {
9440 case EQ_EXPR:
9441 code = EQ;
9442 break;
9443 case NE_EXPR:
9444 code = NE;
9445 break;
9446 case LT_EXPR:
9447 if (integer_onep (arg1))
9448 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9449 else
9450 code = unsignedp ? LTU : LT;
9451 break;
9452 case LE_EXPR:
9453 if (! unsignedp && integer_all_onesp (arg1))
9454 arg1 = integer_zero_node, code = LT;
9455 else
9456 code = unsignedp ? LEU : LE;
9457 break;
9458 case GT_EXPR:
9459 if (! unsignedp && integer_all_onesp (arg1))
9460 arg1 = integer_zero_node, code = GE;
9461 else
9462 code = unsignedp ? GTU : GT;
9463 break;
9464 case GE_EXPR:
9465 if (integer_onep (arg1))
9466 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9467 else
9468 code = unsignedp ? GEU : GE;
9469 break;
9470
9471 case UNORDERED_EXPR:
9472 code = UNORDERED;
9473 break;
9474 case ORDERED_EXPR:
9475 code = ORDERED;
9476 break;
9477 case UNLT_EXPR:
9478 code = UNLT;
9479 break;
9480 case UNLE_EXPR:
9481 code = UNLE;
9482 break;
9483 case UNGT_EXPR:
9484 code = UNGT;
9485 break;
9486 case UNGE_EXPR:
9487 code = UNGE;
9488 break;
9489 case UNEQ_EXPR:
9490 code = UNEQ;
9491 break;
9492
9493 default:
9494 abort ();
9495 }
9496
9497 /* Put a constant second. */
9498 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9499 {
9500 tem = arg0; arg0 = arg1; arg1 = tem;
9501 code = swap_condition (code);
9502 }
9503
9504 /* If this is an equality or inequality test of a single bit, we can
9505 do this by shifting the bit being tested to the low-order bit and
9506 masking the result with the constant 1. If the condition was EQ,
9507 we xor it with 1. This does not require an scc insn and is faster
9508 than an scc insn even if we have it.
9509
9510 The code to make this transformation was moved into fold_single_bit_test,
9511 so we just call into the folder and expand its result. */
9512
9513 if ((code == NE || code == EQ)
9514 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9515 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9516 {
9517 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
9518 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9519 arg0, arg1, type),
9520 target, VOIDmode, EXPAND_NORMAL);
9521 }
9522
9523 /* Now see if we are likely to be able to do this. Return if not. */
9524 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9525 return 0;
9526
9527 icode = setcc_gen_code[(int) code];
9528 if (icode == CODE_FOR_nothing
9529 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9530 {
9531 /* We can only do this if it is one of the special cases that
9532 can be handled without an scc insn. */
9533 if ((code == LT && integer_zerop (arg1))
9534 || (! only_cheap && code == GE && integer_zerop (arg1)))
9535 ;
9536 else if (BRANCH_COST >= 0
9537 && ! only_cheap && (code == NE || code == EQ)
9538 && TREE_CODE (type) != REAL_TYPE
9539 && ((abs_optab->handlers[(int) operand_mode].insn_code
9540 != CODE_FOR_nothing)
9541 || (ffs_optab->handlers[(int) operand_mode].insn_code
9542 != CODE_FOR_nothing)))
9543 ;
9544 else
9545 return 0;
9546 }
9547
9548 if (! get_subtarget (target)
9549 || GET_MODE (subtarget) != operand_mode)
9550 subtarget = 0;
9551
9552 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9553
9554 if (target == 0)
9555 target = gen_reg_rtx (mode);
9556
9557 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9558 because, if the emit_store_flag does anything it will succeed and
9559 OP0 and OP1 will not be used subsequently. */
9560
9561 result = emit_store_flag (target, code,
9562 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9563 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9564 operand_mode, unsignedp, 1);
9565
9566 if (result)
9567 {
9568 if (invert)
9569 result = expand_binop (mode, xor_optab, result, const1_rtx,
9570 result, 0, OPTAB_LIB_WIDEN);
9571 return result;
9572 }
9573
9574 /* If this failed, we have to do this with set/compare/jump/set code. */
9575 if (GET_CODE (target) != REG
9576 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9577 target = gen_reg_rtx (GET_MODE (target));
9578
9579 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9580 result = compare_from_rtx (op0, op1, code, unsignedp,
9581 operand_mode, NULL_RTX);
9582 if (GET_CODE (result) == CONST_INT)
9583 return (((result == const0_rtx && ! invert)
9584 || (result != const0_rtx && invert))
9585 ? const0_rtx : const1_rtx);
9586
9587 /* The code of RESULT may not match CODE if compare_from_rtx
9588 decided to swap its operands and reverse the original code.
9589
9590 We know that compare_from_rtx returns either a CONST_INT or
9591 a new comparison code, so it is safe to just extract the
9592 code from RESULT. */
9593 code = GET_CODE (result);
9594
9595 label = gen_label_rtx ();
9596 if (bcc_gen_fctn[(int) code] == 0)
9597 abort ();
9598
9599 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9600 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9601 emit_label (label);
9602
9603 return target;
9604 }
9605 \f
9606
9607 /* Stubs in case we haven't got a casesi insn. */
9608 #ifndef HAVE_casesi
9609 # define HAVE_casesi 0
9610 # define gen_casesi(a, b, c, d, e) (0)
9611 # define CODE_FOR_casesi CODE_FOR_nothing
9612 #endif
9613
9614 /* If the machine does not have a case insn that compares the bounds,
9615 this means extra overhead for dispatch tables, which raises the
9616 threshold for using them. */
9617 #ifndef CASE_VALUES_THRESHOLD
9618 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9619 #endif /* CASE_VALUES_THRESHOLD */
9620
9621 unsigned int
9622 case_values_threshold (void)
9623 {
9624 return CASE_VALUES_THRESHOLD;
9625 }
9626
9627 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9628 0 otherwise (i.e. if there is no casesi instruction). */
9629 int
9630 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9631 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9632 {
9633 enum machine_mode index_mode = SImode;
9634 int index_bits = GET_MODE_BITSIZE (index_mode);
9635 rtx op1, op2, index;
9636 enum machine_mode op_mode;
9637
9638 if (! HAVE_casesi)
9639 return 0;
9640
9641 /* Convert the index to SImode. */
9642 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9643 {
9644 enum machine_mode omode = TYPE_MODE (index_type);
9645 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9646
9647 /* We must handle the endpoints in the original mode. */
9648 index_expr = build (MINUS_EXPR, index_type,
9649 index_expr, minval);
9650 minval = integer_zero_node;
9651 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9652 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9653 omode, 1, default_label);
9654 /* Now we can safely truncate. */
9655 index = convert_to_mode (index_mode, index, 0);
9656 }
9657 else
9658 {
9659 if (TYPE_MODE (index_type) != index_mode)
9660 {
9661 index_expr = convert ((*lang_hooks.types.type_for_size)
9662 (index_bits, 0), index_expr);
9663 index_type = TREE_TYPE (index_expr);
9664 }
9665
9666 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9667 }
9668 emit_queue ();
9669 index = protect_from_queue (index, 0);
9670 do_pending_stack_adjust ();
9671
9672 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9673 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9674 (index, op_mode))
9675 index = copy_to_mode_reg (op_mode, index);
9676
9677 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9678
9679 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9680 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9681 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
9682 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9683 (op1, op_mode))
9684 op1 = copy_to_mode_reg (op_mode, op1);
9685
9686 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9687
9688 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9689 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9690 op2, TREE_UNSIGNED (TREE_TYPE (range)));
9691 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9692 (op2, op_mode))
9693 op2 = copy_to_mode_reg (op_mode, op2);
9694
9695 emit_jump_insn (gen_casesi (index, op1, op2,
9696 table_label, default_label));
9697 return 1;
9698 }
9699
9700 /* Attempt to generate a tablejump instruction; same concept. */
9701 #ifndef HAVE_tablejump
9702 #define HAVE_tablejump 0
9703 #define gen_tablejump(x, y) (0)
9704 #endif
9705
9706 /* Subroutine of the next function.
9707
9708 INDEX is the value being switched on, with the lowest value
9709 in the table already subtracted.
9710 MODE is its expected mode (needed if INDEX is constant).
9711 RANGE is the length of the jump table.
9712 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9713
9714 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9715 index value is out of range. */
9716
9717 static void
9718 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9719 rtx default_label)
9720 {
9721 rtx temp, vector;
9722
9723 if (INTVAL (range) > cfun->max_jumptable_ents)
9724 cfun->max_jumptable_ents = INTVAL (range);
9725
9726 /* Do an unsigned comparison (in the proper mode) between the index
9727 expression and the value which represents the length of the range.
9728 Since we just finished subtracting the lower bound of the range
9729 from the index expression, this comparison allows us to simultaneously
9730 check that the original index expression value is both greater than
9731 or equal to the minimum value of the range and less than or equal to
9732 the maximum value of the range. */
9733
9734 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9735 default_label);
9736
9737 /* If index is in range, it must fit in Pmode.
9738 Convert to Pmode so we can index with it. */
9739 if (mode != Pmode)
9740 index = convert_to_mode (Pmode, index, 1);
9741
9742 /* Don't let a MEM slip through, because then INDEX that comes
9743 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9744 and break_out_memory_refs will go to work on it and mess it up. */
9745 #ifdef PIC_CASE_VECTOR_ADDRESS
9746 if (flag_pic && GET_CODE (index) != REG)
9747 index = copy_to_mode_reg (Pmode, index);
9748 #endif
9749
9750 /* If flag_force_addr were to affect this address
9751 it could interfere with the tricky assumptions made
9752 about addresses that contain label-refs,
9753 which may be valid only very near the tablejump itself. */
9754 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9755 GET_MODE_SIZE, because this indicates how large insns are. The other
9756 uses should all be Pmode, because they are addresses. This code
9757 could fail if addresses and insns are not the same size. */
9758 index = gen_rtx_PLUS (Pmode,
9759 gen_rtx_MULT (Pmode, index,
9760 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9761 gen_rtx_LABEL_REF (Pmode, table_label));
9762 #ifdef PIC_CASE_VECTOR_ADDRESS
9763 if (flag_pic)
9764 index = PIC_CASE_VECTOR_ADDRESS (index);
9765 else
9766 #endif
9767 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9768 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9769 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9770 RTX_UNCHANGING_P (vector) = 1;
9771 MEM_NOTRAP_P (vector) = 1;
9772 convert_move (temp, vector, 0);
9773
9774 emit_jump_insn (gen_tablejump (temp, table_label));
9775
9776 /* If we are generating PIC code or if the table is PC-relative, the
9777 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9778 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9779 emit_barrier ();
9780 }
9781
9782 int
9783 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9784 rtx table_label, rtx default_label)
9785 {
9786 rtx index;
9787
9788 if (! HAVE_tablejump)
9789 return 0;
9790
9791 index_expr = fold (build (MINUS_EXPR, index_type,
9792 convert (index_type, index_expr),
9793 convert (index_type, minval)));
9794 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9795 emit_queue ();
9796 index = protect_from_queue (index, 0);
9797 do_pending_stack_adjust ();
9798
9799 do_tablejump (index, TYPE_MODE (index_type),
9800 convert_modes (TYPE_MODE (index_type),
9801 TYPE_MODE (TREE_TYPE (range)),
9802 expand_expr (range, NULL_RTX,
9803 VOIDmode, 0),
9804 TREE_UNSIGNED (TREE_TYPE (range))),
9805 table_label, default_label);
9806 return 1;
9807 }
9808
9809 /* Nonzero if the mode is a valid vector mode for this architecture.
9810 This returns nonzero even if there is no hardware support for the
9811 vector mode, but we can emulate with narrower modes. */
9812
9813 int
9814 vector_mode_valid_p (enum machine_mode mode)
9815 {
9816 enum mode_class class = GET_MODE_CLASS (mode);
9817 enum machine_mode innermode;
9818
9819 /* Doh! What's going on? */
9820 if (class != MODE_VECTOR_INT
9821 && class != MODE_VECTOR_FLOAT)
9822 return 0;
9823
9824 /* Hardware support. Woo hoo! */
9825 if (VECTOR_MODE_SUPPORTED_P (mode))
9826 return 1;
9827
9828 innermode = GET_MODE_INNER (mode);
9829
9830 /* We should probably return 1 if requesting V4DI and we have no DI,
9831 but we have V2DI, but this is probably very unlikely. */
9832
9833 /* If we have support for the inner mode, we can safely emulate it.
9834 We may not have V2DI, but me can emulate with a pair of DIs. */
9835 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9836 }
9837
9838 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9839 static rtx
9840 const_vector_from_tree (tree exp)
9841 {
9842 rtvec v;
9843 int units, i;
9844 tree link, elt;
9845 enum machine_mode inner, mode;
9846
9847 mode = TYPE_MODE (TREE_TYPE (exp));
9848
9849 if (is_zeros_p (exp))
9850 return CONST0_RTX (mode);
9851
9852 units = GET_MODE_NUNITS (mode);
9853 inner = GET_MODE_INNER (mode);
9854
9855 v = rtvec_alloc (units);
9856
9857 link = TREE_VECTOR_CST_ELTS (exp);
9858 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9859 {
9860 elt = TREE_VALUE (link);
9861
9862 if (TREE_CODE (elt) == REAL_CST)
9863 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9864 inner);
9865 else
9866 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9867 TREE_INT_CST_HIGH (elt),
9868 inner);
9869 }
9870
9871 /* Initialize remaining elements to 0. */
9872 for (; i < units; ++i)
9873 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9874
9875 return gen_rtx_raw_CONST_VECTOR (mode, v);
9876 }
9877
9878 #include "gt-expr.h"