]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/expr.c
Daily bump.
[thirdparty/gcc.git] / gcc / expr.c
CommitLineData
10f307d9 1/* Convert tree expression to rtl instructions, for GNU compiler.
e1439bcb 2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
a8349c62 3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
10f307d9 4
f12b58b3 5This file is part of GCC.
10f307d9 6
f12b58b3 7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
10f307d9 11
f12b58b3 12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
10f307d9 16
17You should have received a copy of the GNU General Public License
f12b58b3 18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
10f307d9 21
10f307d9 22#include "config.h"
405711de 23#include "system.h"
805e22b2 24#include "coretypes.h"
25#include "tm.h"
649d8da6 26#include "machmode.h"
ef258422 27#include "real.h"
10f307d9 28#include "rtl.h"
29#include "tree.h"
30#include "flags.h"
09994a52 31#include "regs.h"
261db321 32#include "hard-reg-set.h"
037a5228 33#include "except.h"
10f307d9 34#include "function.h"
10f307d9 35#include "insn-config.h"
3084721c 36#include "insn-attr.h"
fa56dc1d 37/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
cd03a192 38#include "expr.h"
d8fc4d0b 39#include "optabs.h"
40#include "libfuncs.h"
10f307d9 41#include "recog.h"
6702c250 42#include "reload.h"
10f307d9 43#include "output.h"
10f307d9 44#include "typeclass.h"
12874aaf 45#include "toplev.h"
521dd524 46#include "ggc.h"
b3187c7c 47#include "langhooks.h"
a3c49299 48#include "intl.h"
075136a2 49#include "tm_p.h"
4ee9c684 50#include "tree-iterator.h"
5290ebdb 51#include "tree-pass.h"
52#include "tree-flow.h"
2c8ff1ed 53#include "target.h"
5290ebdb 54#include "timevar.h"
10f307d9 55
10f307d9 56/* Decide whether a function's arguments should be processed
7473731d 57 from first to last or from last to first.
58
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
10f307d9 61
10f307d9 62#ifdef PUSH_ROUNDING
7473731d 63
2a8e54a4 64#ifndef PUSH_ARGS_REVERSED
cd5f9545 65#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
fa56dc1d 66#define PUSH_ARGS_REVERSED /* If it's last to first. */
10f307d9 67#endif
2a8e54a4 68#endif
7473731d 69
10f307d9 70#endif
71
72#ifndef STACK_PUSH_CODE
73#ifdef STACK_GROWS_DOWNWARD
74#define STACK_PUSH_CODE PRE_DEC
75#else
76#define STACK_PUSH_CODE PRE_INC
77#endif
78#endif
79
c0bfc78e 80
10f307d9 81/* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87int cse_not_expected;
88
3ebd94bd 89/* This structure is used by move_by_pieces to describe the move to
90 be performed. */
3ebd94bd 91struct move_by_pieces
92{
93 rtx to;
94 rtx to_addr;
95 int autinc_to;
96 int explicit_inc_to;
97 rtx from;
98 rtx from_addr;
99 int autinc_from;
100 int explicit_inc_from;
f7c44134 101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
3ebd94bd 103 int reverse;
104};
105
6840589f 106/* This structure is used by store_by_pieces to describe the clear to
dbd14dc5 107 be performed. */
108
6840589f 109struct store_by_pieces
dbd14dc5 110{
111 rtx to;
112 rtx to_addr;
113 int autinc_to;
114 int explicit_inc_to;
f7c44134 115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
35cb5232 117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
b9a7cc69 118 void *constfundata;
dbd14dc5 119 int reverse;
120};
121
35cb5232 122static rtx enqueue_insn (rtx, rtx);
123static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124 unsigned int);
125static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127static bool block_move_libcall_safe_for_call_parm (void);
128static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
129static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
130static tree emit_block_move_libcall_fn (int);
131static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
138static rtx clear_storage_via_libcall (rtx, rtx);
139static tree clear_storage_libcall_fn (int);
140static rtx compress_float_constant (rtx, rtx);
141static rtx get_subtarget (rtx);
35cb5232 142static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, enum machine_mode, int, tree, int);
148static rtx var_rtx (tree);
149
150static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
252d0e4d 151static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
35cb5232 152
153static int is_aligning_offset (tree, tree);
154static rtx expand_increment (tree, int, int);
33204670 155static void expand_operands (tree, tree, rtx, rtx*, rtx*,
156 enum expand_modifier);
35cb5232 157static rtx do_store_flag (tree, rtx, enum machine_mode, int);
fad4a30c 158#ifdef PUSH_ROUNDING
35cb5232 159static void emit_single_push_insn (enum machine_mode, rtx, tree);
fad4a30c 160#endif
35cb5232 161static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
162static rtx const_vector_from_tree (tree);
10f307d9 163
07edfa02 164/* Record for each mode whether we can move a register directly to or
165 from an object of that mode in memory. If we can't, we won't try
166 to use that mode directly when accessing a field of that mode. */
167
168static char direct_load[NUM_MACHINE_MODES];
169static char direct_store[NUM_MACHINE_MODES];
170
c0c4a46d 171/* Record for each mode whether we can float-extend from memory. */
172
173static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
174
53bd09ab 175/* This macro is used to determine whether move_by_pieces should be called
fa56dc1d 176 to perform a structure copy. */
53bd09ab 177#ifndef MOVE_BY_PIECES_P
325d1c45 178#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
e1439bcb 179 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
53bd09ab 180#endif
181
310d3ec9 182/* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184#ifndef CLEAR_BY_PIECES_P
185#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
187#endif
188
805e22b2 189/* This macro is used to determine whether store_by_pieces should be
190 called to "memset" storage with byte values other than zero, or
191 to "memcpy" storage when the source is a constant string. */
192#ifndef STORE_BY_PIECES_P
193#define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
194#endif
195
a5fd5157 196/* This array records the insn_code of insns to perform block moves. */
a42d921d 197enum insn_code movstr_optab[NUM_MACHINE_MODES];
a5fd5157 198
dbd14dc5 199/* This array records the insn_code of insns to perform block clears. */
200enum insn_code clrstr_optab[NUM_MACHINE_MODES];
201
0fbe5a3e 202/* These arrays record the insn_code of two different kinds of insns
203 to perform block compares. */
204enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
205enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
206
6ef828f9 207/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
35f44ac1 208
209#ifndef SLOW_UNALIGNED_ACCESS
9439ebf7 210#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
35f44ac1 211#endif
10f307d9 212\f
07edfa02 213/* This is run once per compilation to set up which modes can be used
a5fd5157 214 directly in memory and to initialize the block move optab. */
07edfa02 215
216void
35cb5232 217init_expr_once (void)
07edfa02 218{
219 rtx insn, pat;
220 enum machine_mode mode;
6fa98783 221 int num_clobbers;
9e042f31 222 rtx mem, mem1;
0c7f5242 223 rtx reg;
9e042f31 224
a97fcedd 225 /* Try indexing by frame ptr and try by stack ptr.
226 It is known that on the Convex the stack ptr isn't a valid index.
227 With luck, one or the other is valid on any machine. */
9e042f31 228 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
229 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
07edfa02 230
0c7f5242 231 /* A scratch register we can modify in-place below to avoid
232 useless RTL allocations. */
233 reg = gen_rtx_REG (VOIDmode, -1);
234
7a5749cc 235 insn = rtx_alloc (INSN);
236 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
237 PATTERN (insn) = pat;
07edfa02 238
239 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
240 mode = (enum machine_mode) ((int) mode + 1))
241 {
242 int regno;
07edfa02 243
244 direct_load[(int) mode] = direct_store[(int) mode] = 0;
245 PUT_MODE (mem, mode);
a97fcedd 246 PUT_MODE (mem1, mode);
0c7f5242 247 PUT_MODE (reg, mode);
07edfa02 248
3c209fda 249 /* See if there is some register that can be used in this mode and
250 directly loaded or stored from memory. */
251
b63679d2 252 if (mode != VOIDmode && mode != BLKmode)
253 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
254 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
255 regno++)
256 {
257 if (! HARD_REGNO_MODE_OK (regno, mode))
258 continue;
3c209fda 259
0c7f5242 260 REGNO (reg) = regno;
3c209fda 261
b63679d2 262 SET_SRC (pat) = mem;
263 SET_DEST (pat) = reg;
264 if (recog (pat, insn, &num_clobbers) >= 0)
265 direct_load[(int) mode] = 1;
3c209fda 266
a97fcedd 267 SET_SRC (pat) = mem1;
268 SET_DEST (pat) = reg;
269 if (recog (pat, insn, &num_clobbers) >= 0)
270 direct_load[(int) mode] = 1;
271
b63679d2 272 SET_SRC (pat) = reg;
273 SET_DEST (pat) = mem;
274 if (recog (pat, insn, &num_clobbers) >= 0)
275 direct_store[(int) mode] = 1;
a97fcedd 276
277 SET_SRC (pat) = reg;
278 SET_DEST (pat) = mem1;
279 if (recog (pat, insn, &num_clobbers) >= 0)
280 direct_store[(int) mode] = 1;
b63679d2 281 }
07edfa02 282 }
283
c0c4a46d 284 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
285
286 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
287 mode = GET_MODE_WIDER_MODE (mode))
288 {
289 enum machine_mode srcmode;
290 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
ff385626 291 srcmode = GET_MODE_WIDER_MODE (srcmode))
c0c4a46d 292 {
293 enum insn_code ic;
294
295 ic = can_extend_p (mode, srcmode, 0);
296 if (ic == CODE_FOR_nothing)
297 continue;
298
299 PUT_MODE (mem, srcmode);
ff385626 300
c0c4a46d 301 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
302 float_extend_from_mem[mode][srcmode] = true;
303 }
304 }
07edfa02 305}
6fa98783 306
10f307d9 307/* This is run at the start of compiling a function. */
308
309void
35cb5232 310init_expr (void)
10f307d9 311{
ecc82929 312 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
10f307d9 313}
314
0a893c29 315/* Small sanity check that the queue is empty at the end of a function. */
d8e5b213 316
10f307d9 317void
35cb5232 318finish_expr_for_function (void)
10f307d9 319{
0a893c29 320 if (pending_chain)
321 abort ();
10f307d9 322}
323\f
324/* Manage the queue of increment instructions to be output
325 for POSTINCREMENT_EXPR expressions, etc. */
326
10f307d9 327/* Queue up to increment (or change) VAR later. BODY says how:
328 BODY should be the same thing you would pass to emit_insn
329 to increment right away. It will go to emit_insn later on.
330
331 The value is a QUEUED expression to be used in place of VAR
332 where you want to guarantee the pre-incrementation value of VAR. */
333
334static rtx
35cb5232 335enqueue_insn (rtx var, rtx body)
10f307d9 336{
7014838c 337 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
338 body, pending_chain);
10f307d9 339 return pending_chain;
340}
341
342/* Use protect_from_queue to convert a QUEUED expression
343 into something that you can put immediately into an instruction.
344 If the queued incrementation has not happened yet,
345 protect_from_queue returns the variable itself.
346 If the incrementation has happened, protect_from_queue returns a temp
347 that contains a copy of the old value of the variable.
348
349 Any time an rtx which might possibly be a QUEUED is to be put
350 into an instruction, it must be passed through protect_from_queue first.
351 QUEUED expressions are not meaningful in instructions.
352
353 Do not pass a value through protect_from_queue and then hold
354 on to it for a while before putting it in an instruction!
355 If the queue is flushed in between, incorrect code will result. */
356
357rtx
35cb5232 358protect_from_queue (rtx x, int modify)
10f307d9 359{
19cb6b50 360 RTX_CODE code = GET_CODE (x);
10f307d9 361
362#if 0 /* A QUEUED can hang around after the queue is forced out. */
363 /* Shortcut for most common case. */
364 if (pending_chain == 0)
365 return x;
366#endif
367
368 if (code != QUEUED)
369 {
2f6a905f 370 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
371 use of autoincrement. Make a copy of the contents of the memory
372 location rather than a copy of the address, but not if the value is
373 of mode BLKmode. Don't modify X in place since it might be
374 shared. */
10f307d9 375 if (code == MEM && GET_MODE (x) != BLKmode
376 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
377 {
e4e86ec5 378 rtx y = XEXP (x, 0);
379 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
2f6a905f 380
10f307d9 381 if (QUEUED_INSN (y))
382 {
e4e86ec5 383 rtx temp = gen_reg_rtx (GET_MODE (x));
384
2f6a905f 385 emit_insn_before (gen_move_insn (temp, new),
10f307d9 386 QUEUED_INSN (y));
387 return temp;
388 }
e4e86ec5 389
f9636a66 390 /* Copy the address into a pseudo, so that the returned value
391 remains correct across calls to emit_queue. */
e4e86ec5 392 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
10f307d9 393 }
e4e86ec5 394
10f307d9 395 /* Otherwise, recursively protect the subexpressions of all
396 the kinds of rtx's that can contain a QUEUED. */
397 if (code == MEM)
c0377bb2 398 {
399 rtx tem = protect_from_queue (XEXP (x, 0), 0);
400 if (tem != XEXP (x, 0))
401 {
402 x = copy_rtx (x);
403 XEXP (x, 0) = tem;
404 }
405 }
10f307d9 406 else if (code == PLUS || code == MULT)
407 {
c0377bb2 408 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
409 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
410 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
411 {
412 x = copy_rtx (x);
413 XEXP (x, 0) = new0;
414 XEXP (x, 1) = new1;
415 }
10f307d9 416 }
417 return x;
418 }
f9636a66 419 /* If the increment has not happened, use the variable itself. Copy it
420 into a new pseudo so that the value remains correct across calls to
421 emit_queue. */
10f307d9 422 if (QUEUED_INSN (x) == 0)
f9636a66 423 return copy_to_reg (QUEUED_VAR (x));
10f307d9 424 /* If the increment has happened and a pre-increment copy exists,
425 use that copy. */
426 if (QUEUED_COPY (x) != 0)
427 return QUEUED_COPY (x);
428 /* The increment has happened but we haven't set up a pre-increment copy.
429 Set one up now, and use it. */
430 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
431 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
432 QUEUED_INSN (x));
433 return QUEUED_COPY (x);
434}
435
436/* Return nonzero if X contains a QUEUED expression:
437 if it contains anything that will be altered by a queued increment.
438 We handle only combinations of MEM, PLUS, MINUS and MULT operators
439 since memory addresses generally contain only those. */
440
6d2e66f1 441int
35cb5232 442queued_subexp_p (rtx x)
10f307d9 443{
19cb6b50 444 enum rtx_code code = GET_CODE (x);
10f307d9 445 switch (code)
446 {
447 case QUEUED:
448 return 1;
449 case MEM:
450 return queued_subexp_p (XEXP (x, 0));
451 case MULT:
452 case PLUS:
453 case MINUS:
0dbd1c74 454 return (queued_subexp_p (XEXP (x, 0))
455 || queued_subexp_p (XEXP (x, 1)));
456 default:
457 return 0;
10f307d9 458 }
10f307d9 459}
460
c0d93299 461/* Retrieve a mark on the queue. */
462
463static rtx
464mark_queue (void)
465{
466 return pending_chain;
467}
10f307d9 468
c0d93299 469/* Perform all the pending incrementations that have been enqueued
470 after MARK was retrieved. If MARK is null, perform all the
471 pending incrementations. */
472
473static void
474emit_insns_enqueued_after_mark (rtx mark)
10f307d9 475{
19cb6b50 476 rtx p;
c0d93299 477
478 /* The marked incrementation may have been emitted in the meantime
479 through a call to emit_queue. In this case, the mark is not valid
480 anymore so do nothing. */
481 if (mark && ! QUEUED_BODY (mark))
482 return;
483
484 while ((p = pending_chain) != mark)
10f307d9 485 {
ec91253b 486 rtx body = QUEUED_BODY (p);
487
31d3e01c 488 switch (GET_CODE (body))
489 {
490 case INSN:
491 case JUMP_INSN:
492 case CALL_INSN:
493 case CODE_LABEL:
494 case BARRIER:
495 case NOTE:
496 QUEUED_INSN (p) = body;
497 emit_insn (body);
498 break;
499
500#ifdef ENABLE_CHECKING
501 case SEQUENCE:
502 abort ();
503 break;
504#endif
505
506 default:
507 QUEUED_INSN (p) = emit_insn (body);
508 break;
ec91253b 509 }
31d3e01c 510
c0d93299 511 QUEUED_BODY (p) = 0;
10f307d9 512 pending_chain = QUEUED_NEXT (p);
513 }
514}
c0d93299 515
516/* Perform all the pending incrementations. */
517
518void
519emit_queue (void)
520{
521 emit_insns_enqueued_after_mark (NULL_RTX);
522}
10f307d9 523\f
524/* Copy data from FROM to TO, where the machine modes are not the same.
525 Both modes may be integer, or both may be floating.
526 UNSIGNEDP should be nonzero if FROM is an unsigned type.
527 This causes zero-extension instead of sign-extension. */
528
529void
35cb5232 530convert_move (rtx to, rtx from, int unsignedp)
10f307d9 531{
532 enum machine_mode to_mode = GET_MODE (to);
533 enum machine_mode from_mode = GET_MODE (from);
534 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
535 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
536 enum insn_code code;
537 rtx libcall;
538
539 /* rtx code for making an equivalent value. */
65923445 540 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
541 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
10f307d9 542
543 to = protect_from_queue (to, 1);
544 from = protect_from_queue (from, 0);
545
546 if (to_real != from_real)
547 abort ();
548
4ee9c684 549 /* If the source and destination are already the same, then there's
550 nothing to do. */
551 if (to == from)
552 return;
553
acfb31e5 554 /* If FROM is a SUBREG that indicates that we have already done at least
555 the required extension, strip it. We don't handle such SUBREGs as
556 TO here. */
557
558 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
559 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
560 >= GET_MODE_SIZE (to_mode))
561 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
562 from = gen_lowpart (to_mode, from), from_mode = to_mode;
563
564 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
565 abort ();
566
10f307d9 567 if (to_mode == from_mode
568 || (from_mode == VOIDmode && CONSTANT_P (from)))
569 {
570 emit_move_insn (to, from);
571 return;
572 }
573
8a95ab85 574 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
575 {
576 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
577 abort ();
fa56dc1d 578
8a95ab85 579 if (VECTOR_MODE_P (to_mode))
1c0d4c2c 580 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
8a95ab85 581 else
1c0d4c2c 582 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
8a95ab85 583
584 emit_move_insn (to, from);
585 return;
586 }
587
a9f93c81 588 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
589 {
590 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
591 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
592 return;
593 }
594
10f307d9 595 if (to_real)
596 {
542baf17 597 rtx value, insns;
a7cc195f 598 convert_optab tab;
ece3ba9a 599
d8cb6ce8 600 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
a7cc195f 601 tab = sext_optab;
d8cb6ce8 602 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
a7cc195f 603 tab = trunc_optab;
604 else
605 abort ();
dd8a4c60 606
a7cc195f 607 /* Try converting directly if the insn is supported. */
dd8a4c60 608
a7cc195f 609 code = tab->handlers[to_mode][from_mode].insn_code;
610 if (code != CODE_FOR_nothing)
c2a91a88 611 {
a7cc195f 612 emit_unop_insn (code, to, from,
613 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
c2a91a88 614 return;
615 }
c2a91a88 616
a7cc195f 617 /* Otherwise use a libcall. */
618 libcall = tab->handlers[to_mode][from_mode].libfunc;
fa56dc1d 619
a7cc195f 620 if (!libcall)
c2a91a88 621 /* This conversion is not implemented yet. */
10f307d9 622 abort ();
623
542baf17 624 start_sequence ();
2c5d421b 625 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
ece3ba9a 626 1, from, from_mode);
542baf17 627 insns = get_insns ();
628 end_sequence ();
1d5ca076 629 emit_libcall_block (insns, to, value,
630 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
631 from)
632 : gen_rtx_FLOAT_EXTEND (to_mode, from));
10f307d9 633 return;
634 }
635
a7cc195f 636 /* Handle pointer conversion. */ /* SPEE 900220. */
637 /* Targets are expected to provide conversion insns between PxImode and
638 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
639 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
640 {
641 enum machine_mode full_mode
642 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
643
644 if (trunc_optab->handlers[to_mode][full_mode].insn_code
645 == CODE_FOR_nothing)
646 abort ();
647
648 if (full_mode != from_mode)
649 from = convert_to_mode (full_mode, from, unsignedp);
650 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
651 to, from, UNKNOWN);
652 return;
653 }
654 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
655 {
656 enum machine_mode full_mode
657 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
658
659 if (sext_optab->handlers[full_mode][from_mode].insn_code
660 == CODE_FOR_nothing)
661 abort ();
662
663 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
664 to, from, UNKNOWN);
665 if (to_mode == full_mode)
666 return;
667
aab2cf92 668 /* else proceed to integer conversions below. */
a7cc195f 669 from_mode = full_mode;
670 }
671
10f307d9 672 /* Now both modes are integers. */
673
674 /* Handle expanding beyond a word. */
675 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
676 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
677 {
678 rtx insns;
679 rtx lowpart;
680 rtx fill_value;
681 rtx lowfrom;
682 int i;
683 enum machine_mode lowpart_mode;
684 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
685
686 /* Try converting directly if the insn is supported. */
687 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
688 != CODE_FOR_nothing)
689 {
6a0b5011 690 /* If FROM is a SUBREG, put it into a register. Do this
691 so that we always generate the same set of insns for
692 better cse'ing; if an intermediate assignment occurred,
693 we won't be doing the operation directly on the SUBREG. */
694 if (optimize > 0 && GET_CODE (from) == SUBREG)
695 from = force_reg (from_mode, from);
10f307d9 696 emit_unop_insn (code, to, from, equiv_code);
697 return;
698 }
699 /* Next, try converting via full word. */
700 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
701 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
702 != CODE_FOR_nothing))
703 {
8ad4c111 704 if (REG_P (to))
d6af6bc2 705 {
706 if (reg_overlap_mentioned_p (to, from))
707 from = force_reg (from_mode, from);
708 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
709 }
10f307d9 710 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
711 emit_unop_insn (code, to,
712 gen_lowpart (word_mode, to), equiv_code);
713 return;
714 }
715
716 /* No special multiword conversion insn; do it by hand. */
717 start_sequence ();
718
ab72e117 719 /* Since we will turn this into a no conflict block, we must ensure
720 that the source does not overlap the target. */
721
722 if (reg_overlap_mentioned_p (to, from))
723 from = force_reg (from_mode, from);
724
10f307d9 725 /* Get a copy of FROM widened to a word, if necessary. */
726 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
727 lowpart_mode = word_mode;
728 else
729 lowpart_mode = from_mode;
730
731 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
732
733 lowpart = gen_lowpart (lowpart_mode, to);
734 emit_move_insn (lowpart, lowfrom);
735
736 /* Compute the value to put in each remaining word. */
737 if (unsignedp)
738 fill_value = const0_rtx;
739 else
740 {
741#ifdef HAVE_slt
742 if (HAVE_slt
6357eaae 743 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
10f307d9 744 && STORE_FLAG_VALUE == -1)
745 {
b572011e 746 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
2b96c5f6 747 lowpart_mode, 0);
10f307d9 748 fill_value = gen_reg_rtx (word_mode);
749 emit_insn (gen_slt (fill_value));
750 }
751 else
752#endif
753 {
754 fill_value
755 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
756 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
b572011e 757 NULL_RTX, 0);
10f307d9 758 fill_value = convert_to_mode (word_mode, fill_value, 1);
759 }
760 }
761
762 /* Fill the remaining words. */
763 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
764 {
765 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
766 rtx subword = operand_subword (to, index, 1, to_mode);
767
768 if (subword == 0)
769 abort ();
770
771 if (fill_value != subword)
772 emit_move_insn (subword, fill_value);
773 }
774
775 insns = get_insns ();
776 end_sequence ();
777
b572011e 778 emit_no_conflict_block (insns, to, from, NULL_RTX,
941522d6 779 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
10f307d9 780 return;
781 }
782
5602c36d 783 /* Truncating multi-word to a word or less. */
784 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
785 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
10f307d9 786 {
e16ceb8e 787 if (!((MEM_P (from)
d5601bb1 788 && ! MEM_VOLATILE_P (from)
789 && direct_load[(int) to_mode]
790 && ! mode_dependent_address_p (XEXP (from, 0)))
8ad4c111 791 || REG_P (from)
d5601bb1 792 || GET_CODE (from) == SUBREG))
793 from = force_reg (from_mode, from);
10f307d9 794 convert_move (to, gen_lowpart (word_mode, from), 0);
795 return;
796 }
797
10f307d9 798 /* Now follow all the conversions between integers
799 no more than a word long. */
800
801 /* For truncation, usually we can just refer to FROM in a narrower mode. */
802 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
803 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
5602c36d 804 GET_MODE_BITSIZE (from_mode)))
10f307d9 805 {
e16ceb8e 806 if (!((MEM_P (from)
5602c36d 807 && ! MEM_VOLATILE_P (from)
808 && direct_load[(int) to_mode]
809 && ! mode_dependent_address_p (XEXP (from, 0)))
8ad4c111 810 || REG_P (from)
5602c36d 811 || GET_CODE (from) == SUBREG))
812 from = force_reg (from_mode, from);
8ad4c111 813 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
7de79a05 814 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
815 from = copy_to_reg (from);
10f307d9 816 emit_move_insn (to, gen_lowpart (to_mode, from));
817 return;
818 }
819
5602c36d 820 /* Handle extension. */
10f307d9 821 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
822 {
823 /* Convert directly if that works. */
824 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
825 != CODE_FOR_nothing)
826 {
d243ee41 827 if (flag_force_mem)
828 from = force_not_mem (from);
829
10f307d9 830 emit_unop_insn (code, to, from, equiv_code);
831 return;
832 }
833 else
834 {
835 enum machine_mode intermediate;
851e6849 836 rtx tmp;
837 tree shift_amount;
10f307d9 838
839 /* Search for a mode to convert via. */
840 for (intermediate = from_mode; intermediate != VOIDmode;
841 intermediate = GET_MODE_WIDER_MODE (intermediate))
0f22a35c 842 if (((can_extend_p (to_mode, intermediate, unsignedp)
843 != CODE_FOR_nothing)
844 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
fc10bba7 845 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
846 GET_MODE_BITSIZE (intermediate))))
10f307d9 847 && (can_extend_p (intermediate, from_mode, unsignedp)
848 != CODE_FOR_nothing))
849 {
850 convert_move (to, convert_to_mode (intermediate, from,
851 unsignedp), unsignedp);
852 return;
853 }
854
851e6849 855 /* No suitable intermediate mode.
fa56dc1d 856 Generate what we need with shifts. */
851e6849 857 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
858 - GET_MODE_BITSIZE (from_mode), 0);
859 from = gen_lowpart (to_mode, force_reg (from_mode, from));
860 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
861 to, unsignedp);
fa56dc1d 862 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
851e6849 863 to, unsignedp);
864 if (tmp != to)
865 emit_move_insn (to, tmp);
866 return;
10f307d9 867 }
868 }
869
fa56dc1d 870 /* Support special truncate insns for certain modes. */
a7cc195f 871 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
10f307d9 872 {
a7cc195f 873 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
874 to, from, UNKNOWN);
cd0fdd24 875 return;
876 }
877
10f307d9 878 /* Handle truncation of volatile memrefs, and so on;
879 the things that couldn't be truncated directly,
a7cc195f 880 and for which there was no special instruction.
881
882 ??? Code above formerly short-circuited this, for most integer
883 mode pairs, with a force_reg in from_mode followed by a recursive
884 call to this routine. Appears always to have been wrong. */
10f307d9 885 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
886 {
887 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
888 emit_move_insn (to, temp);
889 return;
890 }
891
892 /* Mode combination is not recognized. */
893 abort ();
894}
895
896/* Return an rtx for a value that would result
897 from converting X to mode MODE.
898 Both X and MODE may be floating, or both integer.
899 UNSIGNEDP is nonzero if X is an unsigned value.
900 This can be done by referring to a part of X in place
2e906c1b 901 or by copying to a new temporary with conversion.
902
903 This function *must not* call protect_from_queue
904 except when putting X into an insn (in which case convert_move does it). */
10f307d9 905
906rtx
35cb5232 907convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
a63e1c46 908{
909 return convert_modes (mode, VOIDmode, x, unsignedp);
910}
911
912/* Return an rtx for a value that would result
913 from converting X from mode OLDMODE to mode MODE.
914 Both modes may be floating, or both integer.
915 UNSIGNEDP is nonzero if X is an unsigned value.
916
917 This can be done by referring to a part of X in place
918 or by copying to a new temporary with conversion.
919
920 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
921
922 This function *must not* call protect_from_queue
923 except when putting X into an insn (in which case convert_move does it). */
924
925rtx
35cb5232 926convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
10f307d9 927{
19cb6b50 928 rtx temp;
a63e1c46 929
acfb31e5 930 /* If FROM is a SUBREG that indicates that we have already done at least
931 the required extension, strip it. */
932
933 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
934 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
935 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
936 x = gen_lowpart (mode, x);
10f307d9 937
e4d9bbd7 938 if (GET_MODE (x) != VOIDmode)
939 oldmode = GET_MODE (x);
fa56dc1d 940
a63e1c46 941 if (mode == oldmode)
10f307d9 942 return x;
943
944 /* There is one case that we must handle specially: If we are converting
b572011e 945 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
10f307d9 946 we are to interpret the constant as unsigned, gen_lowpart will do
947 the wrong if the constant appears negative. What we want to do is
948 make the high-order word of the constant zero, not all ones. */
949
950 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
b572011e 951 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
10f307d9 952 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
e92ac421 953 {
954 HOST_WIDE_INT val = INTVAL (x);
955
956 if (oldmode != VOIDmode
957 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
958 {
959 int width = GET_MODE_BITSIZE (oldmode);
960
961 /* We need to zero extend VAL. */
962 val &= ((HOST_WIDE_INT) 1 << width) - 1;
963 }
964
965 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
966 }
10f307d9 967
968 /* We can do this with a gen_lowpart if both desired and current modes
969 are integer, and this is either a constant integer, a register, or a
d3177667 970 non-volatile MEM. Except for the constant case where MODE is no
971 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
10f307d9 972
d3177667 973 if ((GET_CODE (x) == CONST_INT
974 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10f307d9 975 || (GET_MODE_CLASS (mode) == MODE_INT
a63e1c46 976 && GET_MODE_CLASS (oldmode) == MODE_INT
10f307d9 977 && (GET_CODE (x) == CONST_DOUBLE
a63e1c46 978 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
e16ceb8e 979 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
e2f3d3c1 980 && direct_load[(int) mode])
8ad4c111 981 || (REG_P (x)
12726d6a 982 && (! HARD_REGISTER_P (x)
983 || HARD_REGNO_MODE_OK (REGNO (x), mode))
ec557174 984 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
985 GET_MODE_BITSIZE (GET_MODE (x)))))))))
d3177667 986 {
987 /* ?? If we don't know OLDMODE, we have to assume here that
988 X does not need sign- or zero-extension. This may not be
989 the case, but it's the best we can do. */
990 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
991 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
992 {
993 HOST_WIDE_INT val = INTVAL (x);
994 int width = GET_MODE_BITSIZE (oldmode);
995
996 /* We must sign or zero-extend in this case. Start by
997 zero-extending, then sign extend if we need to. */
998 val &= ((HOST_WIDE_INT) 1 << width) - 1;
999 if (! unsignedp
1000 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1001 val |= (HOST_WIDE_INT) (-1) << width;
1002
2d232d05 1003 return gen_int_mode (val, mode);
d3177667 1004 }
1005
1006 return gen_lowpart (mode, x);
1007 }
10f307d9 1008
77d25dbd 1009 /* Converting from integer constant into mode is always equivalent to an
1010 subreg operation. */
1011 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1012 {
1013 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1014 abort ();
1015 return simplify_gen_subreg (mode, x, oldmode, 0);
1016 }
1017
10f307d9 1018 temp = gen_reg_rtx (mode);
1019 convert_move (temp, x, unsignedp);
1020 return temp;
1021}
1022\f
d1f6ae0c 1023/* STORE_MAX_PIECES is the number of bytes at a time that we can
1024 store efficiently. Due to internal GCC limitations, this is
1025 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1026 for an immediate constant. */
1027
1028#define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1029
9fe0e1b8 1030/* Determine whether the LEN bytes can be moved by using several move
1031 instructions. Return nonzero if a call to move_by_pieces should
1032 succeed. */
1033
1034int
35cb5232 1035can_move_by_pieces (unsigned HOST_WIDE_INT len,
1036 unsigned int align ATTRIBUTE_UNUSED)
9fe0e1b8 1037{
1038 return MOVE_BY_PIECES_P (len, align);
1039}
1040
fad4a30c 1041/* Generate several move instructions to copy LEN bytes from block FROM to
1042 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1043 and TO through protect_from_queue before calling.
ef7dc4b4 1044
fad4a30c 1045 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1046 used to push FROM to the stack.
ef7dc4b4 1047
9fe0e1b8 1048 ALIGN is maximum stack alignment we can assume.
10f307d9 1049
9fe0e1b8 1050 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1051 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1052 stpcpy. */
1053
1054rtx
35cb5232 1055move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1056 unsigned int align, int endp)
10f307d9 1057{
1058 struct move_by_pieces data;
ef7dc4b4 1059 rtx to_addr, from_addr = XEXP (from, 0);
02e7a332 1060 unsigned int max_size = MOVE_MAX_PIECES + 1;
53bd09ab 1061 enum machine_mode mode = VOIDmode, tmode;
1062 enum insn_code icode;
10f307d9 1063
b4ad0ea6 1064 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1065
10f307d9 1066 data.offset = 0;
10f307d9 1067 data.from_addr = from_addr;
ef7dc4b4 1068 if (to)
1069 {
1070 to_addr = XEXP (to, 0);
1071 data.to = to;
1072 data.autinc_to
1073 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1074 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1075 data.reverse
1076 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1077 }
1078 else
1079 {
1080 to_addr = NULL_RTX;
1081 data.to = NULL_RTX;
1082 data.autinc_to = 1;
1083#ifdef STACK_GROWS_DOWNWARD
1084 data.reverse = 1;
1085#else
1086 data.reverse = 0;
1087#endif
1088 }
1089 data.to_addr = to_addr;
10f307d9 1090 data.from = from;
10f307d9 1091 data.autinc_from
1092 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1093 || GET_CODE (from_addr) == POST_INC
1094 || GET_CODE (from_addr) == POST_DEC);
1095
1096 data.explicit_inc_from = 0;
1097 data.explicit_inc_to = 0;
10f307d9 1098 if (data.reverse) data.offset = len;
1099 data.len = len;
1100
1101 /* If copying requires more than two move insns,
1102 copy addresses to registers (to make displacements shorter)
1103 and use post-increment if available. */
1104 if (!(data.autinc_from && data.autinc_to)
1105 && move_by_pieces_ninsns (len, align) > 2)
1106 {
fa56dc1d 1107 /* Find the mode of the largest move... */
53bd09ab 1108 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1109 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1110 if (GET_MODE_SIZE (tmode) < max_size)
1111 mode = tmode;
1112
1113 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
10f307d9 1114 {
1115 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1116 data.autinc_from = 1;
1117 data.explicit_inc_from = -1;
1118 }
53bd09ab 1119 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
10f307d9 1120 {
1121 data.from_addr = copy_addr_to_reg (from_addr);
1122 data.autinc_from = 1;
1123 data.explicit_inc_from = 1;
1124 }
10f307d9 1125 if (!data.autinc_from && CONSTANT_P (from_addr))
1126 data.from_addr = copy_addr_to_reg (from_addr);
53bd09ab 1127 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
10f307d9 1128 {
1129 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1130 data.autinc_to = 1;
1131 data.explicit_inc_to = -1;
1132 }
53bd09ab 1133 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
10f307d9 1134 {
1135 data.to_addr = copy_addr_to_reg (to_addr);
1136 data.autinc_to = 1;
1137 data.explicit_inc_to = 1;
1138 }
10f307d9 1139 if (!data.autinc_to && CONSTANT_P (to_addr))
1140 data.to_addr = copy_addr_to_reg (to_addr);
1141 }
1142
9439ebf7 1143 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
325d1c45 1144 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1145 align = MOVE_MAX * BITS_PER_UNIT;
10f307d9 1146
1147 /* First move what we can in the largest integer mode, then go to
1148 successively smaller modes. */
1149
1150 while (max_size > 1)
1151 {
01ab6370 1152 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1153 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1154 if (GET_MODE_SIZE (tmode) < max_size)
10f307d9 1155 mode = tmode;
1156
1157 if (mode == VOIDmode)
1158 break;
1159
1160 icode = mov_optab->handlers[(int) mode].insn_code;
325d1c45 1161 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
10f307d9 1162 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1163
1164 max_size = GET_MODE_SIZE (mode);
1165 }
1166
1167 /* The code above should have handled everything. */
f9675788 1168 if (data.len > 0)
10f307d9 1169 abort ();
9fe0e1b8 1170
1171 if (endp)
1172 {
1173 rtx to1;
1174
1175 if (data.reverse)
1176 abort ();
1177 if (data.autinc_to)
1178 {
1179 if (endp == 2)
1180 {
1181 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1182 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1183 else
1184 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1185 -1));
1186 }
1187 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1188 data.offset);
1189 }
1190 else
1191 {
1192 if (endp == 2)
1193 --data.offset;
1194 to1 = adjust_address (data.to, QImode, data.offset);
1195 }
1196 return to1;
1197 }
1198 else
1199 return data.to;
10f307d9 1200}
1201
1202/* Return number of insns required to move L bytes by pieces.
decd7a45 1203 ALIGN (in bits) is maximum alignment we can assume. */
10f307d9 1204
f7c44134 1205static unsigned HOST_WIDE_INT
35cb5232 1206move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
10f307d9 1207{
f7c44134 1208 unsigned HOST_WIDE_INT n_insns = 0;
1209 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
10f307d9 1210
9439ebf7 1211 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
325d1c45 1212 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
a0eeb1e3 1213 align = MOVE_MAX * BITS_PER_UNIT;
10f307d9 1214
1215 while (max_size > 1)
1216 {
1217 enum machine_mode mode = VOIDmode, tmode;
1218 enum insn_code icode;
1219
01ab6370 1220 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1221 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1222 if (GET_MODE_SIZE (tmode) < max_size)
10f307d9 1223 mode = tmode;
1224
1225 if (mode == VOIDmode)
1226 break;
1227
1228 icode = mov_optab->handlers[(int) mode].insn_code;
325d1c45 1229 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
10f307d9 1230 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1231
1232 max_size = GET_MODE_SIZE (mode);
1233 }
1234
f9b86811 1235 if (l)
1236 abort ();
10f307d9 1237 return n_insns;
1238}
1239
1240/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1241 with move instructions for mode MODE. GENFUN is the gen_... function
1242 to make a move insn for that mode. DATA has all the other info. */
1243
1244static void
35cb5232 1245move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1246 struct move_by_pieces *data)
10f307d9 1247{
f7c44134 1248 unsigned int size = GET_MODE_SIZE (mode);
97b330ca 1249 rtx to1 = NULL_RTX, from1;
10f307d9 1250
1251 while (data->len >= size)
1252 {
f7c44134 1253 if (data->reverse)
1254 data->offset -= size;
1255
ef7dc4b4 1256 if (data->to)
f7c44134 1257 {
ef7dc4b4 1258 if (data->autinc_to)
bf42c62d 1259 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1260 data->offset);
ef7dc4b4 1261 else
e513d163 1262 to1 = adjust_address (data->to, mode, data->offset);
f7c44134 1263 }
f7c44134 1264
1265 if (data->autinc_from)
bf42c62d 1266 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1267 data->offset);
f7c44134 1268 else
e513d163 1269 from1 = adjust_address (data->from, mode, data->offset);
10f307d9 1270
e4e498cf 1271 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
e1855348 1272 emit_insn (gen_add2_insn (data->to_addr,
1273 GEN_INT (-(HOST_WIDE_INT)size)));
e4e498cf 1274 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
e1855348 1275 emit_insn (gen_add2_insn (data->from_addr,
1276 GEN_INT (-(HOST_WIDE_INT)size)));
10f307d9 1277
ef7dc4b4 1278 if (data->to)
1279 emit_insn ((*genfun) (to1, from1));
1280 else
fad4a30c 1281 {
1282#ifdef PUSH_ROUNDING
1283 emit_single_push_insn (mode, from1, NULL);
1284#else
1285 abort ();
1286#endif
1287 }
f7c44134 1288
e4e498cf 1289 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
b572011e 1290 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
e4e498cf 1291 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
b572011e 1292 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
10f307d9 1293
f7c44134 1294 if (! data->reverse)
1295 data->offset += size;
10f307d9 1296
1297 data->len -= size;
1298 }
1299}
1300\f
c0bfc78e 1301/* Emit code to move a block Y to a block X. This may be done with
1302 string-move instructions, with multiple scalar move instructions,
1303 or with a library call.
10f307d9 1304
c0bfc78e 1305 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
10f307d9 1306 SIZE is an rtx that says how long they are.
325d1c45 1307 ALIGN is the maximum alignment we can assume they have.
0378dbdc 1308 METHOD describes what kind of copy this is, and what mechanisms may be used.
10f307d9 1309
0dbd1c74 1310 Return the address of the new block, if memcpy is called and returns it,
1311 0 otherwise. */
1312
1313rtx
35cb5232 1314emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
10f307d9 1315{
0378dbdc 1316 bool may_use_call;
0dbd1c74 1317 rtx retval = 0;
0378dbdc 1318 unsigned int align;
1319
1320 switch (method)
1321 {
1322 case BLOCK_OP_NORMAL:
1323 may_use_call = true;
1324 break;
1325
1326 case BLOCK_OP_CALL_PARM:
1327 may_use_call = block_move_libcall_safe_for_call_parm ();
1328
1329 /* Make inhibit_defer_pop nonzero around the library call
1330 to force it to pop the arguments right away. */
1331 NO_DEFER_POP;
1332 break;
1333
1334 case BLOCK_OP_NO_LIBCALL:
1335 may_use_call = false;
1336 break;
1337
1338 default:
1339 abort ();
1340 }
1341
1342 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
0dbd1c74 1343
10f307d9 1344 x = protect_from_queue (x, 1);
1345 y = protect_from_queue (y, 0);
2e906c1b 1346 size = protect_from_queue (size, 0);
10f307d9 1347
e16ceb8e 1348 if (!MEM_P (x))
10f307d9 1349 abort ();
e16ceb8e 1350 if (!MEM_P (y))
10f307d9 1351 abort ();
1352 if (size == 0)
1353 abort ();
1354
67c155cb 1355 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1356 block copy is more efficient for other large modes, e.g. DCmode. */
1357 x = adjust_address (x, BLKmode, 0);
1358 y = adjust_address (y, BLKmode, 0);
1359
e83ff88b 1360 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1361 can be incorrect is coming from __builtin_memcpy. */
1362 if (GET_CODE (size) == CONST_INT)
1363 {
9fbc9c4d 1364 if (INTVAL (size) == 0)
1365 return 0;
1366
e83ff88b 1367 x = shallow_copy_rtx (x);
1368 y = shallow_copy_rtx (y);
1369 set_mem_size (x, size);
1370 set_mem_size (y, size);
1371 }
1372
53bd09ab 1373 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
9fe0e1b8 1374 move_by_pieces (x, y, INTVAL (size), align, 0);
c0bfc78e 1375 else if (emit_block_move_via_movstr (x, y, size, align))
1376 ;
0378dbdc 1377 else if (may_use_call)
c0bfc78e 1378 retval = emit_block_move_via_libcall (x, y, size);
0378dbdc 1379 else
1380 emit_block_move_via_loop (x, y, size, align);
1381
1382 if (method == BLOCK_OP_CALL_PARM)
1383 OK_DEFER_POP;
a5fd5157 1384
c0bfc78e 1385 return retval;
1386}
a5fd5157 1387
35cb5232 1388/* A subroutine of emit_block_move. Returns true if calling the
0378dbdc 1389 block move libcall will not clobber any parameters which may have
1390 already been placed on the stack. */
1391
1392static bool
35cb5232 1393block_move_libcall_safe_for_call_parm (void)
0378dbdc 1394{
a58c0619 1395 /* If arguments are pushed on the stack, then they're safe. */
0378dbdc 1396 if (PUSH_ARGS)
1397 return true;
0378dbdc 1398
1d5ca076 1399 /* If registers go on the stack anyway, any argument is sure to clobber
a58c0619 1400 an outgoing argument. */
1401#if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1402 {
1403 tree fn = emit_block_move_libcall_fn (false);
1404 (void) fn;
1405 if (REG_PARM_STACK_SPACE (fn) != 0)
1406 return false;
1407 }
0378dbdc 1408#endif
0378dbdc 1409
a58c0619 1410 /* If any argument goes in memory, then it might clobber an outgoing
1411 argument. */
1412 {
1413 CUMULATIVE_ARGS args_so_far;
1414 tree fn, arg;
1d5ca076 1415
a58c0619 1416 fn = emit_block_move_libcall_fn (false);
30c70355 1417 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1d5ca076 1418
a58c0619 1419 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1420 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1421 {
1422 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1423 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1424 if (!tmp || !REG_P (tmp))
0378dbdc 1425 return false;
a58c0619 1426#ifdef FUNCTION_ARG_PARTIAL_NREGS
1427 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1428 NULL_TREE, 1))
1429 return false;
1430#endif
1431 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1432 }
1433 }
1434 return true;
0378dbdc 1435}
1436
35cb5232 1437/* A subroutine of emit_block_move. Expand a movstr pattern;
c0bfc78e 1438 return true if successful. */
6702c250 1439
c0bfc78e 1440static bool
35cb5232 1441emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
c0bfc78e 1442{
c0bfc78e 1443 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
fbc6244b 1444 int save_volatile_ok = volatile_ok;
c0bfc78e 1445 enum machine_mode mode;
a5fd5157 1446
c0bfc78e 1447 /* Since this is a move insn, we don't care about volatility. */
1448 volatile_ok = 1;
1449
d5f9786f 1450 /* Try the most limited insn first, because there's no point
1451 including more than one in the machine description unless
1452 the more limited one has some advantage. */
1453
c0bfc78e 1454 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1455 mode = GET_MODE_WIDER_MODE (mode))
1456 {
1457 enum insn_code code = movstr_optab[(int) mode];
1458 insn_operand_predicate_fn pred;
1459
1460 if (code != CODE_FOR_nothing
1461 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1462 here because if SIZE is less than the mode mask, as it is
1463 returned by the macro, it will definitely be less than the
1464 actual mode mask. */
1465 && ((GET_CODE (size) == CONST_INT
1466 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1467 <= (GET_MODE_MASK (mode) >> 1)))
1468 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1469 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1470 || (*pred) (x, BLKmode))
1471 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1472 || (*pred) (y, BLKmode))
1473 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1474 || (*pred) (opalign, VOIDmode)))
1475 {
1476 rtx op2;
1477 rtx last = get_last_insn ();
1478 rtx pat;
1479
1480 op2 = convert_to_mode (mode, size, 1);
1481 pred = insn_data[(int) code].operand[2].predicate;
1482 if (pred != 0 && ! (*pred) (op2, mode))
1483 op2 = copy_to_mode_reg (mode, op2);
1484
1485 /* ??? When called via emit_block_move_for_call, it'd be
1486 nice if there were some way to inform the backend, so
1487 that it doesn't fail the expansion because it thinks
1488 emitting the libcall would be more efficient. */
1489
1490 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1491 if (pat)
1492 {
1493 emit_insn (pat);
fbc6244b 1494 volatile_ok = save_volatile_ok;
c0bfc78e 1495 return true;
10f307d9 1496 }
c0bfc78e 1497 else
1498 delete_insns_since (last);
10f307d9 1499 }
c0bfc78e 1500 }
10f307d9 1501
fbc6244b 1502 volatile_ok = save_volatile_ok;
c0bfc78e 1503 return false;
1504}
6702c250 1505
f896c932 1506/* A subroutine of emit_block_move. Expand a call to memcpy.
c0bfc78e 1507 Return the return value from memcpy, 0 otherwise. */
06b8e3db 1508
c0bfc78e 1509static rtx
35cb5232 1510emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
c0bfc78e 1511{
d5f9786f 1512 rtx dst_addr, src_addr;
c0bfc78e 1513 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1514 enum machine_mode size_mode;
1515 rtx retval;
06b8e3db 1516
c0bfc78e 1517 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
06b8e3db 1518
d5f9786f 1519 It is unsafe to save the value generated by protect_from_queue and reuse
1520 it later. Consider what happens if emit_queue is called before the
1521 return value from protect_from_queue is used.
06b8e3db 1522
d5f9786f 1523 Expansion of the CALL_EXPR below will call emit_queue before we are
1524 finished emitting RTL for argument setup. So if we are not careful we
1525 could get the wrong value for an argument.
06b8e3db 1526
d5f9786f 1527 To avoid this problem we go ahead and emit code to copy the addresses of
735f4358 1528 DST and SRC and SIZE into new pseudos.
06b8e3db 1529
d5f9786f 1530 Note this is not strictly needed for library calls since they do not call
1531 emit_queue before loading their arguments. However, we may need to have
1532 library calls call emit_queue in the future since failing to do so could
1533 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1534 arguments in registers. */
1535
1536 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1537 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
c0bfc78e 1538
d5f9786f 1539 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1540 src_addr = convert_memory_address (ptr_mode, src_addr);
d5f9786f 1541
1542 dst_tree = make_tree (ptr_type_node, dst_addr);
1543 src_tree = make_tree (ptr_type_node, src_addr);
c0bfc78e 1544
f896c932 1545 size_mode = TYPE_MODE (sizetype);
d5f9786f 1546
c0bfc78e 1547 size = convert_to_mode (size_mode, size, 1);
1548 size = copy_to_mode_reg (size_mode, size);
1549
1550 /* It is incorrect to use the libcall calling conventions to call
1551 memcpy in this context. This could be a user call to memcpy and
1552 the user may wish to examine the return value from memcpy. For
1553 targets where libcalls and normal calls have different conventions
f896c932 1554 for returning pointers, we could end up generating incorrect code. */
c0bfc78e 1555
f896c932 1556 size_tree = make_tree (sizetype, size);
c0bfc78e 1557
1558 fn = emit_block_move_libcall_fn (true);
1559 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
f896c932 1560 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1561 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
c0bfc78e 1562
1563 /* Now we have to build up the CALL_EXPR itself. */
1564 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1565 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1566 call_expr, arg_list, NULL_TREE);
c0bfc78e 1567
1568 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1569
d5f9786f 1570 /* If we are initializing a readonly value, show the above call clobbered
1571 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1572 the delay slot scheduler might overlook conflicts and take nasty
1573 decisions. */
c0bfc78e 1574 if (RTX_UNCHANGING_P (dst))
d5f9786f 1575 add_function_usage_to
1576 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1577 gen_rtx_CLOBBER (VOIDmode, dst),
1578 NULL_RTX));
c0bfc78e 1579
f896c932 1580 return retval;
c0bfc78e 1581}
f708f8fd 1582
c0bfc78e 1583/* A subroutine of emit_block_move_via_libcall. Create the tree node
1584 for the function we use for block copies. The first time FOR_CALL
1585 is true, we call assemble_external. */
f708f8fd 1586
c0bfc78e 1587static GTY(()) tree block_move_fn;
1588
d459e0d8 1589void
35cb5232 1590init_block_move_fn (const char *asmspec)
c0bfc78e 1591{
d459e0d8 1592 if (!block_move_fn)
c0bfc78e 1593 {
9fe0e1b8 1594 tree args, fn;
d459e0d8 1595
f896c932 1596 fn = get_identifier ("memcpy");
1597 args = build_function_type_list (ptr_type_node, ptr_type_node,
1598 const_ptr_type_node, sizetype,
1599 NULL_TREE);
f708f8fd 1600
c0bfc78e 1601 fn = build_decl (FUNCTION_DECL, fn, args);
1602 DECL_EXTERNAL (fn) = 1;
1603 TREE_PUBLIC (fn) = 1;
1604 DECL_ARTIFICIAL (fn) = 1;
1605 TREE_NOTHROW (fn) = 1;
8ca560c1 1606
c0bfc78e 1607 block_move_fn = fn;
10f307d9 1608 }
0dbd1c74 1609
d459e0d8 1610 if (asmspec)
1611 {
1612 SET_DECL_RTL (block_move_fn, NULL_RTX);
1613 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1614 }
1615}
1616
1617static tree
35cb5232 1618emit_block_move_libcall_fn (int for_call)
d459e0d8 1619{
1620 static bool emitted_extern;
1621
1622 if (!block_move_fn)
1623 init_block_move_fn (NULL);
1624
c0bfc78e 1625 if (for_call && !emitted_extern)
1626 {
1627 emitted_extern = true;
d459e0d8 1628 make_decl_rtl (block_move_fn, NULL);
1629 assemble_external (block_move_fn);
c0bfc78e 1630 }
1631
d459e0d8 1632 return block_move_fn;
10f307d9 1633}
0378dbdc 1634
1635/* A subroutine of emit_block_move. Copy the data via an explicit
1636 loop. This is used only when libcalls are forbidden. */
1637/* ??? It'd be nice to copy in hunks larger than QImode. */
1638
1639static void
35cb5232 1640emit_block_move_via_loop (rtx x, rtx y, rtx size,
1641 unsigned int align ATTRIBUTE_UNUSED)
0378dbdc 1642{
1643 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1644 enum machine_mode iter_mode;
1645
1646 iter_mode = GET_MODE (size);
1647 if (iter_mode == VOIDmode)
1648 iter_mode = word_mode;
1649
1650 top_label = gen_label_rtx ();
1651 cmp_label = gen_label_rtx ();
1652 iter = gen_reg_rtx (iter_mode);
1653
1654 emit_move_insn (iter, const0_rtx);
1655
1656 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1657 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1658 do_pending_stack_adjust ();
1659
0378dbdc 1660 emit_jump (cmp_label);
1661 emit_label (top_label);
1662
1663 tmp = convert_modes (Pmode, iter_mode, iter, true);
1664 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1665 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1666 x = change_address (x, QImode, x_addr);
1667 y = change_address (y, QImode, y_addr);
1668
1669 emit_move_insn (x, y);
1670
1671 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1672 true, OPTAB_LIB_WIDEN);
1673 if (tmp != iter)
1674 emit_move_insn (iter, tmp);
1675
0378dbdc 1676 emit_label (cmp_label);
1677
1678 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1679 true, top_label);
0378dbdc 1680}
10f307d9 1681\f
1682/* Copy all or part of a value X into registers starting at REGNO.
1683 The number of registers to be filled is NREGS. */
1684
1685void
35cb5232 1686move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
10f307d9 1687{
1688 int i;
0c22b90f 1689#ifdef HAVE_load_multiple
fa56dc1d 1690 rtx pat;
0c22b90f 1691 rtx last;
1692#endif
10f307d9 1693
c9750f6d 1694 if (nregs == 0)
1695 return;
1696
10f307d9 1697 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1698 x = validize_mem (force_const_mem (mode, x));
1699
1700 /* See if the machine can do this with a load multiple insn. */
1701#ifdef HAVE_load_multiple
d3afc10f 1702 if (HAVE_load_multiple)
10f307d9 1703 {
d3afc10f 1704 last = get_last_insn ();
941522d6 1705 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
d3afc10f 1706 GEN_INT (nregs));
1707 if (pat)
1708 {
1709 emit_insn (pat);
1710 return;
1711 }
1712 else
1713 delete_insns_since (last);
10f307d9 1714 }
10f307d9 1715#endif
1716
1717 for (i = 0; i < nregs; i++)
941522d6 1718 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
10f307d9 1719 operand_subword_force (x, i, mode));
1720}
1721
1722/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
530178a9 1723 The number of registers to be filled is NREGS. */
db7bca86 1724
10f307d9 1725void
35cb5232 1726move_block_from_reg (int regno, rtx x, int nregs)
10f307d9 1727{
1728 int i;
10f307d9 1729
cc119c14 1730 if (nregs == 0)
1731 return;
1732
10f307d9 1733 /* See if the machine can do this with a store multiple insn. */
1734#ifdef HAVE_store_multiple
d3afc10f 1735 if (HAVE_store_multiple)
10f307d9 1736 {
530178a9 1737 rtx last = get_last_insn ();
1738 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1739 GEN_INT (nregs));
d3afc10f 1740 if (pat)
1741 {
1742 emit_insn (pat);
1743 return;
1744 }
1745 else
1746 delete_insns_since (last);
10f307d9 1747 }
10f307d9 1748#endif
1749
1750 for (i = 0; i < nregs; i++)
1751 {
1752 rtx tem = operand_subword (x, i, 1, BLKmode);
1753
1754 if (tem == 0)
1755 abort ();
1756
941522d6 1757 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
10f307d9 1758 }
1759}
1760
b566e2e5 1761/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1762 ORIG, where ORIG is a non-consecutive group of registers represented by
1763 a PARALLEL. The clone is identical to the original except in that the
1764 original set of registers is replaced by a new set of pseudo registers.
1765 The new set has the same modes as the original set. */
1766
1767rtx
35cb5232 1768gen_group_rtx (rtx orig)
b566e2e5 1769{
1770 int i, length;
1771 rtx *tmps;
1772
1773 if (GET_CODE (orig) != PARALLEL)
1774 abort ();
1775
1776 length = XVECLEN (orig, 0);
f0af5a88 1777 tmps = alloca (sizeof (rtx) * length);
b566e2e5 1778
1779 /* Skip a NULL entry in first slot. */
1780 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1781
1782 if (i)
1783 tmps[0] = 0;
1784
1785 for (; i < length; i++)
1786 {
1787 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1788 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1789
1790 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1791 }
1792
1793 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1794}
1795
5f4cd670 1796/* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1797 where DST is non-consecutive registers represented by a PARALLEL.
1798 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1d5ca076 1799 if not known. */
ce739127 1800
1801void
5f4cd670 1802emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
ce739127 1803{
6ede8018 1804 rtx *tmps, src;
1805 int start, i;
ce739127 1806
6ede8018 1807 if (GET_CODE (dst) != PARALLEL)
ce739127 1808 abort ();
1809
1810 /* Check for a NULL entry, used to indicate that the parameter goes
1811 both on the stack and in registers. */
6ede8018 1812 if (XEXP (XVECEXP (dst, 0, 0), 0))
1813 start = 0;
ce739127 1814 else
6ede8018 1815 start = 1;
1816
f0af5a88 1817 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
6ede8018 1818
6ede8018 1819 /* Process the pieces. */
1820 for (i = start; i < XVECLEN (dst, 0); i++)
1821 {
1822 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
02e7a332 1823 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1824 unsigned int bytelen = GET_MODE_SIZE (mode);
6ede8018 1825 int shift = 0;
1826
1827 /* Handle trailing fragments that run over the size of the struct. */
e1439bcb 1828 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
6ede8018 1829 {
5f4cd670 1830 /* Arrange to shift the fragment to where it belongs.
1831 extract_bit_field loads to the lsb of the reg. */
1832 if (
1833#ifdef BLOCK_REG_PADDING
1834 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1835 == (BYTES_BIG_ENDIAN ? upward : downward)
1836#else
1837 BYTES_BIG_ENDIAN
1838#endif
1839 )
1840 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
6ede8018 1841 bytelen = ssize - bytepos;
1842 if (bytelen <= 0)
fe352cf1 1843 abort ();
6ede8018 1844 }
1845
c037cba7 1846 /* If we won't be loading directly from memory, protect the real source
1847 from strange tricks we might play; but make sure that the source can
1848 be loaded directly into the destination. */
1849 src = orig_src;
e16ceb8e 1850 if (!MEM_P (orig_src)
c037cba7 1851 && (!CONSTANT_P (orig_src)
1852 || (GET_MODE (orig_src) != mode
1853 && GET_MODE (orig_src) != VOIDmode)))
1854 {
1855 if (GET_MODE (orig_src) == VOIDmode)
1856 src = gen_reg_rtx (mode);
1857 else
1858 src = gen_reg_rtx (GET_MODE (orig_src));
2c269e73 1859
c037cba7 1860 emit_move_insn (src, orig_src);
1861 }
1862
6ede8018 1863 /* Optimize the access just a bit. */
e16ceb8e 1864 if (MEM_P (src)
5f4cd670 1865 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1866 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
fe352cf1 1867 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
6ede8018 1868 && bytelen == GET_MODE_SIZE (mode))
1869 {
1870 tmps[i] = gen_reg_rtx (mode);
e513d163 1871 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
ce739127 1872 }
a1000ec6 1873 else if (GET_CODE (src) == CONCAT)
1874 {
2a075f91 1875 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1876 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1877
1878 if ((bytepos == 0 && bytelen == slen0)
1879 || (bytepos != 0 && bytepos + bytelen <= slen))
4c183732 1880 {
2a075f91 1881 /* The following assumes that the concatenated objects all
1882 have the same size. In this case, a simple calculation
1883 can be used to determine the object and the bit field
1884 to be extracted. */
1885 tmps[i] = XEXP (src, bytepos / slen0);
4c183732 1886 if (! CONSTANT_P (tmps[i])
8ad4c111 1887 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
4c183732 1888 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2a075f91 1889 (bytepos % slen0) * BITS_PER_UNIT,
1890 1, NULL_RTX, mode, mode, ssize);
4c183732 1891 }
10d075b5 1892 else if (bytepos == 0)
1893 {
2a075f91 1894 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
10d075b5 1895 emit_move_insn (mem, src);
2c269e73 1896 tmps[i] = adjust_address (mem, mode, 0);
10d075b5 1897 }
a1000ec6 1898 else
1899 abort ();
1900 }
c050f95a 1901 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1902 SIMD register, which is currently broken. While we get GCC
1903 to emit proper RTL for these cases, let's dump to memory. */
1904 else if (VECTOR_MODE_P (GET_MODE (dst))
8ad4c111 1905 && REG_P (src))
c050f95a 1906 {
1907 int slen = GET_MODE_SIZE (GET_MODE (src));
1908 rtx mem;
1909
1910 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1911 emit_move_insn (mem, src);
1912 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1913 }
568b64fd 1914 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1915 && XVECLEN (dst, 0) > 1)
1916 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
c037cba7 1917 else if (CONSTANT_P (src)
8ad4c111 1918 || (REG_P (src) && GET_MODE (src) == mode))
73645c13 1919 tmps[i] = src;
ce739127 1920 else
325d1c45 1921 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1922 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2c269e73 1923 mode, mode, ssize);
ce739127 1924
5f4cd670 1925 if (shift)
92966f8b 1926 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1927 build_int_2 (shift, 0), tmps[i], 0);
ce739127 1928 }
325d1c45 1929
fa56dc1d 1930 emit_queue ();
6ede8018 1931
1932 /* Copy the extracted pieces into the proper (probable) hard regs. */
1933 for (i = start; i < XVECLEN (dst, 0); i++)
1934 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
ce739127 1935}
1936
b566e2e5 1937/* Emit code to move a block SRC to block DST, where SRC and DST are
1938 non-consecutive groups of registers, each represented by a PARALLEL. */
1939
1940void
35cb5232 1941emit_group_move (rtx dst, rtx src)
b566e2e5 1942{
1943 int i;
1944
1945 if (GET_CODE (src) != PARALLEL
1946 || GET_CODE (dst) != PARALLEL
1947 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1948 abort ();
1949
1950 /* Skip first entry if NULL. */
1951 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1952 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1953 XEXP (XVECEXP (src, 0, i), 0));
1954}
1955
5f4cd670 1956/* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1957 where SRC is non-consecutive registers represented by a PARALLEL.
1958 SSIZE represents the total size of block ORIG_DST, or -1 if not
1959 known. */
ce739127 1960
1961void
5f4cd670 1962emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
ce739127 1963{
6ede8018 1964 rtx *tmps, dst;
1965 int start, i;
ce739127 1966
6ede8018 1967 if (GET_CODE (src) != PARALLEL)
ce739127 1968 abort ();
1969
1970 /* Check for a NULL entry, used to indicate that the parameter goes
1971 both on the stack and in registers. */
6ede8018 1972 if (XEXP (XVECEXP (src, 0, 0), 0))
1973 start = 0;
ce739127 1974 else
6ede8018 1975 start = 1;
1976
f0af5a88 1977 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
ce739127 1978
6ede8018 1979 /* Copy the (probable) hard regs into pseudos. */
1980 for (i = start; i < XVECLEN (src, 0); i++)
ce739127 1981 {
6ede8018 1982 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1983 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1984 emit_move_insn (tmps[i], reg);
1985 }
fa56dc1d 1986 emit_queue ();
ce739127 1987
6ede8018 1988 /* If we won't be storing directly into memory, protect the real destination
1989 from strange tricks we might play. */
1990 dst = orig_dst;
723d3639 1991 if (GET_CODE (dst) == PARALLEL)
1992 {
1993 rtx temp;
1994
1995 /* We can get a PARALLEL dst if there is a conditional expression in
1996 a return statement. In that case, the dst and src are the same,
1997 so no action is necessary. */
1998 if (rtx_equal_p (dst, src))
1999 return;
2000
2001 /* It is unclear if we can ever reach here, but we may as well handle
2002 it. Allocate a temporary, and split this into a store/load to/from
2003 the temporary. */
2004
2005 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
5f4cd670 2006 emit_group_store (temp, src, type, ssize);
2007 emit_group_load (dst, temp, type, ssize);
723d3639 2008 return;
2009 }
e16ceb8e 2010 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
6ede8018 2011 {
2012 dst = gen_reg_rtx (GET_MODE (orig_dst));
2013 /* Make life a bit easier for combine. */
286dba1e 2014 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
6ede8018 2015 }
6ede8018 2016
2017 /* Process the pieces. */
2018 for (i = start; i < XVECLEN (src, 0); i++)
2019 {
02e7a332 2020 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
6ede8018 2021 enum machine_mode mode = GET_MODE (tmps[i]);
02e7a332 2022 unsigned int bytelen = GET_MODE_SIZE (mode);
463e3bf7 2023 rtx dest = dst;
6ede8018 2024
2025 /* Handle trailing fragments that run over the size of the struct. */
e1439bcb 2026 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
fe08fc1b 2027 {
5f4cd670 2028 /* store_bit_field always takes its value from the lsb.
2029 Move the fragment to the lsb if it's not already there. */
2030 if (
2031#ifdef BLOCK_REG_PADDING
2032 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2033 == (BYTES_BIG_ENDIAN ? upward : downward)
2034#else
2035 BYTES_BIG_ENDIAN
2036#endif
2037 )
6ede8018 2038 {
2039 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
92966f8b 2040 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2041 build_int_2 (shift, 0), tmps[i], 0);
6ede8018 2042 }
2043 bytelen = ssize - bytepos;
fe08fc1b 2044 }
ce739127 2045
463e3bf7 2046 if (GET_CODE (dst) == CONCAT)
2047 {
2048 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2049 dest = XEXP (dst, 0);
2050 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2051 {
2052 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2053 dest = XEXP (dst, 1);
2054 }
376c21d1 2055 else if (bytepos == 0 && XVECLEN (src, 0))
2056 {
2057 dest = assign_stack_temp (GET_MODE (dest),
2058 GET_MODE_SIZE (GET_MODE (dest)), 0);
2059 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2060 tmps[i]);
2061 dst = dest;
2062 break;
2063 }
463e3bf7 2064 else
2065 abort ();
2066 }
2067
6ede8018 2068 /* Optimize the access just a bit. */
e16ceb8e 2069 if (MEM_P (dest)
5f4cd670 2070 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2071 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
fe352cf1 2072 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
6ede8018 2073 && bytelen == GET_MODE_SIZE (mode))
463e3bf7 2074 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
6ede8018 2075 else
463e3bf7 2076 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2c269e73 2077 mode, tmps[i], ssize);
ce739127 2078 }
fe352cf1 2079
fa56dc1d 2080 emit_queue ();
6ede8018 2081
2082 /* Copy from the pseudo into the (probable) hard reg. */
376c21d1 2083 if (orig_dst != dst)
6ede8018 2084 emit_move_insn (orig_dst, dst);
ce739127 2085}
2086
25eb0f59 2087/* Generate code to copy a BLKmode object of TYPE out of a
2088 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2089 is null, a stack temporary is created. TGTBLK is returned.
2090
2c8ff1ed 2091 The purpose of this routine is to handle functions that return
2092 BLKmode structures in registers. Some machines (the PA for example)
2093 want to return all small structures in registers regardless of the
2094 structure's alignment. */
25eb0f59 2095
2096rtx
35cb5232 2097copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
25eb0f59 2098{
325d1c45 2099 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2100 rtx src = NULL, dst = NULL;
2101 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2c8ff1ed 2102 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
325d1c45 2103
2104 if (tgtblk == 0)
2105 {
387bc205 2106 tgtblk = assign_temp (build_qualified_type (type,
2107 (TYPE_QUALS (type)
2108 | TYPE_QUAL_CONST)),
2109 0, 1, 1);
325d1c45 2110 preserve_temp_slots (tgtblk);
2111 }
fa56dc1d 2112
a689a61a 2113 /* This code assumes srcreg is at least a full word. If it isn't, copy it
f4a0a478 2114 into a new pseudo which is a full word. */
23551094 2115
325d1c45 2116 if (GET_MODE (srcreg) != BLKmode
2117 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
78a8ed03 2118 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
325d1c45 2119
2c8ff1ed 2120 /* If the structure doesn't take up a whole number of words, see whether
2121 SRCREG is padded on the left or on the right. If it's on the left,
2122 set PADDING_CORRECTION to the number of bits to skip.
2123
2124 In most ABIs, the structure will be returned at the least end of
2125 the register, which translates to right padding on little-endian
2126 targets and left padding on big-endian targets. The opposite
2127 holds if the structure is returned at the most significant
2128 end of the register. */
2129 if (bytes % UNITS_PER_WORD != 0
2130 && (targetm.calls.return_in_msb (type)
2131 ? !BYTES_BIG_ENDIAN
2132 : BYTES_BIG_ENDIAN))
2133 padding_correction
325d1c45 2134 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2135
2136 /* Copy the structure BITSIZE bites at a time.
fa56dc1d 2137
325d1c45 2138 We could probably emit more efficient code for machines which do not use
2139 strict alignment, but it doesn't seem worth the effort at the current
2140 time. */
2c8ff1ed 2141 for (bitpos = 0, xbitpos = padding_correction;
325d1c45 2142 bitpos < bytes * BITS_PER_UNIT;
2143 bitpos += bitsize, xbitpos += bitsize)
2144 {
fa56dc1d 2145 /* We need a new source operand each time xbitpos is on a
2c8ff1ed 2146 word boundary and when xbitpos == padding_correction
325d1c45 2147 (the first time through). */
2148 if (xbitpos % BITS_PER_WORD == 0
2c8ff1ed 2149 || xbitpos == padding_correction)
c502077e 2150 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2151 GET_MODE (srcreg));
325d1c45 2152
2153 /* We need a new destination operand each time bitpos is on
2154 a word boundary. */
2155 if (bitpos % BITS_PER_WORD == 0)
2156 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
fa56dc1d 2157
325d1c45 2158 /* Use xbitpos for the source extraction (right justified) and
2159 xbitpos for the destination store (left justified). */
2160 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2161 extract_bit_field (src, bitsize,
2162 xbitpos % BITS_PER_WORD, 1,
2163 NULL_RTX, word_mode, word_mode,
2c269e73 2164 BITS_PER_WORD),
2165 BITS_PER_WORD);
325d1c45 2166 }
2167
2168 return tgtblk;
25eb0f59 2169}
2170
07409b3a 2171/* Add a USE expression for REG to the (possibly empty) list pointed
2172 to by CALL_FUSAGE. REG must denote a hard register. */
10f307d9 2173
2174void
35cb5232 2175use_reg (rtx *call_fusage, rtx reg)
7e2ca70b 2176{
8ad4c111 2177 if (!REG_P (reg)
f2799de7 2178 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
fa56dc1d 2179 abort ();
7e2ca70b 2180
2181 *call_fusage
941522d6 2182 = gen_rtx_EXPR_LIST (VOIDmode,
2183 gen_rtx_USE (VOIDmode, reg), *call_fusage);
7e2ca70b 2184}
2185
07409b3a 2186/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2187 starting at REGNO. All of these registers must be hard registers. */
7e2ca70b 2188
2189void
35cb5232 2190use_regs (rtx *call_fusage, int regno, int nregs)
10f307d9 2191{
f2799de7 2192 int i;
10f307d9 2193
f2799de7 2194 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2195 abort ();
2196
2197 for (i = 0; i < nregs; i++)
936082bb 2198 use_reg (call_fusage, regno_reg_rtx[regno + i]);
10f307d9 2199}
ce739127 2200
2201/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2202 PARALLEL REGS. This is for calls that pass values in multiple
2203 non-contiguous locations. The Irix 6 ABI has examples of this. */
2204
2205void
35cb5232 2206use_group_regs (rtx *call_fusage, rtx regs)
ce739127 2207{
2208 int i;
2209
2f373e5d 2210 for (i = 0; i < XVECLEN (regs, 0); i++)
2211 {
2212 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
ce739127 2213
2f373e5d 2214 /* A NULL entry means the parameter goes both on the stack and in
2215 registers. This can also be a MEM for targets that pass values
2216 partially on the stack and partially in registers. */
8ad4c111 2217 if (reg != 0 && REG_P (reg))
2f373e5d 2218 use_reg (call_fusage, reg);
2219 }
ce739127 2220}
10f307d9 2221\f
6840589f 2222
d1f6ae0c 2223/* Determine whether the LEN bytes generated by CONSTFUN can be
2224 stored to memory using several move instructions. CONSTFUNDATA is
2225 a pointer which will be passed as argument in every CONSTFUN call.
2226 ALIGN is maximum alignment we can assume. Return nonzero if a
2227 call to store_by_pieces should succeed. */
2228
6840589f 2229int
35cb5232 2230can_store_by_pieces (unsigned HOST_WIDE_INT len,
2231 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2232 void *constfundata, unsigned int align)
6840589f 2233{
9acfe138 2234 unsigned HOST_WIDE_INT max_size, l;
6840589f 2235 HOST_WIDE_INT offset = 0;
2236 enum machine_mode mode, tmode;
2237 enum insn_code icode;
2238 int reverse;
2239 rtx cst;
2240
1d881c02 2241 if (len == 0)
2242 return 1;
2243
805e22b2 2244 if (! STORE_BY_PIECES_P (len, align))
6840589f 2245 return 0;
2246
2247 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2248 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2249 align = MOVE_MAX * BITS_PER_UNIT;
2250
2251 /* We would first store what we can in the largest integer mode, then go to
2252 successively smaller modes. */
2253
2254 for (reverse = 0;
2255 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2256 reverse++)
2257 {
2258 l = len;
2259 mode = VOIDmode;
d1f6ae0c 2260 max_size = STORE_MAX_PIECES + 1;
6840589f 2261 while (max_size > 1)
2262 {
2263 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2264 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2265 if (GET_MODE_SIZE (tmode) < max_size)
2266 mode = tmode;
2267
2268 if (mode == VOIDmode)
2269 break;
2270
2271 icode = mov_optab->handlers[(int) mode].insn_code;
2272 if (icode != CODE_FOR_nothing
2273 && align >= GET_MODE_ALIGNMENT (mode))
2274 {
2275 unsigned int size = GET_MODE_SIZE (mode);
2276
2277 while (l >= size)
2278 {
2279 if (reverse)
2280 offset -= size;
2281
2282 cst = (*constfun) (constfundata, offset, mode);
2283 if (!LEGITIMATE_CONSTANT_P (cst))
2284 return 0;
2285
2286 if (!reverse)
2287 offset += size;
2288
2289 l -= size;
2290 }
2291 }
2292
2293 max_size = GET_MODE_SIZE (mode);
2294 }
2295
2296 /* The code above should have handled everything. */
2297 if (l != 0)
2298 abort ();
2299 }
2300
2301 return 1;
2302}
2303
2304/* Generate several move instructions to store LEN bytes generated by
2305 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2306 pointer which will be passed as argument in every CONSTFUN call.
9fe0e1b8 2307 ALIGN is maximum alignment we can assume.
2308 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2309 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2310 stpcpy. */
6840589f 2311
9fe0e1b8 2312rtx
35cb5232 2313store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2314 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2315 void *constfundata, unsigned int align, int endp)
6840589f 2316{
2317 struct store_by_pieces data;
2318
1d881c02 2319 if (len == 0)
2320 {
2321 if (endp == 2)
2322 abort ();
2323 return to;
2324 }
2325
805e22b2 2326 if (! STORE_BY_PIECES_P (len, align))
6840589f 2327 abort ();
2328 to = protect_from_queue (to, 1);
2329 data.constfun = constfun;
2330 data.constfundata = constfundata;
2331 data.len = len;
2332 data.to = to;
2333 store_by_pieces_1 (&data, align);
9fe0e1b8 2334 if (endp)
2335 {
2336 rtx to1;
2337
2338 if (data.reverse)
2339 abort ();
2340 if (data.autinc_to)
2341 {
2342 if (endp == 2)
2343 {
2344 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2345 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2346 else
2347 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2348 -1));
2349 }
2350 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2351 data.offset);
2352 }
2353 else
2354 {
2355 if (endp == 2)
2356 --data.offset;
2357 to1 = adjust_address (data.to, QImode, data.offset);
2358 }
2359 return to1;
2360 }
2361 else
2362 return data.to;
6840589f 2363}
2364
325d1c45 2365/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2366 rtx with BLKmode). The caller must pass TO through protect_from_queue
2367 before calling. ALIGN is maximum alignment we can assume. */
dbd14dc5 2368
2369static void
f1667d92 2370clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
dbd14dc5 2371{
6840589f 2372 struct store_by_pieces data;
2373
1d881c02 2374 if (len == 0)
2375 return;
2376
6840589f 2377 data.constfun = clear_by_pieces_1;
2571646d 2378 data.constfundata = NULL;
6840589f 2379 data.len = len;
2380 data.to = to;
2381 store_by_pieces_1 (&data, align);
2382}
2383
2384/* Callback routine for clear_by_pieces.
2385 Return const0_rtx unconditionally. */
2386
2387static rtx
35cb5232 2388clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2389 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2390 enum machine_mode mode ATTRIBUTE_UNUSED)
6840589f 2391{
2392 return const0_rtx;
2393}
2394
2395/* Subroutine of clear_by_pieces and store_by_pieces.
2396 Generate several move instructions to store LEN bytes of block TO. (A MEM
2397 rtx with BLKmode). The caller must pass TO through protect_from_queue
2398 before calling. ALIGN is maximum alignment we can assume. */
2399
2400static void
35cb5232 2401store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2402 unsigned int align ATTRIBUTE_UNUSED)
6840589f 2403{
2404 rtx to_addr = XEXP (data->to, 0);
d1f6ae0c 2405 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
53bd09ab 2406 enum machine_mode mode = VOIDmode, tmode;
2407 enum insn_code icode;
dbd14dc5 2408
6840589f 2409 data->offset = 0;
2410 data->to_addr = to_addr;
2411 data->autinc_to
dbd14dc5 2412 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2413 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2414
6840589f 2415 data->explicit_inc_to = 0;
2416 data->reverse
dbd14dc5 2417 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
6840589f 2418 if (data->reverse)
2419 data->offset = data->len;
dbd14dc5 2420
6840589f 2421 /* If storing requires more than two move insns,
dbd14dc5 2422 copy addresses to registers (to make displacements shorter)
2423 and use post-increment if available. */
6840589f 2424 if (!data->autinc_to
2425 && move_by_pieces_ninsns (data->len, align) > 2)
dbd14dc5 2426 {
fa56dc1d 2427 /* Determine the main mode we'll be using. */
53bd09ab 2428 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2429 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2430 if (GET_MODE_SIZE (tmode) < max_size)
2431 mode = tmode;
2432
6840589f 2433 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
dbd14dc5 2434 {
6840589f 2435 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2436 data->autinc_to = 1;
2437 data->explicit_inc_to = -1;
dbd14dc5 2438 }
f7c44134 2439
6840589f 2440 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2441 && ! data->autinc_to)
dbd14dc5 2442 {
6840589f 2443 data->to_addr = copy_addr_to_reg (to_addr);
2444 data->autinc_to = 1;
2445 data->explicit_inc_to = 1;
dbd14dc5 2446 }
f7c44134 2447
6840589f 2448 if ( !data->autinc_to && CONSTANT_P (to_addr))
2449 data->to_addr = copy_addr_to_reg (to_addr);
dbd14dc5 2450 }
2451
9439ebf7 2452 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
325d1c45 2453 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
80909c64 2454 align = MOVE_MAX * BITS_PER_UNIT;
dbd14dc5 2455
6840589f 2456 /* First store what we can in the largest integer mode, then go to
dbd14dc5 2457 successively smaller modes. */
2458
2459 while (max_size > 1)
2460 {
dbd14dc5 2461 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2462 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2463 if (GET_MODE_SIZE (tmode) < max_size)
2464 mode = tmode;
2465
2466 if (mode == VOIDmode)
2467 break;
2468
2469 icode = mov_optab->handlers[(int) mode].insn_code;
325d1c45 2470 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
6840589f 2471 store_by_pieces_2 (GEN_FCN (icode), mode, data);
dbd14dc5 2472
2473 max_size = GET_MODE_SIZE (mode);
2474 }
2475
2476 /* The code above should have handled everything. */
6840589f 2477 if (data->len != 0)
dbd14dc5 2478 abort ();
2479}
2480
6840589f 2481/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
dbd14dc5 2482 with move instructions for mode MODE. GENFUN is the gen_... function
2483 to make a move insn for that mode. DATA has all the other info. */
2484
2485static void
35cb5232 2486store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2487 struct store_by_pieces *data)
dbd14dc5 2488{
f7c44134 2489 unsigned int size = GET_MODE_SIZE (mode);
6840589f 2490 rtx to1, cst;
dbd14dc5 2491
2492 while (data->len >= size)
2493 {
f7c44134 2494 if (data->reverse)
2495 data->offset -= size;
dbd14dc5 2496
f7c44134 2497 if (data->autinc_to)
bf42c62d 2498 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2499 data->offset);
fa56dc1d 2500 else
e513d163 2501 to1 = adjust_address (data->to, mode, data->offset);
dbd14dc5 2502
e4e498cf 2503 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
6840589f 2504 emit_insn (gen_add2_insn (data->to_addr,
2505 GEN_INT (-(HOST_WIDE_INT) size)));
dbd14dc5 2506
6840589f 2507 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2508 emit_insn ((*genfun) (to1, cst));
f7c44134 2509
e4e498cf 2510 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
dbd14dc5 2511 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
dbd14dc5 2512
f7c44134 2513 if (! data->reverse)
2514 data->offset += size;
dbd14dc5 2515
2516 data->len -= size;
2517 }
2518}
2519\f
325d1c45 2520/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2a631e19 2521 its length in bytes. */
0dbd1c74 2522
2523rtx
35cb5232 2524clear_storage (rtx object, rtx size)
10f307d9 2525{
0dbd1c74 2526 rtx retval = 0;
e16ceb8e 2527 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
2a631e19 2528 : GET_MODE_ALIGNMENT (GET_MODE (object)));
0dbd1c74 2529
20c377c2 2530 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2531 just move a zero. Otherwise, do this a piece at a time. */
886cfd4f 2532 if (GET_MODE (object) != BLKmode
20c377c2 2533 && GET_CODE (size) == CONST_INT
c0bfc78e 2534 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
20c377c2 2535 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2536 else
10f307d9 2537 {
dbd14dc5 2538 object = protect_from_queue (object, 1);
2539 size = protect_from_queue (size, 0);
2540
9fbc9c4d 2541 if (size == const0_rtx)
1d881c02 2542 ;
2543 else if (GET_CODE (size) == CONST_INT
310d3ec9 2544 && CLEAR_BY_PIECES_P (INTVAL (size), align))
dbd14dc5 2545 clear_by_pieces (object, INTVAL (size), align);
c0bfc78e 2546 else if (clear_storage_via_clrstr (object, size, align))
2547 ;
dbd14dc5 2548 else
c0bfc78e 2549 retval = clear_storage_via_libcall (object, size);
2550 }
2551
2552 return retval;
2553}
2554
2555/* A subroutine of clear_storage. Expand a clrstr pattern;
2556 return true if successful. */
2557
2558static bool
35cb5232 2559clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
c0bfc78e 2560{
2561 /* Try the most limited insn first, because there's no point
2562 including more than one in the machine description unless
2563 the more limited one has some advantage. */
2564
2565 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2566 enum machine_mode mode;
2567
2568 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2569 mode = GET_MODE_WIDER_MODE (mode))
2570 {
2571 enum insn_code code = clrstr_optab[(int) mode];
2572 insn_operand_predicate_fn pred;
2573
2574 if (code != CODE_FOR_nothing
2575 /* We don't need MODE to be narrower than
2576 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2577 the mode mask, as it is returned by the macro, it will
2578 definitely be less than the actual mode mask. */
2579 && ((GET_CODE (size) == CONST_INT
2580 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2581 <= (GET_MODE_MASK (mode) >> 1)))
2582 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2583 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2584 || (*pred) (object, BLKmode))
2585 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2586 || (*pred) (opalign, VOIDmode)))
dbd14dc5 2587 {
c0bfc78e 2588 rtx op1;
2589 rtx last = get_last_insn ();
2590 rtx pat;
dbd14dc5 2591
c0bfc78e 2592 op1 = convert_to_mode (mode, size, 1);
2593 pred = insn_data[(int) code].operand[1].predicate;
2594 if (pred != 0 && ! (*pred) (op1, mode))
2595 op1 = copy_to_mode_reg (mode, op1);
dbd14dc5 2596
c0bfc78e 2597 pat = GEN_FCN ((int) code) (object, op1, opalign);
2598 if (pat)
dbd14dc5 2599 {
c0bfc78e 2600 emit_insn (pat);
2601 return true;
2602 }
2603 else
2604 delete_insns_since (last);
2605 }
2606 }
dbd14dc5 2607
c0bfc78e 2608 return false;
2609}
dbd14dc5 2610
f896c932 2611/* A subroutine of clear_storage. Expand a call to memset.
c0bfc78e 2612 Return the return value of memset, 0 otherwise. */
dbd14dc5 2613
c0bfc78e 2614static rtx
35cb5232 2615clear_storage_via_libcall (rtx object, rtx size)
c0bfc78e 2616{
2617 tree call_expr, arg_list, fn, object_tree, size_tree;
2618 enum machine_mode size_mode;
2619 rtx retval;
dbd14dc5 2620
c0bfc78e 2621 /* OBJECT or SIZE may have been passed through protect_from_queue.
f708f8fd 2622
c0bfc78e 2623 It is unsafe to save the value generated by protect_from_queue
2624 and reuse it later. Consider what happens if emit_queue is
2625 called before the return value from protect_from_queue is used.
f708f8fd 2626
c0bfc78e 2627 Expansion of the CALL_EXPR below will call emit_queue before
2628 we are finished emitting RTL for argument setup. So if we are
2629 not careful we could get the wrong value for an argument.
f708f8fd 2630
c0bfc78e 2631 To avoid this problem we go ahead and emit code to copy OBJECT
735f4358 2632 and SIZE into new pseudos.
f708f8fd 2633
c0bfc78e 2634 Note this is not strictly needed for library calls since they
2635 do not call emit_queue before loading their arguments. However,
2636 we may need to have library calls call emit_queue in the future
2637 since failing to do so could cause problems for targets which
2638 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
f708f8fd 2639
c0bfc78e 2640 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
f708f8fd 2641
f896c932 2642 size_mode = TYPE_MODE (sizetype);
c0bfc78e 2643 size = convert_to_mode (size_mode, size, 1);
2644 size = copy_to_mode_reg (size_mode, size);
f708f8fd 2645
c0bfc78e 2646 /* It is incorrect to use the libcall calling conventions to call
2647 memset in this context. This could be a user call to memset and
2648 the user may wish to examine the return value from memset. For
2649 targets where libcalls and normal calls have different conventions
f896c932 2650 for returning pointers, we could end up generating incorrect code. */
06b8e3db 2651
c0bfc78e 2652 object_tree = make_tree (ptr_type_node, object);
f896c932 2653 size_tree = make_tree (sizetype, size);
c0bfc78e 2654
2655 fn = clear_storage_libcall_fn (true);
2656 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
f896c932 2657 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
c0bfc78e 2658 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2659
2660 /* Now we have to build up the CALL_EXPR itself. */
2661 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2662 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2663 call_expr, arg_list, NULL_TREE);
c0bfc78e 2664
2665 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2666
2667 /* If we are initializing a readonly value, show the above call
2668 clobbered it. Otherwise, a load from it may erroneously be
2669 hoisted from a loop. */
2670 if (RTX_UNCHANGING_P (object))
2671 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2672
f896c932 2673 return retval;
c0bfc78e 2674}
2675
2676/* A subroutine of clear_storage_via_libcall. Create the tree node
2677 for the function we use for block clears. The first time FOR_CALL
2678 is true, we call assemble_external. */
2679
2680static GTY(()) tree block_clear_fn;
8ca560c1 2681
d459e0d8 2682void
35cb5232 2683init_block_clear_fn (const char *asmspec)
c0bfc78e 2684{
d459e0d8 2685 if (!block_clear_fn)
c0bfc78e 2686 {
d459e0d8 2687 tree fn, args;
2688
f896c932 2689 fn = get_identifier ("memset");
2690 args = build_function_type_list (ptr_type_node, ptr_type_node,
2691 integer_type_node, sizetype,
2692 NULL_TREE);
c0bfc78e 2693
2694 fn = build_decl (FUNCTION_DECL, fn, args);
2695 DECL_EXTERNAL (fn) = 1;
2696 TREE_PUBLIC (fn) = 1;
2697 DECL_ARTIFICIAL (fn) = 1;
2698 TREE_NOTHROW (fn) = 1;
2699
2700 block_clear_fn = fn;
10f307d9 2701 }
0dbd1c74 2702
d459e0d8 2703 if (asmspec)
2704 {
2705 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2706 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2707 }
2708}
2709
2710static tree
35cb5232 2711clear_storage_libcall_fn (int for_call)
d459e0d8 2712{
2713 static bool emitted_extern;
2714
2715 if (!block_clear_fn)
2716 init_block_clear_fn (NULL);
2717
c0bfc78e 2718 if (for_call && !emitted_extern)
2719 {
2720 emitted_extern = true;
d459e0d8 2721 make_decl_rtl (block_clear_fn, NULL);
2722 assemble_external (block_clear_fn);
c0bfc78e 2723 }
10f307d9 2724
d459e0d8 2725 return block_clear_fn;
c0bfc78e 2726}
2727\f
10f307d9 2728/* Generate code to copy Y into X.
2729 Both Y and X must have the same mode, except that
2730 Y can be a constant with VOIDmode.
2731 This mode cannot be BLKmode; use emit_block_move for that.
2732
2733 Return the last instruction emitted. */
2734
2735rtx
35cb5232 2736emit_move_insn (rtx x, rtx y)
10f307d9 2737{
2738 enum machine_mode mode = GET_MODE (x);
94580317 2739 rtx y_cst = NULL_RTX;
6442675c 2740 rtx last_insn, set;
10f307d9 2741
2742 x = protect_from_queue (x, 1);
2743 y = protect_from_queue (y, 0);
2744
2745 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2746 abort ();
2747
4ee9c684 2748 if (CONSTANT_P (y))
94580317 2749 {
c0c4a46d 2750 if (optimize
248c3c28 2751 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
c0c4a46d 2752 && (last_insn = compress_float_constant (x, y)))
2753 return last_insn;
2754
6442675c 2755 y_cst = y;
2756
c0c4a46d 2757 if (!LEGITIMATE_CONSTANT_P (y))
2758 {
c0c4a46d 2759 y = force_const_mem (mode, y);
a6bbccc1 2760
2761 /* If the target's cannot_force_const_mem prevented the spill,
2762 assume that the target's move expanders will also take care
2763 of the non-legitimate constant. */
2764 if (!y)
2765 y = y_cst;
c0c4a46d 2766 }
94580317 2767 }
10f307d9 2768
2769 /* If X or Y are memory references, verify that their addresses are valid
2770 for the machine. */
e16ceb8e 2771 if (MEM_P (x)
10f307d9 2772 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2773 && ! push_operand (x, GET_MODE (x)))
2774 || (flag_force_addr
2775 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
537ffcfc 2776 x = validize_mem (x);
10f307d9 2777
e16ceb8e 2778 if (MEM_P (y)
10f307d9 2779 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2780 || (flag_force_addr
2781 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
537ffcfc 2782 y = validize_mem (y);
10f307d9 2783
2784 if (mode == BLKmode)
2785 abort ();
2786
94580317 2787 last_insn = emit_move_insn_1 (x, y);
2788
8ad4c111 2789 if (y_cst && REG_P (x)
6442675c 2790 && (set = single_set (last_insn)) != NULL_RTX
2791 && SET_DEST (set) == x
2792 && ! rtx_equal_p (y_cst, SET_SRC (set)))
c080d8f0 2793 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
94580317 2794
2795 return last_insn;
aaad03e5 2796}
2797
2798/* Low level part of emit_move_insn.
2799 Called just like emit_move_insn, but assumes X and Y
2800 are basically valid. */
2801
2802rtx
35cb5232 2803emit_move_insn_1 (rtx x, rtx y)
aaad03e5 2804{
2805 enum machine_mode mode = GET_MODE (x);
2806 enum machine_mode submode;
2807 enum mode_class class = GET_MODE_CLASS (mode);
aaad03e5 2808
0fd4500a 2809 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
fa56dc1d 2810 abort ();
1203f673 2811
10f307d9 2812 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2813 return
2814 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2815
d3938eaa 2816 /* Expand complex moves by moving real part and imag part, if possible. */
b63679d2 2817 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
e9e12845 2818 && BLKmode != (submode = GET_MODE_INNER (mode))
b63679d2 2819 && (mov_optab->handlers[(int) submode].insn_code
2820 != CODE_FOR_nothing))
2821 {
2822 /* Don't split destination if it is a stack push. */
2823 int stack = push_operand (x, GET_MODE (x));
b63679d2 2824
4ed008e7 2825#ifdef PUSH_ROUNDING
31d97509 2826 /* In case we output to the stack, but the size is smaller than the
2827 machine can push exactly, we need to use move instructions. */
a8d8b962 2828 if (stack
76ab50f8 2829 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2830 != GET_MODE_SIZE (submode)))
a8d8b962 2831 {
2832 rtx temp;
76ab50f8 2833 HOST_WIDE_INT offset1, offset2;
a8d8b962 2834
2835 /* Do not use anti_adjust_stack, since we don't want to update
2836 stack_pointer_delta. */
2837 temp = expand_binop (Pmode,
2838#ifdef STACK_GROWS_DOWNWARD
2839 sub_optab,
2840#else
2841 add_optab,
2842#endif
2843 stack_pointer_rtx,
2844 GEN_INT
76ab50f8 2845 (PUSH_ROUNDING
2846 (GET_MODE_SIZE (GET_MODE (x)))),
2847 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2848
a8d8b962 2849 if (temp != stack_pointer_rtx)
2850 emit_move_insn (stack_pointer_rtx, temp);
76ab50f8 2851
a8d8b962 2852#ifdef STACK_GROWS_DOWNWARD
2853 offset1 = 0;
2854 offset2 = GET_MODE_SIZE (submode);
2855#else
2856 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2857 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2858 + GET_MODE_SIZE (submode));
2859#endif
76ab50f8 2860
a8d8b962 2861 emit_move_insn (change_address (x, submode,
2862 gen_rtx_PLUS (Pmode,
2863 stack_pointer_rtx,
2864 GEN_INT (offset1))),
2865 gen_realpart (submode, y));
2866 emit_move_insn (change_address (x, submode,
2867 gen_rtx_PLUS (Pmode,
2868 stack_pointer_rtx,
2869 GEN_INT (offset2))),
2870 gen_imagpart (submode, y));
2871 }
e3fe8c3b 2872 else
4ed008e7 2873#endif
b63679d2 2874 /* If this is a stack, push the highpart first, so it
2875 will be in the argument order.
2876
2877 In that case, change_address is used only to convert
2878 the mode, not to change the address. */
e3fe8c3b 2879 if (stack)
90524033 2880 {
55997042 2881 /* Note that the real part always precedes the imag part in memory
2882 regardless of machine's endianness. */
90524033 2883#ifdef STACK_GROWS_DOWNWARD
9d3d90e0 2884 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2885 gen_imagpart (submode, y));
2886 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2887 gen_realpart (submode, y));
90524033 2888#else
9d3d90e0 2889 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2890 gen_realpart (submode, y));
2891 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2892 gen_imagpart (submode, y));
90524033 2893#endif
2894 }
2895 else
2896 {
7f964718 2897 rtx realpart_x, realpart_y;
2898 rtx imagpart_x, imagpart_y;
2899
5b5abf88 2900 /* If this is a complex value with each part being smaller than a
2901 word, the usual calling sequence will likely pack the pieces into
2902 a single register. Unfortunately, SUBREG of hard registers only
2903 deals in terms of words, so we have a problem converting input
2904 arguments to the CONCAT of two registers that is used elsewhere
2905 for complex values. If this is before reload, we can copy it into
2906 memory and reload. FIXME, we should see about using extract and
2907 insert on integer registers, but complex short and complex char
2908 variables should be rarely used. */
fa56dc1d 2909 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
5b5abf88 2910 && (reload_in_progress | reload_completed) == 0)
2911 {
76ab50f8 2912 int packed_dest_p
2913 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2914 int packed_src_p
2915 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
5b5abf88 2916
2917 if (packed_dest_p || packed_src_p)
2918 {
2919 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2920 ? MODE_FLOAT : MODE_INT);
2921
387bc205 2922 enum machine_mode reg_mode
2923 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
5b5abf88 2924
2925 if (reg_mode != BLKmode)
2926 {
2927 rtx mem = assign_stack_temp (reg_mode,
2928 GET_MODE_SIZE (mode), 0);
e513d163 2929 rtx cmem = adjust_address (mem, mode, 0);
5b5abf88 2930
5b5abf88 2931 if (packed_dest_p)
2932 {
2933 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
76ab50f8 2934
5b5abf88 2935 emit_move_insn_1 (cmem, y);
2936 return emit_move_insn_1 (sreg, mem);
2937 }
2938 else
2939 {
2940 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
76ab50f8 2941
5b5abf88 2942 emit_move_insn_1 (mem, sreg);
2943 return emit_move_insn_1 (x, cmem);
2944 }
2945 }
2946 }
2947 }
2948
7f964718 2949 realpart_x = gen_realpart (submode, x);
2950 realpart_y = gen_realpart (submode, y);
2951 imagpart_x = gen_imagpart (submode, x);
2952 imagpart_y = gen_imagpart (submode, y);
2953
2954 /* Show the output dies here. This is necessary for SUBREGs
2955 of pseudos since we cannot track their lifetimes correctly;
c6abf2b8 2956 hard regs shouldn't appear here except as return values.
2957 We never want to emit such a clobber after reload. */
2958 if (x != y
7f964718 2959 && ! (reload_in_progress || reload_completed)
2960 && (GET_CODE (realpart_x) == SUBREG
2961 || GET_CODE (imagpart_x) == SUBREG))
76ab50f8 2962 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
7908d3b3 2963
9d3d90e0 2964 emit_move_insn (realpart_x, realpart_y);
2965 emit_move_insn (imagpart_x, imagpart_y);
90524033 2966 }
b63679d2 2967
bc82d91b 2968 return get_last_insn ();
b63679d2 2969 }
2970
8d94ba7c 2971 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
2972 find a mode to do it in. If we have a movcc, use it. Otherwise,
2973 find the MODE_INT mode of the same width. */
2974 else if (GET_MODE_CLASS (mode) == MODE_CC
2975 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2976 {
2977 enum insn_code insn_code;
2978 enum machine_mode tmode = VOIDmode;
2979 rtx x1 = x, y1 = y;
2980
2981 if (mode != CCmode
2982 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
2983 tmode = CCmode;
2984 else
2985 for (tmode = QImode; tmode != VOIDmode;
2986 tmode = GET_MODE_WIDER_MODE (tmode))
2987 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
2988 break;
2989
2990 if (tmode == VOIDmode)
2991 abort ();
2992
2993 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
2994 may call change_address which is not appropriate if we were
2995 called when a reload was in progress. We don't have to worry
2996 about changing the address since the size in bytes is supposed to
2997 be the same. Copy the MEM to change the mode and move any
2998 substitutions from the old MEM to the new one. */
2999
3000 if (reload_in_progress)
3001 {
3002 x = gen_lowpart_common (tmode, x1);
e16ceb8e 3003 if (x == 0 && MEM_P (x1))
8d94ba7c 3004 {
3005 x = adjust_address_nv (x1, tmode, 0);
3006 copy_replacements (x1, x);
3007 }
3008
3009 y = gen_lowpart_common (tmode, y1);
e16ceb8e 3010 if (y == 0 && MEM_P (y1))
8d94ba7c 3011 {
3012 y = adjust_address_nv (y1, tmode, 0);
3013 copy_replacements (y1, y);
3014 }
3015 }
3016 else
3017 {
3018 x = gen_lowpart (tmode, x);
3019 y = gen_lowpart (tmode, y);
3020 }
35cb5232 3021
8d94ba7c 3022 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3023 return emit_insn (GEN_FCN (insn_code) (x, y));
3024 }
3025
7be9cf34 3026 /* Try using a move pattern for the corresponding integer mode. This is
3027 only safe when simplify_subreg can convert MODE constants into integer
3028 constants. At present, it can only do this reliably if the value
3029 fits within a HOST_WIDE_INT. */
3030 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3031 && (submode = int_mode_for_mode (mode)) != BLKmode
3032 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3033 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3034 (simplify_gen_subreg (submode, x, mode, 0),
3035 simplify_gen_subreg (submode, y, mode, 0)));
3036
78defff5 3037 /* This will handle any multi-word or full-word mode that lacks a move_insn
3038 pattern. However, you will get better code if you define such patterns,
10f307d9 3039 even if they must turn into multiple assembler instructions. */
78defff5 3040 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
10f307d9 3041 {
3042 rtx last_insn = 0;
6702c250 3043 rtx seq, inner;
7f964718 3044 int need_clobber;
76ab50f8 3045 int i;
fa56dc1d 3046
498aec4e 3047#ifdef PUSH_ROUNDING
3048
3049 /* If X is a push on the stack, do the push now and replace
3050 X with a reference to the stack pointer. */
3051 if (push_operand (x, GET_MODE (x)))
3052 {
07c143fb 3053 rtx temp;
3054 enum rtx_code code;
ff385626 3055
07c143fb 3056 /* Do not use anti_adjust_stack, since we don't want to update
3057 stack_pointer_delta. */
3058 temp = expand_binop (Pmode,
3059#ifdef STACK_GROWS_DOWNWARD
3060 sub_optab,
3061#else
3062 add_optab,
3063#endif
3064 stack_pointer_rtx,
3065 GEN_INT
76ab50f8 3066 (PUSH_ROUNDING
3067 (GET_MODE_SIZE (GET_MODE (x)))),
92b7c66a 3068 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
76ab50f8 3069
ff385626 3070 if (temp != stack_pointer_rtx)
3071 emit_move_insn (stack_pointer_rtx, temp);
07c143fb 3072
3073 code = GET_CODE (XEXP (x, 0));
76ab50f8 3074
07c143fb 3075 /* Just hope that small offsets off SP are OK. */
3076 if (code == POST_INC)
ff385626 3077 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
76ab50f8 3078 GEN_INT (-((HOST_WIDE_INT)
3079 GET_MODE_SIZE (GET_MODE (x)))));
07c143fb 3080 else if (code == POST_DEC)
ff385626 3081 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
07c143fb 3082 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3083 else
3084 temp = stack_pointer_rtx;
3085
3086 x = change_address (x, VOIDmode, temp);
498aec4e 3087 }
3088#endif
fa56dc1d 3089
6702c250 3090 /* If we are in reload, see if either operand is a MEM whose address
3091 is scheduled for replacement. */
e16ceb8e 3092 if (reload_in_progress && MEM_P (x)
6702c250 3093 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
e4e86ec5 3094 x = replace_equiv_address_nv (x, inner);
e16ceb8e 3095 if (reload_in_progress && MEM_P (y)
6702c250 3096 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
e4e86ec5 3097 y = replace_equiv_address_nv (y, inner);
6702c250 3098
7f964718 3099 start_sequence ();
9cb64ebc 3100
7f964718 3101 need_clobber = 0;
10f307d9 3102 for (i = 0;
fa56dc1d 3103 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
10f307d9 3104 i++)
3105 {
3106 rtx xpart = operand_subword (x, i, 1, mode);
3107 rtx ypart = operand_subword (y, i, 1, mode);
3108
3109 /* If we can't get a part of Y, put Y into memory if it is a
3110 constant. Otherwise, force it into a register. If we still
3111 can't get a part of Y, abort. */
3112 if (ypart == 0 && CONSTANT_P (y))
3113 {
3114 y = force_const_mem (mode, y);
3115 ypart = operand_subword (y, i, 1, mode);
3116 }
3117 else if (ypart == 0)
3118 ypart = operand_subword_force (y, i, mode);
3119
3120 if (xpart == 0 || ypart == 0)
3121 abort ();
3122
7f964718 3123 need_clobber |= (GET_CODE (xpart) == SUBREG);
3124
10f307d9 3125 last_insn = emit_move_insn (xpart, ypart);
3126 }
dd0d17cd 3127
31d3e01c 3128 seq = get_insns ();
7f964718 3129 end_sequence ();
3130
3131 /* Show the output dies here. This is necessary for SUBREGs
3132 of pseudos since we cannot track their lifetimes correctly;
3133 hard regs shouldn't appear here except as return values.
3134 We never want to emit such a clobber after reload. */
3135 if (x != y
3136 && ! (reload_in_progress || reload_completed)
3137 && need_clobber != 0)
76ab50f8 3138 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
7f964718 3139
3140 emit_insn (seq);
3141
10f307d9 3142 return last_insn;
3143 }
3144 else
3145 abort ();
3146}
c0c4a46d 3147
3148/* If Y is representable exactly in a narrower mode, and the target can
3149 perform the extension directly from constant or memory, then emit the
3150 move as an extension. */
3151
3152static rtx
35cb5232 3153compress_float_constant (rtx x, rtx y)
c0c4a46d 3154{
3155 enum machine_mode dstmode = GET_MODE (x);
3156 enum machine_mode orig_srcmode = GET_MODE (y);
3157 enum machine_mode srcmode;
3158 REAL_VALUE_TYPE r;
3159
3160 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3161
3162 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3163 srcmode != orig_srcmode;
3164 srcmode = GET_MODE_WIDER_MODE (srcmode))
3165 {
3166 enum insn_code ic;
3167 rtx trunc_y, last_insn;
3168
3169 /* Skip if the target can't extend this way. */
3170 ic = can_extend_p (dstmode, srcmode, 0);
3171 if (ic == CODE_FOR_nothing)
3172 continue;
3173
3174 /* Skip if the narrowed value isn't exact. */
3175 if (! exact_real_truncate (srcmode, &r))
3176 continue;
3177
3178 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3179
3180 if (LEGITIMATE_CONSTANT_P (trunc_y))
3181 {
3182 /* Skip if the target needs extra instructions to perform
3183 the extension. */
3184 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3185 continue;
3186 }
3187 else if (float_extend_from_mem[dstmode][srcmode])
3188 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3189 else
3190 continue;
3191
3192 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3193 last_insn = get_last_insn ();
3194
8ad4c111 3195 if (REG_P (x))
6442675c 3196 set_unique_reg_note (last_insn, REG_EQUAL, y);
c0c4a46d 3197
3198 return last_insn;
3199 }
3200
3201 return NULL_RTX;
3202}
10f307d9 3203\f
3204/* Pushing data onto the stack. */
3205
3206/* Push a block of length SIZE (perhaps variable)
3207 and return an rtx to address the beginning of the block.
3208 Note that it is not possible for the value returned to be a QUEUED.
3209 The value may be virtual_outgoing_args_rtx.
3210
3211 EXTRA is the number of bytes of padding to push in addition to SIZE.
3212 BELOW nonzero means this padding comes at low addresses;
3213 otherwise, the padding comes at high addresses. */
3214
3215rtx
35cb5232 3216push_block (rtx size, int extra, int below)
10f307d9 3217{
19cb6b50 3218 rtx temp;
ed8d3eee 3219
3220 size = convert_modes (Pmode, ptr_mode, size, 1);
10f307d9 3221 if (CONSTANT_P (size))
3222 anti_adjust_stack (plus_constant (size, extra));
8ad4c111 3223 else if (REG_P (size) && extra == 0)
10f307d9 3224 anti_adjust_stack (size);
3225 else
3226 {
481feae3 3227 temp = copy_to_mode_reg (Pmode, size);
10f307d9 3228 if (extra != 0)
b572011e 3229 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
10f307d9 3230 temp, 0, OPTAB_LIB_WIDEN);
3231 anti_adjust_stack (temp);
3232 }
3233
4448f543 3234#ifndef STACK_GROWS_DOWNWARD
4448f543 3235 if (0)
4448f543 3236#else
3237 if (1)
10f307d9 3238#endif
4448f543 3239 {
4448f543 3240 temp = virtual_outgoing_args_rtx;
3241 if (extra != 0 && below)
3242 temp = plus_constant (temp, extra);
3243 }
3244 else
3245 {
3246 if (GET_CODE (size) == CONST_INT)
3247 temp = plus_constant (virtual_outgoing_args_rtx,
fa56dc1d 3248 -INTVAL (size) - (below ? 0 : extra));
4448f543 3249 else if (extra != 0 && !below)
3250 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
f7c44134 3251 negate_rtx (Pmode, plus_constant (size, extra)));
4448f543 3252 else
3253 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3254 negate_rtx (Pmode, size));
3255 }
10f307d9 3256
3257 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3258}
3259
fad4a30c 3260#ifdef PUSH_ROUNDING
3261
ef7dc4b4 3262/* Emit single push insn. */
fad4a30c 3263
ef7dc4b4 3264static void
35cb5232 3265emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
ef7dc4b4 3266{
ef7dc4b4 3267 rtx dest_addr;
07c143fb 3268 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
ef7dc4b4 3269 rtx dest;
675b92cc 3270 enum insn_code icode;
3271 insn_operand_predicate_fn pred;
ef7dc4b4 3272
675b92cc 3273 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3274 /* If there is push pattern, use it. Otherwise try old way of throwing
3275 MEM representing push operation to move expander. */
3276 icode = push_optab->handlers[(int) mode].insn_code;
3277 if (icode != CODE_FOR_nothing)
3278 {
3279 if (((pred = insn_data[(int) icode].operand[0].predicate)
e17f5b23 3280 && !((*pred) (x, mode))))
675b92cc 3281 x = force_reg (mode, x);
3282 emit_insn (GEN_FCN (icode) (x));
3283 return;
3284 }
ef7dc4b4 3285 if (GET_MODE_SIZE (mode) == rounded_size)
3286 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
20e1fca5 3287 /* If we are to pad downward, adjust the stack pointer first and
3288 then store X into the stack location using an offset. This is
3289 because emit_move_insn does not know how to pad; it does not have
3290 access to type. */
3291 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3292 {
3293 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3294 HOST_WIDE_INT offset;
3295
3296 emit_move_insn (stack_pointer_rtx,
3297 expand_binop (Pmode,
3298#ifdef STACK_GROWS_DOWNWARD
3299 sub_optab,
3300#else
3301 add_optab,
3302#endif
3303 stack_pointer_rtx,
3304 GEN_INT (rounded_size),
3305 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3306
3307 offset = (HOST_WIDE_INT) padding_size;
3308#ifdef STACK_GROWS_DOWNWARD
3309 if (STACK_PUSH_CODE == POST_DEC)
3310 /* We have already decremented the stack pointer, so get the
3311 previous value. */
3312 offset += (HOST_WIDE_INT) rounded_size;
3313#else
3314 if (STACK_PUSH_CODE == POST_INC)
3315 /* We have already incremented the stack pointer, so get the
3316 previous value. */
3317 offset -= (HOST_WIDE_INT) rounded_size;
3318#endif
3319 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3320 }
ef7dc4b4 3321 else
3322 {
3323#ifdef STACK_GROWS_DOWNWARD
20e1fca5 3324 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
ef7dc4b4 3325 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
e17f5b23 3326 GEN_INT (-(HOST_WIDE_INT) rounded_size));
ef7dc4b4 3327#else
20e1fca5 3328 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
ef7dc4b4 3329 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3330 GEN_INT (rounded_size));
3331#endif
3332 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3333 }
3334
3335 dest = gen_rtx_MEM (mode, dest_addr);
3336
ef7dc4b4 3337 if (type != 0)
3338 {
3339 set_mem_attributes (dest, type, 1);
a9d9ab08 3340
3341 if (flag_optimize_sibling_calls)
3342 /* Function incoming arguments may overlap with sibling call
3343 outgoing arguments and we cannot allow reordering of reads
3344 from function arguments with stores to outgoing arguments
3345 of sibling calls. */
3346 set_mem_alias_set (dest, 0);
ef7dc4b4 3347 }
3348 emit_move_insn (dest, x);
ef7dc4b4 3349}
fad4a30c 3350#endif
ef7dc4b4 3351
10f307d9 3352/* Generate code to push X onto the stack, assuming it has mode MODE and
3353 type TYPE.
3354 MODE is redundant except when X is a CONST_INT (since they don't
3355 carry mode info).
3356 SIZE is an rtx for the size of data to be copied (in bytes),
3357 needed only if X is BLKmode.
3358
decd7a45 3359 ALIGN (in bits) is maximum alignment we can assume.
10f307d9 3360
a984cc1e 3361 If PARTIAL and REG are both nonzero, then copy that many of the first
3362 words of X into registers starting with REG, and push the rest of X.
10f307d9 3363 The amount of space pushed is decreased by PARTIAL words,
3364 rounded *down* to a multiple of PARM_BOUNDARY.
3365 REG must be a hard register in this case.
a984cc1e 3366 If REG is zero but PARTIAL is not, take any all others actions for an
3367 argument partially in registers, but do not actually load any
3368 registers.
10f307d9 3369
3370 EXTRA is the amount in bytes of extra space to leave next to this arg.
4bbea254 3371 This is ignored if an argument block has already been allocated.
10f307d9 3372
3373 On a machine that lacks real push insns, ARGS_ADDR is the address of
3374 the bottom of the argument block for this call. We use indexing off there
3375 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3376 argument block has not been preallocated.
3377
997d68fe 3378 ARGS_SO_FAR is the size of args previously pushed for this call.
3379
3380 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3381 for arguments passed in registers. If nonzero, it will be the number
3382 of bytes required. */
10f307d9 3383
3384void
35cb5232 3385emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3386 unsigned int align, int partial, rtx reg, int extra,
3387 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3388 rtx alignment_pad)
10f307d9 3389{
3390 rtx xinner;
3391 enum direction stack_direction
3392#ifdef STACK_GROWS_DOWNWARD
3393 = downward;
3394#else
3395 = upward;
3396#endif
3397
3398 /* Decide where to pad the argument: `downward' for below,
3399 `upward' for above, or `none' for don't pad it.
3400 Default is below for small data on big-endian machines; else above. */
3401 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3402
ff385626 3403 /* Invert direction if stack is post-decrement.
12a97a04 3404 FIXME: why? */
3405 if (STACK_PUSH_CODE == POST_DEC)
10f307d9 3406 if (where_pad != none)
3407 where_pad = (where_pad == downward ? upward : downward);
3408
3409 xinner = x = protect_from_queue (x, 0);
3410
3411 if (mode == BLKmode)
3412 {
3413 /* Copy a block into the stack, entirely or partially. */
3414
19cb6b50 3415 rtx temp;
10f307d9 3416 int used = partial * UNITS_PER_WORD;
a2509aaa 3417 int offset;
10f307d9 3418 int skip;
fa56dc1d 3419
a2509aaa 3420 if (reg && GET_CODE (reg) == PARALLEL)
3421 {
3422 /* Use the size of the elt to compute offset. */
3423 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3424 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3425 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3426 }
3427 else
3428 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3429
10f307d9 3430 if (size == 0)
3431 abort ();
3432
3433 used -= offset;
3434
3435 /* USED is now the # of bytes we need not copy to the stack
3436 because registers will take care of them. */
3437
3438 if (partial != 0)
e513d163 3439 xinner = adjust_address (xinner, BLKmode, used);
10f307d9 3440
3441 /* If the partial register-part of the arg counts in its stack size,
3442 skip the part of stack space corresponding to the registers.
3443 Otherwise, start copying to the beginning of the stack space,
3444 by setting SKIP to 0. */
997d68fe 3445 skip = (reg_parm_stack_space == 0) ? 0 : used;
10f307d9 3446
3447#ifdef PUSH_ROUNDING
3448 /* Do it with several push insns if that doesn't take lots of insns
3449 and if there is no difficulty with push insns that skip bytes
3450 on the stack for alignment purposes. */
3451 if (args_addr == 0
4448f543 3452 && PUSH_ARGS
10f307d9 3453 && GET_CODE (size) == CONST_INT
3454 && skip == 0
b4ad0ea6 3455 && MEM_ALIGN (xinner) >= align
928a6bdc 3456 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
10f307d9 3457 /* Here we avoid the case of a structure whose weak alignment
3458 forces many pushes of a small amount of data,
3459 and such small pushes do rounding that causes trouble. */
9439ebf7 3460 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
325d1c45 3461 || align >= BIGGEST_ALIGNMENT
decd7a45 3462 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3463 == (align / BITS_PER_UNIT)))
10f307d9 3464 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3465 {
3466 /* Push padding now if padding above and stack grows down,
3467 or if padding below and stack grows up.
3468 But if space already allocated, this has already been done. */
3469 if (extra && args_addr == 0
3470 && where_pad != none && where_pad != stack_direction)
b572011e 3471 anti_adjust_stack (GEN_INT (extra));
10f307d9 3472
9fe0e1b8 3473 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
10f307d9 3474 }
3475 else
fa56dc1d 3476#endif /* PUSH_ROUNDING */
10f307d9 3477 {
a9f2963b 3478 rtx target;
3479
10f307d9 3480 /* Otherwise make space on the stack and copy the data
3481 to the address of that space. */
3482
3483 /* Deduct words put into registers from the size we must copy. */
3484 if (partial != 0)
3485 {
3486 if (GET_CODE (size) == CONST_INT)
b572011e 3487 size = GEN_INT (INTVAL (size) - used);
10f307d9 3488 else
3489 size = expand_binop (GET_MODE (size), sub_optab, size,
b572011e 3490 GEN_INT (used), NULL_RTX, 0,
3491 OPTAB_LIB_WIDEN);
10f307d9 3492 }
3493
3494 /* Get the address of the stack space.
3495 In this case, we do not deal with EXTRA separately.
3496 A single stack adjust will do. */
3497 if (! args_addr)
3498 {
3499 temp = push_block (size, extra, where_pad == downward);
3500 extra = 0;
3501 }
3502 else if (GET_CODE (args_so_far) == CONST_INT)
3503 temp = memory_address (BLKmode,
3504 plus_constant (args_addr,
3505 skip + INTVAL (args_so_far)));
3506 else
3507 temp = memory_address (BLKmode,
941522d6 3508 plus_constant (gen_rtx_PLUS (Pmode,
3509 args_addr,
3510 args_so_far),
10f307d9 3511 skip));
c0bfc78e 3512
3513 if (!ACCUMULATE_OUTGOING_ARGS)
3514 {
3515 /* If the source is referenced relative to the stack pointer,
3516 copy it to another register to stabilize it. We do not need
3517 to do this if we know that we won't be changing sp. */
3518
3519 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3520 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3521 temp = copy_to_reg (temp);
3522 }
3523
fa56dc1d 3524 target = gen_rtx_MEM (BLKmode, temp);
a9f2963b 3525
fa56dc1d 3526 if (type != 0)
3527 {
3528 set_mem_attributes (target, type, 1);
3529 /* Function incoming arguments may overlap with sibling call
3530 outgoing arguments and we cannot allow reordering of reads
3531 from function arguments with stores to outgoing arguments
3532 of sibling calls. */
ab6ab77e 3533 set_mem_alias_set (target, 0);
fa56dc1d 3534 }
c0bfc78e 3535
0378dbdc 3536 /* ALIGN may well be better aligned than TYPE, e.g. due to
3537 PARM_BOUNDARY. Assume the caller isn't lying. */
3538 set_mem_align (target, align);
c0bfc78e 3539
0378dbdc 3540 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
10f307d9 3541 }
3542 }
3543 else if (partial > 0)
3544 {
3545 /* Scalar partly in registers. */
3546
3547 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3548 int i;
3549 int not_stack;
3550 /* # words of start of argument
3551 that we must make space for but need not store. */
3552 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3553 int args_offset = INTVAL (args_so_far);
3554 int skip;
3555
3556 /* Push padding now if padding above and stack grows down,
3557 or if padding below and stack grows up.
3558 But if space already allocated, this has already been done. */
3559 if (extra && args_addr == 0
3560 && where_pad != none && where_pad != stack_direction)
b572011e 3561 anti_adjust_stack (GEN_INT (extra));
10f307d9 3562
3563 /* If we make space by pushing it, we might as well push
3564 the real data. Otherwise, we can leave OFFSET nonzero
3565 and leave the space uninitialized. */
3566 if (args_addr == 0)
3567 offset = 0;
3568
3569 /* Now NOT_STACK gets the number of words that we don't need to
3570 allocate on the stack. */
3571 not_stack = partial - offset;
3572
3573 /* If the partial register-part of the arg counts in its stack size,
3574 skip the part of stack space corresponding to the registers.
3575 Otherwise, start copying to the beginning of the stack space,
3576 by setting SKIP to 0. */
997d68fe 3577 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
10f307d9 3578
3579 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3580 x = validize_mem (force_const_mem (mode, x));
3581
3582 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3583 SUBREGs of such registers are not allowed. */
8ad4c111 3584 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
10f307d9 3585 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3586 x = copy_to_reg (x);
3587
3588 /* Loop over all the words allocated on the stack for this arg. */
3589 /* We can do it by words, because any scalar bigger than a word
3590 has a size a multiple of a word. */
3591#ifndef PUSH_ARGS_REVERSED
3592 for (i = not_stack; i < size; i++)
3593#else
3594 for (i = size - 1; i >= not_stack; i--)
3595#endif
3596 if (i >= not_stack + offset)
3597 emit_push_insn (operand_subword_force (x, i, mode),
b572011e 3598 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3599 0, args_addr,
3600 GEN_INT (args_offset + ((i - not_stack + skip)
997d68fe 3601 * UNITS_PER_WORD)),
9d855d2f 3602 reg_parm_stack_space, alignment_pad);
10f307d9 3603 }
3604 else
3605 {
3606 rtx addr;
f7c44134 3607 rtx dest;
10f307d9 3608
3609 /* Push padding now if padding above and stack grows down,
3610 or if padding below and stack grows up.
3611 But if space already allocated, this has already been done. */
3612 if (extra && args_addr == 0
3613 && where_pad != none && where_pad != stack_direction)
b572011e 3614 anti_adjust_stack (GEN_INT (extra));
10f307d9 3615
3616#ifdef PUSH_ROUNDING
4448f543 3617 if (args_addr == 0 && PUSH_ARGS)
ef7dc4b4 3618 emit_single_push_insn (mode, x, type);
10f307d9 3619 else
3620#endif
eb4b06b6 3621 {
3622 if (GET_CODE (args_so_far) == CONST_INT)
3623 addr
3624 = memory_address (mode,
fa56dc1d 3625 plus_constant (args_addr,
eb4b06b6 3626 INTVAL (args_so_far)));
fa56dc1d 3627 else
941522d6 3628 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3629 args_so_far));
ef7dc4b4 3630 dest = gen_rtx_MEM (mode, addr);
3631 if (type != 0)
3632 {
3633 set_mem_attributes (dest, type, 1);
3634 /* Function incoming arguments may overlap with sibling call
3635 outgoing arguments and we cannot allow reordering of reads
3636 from function arguments with stores to outgoing arguments
3637 of sibling calls. */
ab6ab77e 3638 set_mem_alias_set (dest, 0);
ef7dc4b4 3639 }
10f307d9 3640
ef7dc4b4 3641 emit_move_insn (dest, x);
ef7dc4b4 3642 }
10f307d9 3643 }
3644
10f307d9 3645 /* If part should go in registers, copy that part
3646 into the appropriate registers. Do this now, at the end,
3647 since mem-to-mem copies above may do function calls. */
a984cc1e 3648 if (partial > 0 && reg != 0)
ce739127 3649 {
3650 /* Handle calls that pass values in multiple non-contiguous locations.
3651 The Irix 6 ABI has examples of this. */
3652 if (GET_CODE (reg) == PARALLEL)
5f4cd670 3653 emit_group_load (reg, x, type, -1);
ce739127 3654 else
3655 move_block_to_reg (REGNO (reg), x, partial, mode);
3656 }
10f307d9 3657
3658 if (extra && args_addr == 0 && where_pad == stack_direction)
b572011e 3659 anti_adjust_stack (GEN_INT (extra));
fa56dc1d 3660
364a85bd 3661 if (alignment_pad && args_addr == 0)
9d855d2f 3662 anti_adjust_stack (alignment_pad);
10f307d9 3663}
3664\f
d8e5b213 3665/* Return X if X can be used as a subtarget in a sequence of arithmetic
3666 operations. */
3667
3668static rtx
35cb5232 3669get_subtarget (rtx x)
d8e5b213 3670{
3671 return ((x == 0
3672 /* Only registers can be subtargets. */
8ad4c111 3673 || !REG_P (x)
d8e5b213 3674 /* If the register is readonly, it can't be set more than once. */
3675 || RTX_UNCHANGING_P (x)
3676 /* Don't use hard regs to avoid extending their life. */
3677 || REGNO (x) < FIRST_PSEUDO_REGISTER
3678 /* Avoid subtargets inside loops,
3679 since they hide some invariant expressions. */
3680 || preserve_subexpressions_p ())
3681 ? 0 : x);
3682}
3683
10f307d9 3684/* Expand an assignment that stores the value of FROM into TO.
3685 If WANT_VALUE is nonzero, return an rtx for the value of TO.
9282409c 3686 (This may contain a QUEUED rtx;
3687 if the value is constant, this rtx is a constant.)
725cd5ad 3688 Otherwise, the returned value is NULL_RTX. */
10f307d9 3689
3690rtx
725cd5ad 3691expand_assignment (tree to, tree from, int want_value)
10f307d9 3692{
19cb6b50 3693 rtx to_rtx = 0;
10f307d9 3694 rtx result;
3695
3696 /* Don't crash if the lhs of the assignment was erroneous. */
3697
3698 if (TREE_CODE (to) == ERROR_MARK)
9282409c 3699 {
3700 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3701 return want_value ? result : NULL_RTX;
3702 }
10f307d9 3703
3704 /* Assignment of a structure component needs special treatment
3705 if the structure component's rtx is not simply a MEM.
e3a8913c 3706 Assignment of an array element at a constant index, and assignment of
3707 an array element in an unaligned packed structure field, has the same
3708 problem. */
10f307d9 3709
26e80911 3710 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
2d55cbd9 3711 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3712 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
10f307d9 3713 {
3714 enum machine_mode mode1;
02e7a332 3715 HOST_WIDE_INT bitsize, bitpos;
2b96c5f6 3716 rtx orig_to_rtx;
954bdcb1 3717 tree offset;
10f307d9 3718 int unsignedp;
3719 int volatilep = 0;
88ac3f7f 3720 tree tem;
3721
3722 push_temp_slots ();
7fce34be 3723 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2b96c5f6 3724 &unsignedp, &volatilep);
10f307d9 3725
3726 /* If we are going to use store_bit_field and extract_bit_field,
3727 make sure to_rtx will be safe for multiple use. */
3728
3729 if (mode1 == VOIDmode && want_value)
3730 tem = stabilize_reference (tem);
3731
a689a61a 3732 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3733
954bdcb1 3734 if (offset != 0)
3735 {
fac6aae6 3736 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
954bdcb1 3737
e16ceb8e 3738 if (!MEM_P (to_rtx))
954bdcb1 3739 abort ();
33ef2f52 3740
33ef2f52 3741#ifdef POINTERS_EXTEND_UNSIGNED
479e4d5e 3742 if (GET_MODE (offset_rtx) != Pmode)
33402d67 3743 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4a836698 3744#else
3745 if (GET_MODE (offset_rtx) != ptr_mode)
3746 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
33ef2f52 3747#endif
33ef2f52 3748
d89d783c 3749 /* A constant address in TO_RTX can have VOIDmode, we must not try
3750 to call force_reg for that case. Avoid that case. */
e16ceb8e 3751 if (MEM_P (to_rtx)
25d55d72 3752 && GET_MODE (to_rtx) == BLKmode
d89d783c 3753 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
2b96c5f6 3754 && bitsize > 0
fa56dc1d 3755 && (bitpos % bitsize) == 0
25d55d72 3756 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
2b96c5f6 3757 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
25d55d72 3758 {
fac6aae6 3759 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
25d55d72 3760 bitpos = 0;
3761 }
3762
fcdc122e 3763 to_rtx = offset_address (to_rtx, offset_rtx,
252d0e4d 3764 highest_pow2_factor_for_target (to,
3765 offset));
954bdcb1 3766 }
7014838c 3767
e16ceb8e 3768 if (MEM_P (to_rtx))
b10dbbca 3769 {
b10dbbca 3770 /* If the field is at offset zero, we could have been given the
3771 DECL_RTX of the parent struct. Don't munge it. */
3772 to_rtx = shallow_copy_rtx (to_rtx);
3773
6f717f77 3774 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
b10dbbca 3775 }
46652181 3776
2b96c5f6 3777 /* Deal with volatile and readonly fields. The former is only done
3778 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
e16ceb8e 3779 if (volatilep && MEM_P (to_rtx))
2b96c5f6 3780 {
3781 if (to_rtx == orig_to_rtx)
3782 to_rtx = copy_rtx (to_rtx);
3783 MEM_VOLATILE_P (to_rtx) = 1;
10f307d9 3784 }
3785
ad87de1e 3786 if (TREE_CODE (to) == COMPONENT_REF
9836f32e 3787 && TREE_READONLY (TREE_OPERAND (to, 1))
3788 /* We can't assert that a MEM won't be set more than once
3789 if the component is not addressable because another
3790 non-addressable component may be referenced by the same MEM. */
e16ceb8e 3791 && ! (MEM_P (to_rtx) && ! can_address_p (to)))
ad87de1e 3792 {
2b96c5f6 3793 if (to_rtx == orig_to_rtx)
ad87de1e 3794 to_rtx = copy_rtx (to_rtx);
ad87de1e 3795 RTX_UNCHANGING_P (to_rtx) = 1;
3796 }
3797
e16ceb8e 3798 if (MEM_P (to_rtx) && ! can_address_p (to))
2b96c5f6 3799 {
3800 if (to_rtx == orig_to_rtx)
3801 to_rtx = copy_rtx (to_rtx);
3802 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3803 }
3804
bba9e08b 3805 /* Disabled temporarily. GET_MODE (to_rtx) is often not the right
3806 mode. */
3807 while (0 && mode1 == VOIDmode && !want_value
9c5f26b0 3808 && bitpos + bitsize <= BITS_PER_WORD
3809 && bitsize < BITS_PER_WORD
3810 && GET_MODE_BITSIZE (GET_MODE (to_rtx)) <= BITS_PER_WORD
3811 && !TREE_SIDE_EFFECTS (to)
3812 && !TREE_THIS_VOLATILE (to))
0717ec39 3813 {
9c5f26b0 3814 tree src, op0, op1;
0717ec39 3815 rtx value;
3816 HOST_WIDE_INT count = bitpos;
9c5f26b0 3817 optab binop;
3818
3819 src = from;
3820 STRIP_NOPS (src);
3821 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE
3822 || TREE_CODE_CLASS (TREE_CODE (src)) != '2')
3823 break;
3824
3825 op0 = TREE_OPERAND (src, 0);
3826 op1 = TREE_OPERAND (src, 1);
3827 STRIP_NOPS (op0);
3828
3829 if (! operand_equal_p (to, op0, 0))
3830 break;
0717ec39 3831
3832 if (BYTES_BIG_ENDIAN)
3833 count = GET_MODE_BITSIZE (GET_MODE (to_rtx)) - bitpos - bitsize;
3834
3835 /* Special case some bitfield op= exp. */
9c5f26b0 3836 switch (TREE_CODE (src))
0717ec39 3837 {
3838 case PLUS_EXPR:
3839 case MINUS_EXPR:
3840 if (count <= 0)
3841 break;
3842
3843 /* For now, just optimize the case of the topmost bitfield
9c5f26b0 3844 where we don't need to do any masking and also
3845 1 bit bitfields where xor can be used.
0717ec39 3846 We might win by one instruction for the other bitfields
3847 too if insv/extv instructions aren't used, so that
3848 can be added later. */
9c5f26b0 3849 if (count + bitsize != GET_MODE_BITSIZE (GET_MODE (to_rtx))
3850 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
0717ec39 3851 break;
9c5f26b0 3852 value = expand_expr (op1, NULL_RTX, VOIDmode, 0);
0717ec39 3853 value = protect_from_queue (value, 0);
3854 to_rtx = protect_from_queue (to_rtx, 1);
9c5f26b0 3855 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3856 if (bitsize == 1
3857 && count + bitsize != GET_MODE_BITSIZE (GET_MODE (to_rtx)))
3858 {
3859 value = expand_and (GET_MODE (to_rtx), value, const1_rtx,
3860 NULL_RTX);
3861 binop = xor_optab;
3862 }
0717ec39 3863 value = expand_shift (LSHIFT_EXPR, GET_MODE (to_rtx),
3864 value, build_int_2 (count, 0),
3865 NULL_RTX, 1);
9c5f26b0 3866 result = expand_binop (GET_MODE (to_rtx), binop, to_rtx,
0717ec39 3867 value, to_rtx, 1, OPTAB_WIDEN);
3868 if (result != to_rtx)
3869 emit_move_insn (to_rtx, result);
3870 free_temp_slots ();
3871 pop_temp_slots ();
3872 return NULL_RTX;
3873 default:
3874 break;
3875 }
9c5f26b0 3876
3877 break;
0717ec39 3878 }
3879
2b96c5f6 3880 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3881 (want_value
3882 /* Spurious cast for HPUX compiler. */
3883 ? ((enum machine_mode)
3884 TYPE_MODE (TREE_TYPE (to)))
3885 : VOIDmode),
3886 unsignedp, TREE_TYPE (tem), get_alias_set (to));
2e918804 3887
2b96c5f6 3888 preserve_temp_slots (result);
3889 free_temp_slots ();
3890 pop_temp_slots ();
2e918804 3891
2b96c5f6 3892 /* If the value is meaningful, convert RESULT to the proper mode.
3893 Otherwise, return nothing. */
3894 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3895 TYPE_MODE (TREE_TYPE (from)),
3896 result,
78a8ed03 3897 TYPE_UNSIGNED (TREE_TYPE (to)))
2b96c5f6 3898 : NULL_RTX);
10f307d9 3899 }
3900
a2e044a5 3901 /* If the rhs is a function call and its value is not an aggregate,
3902 call the function before we start to compute the lhs.
3903 This is needed for correct code for cases such as
3904 val = setjmp (buf) on machines where reference to val
e767499e 3905 requires loading up part of an address in a separate insn.
3906
16a8193d 3907 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3908 since it might be a promoted variable where the zero- or sign- extension
3909 needs to be done. Handling this in the normal way is safe because no
3910 computation is done before the call. */
45550790 3911 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
61b44857 3912 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
16a8193d 3913 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
8ad4c111 3914 && REG_P (DECL_RTL (to))))
a2e044a5 3915 {
88ac3f7f 3916 rtx value;
3917
3918 push_temp_slots ();
3919 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
a2e044a5 3920 if (to_rtx == 0)
8a06f2d4 3921 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
ac263f88 3922
ce739127 3923 /* Handle calls that return values in multiple non-contiguous locations.
3924 The Irix 6 ABI has examples of this. */
3925 if (GET_CODE (to_rtx) == PARALLEL)
5f4cd670 3926 emit_group_load (to_rtx, value, TREE_TYPE (from),
3927 int_size_in_bytes (TREE_TYPE (from)));
ce739127 3928 else if (GET_MODE (to_rtx) == BLKmode)
0378dbdc 3929 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
ac263f88 3930 else
5471b3be 3931 {
85d654dd 3932 if (POINTER_TYPE_P (TREE_TYPE (to)))
5471b3be 3933 value = convert_memory_address (GET_MODE (to_rtx), value);
5471b3be 3934 emit_move_insn (to_rtx, value);
3935 }
a2e044a5 3936 preserve_temp_slots (to_rtx);
3937 free_temp_slots ();
88ac3f7f 3938 pop_temp_slots ();
9282409c 3939 return want_value ? to_rtx : NULL_RTX;
a2e044a5 3940 }
3941
10f307d9 3942 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3943 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3944
3945 if (to_rtx == 0)
8a06f2d4 3946 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
10f307d9 3947
addbe7ac 3948 /* Don't move directly into a return register. */
155b05dc 3949 if (TREE_CODE (to) == RESULT_DECL
8ad4c111 3950 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
addbe7ac 3951 {
88ac3f7f 3952 rtx temp;
3953
3954 push_temp_slots ();
3955 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
155b05dc 3956
3957 if (GET_CODE (to_rtx) == PARALLEL)
5f4cd670 3958 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3959 int_size_in_bytes (TREE_TYPE (from)));
155b05dc 3960 else
3961 emit_move_insn (to_rtx, temp);
3962
addbe7ac 3963 preserve_temp_slots (to_rtx);
3964 free_temp_slots ();
88ac3f7f 3965 pop_temp_slots ();
9282409c 3966 return want_value ? to_rtx : NULL_RTX;
addbe7ac 3967 }
3968
10f307d9 3969 /* In case we are returning the contents of an object which overlaps
3970 the place the value is being stored, use a safe function when copying
3971 a value through a pointer into a structure value return block. */
3972 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3973 && current_function_returns_struct
3974 && !current_function_returns_pcc_struct)
3975 {
88ac3f7f 3976 rtx from_rtx, size;
3977
3978 push_temp_slots ();
eaf7767e 3979 size = expr_size (from);
8a06f2d4 3980 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
10f307d9 3981
f896c932 3982 emit_library_call (memmove_libfunc, LCT_NORMAL,
3983 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3984 XEXP (from_rtx, 0), Pmode,
3985 convert_to_mode (TYPE_MODE (sizetype),
3986 size, TYPE_UNSIGNED (sizetype)),
3987 TYPE_MODE (sizetype));
10f307d9 3988
3989 preserve_temp_slots (to_rtx);
3990 free_temp_slots ();
88ac3f7f 3991 pop_temp_slots ();
9282409c 3992 return want_value ? to_rtx : NULL_RTX;
10f307d9 3993 }
3994
3995 /* Compute FROM and store the value in the rtx we got. */
3996
88ac3f7f 3997 push_temp_slots ();
10f307d9 3998 result = store_expr (from, to_rtx, want_value);
3999 preserve_temp_slots (result);
4000 free_temp_slots ();
88ac3f7f 4001 pop_temp_slots ();
9282409c 4002 return want_value ? result : NULL_RTX;
10f307d9 4003}
4004
4005/* Generate code for computing expression EXP,
4006 and storing the value into TARGET.
10f307d9 4007 TARGET may contain a QUEUED rtx.
4008
a35a63ff 4009 If WANT_VALUE & 1 is nonzero, return a copy of the value
9282409c 4010 not in TARGET, so that we can be sure to use the proper
4011 value in a containing expression even if TARGET has something
4012 else stored in it. If possible, we copy the value through a pseudo
4013 and return that pseudo. Or, if the value is constant, we try to
4014 return the constant. In some cases, we return a pseudo
4015 copied *from* TARGET.
4016
4017 If the mode is BLKmode then we may return TARGET itself.
4018 It turns out that in BLKmode it doesn't cause a problem.
4019 because C has no operators that could combine two different
4020 assignments into the same BLKmode object with different values
4021 with no sequence point. Will other languages need this to
4022 be more thorough?
4023
a35a63ff 4024 If WANT_VALUE & 1 is 0, we return NULL, to make sure
9282409c 4025 to catch quickly any cases where the caller uses the value
a35a63ff 4026 and fails to set WANT_VALUE.
4027
4028 If WANT_VALUE & 2 is set, this is a store into a call param on the
4029 stack, and block moves may need to be treated specially. */
10f307d9 4030
4031rtx
35cb5232 4032store_expr (tree exp, rtx target, int want_value)
10f307d9 4033{
19cb6b50 4034 rtx temp;
60ffaf4d 4035 rtx alt_rtl = NULL_RTX;
c0d93299 4036 rtx mark = mark_queue ();
10f307d9 4037 int dont_return_target = 0;
afadb0ab 4038 int dont_store_target = 0;
10f307d9 4039
824638f9 4040 if (VOID_TYPE_P (TREE_TYPE (exp)))
4041 {
4042 /* C++ can generate ?: expressions with a throw expression in one
4043 branch and an rvalue in the other. Here, we resolve attempts to
917bbcab 4044 store the throw expression's nonexistent result. */
824638f9 4045 if (want_value)
4046 abort ();
4047 expand_expr (exp, const0_rtx, VOIDmode, 0);
4048 return NULL_RTX;
4049 }
10f307d9 4050 if (TREE_CODE (exp) == COMPOUND_EXPR)
4051 {
4052 /* Perform first part of compound expression, then assign from second
4053 part. */
a35a63ff 4054 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4055 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
10f307d9 4056 emit_queue ();
9282409c 4057 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
10f307d9 4058 }
4059 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4060 {
4061 /* For conditional expression, get safe form of the target. Then
4062 test the condition, doing the appropriate assignment on either
4063 side. This avoids the creation of unnecessary temporaries.
4064 For non-BLKmode, it is more efficient not to do this. */
4065
4066 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4067
4068 emit_queue ();
4069 target = protect_from_queue (target, 1);
4070
d07f1b1f 4071 do_pending_stack_adjust ();
10f307d9 4072 NO_DEFER_POP;
4073 jumpifnot (TREE_OPERAND (exp, 0), lab1);
ad87de1e 4074 start_cleanup_deferral ();
a35a63ff 4075 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
ad87de1e 4076 end_cleanup_deferral ();
10f307d9 4077 emit_queue ();
4078 emit_jump_insn (gen_jump (lab2));
4079 emit_barrier ();
4080 emit_label (lab1);
ad87de1e 4081 start_cleanup_deferral ();
a35a63ff 4082 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
ad87de1e 4083 end_cleanup_deferral ();
10f307d9 4084 emit_queue ();
4085 emit_label (lab2);
4086 OK_DEFER_POP;
9012f57d 4087
a35a63ff 4088 return want_value & 1 ? target : NULL_RTX;
10f307d9 4089 }
10f307d9 4090 else if (queued_subexp_p (target))
9282409c 4091 /* If target contains a postincrement, let's not risk
4092 using it as the place to generate the rhs. */
10f307d9 4093 {
4094 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4095 {
4096 /* Expand EXP into a new pseudo. */
4097 temp = gen_reg_rtx (GET_MODE (target));
a35a63ff 4098 temp = expand_expr (exp, temp, GET_MODE (target),
4099 (want_value & 2
4100 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
10f307d9 4101 }
4102 else
a35a63ff 4103 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4104 (want_value & 2
4105 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
9282409c 4106
4107 /* If target is volatile, ANSI requires accessing the value
4108 *from* the target, if it is accessed. So make that happen.
4109 In no case return the target itself. */
a35a63ff 4110 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
9282409c 4111 dont_return_target = 1;
10f307d9 4112 }
a35a63ff 4113 else if ((want_value & 1) != 0
e16ceb8e 4114 && MEM_P (target)
a35a63ff 4115 && ! MEM_VOLATILE_P (target)
bb11bacb 4116 && GET_MODE (target) != BLKmode)
4117 /* If target is in memory and caller wants value in a register instead,
4118 arrange that. Pass TARGET as target for expand_expr so that,
4119 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4120 We know expand_expr will not use the target in that case.
4121 Don't do this if TARGET is volatile because we are supposed
4122 to write it and then read it. */
4123 {
a35a63ff 4124 temp = expand_expr (exp, target, GET_MODE (target),
4125 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
bb11bacb 4126 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
afadb0ab 4127 {
4128 /* If TEMP is already in the desired TARGET, only copy it from
4129 memory and don't store it there again. */
4130 if (temp == target
4131 || (rtx_equal_p (temp, target)
4132 && ! side_effects_p (temp) && ! side_effects_p (target)))
4133 dont_store_target = 1;
4134 temp = copy_to_reg (temp);
4135 }
bb11bacb 4136 dont_return_target = 1;
4137 }
acfb31e5 4138 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
edc2a478 4139 /* If this is a scalar in a register that is stored in a wider mode
acfb31e5 4140 than the declared mode, compute the result into its declared mode
4141 and then convert to the wider mode. Our value is the computed
4142 expression. */
4143 {
d2422fc2 4144 rtx inner_target = 0;
4145
256749c3 4146 /* If we don't want a value, we can do the conversion inside EXP,
8d426db9 4147 which will often result in some optimizations. Do the conversion
4148 in two steps: first change the signedness, if needed, then
74086fde 4149 the extend. But don't do this if the type of EXP is a subtype
4150 of something else since then the conversion might involve
4151 more than just converting modes. */
a35a63ff 4152 if ((want_value & 1) == 0
4153 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
74086fde 4154 && TREE_TYPE (TREE_TYPE (exp)) == 0)
8d426db9 4155 {
78a8ed03 4156 if (TYPE_UNSIGNED (TREE_TYPE (exp))
8d426db9 4157 != SUBREG_PROMOTED_UNSIGNED_P (target))
4070745f 4158 exp = convert
dc24ddbd 4159 (lang_hooks.types.signed_or_unsigned_type
4070745f 4160 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
8d426db9 4161
dc24ddbd 4162 exp = convert (lang_hooks.types.type_for_mode
771d21fa 4163 (GET_MODE (SUBREG_REG (target)),
4164 SUBREG_PROMOTED_UNSIGNED_P (target)),
8d426db9 4165 exp);
d2422fc2 4166
4167 inner_target = SUBREG_REG (target);
8d426db9 4168 }
fa56dc1d 4169
a35a63ff 4170 temp = expand_expr (exp, inner_target, VOIDmode,
4171 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
ceefa980 4172
ae587726 4173 /* If TEMP is a MEM and we want a result value, make the access
35cb5232 4174 now so it gets done only once. Strictly speaking, this is
4175 only necessary if the MEM is volatile, or if the address
ae587726 4176 overlaps TARGET. But not performing the load twice also
4177 reduces the amount of rtl we generate and then have to CSE. */
e16ceb8e 4178 if (MEM_P (temp) && (want_value & 1) != 0)
eb9d8626 4179 temp = copy_to_reg (temp);
4180
ceefa980 4181 /* If TEMP is a VOIDmode constant, use convert_modes to make
4182 sure that we properly convert it. */
4183 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
c3ba908e 4184 {
4185 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4186 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4187 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4188 GET_MODE (target), temp,
4189 SUBREG_PROMOTED_UNSIGNED_P (target));
4190 }
ceefa980 4191
acfb31e5 4192 convert_move (SUBREG_REG (target), temp,
4193 SUBREG_PROMOTED_UNSIGNED_P (target));
28ad8d33 4194
4195 /* If we promoted a constant, change the mode back down to match
4196 target. Otherwise, the caller might get confused by a result whose
4197 mode is larger than expected. */
4198
a35a63ff 4199 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
28ad8d33 4200 {
f06d0bb1 4201 if (GET_MODE (temp) != VOIDmode)
4202 {
4203 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4204 SUBREG_PROMOTED_VAR_P (temp) = 1;
ff385626 4205 SUBREG_PROMOTED_UNSIGNED_SET (temp,
bfd242e8 4206 SUBREG_PROMOTED_UNSIGNED_P (target));
f06d0bb1 4207 }
4208 else
4209 temp = convert_modes (GET_MODE (target),
4210 GET_MODE (SUBREG_REG (target)),
4211 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
28ad8d33 4212 }
4213
a35a63ff 4214 return want_value & 1 ? temp : NULL_RTX;
acfb31e5 4215 }
10f307d9 4216 else
4217 {
60ffaf4d 4218 temp = expand_expr_real (exp, target, GET_MODE (target),
4219 (want_value & 2
4220 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4221 &alt_rtl);
eb9d8626 4222 /* Return TARGET if it's a specified hardware register.
9282409c 4223 If TARGET is a volatile mem ref, either return TARGET
4224 or return a reg copied *from* TARGET; ANSI requires this.
4225
4226 Otherwise, if TEMP is not TARGET, return TEMP
4227 if it is constant (for efficiency),
4228 or if we really want the correct value. */
8ad4c111 4229 if (!(target && REG_P (target)
10f307d9 4230 && REGNO (target) < FIRST_PSEUDO_REGISTER)
e16ceb8e 4231 && !(MEM_P (target) && MEM_VOLATILE_P (target))
46652181 4232 && ! rtx_equal_p (temp, target)
a35a63ff 4233 && (CONSTANT_P (temp) || (want_value & 1) != 0))
10f307d9 4234 dont_return_target = 1;
4235 }
4236
ceefa980 4237 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4238 the same as that of TARGET, adjust the constant. This is needed, for
4239 example, in case it is a CONST_DOUBLE and we want only a word-sized
4240 value. */
4241 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
43769aba 4242 && TREE_CODE (exp) != ERROR_MARK
ceefa980 4243 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4244 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
78a8ed03 4245 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
ceefa980 4246
10f307d9 4247 /* If value was not generated in the target, store it there.
c0d93299 4248 Convert the value to TARGET's type first if necessary and emit the
4249 pending incrementations that have been queued when expanding EXP.
4250 Note that we cannot emit the whole queue blindly because this will
4251 effectively disable the POST_INC optimization later.
4252
8a06f2d4 4253 If TEMP and TARGET compare equal according to rtx_equal_p, but
14e396bb 4254 one or both of them are volatile memory refs, we have to distinguish
4255 two cases:
4256 - expand_expr has used TARGET. In this case, we must not generate
4257 another copy. This can be detected by TARGET being equal according
4258 to == .
4259 - expand_expr has not used TARGET - that means that the source just
4260 happens to have the same RTX form. Since temp will have been created
4261 by expand_expr, it will compare unequal according to == .
4262 We must generate a copy in this case, to reach the correct number
4263 of volatile memory references. */
10f307d9 4264
b1ba8c8b 4265 if ((! rtx_equal_p (temp, target)
14e396bb 4266 || (temp != target && (side_effects_p (temp)
4267 || side_effects_p (target))))
afadb0ab 4268 && TREE_CODE (exp) != ERROR_MARK
6db2b7ab 4269 && ! dont_store_target
72a64688 4270 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4271 but TARGET is not valid memory reference, TEMP will differ
4272 from TARGET although it is really the same location. */
60ffaf4d 4273 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
d18d957a 4274 /* If there's nothing to copy, don't bother. Don't call expr_size
4275 unless necessary, because some front-ends (C++) expr_size-hook
4276 aborts on objects that are not supposed to be bit-copied or
4277 bit-initialized. */
4278 && expr_size (exp) != const0_rtx)
10f307d9 4279 {
c0d93299 4280 emit_insns_enqueued_after_mark (mark);
10f307d9 4281 target = protect_from_queue (target, 1);
5e4778f4 4282 temp = protect_from_queue (temp, 0);
10f307d9 4283 if (GET_MODE (temp) != GET_MODE (target)
d0ddddf7 4284 && GET_MODE (temp) != VOIDmode)
10f307d9 4285 {
78a8ed03 4286 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
10f307d9 4287 if (dont_return_target)
4288 {
4289 /* In this case, we will return TEMP,
4290 so make sure it has the proper mode.
4291 But don't forget to store the value into TARGET. */
4292 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4293 emit_move_insn (target, temp);
4294 }
4295 else
4296 convert_move (target, temp, unsignedp);
4297 }
4298
4299 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4300 {
18279aee 4301 /* Handle copying a string constant into an array. The string
4302 constant may be shorter than the array. So copy just the string's
4303 actual length, and clear the rest. First get the size of the data
4304 type of the string, which is actually the size of the target. */
4305 rtx size = expr_size (exp);
10f307d9 4306
35f44ac1 4307 if (GET_CODE (size) == CONST_INT
4308 && INTVAL (size) < TREE_STRING_LENGTH (exp))
a35a63ff 4309 emit_block_move (target, temp, size,
4310 (want_value & 2
4311 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
35f44ac1 4312 else
10f307d9 4313 {
35f44ac1 4314 /* Compute the size of the data to copy from the string. */
4315 tree copy_size
3586684a 4316 = size_binop (MIN_EXPR,
c869557a 4317 make_tree (sizetype, size),
902de8ed 4318 size_int (TREE_STRING_LENGTH (exp)));
a35a63ff 4319 rtx copy_size_rtx
4320 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4321 (want_value & 2
4322 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
35f44ac1 4323 rtx label = 0;
4324
4325 /* Copy that much. */
33402d67 4326 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
78a8ed03 4327 TYPE_UNSIGNED (sizetype));
a35a63ff 4328 emit_block_move (target, temp, copy_size_rtx,
4329 (want_value & 2
4330 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
35f44ac1 4331
ed8d3eee 4332 /* Figure out how much is left in TARGET that we have to clear.
4333 Do all calculations in ptr_mode. */
35f44ac1 4334 if (GET_CODE (copy_size_rtx) == CONST_INT)
4335 {
18279aee 4336 size = plus_constant (size, -INTVAL (copy_size_rtx));
4337 target = adjust_address (target, BLKmode,
4338 INTVAL (copy_size_rtx));
35f44ac1 4339 }
4340 else
4341 {
4a836698 4342 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
b572011e 4343 copy_size_rtx, NULL_RTX, 0,
4344 OPTAB_LIB_WIDEN);
35f44ac1 4345
18279aee 4346#ifdef POINTERS_EXTEND_UNSIGNED
4347 if (GET_MODE (copy_size_rtx) != Pmode)
33402d67 4348 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
78a8ed03 4349 TYPE_UNSIGNED (sizetype));
18279aee 4350#endif
4351
4352 target = offset_address (target, copy_size_rtx,
4353 highest_pow2_factor (copy_size));
35f44ac1 4354 label = gen_label_rtx ();
5a894bc6 4355 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
2b96c5f6 4356 GET_MODE (size), 0, label);
35f44ac1 4357 }
4358
4359 if (size != const0_rtx)
8a06f2d4 4360 clear_storage (target, size);
bdf60b71 4361
35f44ac1 4362 if (label)
4363 emit_label (label);
10f307d9 4364 }
4365 }
ce739127 4366 /* Handle calls that return values in multiple non-contiguous locations.
4367 The Irix 6 ABI has examples of this. */
4368 else if (GET_CODE (target) == PARALLEL)
5f4cd670 4369 emit_group_load (target, temp, TREE_TYPE (exp),
4370 int_size_in_bytes (TREE_TYPE (exp)));
10f307d9 4371 else if (GET_MODE (temp) == BLKmode)
a35a63ff 4372 emit_block_move (target, temp, expr_size (exp),
4373 (want_value & 2
4374 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10f307d9 4375 else
828eae76 4376 {
4377 temp = force_operand (temp, target);
4378 if (temp != target)
4379 emit_move_insn (target, temp);
4380 }
10f307d9 4381 }
9282409c 4382
eb9d8626 4383 /* If we don't want a value, return NULL_RTX. */
a35a63ff 4384 if ((want_value & 1) == 0)
eb9d8626 4385 return NULL_RTX;
4386
4387 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4388 ??? The latter test doesn't seem to make sense. */
e16ceb8e 4389 else if (dont_return_target && !MEM_P (temp))
10f307d9 4390 return temp;
eb9d8626 4391
4392 /* Return TARGET itself if it is a hard register. */
a35a63ff 4393 else if ((want_value & 1) != 0
4394 && GET_MODE (target) != BLKmode
8ad4c111 4395 && ! (REG_P (target)
eb9d8626 4396 && REGNO (target) < FIRST_PSEUDO_REGISTER))
9282409c 4397 return copy_to_reg (target);
fa56dc1d 4398
eb9d8626 4399 else
9282409c 4400 return target;
10f307d9 4401}
4402\f
365db11e 4403/* Examine CTOR. Discover how many scalar fields are set to nonzero
4ee9c684 4404 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4405 are set to non-constant values and place it in *P_NC_ELTS. */
dbd14dc5 4406
4ee9c684 4407static void
4408categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4409 HOST_WIDE_INT *p_nc_elts)
dbd14dc5 4410{
4ee9c684 4411 HOST_WIDE_INT nz_elts, nc_elts;
4412 tree list;
dbd14dc5 4413
4ee9c684 4414 nz_elts = 0;
4415 nc_elts = 0;
4416
4417 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
dbd14dc5 4418 {
4ee9c684 4419 tree value = TREE_VALUE (list);
4420 tree purpose = TREE_PURPOSE (list);
4421 HOST_WIDE_INT mult;
dbd14dc5 4422
4ee9c684 4423 mult = 1;
4424 if (TREE_CODE (purpose) == RANGE_EXPR)
4425 {
4426 tree lo_index = TREE_OPERAND (purpose, 0);
4427 tree hi_index = TREE_OPERAND (purpose, 1);
dbd14dc5 4428
4ee9c684 4429 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4430 mult = (tree_low_cst (hi_index, 1)
4431 - tree_low_cst (lo_index, 1) + 1);
4432 }
dbd14dc5 4433
4ee9c684 4434 switch (TREE_CODE (value))
4435 {
4436 case CONSTRUCTOR:
4437 {
4438 HOST_WIDE_INT nz = 0, nc = 0;
4439 categorize_ctor_elements_1 (value, &nz, &nc);
4440 nz_elts += mult * nz;
4441 nc_elts += mult * nc;
4442 }
4443 break;
dbd14dc5 4444
4ee9c684 4445 case INTEGER_CST:
4446 case REAL_CST:
4447 if (!initializer_zerop (value))
4448 nz_elts += mult;
4449 break;
4450 case COMPLEX_CST:
4451 if (!initializer_zerop (TREE_REALPART (value)))
4452 nz_elts += mult;
4453 if (!initializer_zerop (TREE_IMAGPART (value)))
4454 nz_elts += mult;
4455 break;
4456 case VECTOR_CST:
4457 {
4458 tree v;
4459 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4460 if (!initializer_zerop (TREE_VALUE (v)))
4461 nz_elts += mult;
4462 }
4463 break;
886cfd4f 4464
4ee9c684 4465 default:
4466 nz_elts += mult;
4467 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4468 nc_elts += mult;
4469 break;
4470 }
4471 }
886cfd4f 4472
4ee9c684 4473 *p_nz_elts += nz_elts;
4474 *p_nc_elts += nc_elts;
4475}
4476
4477void
4478categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4479 HOST_WIDE_INT *p_nc_elts)
4480{
4481 *p_nz_elts = 0;
4482 *p_nc_elts = 0;
4483 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4484}
4485
4486/* Count the number of scalars in TYPE. Return -1 on overflow or
4487 variable-sized. */
4488
4489HOST_WIDE_INT
4490count_type_elements (tree type)
4491{
4492 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4493 switch (TREE_CODE (type))
4494 {
4495 case ARRAY_TYPE:
4496 {
4497 tree telts = array_type_nelts (type);
4498 if (telts && host_integerp (telts, 1))
4499 {
4500 HOST_WIDE_INT n = tree_low_cst (telts, 1);
4501 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4502 if (n == 0)
4503 return 0;
4504 if (max / n < m)
4505 return n * m;
4506 }
4507 return -1;
4508 }
4509
4510 case RECORD_TYPE:
4511 {
4512 HOST_WIDE_INT n = 0, t;
4513 tree f;
4514
4515 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4516 if (TREE_CODE (f) == FIELD_DECL)
4517 {
4518 t = count_type_elements (TREE_TYPE (f));
4519 if (t < 0)
4520 return -1;
4521 n += t;
4522 }
4523
4524 return n;
4525 }
dbd14dc5 4526
4ee9c684 4527 case UNION_TYPE:
4528 case QUAL_UNION_TYPE:
4529 {
4530 /* Ho hum. How in the world do we guess here? Clearly it isn't
4531 right to count the fields. Guess based on the number of words. */
4532 HOST_WIDE_INT n = int_size_in_bytes (type);
4533 if (n < 0)
4534 return -1;
4535 return n / UNITS_PER_WORD;
4536 }
4537
4538 case COMPLEX_TYPE:
4539 return 2;
4540
4541 case VECTOR_TYPE:
4542 /* ??? This is broke. We should encode the vector width in the tree. */
4543 return GET_MODE_NUNITS (TYPE_MODE (type));
4544
4545 case INTEGER_TYPE:
4546 case REAL_TYPE:
4547 case ENUMERAL_TYPE:
4548 case BOOLEAN_TYPE:
4549 case CHAR_TYPE:
4550 case POINTER_TYPE:
4551 case OFFSET_TYPE:
4552 case REFERENCE_TYPE:
dbd14dc5 4553 return 1;
fa56dc1d 4554
4ee9c684 4555 case VOID_TYPE:
4556 case METHOD_TYPE:
4557 case FILE_TYPE:
4558 case SET_TYPE:
4559 case FUNCTION_TYPE:
4560 case LANG_TYPE:
0dbd1c74 4561 default:
4ee9c684 4562 abort ();
dbd14dc5 4563 }
dbd14dc5 4564}
4565
4566/* Return 1 if EXP contains mostly (3/4) zeros. */
4567
957697db 4568int
35cb5232 4569mostly_zeros_p (tree exp)
dbd14dc5 4570{
dbd14dc5 4571 if (TREE_CODE (exp) == CONSTRUCTOR)
4ee9c684 4572
dbd14dc5 4573 {
4ee9c684 4574 HOST_WIDE_INT nz_elts, nc_elts, elts;
4575
4576 /* If there are no ranges of true bits, it is all zero. */
e7ef3ff2 4577 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4ee9c684 4578 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4579
4580 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4581 elts = count_type_elements (TREE_TYPE (exp));
dbd14dc5 4582
4ee9c684 4583 return nz_elts < elts / 4;
dbd14dc5 4584 }
4585
4ee9c684 4586 return initializer_zerop (exp);
dbd14dc5 4587}
4588\f
e7ef3ff2 4589/* Helper function for store_constructor.
4590 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4591 TYPE is the type of the CONSTRUCTOR, not the element type.
2c269e73 4592 CLEARED is as for store_constructor.
1179a68b 4593 ALIAS_SET is the alias set to use for any stores.
a5b7fc8b 4594
4595 This provides a recursive shortcut back to store_constructor when it isn't
4596 necessary to go through store_field. This is so that we can pass through
4597 the cleared field to let store_constructor know that we may not have to
4598 clear a substructure if the outer structure has already been cleared. */
e7ef3ff2 4599
4600static void
35cb5232 4601store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4602 HOST_WIDE_INT bitpos, enum machine_mode mode,
4603 tree exp, tree type, int cleared, int alias_set)
e7ef3ff2 4604{
4605 if (TREE_CODE (exp) == CONSTRUCTOR
a6645eae 4606 /* We can only call store_constructor recursively if the size and
4607 bit position are on a byte boundary. */
a5b7fc8b 4608 && bitpos % BITS_PER_UNIT == 0
a6645eae 4609 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
6ef828f9 4610 /* If we have a nonzero bitpos for a register target, then we just
a5b7fc8b 4611 let store_field do the bitfield handling. This is unlikely to
4612 generate unnecessary clear instructions anyways. */
e16ceb8e 4613 && (bitpos == 0 || MEM_P (target)))
e7ef3ff2 4614 {
e16ceb8e 4615 if (MEM_P (target))
459b8611 4616 target
4617 = adjust_address (target,
4618 GET_MODE (target) == BLKmode
4619 || 0 != (bitpos
4620 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4621 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
1179a68b 4622
5b90bb08 4623
2c269e73 4624 /* Update the alias set, if required. */
e16ceb8e 4625 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5cc193e7 4626 && MEM_ALIAS_SET (target) != 0)
86ce88aa 4627 {
4628 target = copy_rtx (target);
4629 set_mem_alias_set (target, alias_set);
4630 }
5b90bb08 4631
e792f237 4632 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
e7ef3ff2 4633 }
4634 else
2b96c5f6 4635 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4636 alias_set);
e7ef3ff2 4637}
4638
10f307d9 4639/* Store the value of constructor EXP into the rtx TARGET.
2c269e73 4640 TARGET is either a REG or a MEM; we know it cannot conflict, since
4641 safe_from_p has been called.
e792f237 4642 CLEARED is true if TARGET is known to have been zero'd.
4643 SIZE is the number of bytes of TARGET we are allowed to modify: this
a316ea6a 4644 may not be the same as the size of EXP if we are assigning to a field
4645 which has been packed to exclude padding bits. */
10f307d9 4646
4647static void
35cb5232 4648store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
10f307d9 4649{
2ef1e405 4650 tree type = TREE_TYPE (exp);
0bf16c4a 4651#ifdef WORD_REGISTER_OPERATIONS
3a6656ad 4652 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
0bf16c4a 4653#endif
2ef1e405 4654
34f17b00 4655 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4656 || TREE_CODE (type) == QUAL_UNION_TYPE)
10f307d9 4657 {
19cb6b50 4658 tree elt;
10f307d9 4659
e792f237 4660 /* If size is zero or the target is already cleared, do nothing. */
4661 if (size == 0 || cleared)
1d881c02 4662 cleared = 1;
2c269e73 4663 /* We either clear the aggregate or indicate the value is dead. */
1d881c02 4664 else if ((TREE_CODE (type) == UNION_TYPE
4665 || TREE_CODE (type) == QUAL_UNION_TYPE)
4666 && ! CONSTRUCTOR_ELTS (exp))
2c269e73 4667 /* If the constructor is empty, clear the union. */
226c8875 4668 {
e792f237 4669 clear_storage (target, expr_size (exp));
2c269e73 4670 cleared = 1;
226c8875 4671 }
2ef1e405 4672
4673 /* If we are building a static constructor into a register,
4674 set the initial value as zero so we can fold the value into
dfbad8f1 4675 a constant. But if more than one register is involved,
4676 this probably loses. */
8ad4c111 4677 else if (REG_P (target) && TREE_STATIC (exp)
dfbad8f1 4678 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
dbd14dc5 4679 {
2c269e73 4680 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
dbd14dc5 4681 cleared = 1;
4682 }
4683
4684 /* If the constructor has fewer fields than the structure
4685 or if we are initializing the structure to mostly zeros,
80ac742d 4686 clear the whole structure first. Don't do this if TARGET is a
20c377c2 4687 register whose mode size isn't equal to SIZE since clear_storage
4688 can't handle this case. */
5c0c8e54 4689 else if (size > 0
4690 && ((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4691 || mostly_zeros_p (exp))
8ad4c111 4692 && (!REG_P (target)
e792f237 4693 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
2c269e73 4694 == size)))
dbd14dc5 4695 {
96d87f56 4696 rtx xtarget = target;
4697
4698 if (readonly_fields_p (type))
4699 {
4700 xtarget = copy_rtx (xtarget);
4701 RTX_UNCHANGING_P (xtarget) = 1;
4702 }
4703
e792f237 4704 clear_storage (xtarget, GEN_INT (size));
dbd14dc5 4705 cleared = 1;
4706 }
e792f237 4707
4708 if (! cleared)
4709 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
10f307d9 4710
4711 /* Store each element of the constructor into
4712 the corresponding field of TARGET. */
4713
4714 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4715 {
19cb6b50 4716 tree field = TREE_PURPOSE (elt);
e6860d27 4717 tree value = TREE_VALUE (elt);
19cb6b50 4718 enum machine_mode mode;
02e7a332 4719 HOST_WIDE_INT bitsize;
4720 HOST_WIDE_INT bitpos = 0;
02e7a332 4721 tree offset;
c869557a 4722 rtx to_rtx = target;
10f307d9 4723
52a618b4 4724 /* Just ignore missing fields.
4725 We cleared the whole structure, above,
4726 if any fields are missing. */
4727 if (field == 0)
4728 continue;
4729
4ee9c684 4730 if (cleared && initializer_zerop (value))
e7ef3ff2 4731 continue;
dbd14dc5 4732
02e7a332 4733 if (host_integerp (DECL_SIZE (field), 1))
4734 bitsize = tree_low_cst (DECL_SIZE (field), 1);
155b05dc 4735 else
4736 bitsize = -1;
4737
10f307d9 4738 mode = DECL_MODE (field);
4739 if (DECL_BIT_FIELD (field))
4740 mode = VOIDmode;
4741
02e7a332 4742 offset = DECL_FIELD_OFFSET (field);
4743 if (host_integerp (offset, 0)
4744 && host_integerp (bit_position (field), 0))
4745 {
4746 bitpos = int_bit_position (field);
4747 offset = 0;
4748 }
c869557a 4749 else
02e7a332 4750 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
fa56dc1d 4751
c869557a 4752 if (offset)
4753 {
4754 rtx offset_rtx;
4755
55f9d7dc 4756 offset
4757 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4758 make_tree (TREE_TYPE (exp),
4759 target));
10f307d9 4760
c869557a 4761 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
e16ceb8e 4762 if (!MEM_P (to_rtx))
c869557a 4763 abort ();
4764
33ef2f52 4765#ifdef POINTERS_EXTEND_UNSIGNED
479e4d5e 4766 if (GET_MODE (offset_rtx) != Pmode)
33402d67 4767 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4a836698 4768#else
4769 if (GET_MODE (offset_rtx) != ptr_mode)
4770 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
33ef2f52 4771#endif
33ef2f52 4772
fcdc122e 4773 to_rtx = offset_address (to_rtx, offset_rtx,
4774 highest_pow2_factor (offset));
c869557a 4775 }
7014838c 4776
0ab96142 4777 if (TREE_READONLY (field))
4ba58fd4 4778 {
e16ceb8e 4779 if (MEM_P (to_rtx))
46652181 4780 to_rtx = copy_rtx (to_rtx);
4781
4ba58fd4 4782 RTX_UNCHANGING_P (to_rtx) = 1;
4783 }
4784
e6860d27 4785#ifdef WORD_REGISTER_OPERATIONS
4786 /* If this initializes a field that is smaller than a word, at the
4787 start of a word, try to widen it to a full word.
4788 This special case allows us to output C++ member function
4789 initializations in a form that the optimizers can understand. */
8ad4c111 4790 if (REG_P (target)
e6860d27 4791 && bitsize < BITS_PER_WORD
4792 && bitpos % BITS_PER_WORD == 0
4793 && GET_MODE_CLASS (mode) == MODE_INT
4794 && TREE_CODE (value) == INTEGER_CST
3a6656ad 4795 && exp_size >= 0
4796 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
e6860d27 4797 {
4798 tree type = TREE_TYPE (value);
2c269e73 4799
e6860d27 4800 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4801 {
dc24ddbd 4802 type = lang_hooks.types.type_for_size
78a8ed03 4803 (BITS_PER_WORD, TYPE_UNSIGNED (type));
e6860d27 4804 value = convert (type, value);
4805 }
2c269e73 4806
e6860d27 4807 if (BYTES_BIG_ENDIAN)
4808 value
4809 = fold (build (LSHIFT_EXPR, type, value,
4810 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4811 bitsize = BITS_PER_WORD;
4812 mode = word_mode;
4813 }
4814#endif
5cc193e7 4815
e16ceb8e 4816 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5cc193e7 4817 && DECL_NONADDRESSABLE_P (field))
4818 {
4819 to_rtx = copy_rtx (to_rtx);
4820 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4821 }
4822
7014838c 4823 store_constructor_field (to_rtx, bitsize, bitpos, mode,
297d4daf 4824 value, type, cleared,
5cc193e7 4825 get_alias_set (TREE_TYPE (field)));
10f307d9 4826 }
4827 }
0ad236c2 4828 else if (TREE_CODE (type) == ARRAY_TYPE
4829 || TREE_CODE (type) == VECTOR_TYPE)
10f307d9 4830 {
19cb6b50 4831 tree elt;
4832 int i;
e7ef3ff2 4833 int need_to_clear;
49be8259 4834 tree domain;
2ef1e405 4835 tree elttype = TREE_TYPE (type);
0ad236c2 4836 int const_bounds_p;
97b330ca 4837 HOST_WIDE_INT minelt = 0;
4838 HOST_WIDE_INT maxelt = 0;
b8d2bcdd 4839 int icode = 0;
4840 rtx *vector = NULL;
4841 int elt_size = 0;
4842 unsigned n_elts = 0;
84554bf9 4843
49be8259 4844 if (TREE_CODE (type) == ARRAY_TYPE)
4845 domain = TYPE_DOMAIN (type);
4846 else
4847 /* Vectors do not have domains; look up the domain of
4848 the array embedded in the debug representation type.
4849 FIXME Would probably be more efficient to treat vectors
4850 separately from arrays. */
0ad236c2 4851 {
0ad236c2 4852 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4853 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
b8d2bcdd 4854 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4855 {
4856 enum machine_mode mode = GET_MODE (target);
4857
4858 icode = (int) vec_init_optab->handlers[mode].insn_code;
4859 if (icode != CODE_FOR_nothing)
4860 {
4861 unsigned int i;
4862
4863 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4864 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4865 vector = alloca (n_elts);
4866 for (i = 0; i < n_elts; i++)
4867 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4868 }
4869 }
0ad236c2 4870 }
4871
4872 const_bounds_p = (TYPE_MIN_VALUE (domain)
4873 && TYPE_MAX_VALUE (domain)
4874 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4875 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4876
84554bf9 4877 /* If we have constant bounds for the range of the type, get them. */
4878 if (const_bounds_p)
4879 {
4880 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4881 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4882 }
10f307d9 4883
e7ef3ff2 4884 /* If the constructor has fewer elements than the array,
3398e91d 4885 clear the whole array first. Similarly if this is
e7ef3ff2 4886 static constructor of a non-BLKmode object. */
8ad4c111 4887 if (cleared || (REG_P (target) && TREE_STATIC (exp)))
e7ef3ff2 4888 need_to_clear = 1;
4889 else
4890 {
4891 HOST_WIDE_INT count = 0, zero_count = 0;
84554bf9 4892 need_to_clear = ! const_bounds_p;
4893
e7ef3ff2 4894 /* This loop is a more accurate version of the loop in
4895 mostly_zeros_p (it handles RANGE_EXPR in an index).
4896 It is also needed to check for missing elements. */
4897 for (elt = CONSTRUCTOR_ELTS (exp);
84554bf9 4898 elt != NULL_TREE && ! need_to_clear;
a6b6a56f 4899 elt = TREE_CHAIN (elt))
e7ef3ff2 4900 {
4901 tree index = TREE_PURPOSE (elt);
4902 HOST_WIDE_INT this_node_count;
325d1c45 4903
e7ef3ff2 4904 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4905 {
4906 tree lo_index = TREE_OPERAND (index, 0);
4907 tree hi_index = TREE_OPERAND (index, 1);
a0c2c45b 4908
325d1c45 4909 if (! host_integerp (lo_index, 1)
4910 || ! host_integerp (hi_index, 1))
e7ef3ff2 4911 {
4912 need_to_clear = 1;
4913 break;
4914 }
325d1c45 4915
4916 this_node_count = (tree_low_cst (hi_index, 1)
4917 - tree_low_cst (lo_index, 1) + 1);
e7ef3ff2 4918 }
4919 else
4920 this_node_count = 1;
84554bf9 4921
e7ef3ff2 4922 count += this_node_count;
4923 if (mostly_zeros_p (TREE_VALUE (elt)))
4924 zero_count += this_node_count;
4925 }
84554bf9 4926
028c2cf2 4927 /* Clear the entire array first if there are any missing elements,
a92771b8 4928 or if the incidence of zero elements is >= 75%. */
84554bf9 4929 if (! need_to_clear
4930 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
e7ef3ff2 4931 need_to_clear = 1;
4932 }
84554bf9 4933
b8d2bcdd 4934 if (need_to_clear && size > 0 && !vector)
dbd14dc5 4935 {
4936 if (! cleared)
cbbc8e6a 4937 {
4938 if (REG_P (target))
4939 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4940 else
e792f237 4941 clear_storage (target, GEN_INT (size));
cbbc8e6a 4942 }
e792f237 4943 cleared = 1;
dbd14dc5 4944 }
c6e2ff20 4945 else if (REG_P (target))
e792f237 4946 /* Inform later passes that the old value is dead. */
4947 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
10f307d9 4948
4949 /* Store each element of the constructor into
4950 the corresponding element of TARGET, determined
4951 by counting the elements. */
4952 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4953 elt;
4954 elt = TREE_CHAIN (elt), i++)
4955 {
19cb6b50 4956 enum machine_mode mode;
325d1c45 4957 HOST_WIDE_INT bitsize;
4958 HOST_WIDE_INT bitpos;
10f307d9 4959 int unsignedp;
e7ef3ff2 4960 tree value = TREE_VALUE (elt);
c4492f76 4961 tree index = TREE_PURPOSE (elt);
4962 rtx xtarget = target;
10f307d9 4963
4ee9c684 4964 if (cleared && initializer_zerop (value))
e7ef3ff2 4965 continue;
dbd14dc5 4966
78a8ed03 4967 unsignedp = TYPE_UNSIGNED (elttype);
155b05dc 4968 mode = TYPE_MODE (elttype);
4969 if (mode == BLKmode)
325d1c45 4970 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4971 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4972 : -1);
155b05dc 4973 else
4974 bitsize = GET_MODE_BITSIZE (mode);
10f307d9 4975
e7ef3ff2 4976 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4977 {
4978 tree lo_index = TREE_OPERAND (index, 0);
4979 tree hi_index = TREE_OPERAND (index, 1);
03192665 4980 rtx index_r, pos_rtx;
997a08e0 4981 HOST_WIDE_INT lo, hi, count;
4982 tree position;
e7ef3ff2 4983
b8d2bcdd 4984 if (vector)
4985 abort ();
4986
a92771b8 4987 /* If the range is constant and "small", unroll the loop. */
84554bf9 4988 if (const_bounds_p
4989 && host_integerp (lo_index, 0)
325d1c45 4990 && host_integerp (hi_index, 0)
4991 && (lo = tree_low_cst (lo_index, 0),
4992 hi = tree_low_cst (hi_index, 0),
997a08e0 4993 count = hi - lo + 1,
e16ceb8e 4994 (!MEM_P (target)
997a08e0 4995 || count <= 2
325d1c45 4996 || (host_integerp (TYPE_SIZE (elttype), 1)
4997 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4998 <= 40 * 8)))))
e7ef3ff2 4999 {
997a08e0 5000 lo -= minelt; hi -= minelt;
5001 for (; lo <= hi; lo++)
e7ef3ff2 5002 {
325d1c45 5003 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5cc193e7 5004
e16ceb8e 5005 if (MEM_P (target)
5cc193e7 5006 && !MEM_KEEP_ALIAS_SET_P (target)
0ad236c2 5007 && TREE_CODE (type) == ARRAY_TYPE
5cc193e7 5008 && TYPE_NONALIASED_COMPONENT (type))
5009 {
5010 target = copy_rtx (target);
5011 MEM_KEEP_ALIAS_SET_P (target) = 1;
5012 }
5013
1179a68b 5014 store_constructor_field
2c269e73 5015 (target, bitsize, bitpos, mode, value, type, cleared,
5016 get_alias_set (elttype));
e7ef3ff2 5017 }
5018 }
5019 else
5020 {
03192665 5021 rtx loop_start = gen_label_rtx ();
5022 rtx loop_end = gen_label_rtx ();
5023 tree exit_cond;
e7ef3ff2 5024
03192665 5025 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
78a8ed03 5026 unsignedp = TYPE_UNSIGNED (domain);
e7ef3ff2 5027
5028 index = build_decl (VAR_DECL, NULL_TREE, domain);
5029
0e8e37b2 5030 index_r
e7ef3ff2 5031 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5032 &unsignedp, 0));
0e8e37b2 5033 SET_DECL_RTL (index, index_r);
e7ef3ff2 5034 store_expr (lo_index, index_r, 0);
03192665 5035
5036 /* Build the head of the loop. */
5037 do_pending_stack_adjust ();
5038 emit_queue ();
5039 emit_label (loop_start);
e7ef3ff2 5040
a92771b8 5041 /* Assign value to element index. */
902de8ed 5042 position
5043 = convert (ssizetype,
5044 fold (build (MINUS_EXPR, TREE_TYPE (index),
5045 index, TYPE_MIN_VALUE (domain))));
5046 position = size_binop (MULT_EXPR, position,
5047 convert (ssizetype,
5048 TYPE_SIZE_UNIT (elttype)));
5049
e7ef3ff2 5050 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
fcdc122e 5051 xtarget = offset_address (target, pos_rtx,
5052 highest_pow2_factor (position));
5053 xtarget = adjust_address (xtarget, mode, 0);
e7ef3ff2 5054 if (TREE_CODE (value) == CONSTRUCTOR)
e792f237 5055 store_constructor (value, xtarget, cleared,
5056 bitsize / BITS_PER_UNIT);
e7ef3ff2 5057 else
5058 store_expr (value, xtarget, 0);
5059
03192665 5060 /* Generate a conditional jump to exit the loop. */
5061 exit_cond = build (LT_EXPR, integer_type_node,
5062 index, hi_index);
5063 jumpif (exit_cond, loop_end);
e7ef3ff2 5064
03192665 5065 /* Update the loop counter, and jump to the head of
5066 the loop. */
e7ef3ff2 5067 expand_increment (build (PREINCREMENT_EXPR,
5068 TREE_TYPE (index),
37e76d7d 5069 index, integer_one_node), 0, 0);
03192665 5070 emit_jump (loop_start);
5071
5072 /* Build the end of the loop. */
e7ef3ff2 5073 emit_label (loop_end);
e7ef3ff2 5074 }
5075 }
325d1c45 5076 else if ((index != 0 && ! host_integerp (index, 0))
5077 || ! host_integerp (TYPE_SIZE (elttype), 1))
c4492f76 5078 {
c4492f76 5079 tree position;
5080
b8d2bcdd 5081 if (vector)
5082 abort ();
5083
845a6957 5084 if (index == 0)
902de8ed 5085 index = ssize_int (1);
845a6957 5086
e7ef3ff2 5087 if (minelt)
902de8ed 5088 index = convert (ssizetype,
5089 fold (build (MINUS_EXPR, index,
5090 TYPE_MIN_VALUE (domain))));
325d1c45 5091
902de8ed 5092 position = size_binop (MULT_EXPR, index,
5093 convert (ssizetype,
5094 TYPE_SIZE_UNIT (elttype)));
fcdc122e 5095 xtarget = offset_address (target,
5096 expand_expr (position, 0, VOIDmode, 0),
5097 highest_pow2_factor (position));
5098 xtarget = adjust_address (xtarget, mode, 0);
e7ef3ff2 5099 store_expr (value, xtarget, 0);
c4492f76 5100 }
b8d2bcdd 5101 else if (vector)
5102 {
5103 int pos;
5104
5105 if (index != 0)
5106 pos = tree_low_cst (index, 0) - minelt;
5107 else
5108 pos = i;
5109 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
5110 }
c4492f76 5111 else
5112 {
5113 if (index != 0)
325d1c45 5114 bitpos = ((tree_low_cst (index, 0) - minelt)
5115 * tree_low_cst (TYPE_SIZE (elttype), 1));
c4492f76 5116 else
325d1c45 5117 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5118
e16ceb8e 5119 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
0ad236c2 5120 && TREE_CODE (type) == ARRAY_TYPE
5cc193e7 5121 && TYPE_NONALIASED_COMPONENT (type))
5122 {
5123 target = copy_rtx (target);
5124 MEM_KEEP_ALIAS_SET_P (target) = 1;
5125 }
0a5c2b94 5126 store_constructor_field (target, bitsize, bitpos, mode, value,
5127 type, cleared, get_alias_set (elttype));
c4492f76 5128 }
10f307d9 5129 }
b8d2bcdd 5130 if (vector)
5131 {
5132 emit_insn (GEN_FCN (icode) (target,
5133 gen_rtx_PARALLEL (GET_MODE (target),
5134 gen_rtvec_v (n_elts, vector))));
5135 }
10f307d9 5136 }
325d1c45 5137
fa56dc1d 5138 /* Set constructor assignments. */
97b2af42 5139 else if (TREE_CODE (type) == SET_TYPE)
5140 {
e7ef3ff2 5141 tree elt = CONSTRUCTOR_ELTS (exp);
325d1c45 5142 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
97b2af42 5143 tree domain = TYPE_DOMAIN (type);
5144 tree domain_min, domain_max, bitlength;
5145
c3418f42 5146 /* The default implementation strategy is to extract the constant
97b2af42 5147 parts of the constructor, use that to initialize the target,
5148 and then "or" in whatever non-constant ranges we need in addition.
5149
5150 If a large set is all zero or all ones, it is
f896c932 5151 probably better to set it using memset.
97b2af42 5152 Also, if a large set has just a single range, it may also be
5153 better to first clear all the first clear the set (using
f896c932 5154 memset), and set the bits we want. */
fa56dc1d 5155
a92771b8 5156 /* Check for all zeros. */
6c84d8f3 5157 if (elt == NULL_TREE && size > 0)
97b2af42 5158 {
e792f237 5159 if (!cleared)
5160 clear_storage (target, GEN_INT (size));
97b2af42 5161 return;
5162 }
5163
97b2af42 5164 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5165 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5166 bitlength = size_binop (PLUS_EXPR,
902de8ed 5167 size_diffop (domain_max, domain_min),
5168 ssize_int (1));
97b2af42 5169
325d1c45 5170 nbits = tree_low_cst (bitlength, 1);
e7ef3ff2 5171
5172 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5173 are "complicated" (more than one range), initialize (the
fa56dc1d 5174 constant parts) by copying from a constant. */
e7ef3ff2 5175 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5176 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
97b2af42 5177 {
325d1c45 5178 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
b599eea5 5179 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
f0af5a88 5180 char *bit_buffer = alloca (nbits);
b599eea5 5181 HOST_WIDE_INT word = 0;
325d1c45 5182 unsigned int bit_pos = 0;
5183 unsigned int ibit = 0;
5184 unsigned int offset = 0; /* In bytes from beginning of set. */
5185
e7ef3ff2 5186 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b599eea5 5187 for (;;)
97b2af42 5188 {
b599eea5 5189 if (bit_buffer[ibit])
5190 {
117a9033 5191 if (BYTES_BIG_ENDIAN)
b599eea5 5192 word |= (1 << (set_word_size - 1 - bit_pos));
5193 else
5194 word |= 1 << bit_pos;
5195 }
325d1c45 5196
b599eea5 5197 bit_pos++; ibit++;
5198 if (bit_pos >= set_word_size || ibit == nbits)
97b2af42 5199 {
e792f237 5200 if (word != 0 || ! cleared)
e7ef3ff2 5201 {
8f46af08 5202 rtx datum = gen_int_mode (word, mode);
e7ef3ff2 5203 rtx to_rtx;
325d1c45 5204
a92771b8 5205 /* The assumption here is that it is safe to use
5206 XEXP if the set is multi-word, but not if
5207 it's single-word. */
e16ceb8e 5208 if (MEM_P (target))
e513d163 5209 to_rtx = adjust_address (target, mode, offset);
fa56dc1d 5210 else if (offset == 0)
e7ef3ff2 5211 to_rtx = target;
5212 else
5213 abort ();
5214 emit_move_insn (to_rtx, datum);
5215 }
325d1c45 5216
b599eea5 5217 if (ibit == nbits)
5218 break;
5219 word = 0;
5220 bit_pos = 0;
5221 offset += set_word_size / BITS_PER_UNIT;
97b2af42 5222 }
5223 }
97b2af42 5224 }
e792f237 5225 else if (!cleared)
325d1c45 5226 /* Don't bother clearing storage if the set is all ones. */
5227 if (TREE_CHAIN (elt) != NULL_TREE
5228 || (TREE_PURPOSE (elt) == NULL_TREE
5229 ? nbits != 1
5230 : ( ! host_integerp (TREE_VALUE (elt), 0)
5231 || ! host_integerp (TREE_PURPOSE (elt), 0)
5232 || (tree_low_cst (TREE_VALUE (elt), 0)
5233 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5234 != (HOST_WIDE_INT) nbits))))
e792f237 5235 clear_storage (target, expr_size (exp));
fa56dc1d 5236
e7ef3ff2 5237 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
97b2af42 5238 {
fa56dc1d 5239 /* Start of range of element or NULL. */
97b2af42 5240 tree startbit = TREE_PURPOSE (elt);
fa56dc1d 5241 /* End of range of element, or element value. */
97b2af42 5242 tree endbit = TREE_VALUE (elt);
5243 HOST_WIDE_INT startb, endb;
325d1c45 5244 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
97b2af42 5245
5246 bitlength_rtx = expand_expr (bitlength,
325d1c45 5247 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
97b2af42 5248
fa56dc1d 5249 /* Handle non-range tuple element like [ expr ]. */
97b2af42 5250 if (startbit == NULL_TREE)
5251 {
5252 startbit = save_expr (endbit);
5253 endbit = startbit;
5254 }
325d1c45 5255
97b2af42 5256 startbit = convert (sizetype, startbit);
5257 endbit = convert (sizetype, endbit);
5258 if (! integer_zerop (domain_min))
5259 {
5260 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5261 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5262 }
fa56dc1d 5263 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
97b2af42 5264 EXPAND_CONST_ADDRESS);
fa56dc1d 5265 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
97b2af42 5266 EXPAND_CONST_ADDRESS);
5267
5268 if (REG_P (target))
5269 {
387bc205 5270 targetx
5271 = assign_temp
dc24ddbd 5272 ((build_qualified_type (lang_hooks.types.type_for_mode
771d21fa 5273 (GET_MODE (target), 0),
387bc205 5274 TYPE_QUAL_CONST)),
5275 0, 1, 1);
97b2af42 5276 emit_move_insn (targetx, target);
5277 }
325d1c45 5278
e16ceb8e 5279 else if (MEM_P (target))
97b2af42 5280 targetx = target;
5281 else
5282 abort ();
5283
c0bfc78e 5284 /* Optimization: If startbit and endbit are constants divisible
5285 by BITS_PER_UNIT, call memset instead. */
f896c932 5286 if (TREE_CODE (startbit) == INTEGER_CST
97b2af42 5287 && TREE_CODE (endbit) == INTEGER_CST
5288 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e7ef3ff2 5289 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
97b2af42 5290 {
2c5d421b 5291 emit_library_call (memset_libfunc, LCT_NORMAL,
97b2af42 5292 VOIDmode, 3,
e7ef3ff2 5293 plus_constant (XEXP (targetx, 0),
5294 startb / BITS_PER_UNIT),
97b2af42 5295 Pmode,
36c8378b 5296 constm1_rtx, TYPE_MODE (integer_type_node),
97b2af42 5297 GEN_INT ((endb - startb) / BITS_PER_UNIT),
36c8378b 5298 TYPE_MODE (sizetype));
97b2af42 5299 }
5300 else
62f615b1 5301 emit_library_call (setbits_libfunc, LCT_NORMAL,
5302 VOIDmode, 4, XEXP (targetx, 0),
2c5d421b 5303 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
325d1c45 5304 startbit_rtx, TYPE_MODE (sizetype),
5305 endbit_rtx, TYPE_MODE (sizetype));
5306
97b2af42 5307 if (REG_P (target))
5308 emit_move_insn (target, targetx);
5309 }
5310 }
10f307d9 5311
5312 else
5313 abort ();
5314}
5315
5316/* Store the value of EXP (an expression tree)
5317 into a subfield of TARGET which has mode MODE and occupies
5318 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5319 If MODE is VOIDmode, it means that we are storing into a bit-field.
5320
5321 If VALUE_MODE is VOIDmode, return nothing in particular.
5322 UNSIGNEDP is not used in this case.
5323
5324 Otherwise, return an rtx for the value stored. This rtx
5325 has mode VALUE_MODE if that is convenient to do.
5326 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5327
2b96c5f6 5328 TYPE is the type of the underlying object,
1e2513d9 5329
5330 ALIAS_SET is the alias set for the destination. This value will
5331 (in general) be different from that for TARGET, since TARGET is a
5332 reference to the containing structure. */
10f307d9 5333
5334static rtx
35cb5232 5335store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5336 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5337 int unsignedp, tree type, int alias_set)
10f307d9 5338{
b572011e 5339 HOST_WIDE_INT width_mask = 0;
10f307d9 5340
0dbd1c74 5341 if (TREE_CODE (exp) == ERROR_MARK)
5342 return const0_rtx;
5343
55e9836d 5344 /* If we have nothing to store, do nothing unless the expression has
5345 side-effects. */
5346 if (bitsize == 0)
5347 return expand_expr (exp, const0_rtx, VOIDmode, 0);
ea0041f4 5348 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
b572011e 5349 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
10f307d9 5350
5351 /* If we are storing into an unaligned field of an aligned union that is
5352 in a register, we may have the mode of TARGET being an integer mode but
5353 MODE == BLKmode. In that case, get an aligned object whose size and
5354 alignment are the same as TARGET and store TARGET into it (we can avoid
5355 the store if the field being stored is the entire width of TARGET). Then
5356 call ourselves recursively to store the field into a BLKmode version of
5357 that object. Finally, load from the object into TARGET. This is not
5358 very efficient in general, but should only be slightly more expensive
5359 than the otherwise-required unaligned accesses. Perhaps this can be
d2e73365 5360 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5361 twice, once with emit_move_insn and once via store_field. */
10f307d9 5362
5363 if (mode == BLKmode
8ad4c111 5364 && (REG_P (target) || GET_CODE (target) == SUBREG))
10f307d9 5365 {
d2e73365 5366 rtx object = assign_temp (type, 0, 1, 1);
7a827396 5367 rtx blk_object = adjust_address (object, BLKmode, 0);
10f307d9 5368
e1439bcb 5369 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
10f307d9 5370 emit_move_insn (object, target);
5371
2b96c5f6 5372 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5373 alias_set);
10f307d9 5374
5375 emit_move_insn (target, object);
5376
2b96c5f6 5377 /* We want to return the BLKmode version of the data. */
559a13ea 5378 return blk_object;
10f307d9 5379 }
efd3939c 5380
5381 if (GET_CODE (target) == CONCAT)
5382 {
5383 /* We're storing into a struct containing a single __complex. */
5384
5385 if (bitpos != 0)
5386 abort ();
4ee9c684 5387 return store_expr (exp, target, value_mode != VOIDmode);
efd3939c 5388 }
10f307d9 5389
5390 /* If the structure is in a register or if the component
5391 is a bit field, we cannot use addressing to access it.
5392 Use bit-field techniques or SUBREG to store in it. */
5393
07edfa02 5394 if (mode == VOIDmode
03519f22 5395 || (mode != BLKmode && ! direct_store[(int) mode]
5396 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5397 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
8ad4c111 5398 || REG_P (target)
66aa258b 5399 || GET_CODE (target) == SUBREG
4e05e574 5400 /* If the field isn't aligned enough to store as an ordinary memref,
5401 store it as a bit field. */
9a0db358 5402 || (mode != BLKmode
8f6f6bc8 5403 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5404 || bitpos % GET_MODE_ALIGNMENT (mode))
5405 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
35cb5232 5406 || (bitpos % BITS_PER_UNIT != 0)))
155b05dc 5407 /* If the RHS and field are a constant size and the size of the
5408 RHS isn't the same size as the bitfield, we must use bitfield
5409 operations. */
a0c2c45b 5410 || (bitsize >= 0
5411 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5412 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
10f307d9 5413 {
b572011e 5414 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
97d7f645 5415
0aa5cbcc 5416 /* If BITSIZE is narrower than the size of the type of EXP
5417 we will be narrowing TEMP. Normally, what's wanted are the
5418 low-order bits. However, if EXP's type is a record and this is
5419 big-endian machine, we want the upper BITSIZE bits. */
5420 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
cce8da2f 5421 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
0aa5cbcc 5422 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5423 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5424 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5425 - bitsize),
8a348c93 5426 NULL_RTX, 1);
0aa5cbcc 5427
97d7f645 5428 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5429 MODE. */
5430 if (mode != VOIDmode && mode != BLKmode
5431 && mode != TYPE_MODE (TREE_TYPE (exp)))
5432 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5433
0e20f9fb 5434 /* If the modes of TARGET and TEMP are both BLKmode, both
5435 must be in memory and BITPOS must be aligned on a byte
5436 boundary. If so, we simply do a block copy. */
5437 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5438 {
e16ceb8e 5439 if (!MEM_P (target) || !MEM_P (temp)
0e20f9fb 5440 || bitpos % BITS_PER_UNIT != 0)
5441 abort ();
5442
e513d163 5443 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
0e20f9fb 5444 emit_block_move (target, temp,
2b96c5f6 5445 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
0378dbdc 5446 / BITS_PER_UNIT),
5447 BLOCK_OP_NORMAL);
0e20f9fb 5448
5449 return value_mode == VOIDmode ? const0_rtx : target;
5450 }
5451
10f307d9 5452 /* Store the value in the bitfield. */
2b96c5f6 5453 store_bit_field (target, bitsize, bitpos, mode, temp,
5454 int_size_in_bytes (type));
5455
10f307d9 5456 if (value_mode != VOIDmode)
5457 {
2c269e73 5458 /* The caller wants an rtx for the value.
5459 If possible, avoid refetching from the bitfield itself. */
10f307d9 5460 if (width_mask != 0
e16ceb8e 5461 && ! (MEM_P (target) && MEM_VOLATILE_P (target)))
ba281428 5462 {
7e716022 5463 tree count;
ba281428 5464 enum machine_mode tmode;
04e2d822 5465
ba281428 5466 tmode = GET_MODE (temp);
04e2d822 5467 if (tmode == VOIDmode)
5468 tmode = value_mode;
6de9716c 5469
5470 if (unsignedp)
5471 return expand_and (tmode, temp,
2d232d05 5472 gen_int_mode (width_mask, tmode),
6de9716c 5473 NULL_RTX);
5474
ba281428 5475 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5476 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5477 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5478 }
2c269e73 5479
10f307d9 5480 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2c269e73 5481 NULL_RTX, value_mode, VOIDmode,
2b96c5f6 5482 int_size_in_bytes (type));
10f307d9 5483 }
5484 return const0_rtx;
5485 }
5486 else
5487 {
5488 rtx addr = XEXP (target, 0);
2b96c5f6 5489 rtx to_rtx = target;
10f307d9 5490
5491 /* If a value is wanted, it must be the lhs;
5492 so make the address stable for multiple use. */
5493
8ad4c111 5494 if (value_mode != VOIDmode && !REG_P (addr)
10f307d9 5495 && ! CONSTANT_ADDRESS_P (addr)
5496 /* A frame-pointer reference is already stable. */
5497 && ! (GET_CODE (addr) == PLUS
5498 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5499 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5500 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2b96c5f6 5501 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
10f307d9 5502
5503 /* Now build a reference to just the desired component. */
5504
2b96c5f6 5505 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5506
5507 if (to_rtx == target)
5508 to_rtx = copy_rtx (to_rtx);
537ffcfc 5509
6a0934dd 5510 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5cc193e7 5511 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
2b96c5f6 5512 set_mem_alias_set (to_rtx, alias_set);
10f307d9 5513
5514 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5515 }
5516}
5517\f
5518/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
ba04d9d5 5519 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5520 codes and find the ultimate containing object, which we return.
10f307d9 5521
5522 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5523 bit position, and *PUNSIGNEDP to the signedness of the field.
954bdcb1 5524 If the position of the field is variable, we store a tree
5525 giving the variable offset (in units) in *POFFSET.
5526 This offset is in addition to the bit position.
5527 If the position is not variable, we store 0 in *POFFSET.
10f307d9 5528
5529 If any of the extraction expressions is volatile,
5530 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5531
5532 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5533 is a mode that can be used to access the field. In that case, *PBITSIZE
01ab6370 5534 is redundant.
5535
5536 If the field describes a variable-sized object, *PMODE is set to
5537 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
1e625a2e 5538 this case, but the address of the object can be found. */
10f307d9 5539
5540tree
35cb5232 5541get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5542 HOST_WIDE_INT *pbitpos, tree *poffset,
5543 enum machine_mode *pmode, int *punsignedp,
5544 int *pvolatilep)
10f307d9 5545{
5546 tree size_tree = 0;
5547 enum machine_mode mode = VOIDmode;
902de8ed 5548 tree offset = size_zero_node;
02e7a332 5549 tree bit_offset = bitsize_zero_node;
02e7a332 5550 tree tem;
10f307d9 5551
02e7a332 5552 /* First get the mode, signedness, and size. We do this from just the
5553 outermost expression. */
10f307d9 5554 if (TREE_CODE (exp) == COMPONENT_REF)
5555 {
5556 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5557 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5558 mode = DECL_MODE (TREE_OPERAND (exp, 1));
02e7a332 5559
86ae60fd 5560 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
10f307d9 5561 }
5562 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5563 {
5564 size_tree = TREE_OPERAND (exp, 1);
86ae60fd 5565 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
10f307d9 5566 }
5567 else
5568 {
5569 mode = TYPE_MODE (TREE_TYPE (exp));
78a8ed03 5570 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
02e7a332 5571
be2828ce 5572 if (mode == BLKmode)
5573 size_tree = TYPE_SIZE (TREE_TYPE (exp));
02e7a332 5574 else
5575 *pbitsize = GET_MODE_BITSIZE (mode);
10f307d9 5576 }
fa56dc1d 5577
02e7a332 5578 if (size_tree != 0)
10f307d9 5579 {
02e7a332 5580 if (! host_integerp (size_tree, 1))
01ab6370 5581 mode = BLKmode, *pbitsize = -1;
5582 else
02e7a332 5583 *pbitsize = tree_low_cst (size_tree, 1);
10f307d9 5584 }
5585
5586 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5587 and find the ultimate containing object. */
10f307d9 5588 while (1)
5589 {
02e7a332 5590 if (TREE_CODE (exp) == BIT_FIELD_REF)
5591 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5592 else if (TREE_CODE (exp) == COMPONENT_REF)
10f307d9 5593 {
02e7a332 5594 tree field = TREE_OPERAND (exp, 1);
6374121b 5595 tree this_offset = component_ref_field_offset (exp);
10f307d9 5596
227bf826 5597 /* If this field hasn't been filled in yet, don't go
5598 past it. This should only happen when folding expressions
5599 made during type construction. */
02e7a332 5600 if (this_offset == 0)
227bf826 5601 break;
5602
7114c815 5603 offset = size_binop (PLUS_EXPR, offset, this_offset);
02e7a332 5604 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5605 DECL_FIELD_BIT_OFFSET (field));
75f7b24f 5606
2b96c5f6 5607 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
10f307d9 5608 }
7114c815 5609
ba04d9d5 5610 else if (TREE_CODE (exp) == ARRAY_REF
5611 || TREE_CODE (exp) == ARRAY_RANGE_REF)
10f307d9 5612 {
cf389750 5613 tree index = TREE_OPERAND (exp, 1);
6374121b 5614 tree low_bound = array_ref_low_bound (exp);
5615 tree unit_size = array_ref_element_size (exp);
cf389750 5616
02e7a332 5617 /* We assume all arrays have sizes that are a multiple of a byte.
5618 First subtract the lower bound, if any, in the type of the
5619 index, then convert to sizetype and multiply by the size of the
5620 array element. */
6374121b 5621 if (! integer_zerop (low_bound))
02e7a332 5622 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5623 index, low_bound));
30384dcf 5624
02e7a332 5625 offset = size_binop (PLUS_EXPR, offset,
5626 size_binop (MULT_EXPR,
5627 convert (sizetype, index),
7114c815 5628 unit_size));
10f307d9 5629 }
7114c815 5630
8a348c93 5631 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5632 conversions that don't change the mode, and all view conversions
5633 except those that need to "step up" the alignment. */
10f307d9 5634 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
8a348c93 5635 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5636 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5637 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5638 && STRICT_ALIGNMENT
5639 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5640 < BIGGEST_ALIGNMENT)
5641 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5642 || TYPE_ALIGN_OK (TREE_TYPE
5643 (TREE_OPERAND (exp, 0))))))
10f307d9 5644 && ! ((TREE_CODE (exp) == NOP_EXPR
5645 || TREE_CODE (exp) == CONVERT_EXPR)
5646 && (TYPE_MODE (TREE_TYPE (exp))
5647 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5648 break;
954bdcb1 5649
5650 /* If any reference in the chain is volatile, the effect is volatile. */
5651 if (TREE_THIS_VOLATILE (exp))
5652 *pvolatilep = 1;
7fce34be 5653
10f307d9 5654 exp = TREE_OPERAND (exp, 0);
5655 }
5656
02e7a332 5657 /* If OFFSET is constant, see if we can return the whole thing as a
5658 constant bit position. Otherwise, split it up. */
5659 if (host_integerp (offset, 0)
5660 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5661 bitsize_unit_node))
5662 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5663 && host_integerp (tem, 0))
5664 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5665 else
5666 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
c869557a 5667
10f307d9 5668 *pmode = mode;
10f307d9 5669 return exp;
5670}
eb4b06b6 5671
6374121b 5672/* Return a tree of sizetype representing the size, in bytes, of the element
5673 of EXP, an ARRAY_REF. */
5674
5675tree
5676array_ref_element_size (tree exp)
5677{
5678 tree aligned_size = TREE_OPERAND (exp, 3);
5679 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5680
5681 /* If a size was specified in the ARRAY_REF, it's the size measured
5682 in alignment units of the element type. So multiply by that value. */
5683 if (aligned_size)
5684 return size_binop (MULT_EXPR, aligned_size,
5685 size_int (TYPE_ALIGN (elmt_type) / BITS_PER_UNIT));
5686
5687 /* Otherwise, take the size from that of the element type. Substitute
5688 any PLACEHOLDER_EXPR that we have. */
5689 else
5690 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5691}
5692
5693/* Return a tree representing the lower bound of the array mentioned in
5694 EXP, an ARRAY_REF. */
5695
5696tree
5697array_ref_low_bound (tree exp)
5698{
5699 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5700
5701 /* If a lower bound is specified in EXP, use it. */
5702 if (TREE_OPERAND (exp, 2))
5703 return TREE_OPERAND (exp, 2);
5704
5705 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5706 substituting for a PLACEHOLDER_EXPR as needed. */
5707 if (domain_type && TYPE_MIN_VALUE (domain_type))
5708 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5709
5710 /* Otherwise, return a zero of the appropriate type. */
5711 return fold_convert (TREE_TYPE (TREE_OPERAND (exp, 1)), integer_zero_node);
5712}
5713
5714/* Return a tree representing the offset, in bytes, of the field referenced
5715 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5716
5717tree
5718component_ref_field_offset (tree exp)
5719{
5720 tree aligned_offset = TREE_OPERAND (exp, 2);
5721 tree field = TREE_OPERAND (exp, 1);
5722
5723 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5724 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5725 value. */
5726 if (aligned_offset)
5727 return size_binop (MULT_EXPR, aligned_offset,
5728 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5729
5730 /* Otherwise, take the offset from that of the field. Substitute
5731 any PLACEHOLDER_EXPR that we have. */
5732 else
5733 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5734}
5735
f96c43fb 5736/* Return 1 if T is an expression that get_inner_reference handles. */
5737
5738int
35cb5232 5739handled_component_p (tree t)
f96c43fb 5740{
5741 switch (TREE_CODE (t))
5742 {
5743 case BIT_FIELD_REF:
5744 case COMPONENT_REF:
5745 case ARRAY_REF:
5746 case ARRAY_RANGE_REF:
5747 case NON_LVALUE_EXPR:
5748 case VIEW_CONVERT_EXPR:
5749 return 1;
5750
74878f86 5751 /* ??? Sure they are handled, but get_inner_reference may return
5752 a different PBITSIZE, depending upon whether the expression is
5753 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
f96c43fb 5754 case NOP_EXPR:
5755 case CONVERT_EXPR:
5756 return (TYPE_MODE (TREE_TYPE (t))
5757 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5758
5759 default:
5760 return 0;
5761 }
5762}
10f307d9 5763\f
dc183975 5764/* Given an rtx VALUE that may contain additions and multiplications, return
5765 an equivalent value that just refers to a register, memory, or constant.
5766 This is done by generating instructions to perform the arithmetic and
5767 returning a pseudo-register containing the value.
c4f1a887 5768
5769 The returned value may be a REG, SUBREG, MEM or constant. */
10f307d9 5770
5771rtx
35cb5232 5772force_operand (rtx value, rtx target)
10f307d9 5773{
fef8467d 5774 rtx op1, op2;
10f307d9 5775 /* Use subtarget as the target for operand 0 of a binary operation. */
19cb6b50 5776 rtx subtarget = get_subtarget (target);
fef8467d 5777 enum rtx_code code = GET_CODE (value);
10f307d9 5778
f9cce2dc 5779 /* Check for subreg applied to an expression produced by loop optimizer. */
5780 if (code == SUBREG
8ad4c111 5781 && !REG_P (SUBREG_REG (value))
e16ceb8e 5782 && !MEM_P (SUBREG_REG (value)))
f9cce2dc 5783 {
5784 value = simplify_gen_subreg (GET_MODE (value),
5785 force_reg (GET_MODE (SUBREG_REG (value)),
5786 force_operand (SUBREG_REG (value),
5787 NULL_RTX)),
5788 GET_MODE (SUBREG_REG (value)),
5789 SUBREG_BYTE (value));
5790 code = GET_CODE (value);
5791 }
5792
8b59469a 5793 /* Check for a PIC address load. */
fef8467d 5794 if ((code == PLUS || code == MINUS)
8b59469a 5795 && XEXP (value, 0) == pic_offset_table_rtx
5796 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5797 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5798 || GET_CODE (XEXP (value, 1)) == CONST))
5799 {
5800 if (!subtarget)
5801 subtarget = gen_reg_rtx (GET_MODE (value));
5802 emit_move_insn (subtarget, value);
5803 return subtarget;
5804 }
5805
fef8467d 5806 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
10f307d9 5807 {
fef8467d 5808 if (!target)
5809 target = gen_reg_rtx (GET_MODE (value));
ef8396bd 5810 convert_move (target, force_operand (XEXP (value, 0), NULL),
fef8467d 5811 code == ZERO_EXTEND);
5812 return target;
10f307d9 5813 }
5814
6720e96c 5815 if (ARITHMETIC_P (value))
10f307d9 5816 {
5817 op2 = XEXP (value, 1);
8ad4c111 5818 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
10f307d9 5819 subtarget = 0;
fef8467d 5820 if (code == MINUS && GET_CODE (op2) == CONST_INT)
10f307d9 5821 {
fef8467d 5822 code = PLUS;
10f307d9 5823 op2 = negate_rtx (GET_MODE (value), op2);
5824 }
5825
5826 /* Check for an addition with OP2 a constant integer and our first
fef8467d 5827 operand a PLUS of a virtual register and something else. In that
5828 case, we want to emit the sum of the virtual register and the
5829 constant first and then add the other value. This allows virtual
5830 register instantiation to simply modify the constant rather than
5831 creating another one around this addition. */
5832 if (code == PLUS && GET_CODE (op2) == CONST_INT
10f307d9 5833 && GET_CODE (XEXP (value, 0)) == PLUS
8ad4c111 5834 && REG_P (XEXP (XEXP (value, 0), 0))
10f307d9 5835 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5836 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5837 {
fef8467d 5838 rtx temp = expand_simple_binop (GET_MODE (value), code,
5839 XEXP (XEXP (value, 0), 0), op2,
5840 subtarget, 0, OPTAB_LIB_WIDEN);
5841 return expand_simple_binop (GET_MODE (value), code, temp,
5842 force_operand (XEXP (XEXP (value,
5843 0), 1), 0),
5844 target, 0, OPTAB_LIB_WIDEN);
10f307d9 5845 }
fa56dc1d 5846
fef8467d 5847 op1 = force_operand (XEXP (value, 0), subtarget);
5848 op2 = force_operand (op2, NULL_RTX);
5849 switch (code)
5850 {
5851 case MULT:
5852 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5853 case DIV:
5854 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5855 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5856 target, 1, OPTAB_LIB_WIDEN);
5857 else
5858 return expand_divmod (0,
5859 FLOAT_MODE_P (GET_MODE (value))
5860 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5861 GET_MODE (value), op1, op2, target, 0);
5862 break;
5863 case MOD:
5864 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5865 target, 0);
5866 break;
5867 case UDIV:
5868 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5869 target, 1);
5870 break;
5871 case UMOD:
5872 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5873 target, 1);
5874 break;
5875 case ASHIFTRT:
5876 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5877 target, 0, OPTAB_LIB_WIDEN);
5878 break;
5879 default:
5880 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5881 target, 1, OPTAB_LIB_WIDEN);
5882 }
5883 }
6720e96c 5884 if (UNARY_P (value))
fef8467d 5885 {
5886 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5887 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
10f307d9 5888 }
3084721c 5889
5890#ifdef INSN_SCHEDULING
5891 /* On machines that have insn scheduling, we want all memory reference to be
5892 explicit, so we need to deal with such paradoxical SUBREGs. */
e16ceb8e 5893 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
3084721c 5894 && (GET_MODE_SIZE (GET_MODE (value))
5895 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5896 value
5897 = simplify_gen_subreg (GET_MODE (value),
5898 force_reg (GET_MODE (SUBREG_REG (value)),
5899 force_operand (SUBREG_REG (value),
5900 NULL_RTX)),
5901 GET_MODE (SUBREG_REG (value)),
5902 SUBREG_BYTE (value));
5903#endif
5904
10f307d9 5905 return value;
5906}
5907\f
10f307d9 5908/* Subroutine of expand_expr: return nonzero iff there is no way that
997d68fe 5909 EXP can reference X, which is being modified. TOP_P is nonzero if this
5910 call is going to be used to determine whether we need a temporary
67e40adc 5911 for EXP, as opposed to a recursive call to this function.
5912
5913 It is always safe for this routine to return zero since it merely
5914 searches for optimization opportunities. */
10f307d9 5915
e41f0d80 5916int
35cb5232 5917safe_from_p (rtx x, tree exp, int top_p)
10f307d9 5918{
5919 rtx exp_rtl = 0;
5920 int i, nops;
5921
a71ba0b1 5922 if (x == 0
5923 /* If EXP has varying size, we MUST use a target since we currently
62d8c952 5924 have no way of allocating temporaries of variable size
5925 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5926 So we assume here that something at a higher level has prevented a
b9438b95 5927 clash. This is somewhat bogus, but the best we can do. Only
997d68fe 5928 do this when X is BLKmode and when we are at the top level. */
4b72716d 5929 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
b9438b95 5930 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
62d8c952 5931 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5932 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5933 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5934 != INTEGER_CST)
387bc205 5935 && GET_MODE (x) == BLKmode)
5936 /* If X is in the outgoing argument area, it is always safe. */
e16ceb8e 5937 || (MEM_P (x)
387bc205 5938 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5939 || (GET_CODE (XEXP (x, 0)) == PLUS
5940 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
10f307d9 5941 return 1;
5942
5943 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5944 find the underlying pseudo. */
5945 if (GET_CODE (x) == SUBREG)
5946 {
5947 x = SUBREG_REG (x);
8ad4c111 5948 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
10f307d9 5949 return 0;
5950 }
5951
387bc205 5952 /* Now look at our tree code and possibly recurse. */
10f307d9 5953 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5954 {
5955 case 'd':
6db2b7ab 5956 exp_rtl = DECL_RTL_IF_SET (exp);
10f307d9 5957 break;
5958
5959 case 'c':
5960 return 1;
5961
5962 case 'x':
5963 if (TREE_CODE (exp) == TREE_LIST)
56c7ac50 5964 {
5965 while (1)
5966 {
5967 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5968 return 0;
5969 exp = TREE_CHAIN (exp);
5970 if (!exp)
5971 return 1;
5972 if (TREE_CODE (exp) != TREE_LIST)
5973 return safe_from_p (x, exp, 0);
5974 }
5975 }
67e40adc 5976 else if (TREE_CODE (exp) == ERROR_MARK)
5977 return 1; /* An already-visited SAVE_EXPR? */
10f307d9 5978 else
5979 return 0;
5980
7dd37241 5981 case 's':
5982 /* The only case we look at here is the DECL_INITIAL inside a
5983 DECL_EXPR. */
5984 return (TREE_CODE (exp) != DECL_EXPR
5985 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5986 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5987 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5988
10f307d9 5989 case '2':
5990 case '<':
56c7ac50 5991 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5992 return 0;
d632b59a 5993 /* Fall through. */
56c7ac50 5994
5995 case '1':
5996 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
10f307d9 5997
5998 case 'e':
5999 case 'r':
6000 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6001 the expression. If it is set, we conflict iff we are that rtx or
6002 both are in memory. Otherwise, we check all operands of the
6003 expression recursively. */
6004
6005 switch (TREE_CODE (exp))
6006 {
6007 case ADDR_EXPR:
86ce88aa 6008 /* If the operand is static or we are static, we can't conflict.
6009 Likewise if we don't conflict with the operand at all. */
6010 if (staticp (TREE_OPERAND (exp, 0))
6011 || TREE_STATIC (exp)
6012 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6013 return 1;
6014
6015 /* Otherwise, the only way this can conflict is if we are taking
6016 the address of a DECL a that address if part of X, which is
6017 very rare. */
6018 exp = TREE_OPERAND (exp, 0);
6019 if (DECL_P (exp))
6020 {
6021 if (!DECL_RTL_SET_P (exp)
e16ceb8e 6022 || !MEM_P (DECL_RTL (exp)))
86ce88aa 6023 return 0;
6024 else
6025 exp_rtl = XEXP (DECL_RTL (exp), 0);
6026 }
6027 break;
10f307d9 6028
6029 case INDIRECT_REF:
e16ceb8e 6030 if (MEM_P (x)
387bc205 6031 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6032 get_alias_set (exp)))
10f307d9 6033 return 0;
6034 break;
6035
6036 case CALL_EXPR:
bc33ff05 6037 /* Assume that the call will clobber all hard registers and
6038 all of memory. */
8ad4c111 6039 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
e16ceb8e 6040 || MEM_P (x))
bc33ff05 6041 return 0;
10f307d9 6042 break;
6043
10f307d9 6044 case WITH_CLEANUP_EXPR:
5929001a 6045 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
10f307d9 6046 break;
6047
34e2ddcd 6048 case CLEANUP_POINT_EXPR:
10f307d9 6049 case SAVE_EXPR:
67c155cb 6050 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
10f307d9 6051
4e0ff571 6052 case BIND_EXPR:
6053 /* The only operand we look at is operand 1. The rest aren't
6054 part of the expression. */
997d68fe 6055 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
4e0ff571 6056
0dbd1c74 6057 default:
6058 break;
10f307d9 6059 }
6060
6061 /* If we have an rtx, we do not need to scan our operands. */
6062 if (exp_rtl)
6063 break;
6064
e41f0d80 6065 nops = first_rtl_op (TREE_CODE (exp));
10f307d9 6066 for (i = 0; i < nops; i++)
6067 if (TREE_OPERAND (exp, i) != 0
997d68fe 6068 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
10f307d9 6069 return 0;
e41f0d80 6070
6071 /* If this is a language-specific tree code, it may require
6072 special handling. */
0fd4500a 6073 if ((unsigned int) TREE_CODE (exp)
6074 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
dc24ddbd 6075 && !lang_hooks.safe_from_p (x, exp))
e41f0d80 6076 return 0;
10f307d9 6077 }
6078
6079 /* If we have an rtl, find any enclosed object. Then see if we conflict
6080 with it. */
6081 if (exp_rtl)
6082 {
6083 if (GET_CODE (exp_rtl) == SUBREG)
6084 {
6085 exp_rtl = SUBREG_REG (exp_rtl);
8ad4c111 6086 if (REG_P (exp_rtl)
10f307d9 6087 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6088 return 0;
6089 }
6090
6091 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
387bc205 6092 are memory and they conflict. */
10f307d9 6093 return ! (rtx_equal_p (x, exp_rtl)
e16ceb8e 6094 || (MEM_P (x) && MEM_P (exp_rtl)
c5e81ca5 6095 && true_dependence (exp_rtl, VOIDmode, x,
387bc205 6096 rtx_addr_varies_p)));
10f307d9 6097 }
6098
6099 /* If we reach here, it is safe. */
6100 return 1;
6101}
6102
46e62598 6103/* Subroutine of expand_expr: return rtx if EXP is a
6104 variable or parameter; else return 0. */
6105
6106static rtx
35cb5232 6107var_rtx (tree exp)
46e62598 6108{
6109 STRIP_NOPS (exp);
6110 switch (TREE_CODE (exp))
6111 {
6112 case PARM_DECL:
6113 case VAR_DECL:
6114 return DECL_RTL (exp);
6115 default:
6116 return 0;
6117 }
6118}
155b05dc 6119\f
fcdc122e 6120/* Return the highest power of two that EXP is known to be a multiple of.
6121 This is used in updating alignment of MEMs in array references. */
6122
84130727 6123static unsigned HOST_WIDE_INT
35cb5232 6124highest_pow2_factor (tree exp)
fcdc122e 6125{
84130727 6126 unsigned HOST_WIDE_INT c0, c1;
fcdc122e 6127
6128 switch (TREE_CODE (exp))
6129 {
6130 case INTEGER_CST:
fe93cfe6 6131 /* We can find the lowest bit that's a one. If the low
6132 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6133 We need to handle this case since we can find it in a COND_EXPR,
91c82c20 6134 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
fe93cfe6 6135 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
e62299bd 6136 later ICE. */
fe93cfe6 6137 if (TREE_CONSTANT_OVERFLOW (exp))
a689a61a 6138 return BIGGEST_ALIGNMENT;
fe93cfe6 6139 else
fcdc122e 6140 {
fe93cfe6 6141 /* Note: tree_low_cst is intentionally not used here,
6142 we don't care about the upper bits. */
6143 c0 = TREE_INT_CST_LOW (exp);
6144 c0 &= -c0;
6145 return c0 ? c0 : BIGGEST_ALIGNMENT;
fcdc122e 6146 }
6147 break;
6148
cce8da2f 6149 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
fcdc122e 6150 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6151 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6152 return MIN (c0, c1);
6153
6154 case MULT_EXPR:
6155 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6156 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6157 return c0 * c1;
6158
6159 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6160 case CEIL_DIV_EXPR:
cce8da2f 6161 if (integer_pow2p (TREE_OPERAND (exp, 1))
6162 && host_integerp (TREE_OPERAND (exp, 1), 1))
6163 {
6164 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6165 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6166 return MAX (1, c0 / c1);
6167 }
6168 break;
fcdc122e 6169
6170 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
55f9d7dc 6171 case SAVE_EXPR:
fcdc122e 6172 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6173
cce8da2f 6174 case COMPOUND_EXPR:
6175 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6176
fcdc122e 6177 case COND_EXPR:
6178 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6179 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6180 return MIN (c0, c1);
6181
6182 default:
6183 break;
6184 }
6185
6186 return 1;
6187}
5b965633 6188
252d0e4d 6189/* Similar, except that the alignment requirements of TARGET are
6190 taken into account. Assume it is at least as aligned as its
6191 type, unless it is a COMPONENT_REF in which case the layout of
6192 the structure gives the alignment. */
5b965633 6193
84130727 6194static unsigned HOST_WIDE_INT
252d0e4d 6195highest_pow2_factor_for_target (tree target, tree exp)
5b965633 6196{
252d0e4d 6197 unsigned HOST_WIDE_INT target_align, factor;
5b965633 6198
6199 factor = highest_pow2_factor (exp);
252d0e4d 6200 if (TREE_CODE (target) == COMPONENT_REF)
6201 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
6202 else
6203 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
6204 return MAX (factor, target_align);
5b965633 6205}
fcdc122e 6206\f
4ee9c684 6207/* Expands variable VAR. */
6208
6209void
6210expand_var (tree var)
6211{
6212 if (DECL_EXTERNAL (var))
6213 return;
6214
6215 if (TREE_STATIC (var))
6216 /* If this is an inlined copy of a static local variable,
6217 look up the original decl. */
6218 var = DECL_ORIGIN (var);
6219
6220 if (TREE_STATIC (var)
6221 ? !TREE_ASM_WRITTEN (var)
6222 : !DECL_RTL_SET_P (var))
6223 {
6224 if (TREE_CODE (var) == VAR_DECL && DECL_DEFER_OUTPUT (var))
6225 {
6226 /* Prepare a mem & address for the decl. */
6227 rtx x;
6228
6229 if (TREE_STATIC (var))
6230 abort ();
6231
6232 x = gen_rtx_MEM (DECL_MODE (var),
6233 gen_reg_rtx (Pmode));
6234
6235 set_mem_attributes (x, var, 1);
6236 SET_DECL_RTL (var, x);
6237 }
5135beeb 6238 else if (lang_hooks.expand_decl (var))
4ee9c684 6239 /* OK. */;
6240 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6241 expand_decl (var);
6242 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6243 rest_of_decl_compilation (var, NULL, 0, 0);
6244 else if (TREE_CODE (var) == TYPE_DECL
6245 || TREE_CODE (var) == CONST_DECL
6246 || TREE_CODE (var) == FUNCTION_DECL
6247 || TREE_CODE (var) == LABEL_DECL)
6248 /* No expansion needed. */;
6249 else
6250 abort ();
6251 }
6252}
6253
6254/* Expands declarations of variables in list VARS. */
6255
6256static void
6257expand_vars (tree vars)
6258{
6259 for (; vars; vars = TREE_CHAIN (vars))
6260 {
6261 tree var = vars;
6262
6263 if (DECL_EXTERNAL (var))
6264 continue;
6265
6266 expand_var (var);
6267 expand_decl_init (var);
6268 }
6269}
6270
33204670 6271/* Subroutine of expand_expr. Expand the two operands of a binary
6272 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6273 The value may be stored in TARGET if TARGET is nonzero. The
6274 MODIFIER argument is as documented by expand_expr. */
6275
6276static void
6277expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6278 enum expand_modifier modifier)
6279{
6280 if (! safe_from_p (target, exp1, 1))
6281 target = 0;
6282 if (operand_equal_p (exp0, exp1, 0))
6283 {
6284 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6285 *op1 = copy_rtx (*op0);
6286 }
6287 else
6288 {
3541e113 6289 /* If we need to preserve evaluation order, copy exp0 into its own
6290 temporary variable so that it can't be clobbered by exp1. */
6291 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6292 exp0 = save_expr (exp0);
33204670 6293 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6294 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6295 }
6296}
6297
c3a9c149 6298\f
10f307d9 6299/* expand_expr: generate code for computing expression EXP.
6300 An rtx for the computed value is returned. The value is never null.
6301 In the case of a void EXP, const0_rtx is returned.
6302
6303 The value may be stored in TARGET if TARGET is nonzero.
6304 TARGET is just a suggestion; callers must assume that
6305 the rtx returned may not be the same as TARGET.
6306
6307 If TARGET is CONST0_RTX, it means that the value will be ignored.
6308
6309 If TMODE is not VOIDmode, it suggests generating the
6310 result in mode TMODE. But this is done only when convenient.
6311 Otherwise, TMODE is ignored and the value generated in its natural mode.
6312 TMODE is just a suggestion; callers must assume that
6313 the rtx returned may not have mode TMODE.
6314
d2ae1b1e 6315 Note that TARGET may have neither TMODE nor MODE. In that case, it
6316 probably will not be used.
10f307d9 6317
6318 If MODIFIER is EXPAND_SUM then when EXP is an addition
6319 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6320 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6321 products as above, or REG or MEM, or constant.
6322 Ordinarily in such cases we would output mul or add instructions
6323 and then return a pseudo reg containing the sum.
6324
6325 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6326 it also marks a label as absolutely required (it can't be dead).
1aaabd2e 6327 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d2ae1b1e 6328 This is used for outputting expressions used in initializers.
6329
6330 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6331 with a constant address even if that address is not normally legitimate.
a35a63ff 6332 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6333
6334 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6335 a call parameter. Such targets require special care as we haven't yet
6336 marked TARGET so that it's safe from being trashed by libcalls. We
6337 don't want to use TARGET for anything but the final result;
6338 Intermediate values must go elsewhere. Additionally, calls to
60ffaf4d 6339 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6340
6341 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6342 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6343 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6344 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6345 recursively. */
10f307d9 6346
4ee9c684 6347static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6348 enum expand_modifier, rtx *);
6349
10f307d9 6350rtx
60ffaf4d 6351expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6352 enum expand_modifier modifier, rtx *alt_rtl)
4ee9c684 6353{
6354 int rn = -1;
6355 rtx ret, last = NULL;
6356
6357 /* Handle ERROR_MARK before anybody tries to access its type. */
6358 if (TREE_CODE (exp) == ERROR_MARK
6359 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6360 {
6361 ret = CONST0_RTX (tmode);
6362 return ret ? ret : const0_rtx;
6363 }
6364
6365 if (flag_non_call_exceptions)
6366 {
6367 rn = lookup_stmt_eh_region (exp);
6368 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6369 if (rn >= 0)
6370 last = get_last_insn ();
6371 }
6372
6373 /* If this is an expression of some kind and it has an associated line
6374 number, then emit the line number before expanding the expression.
6375
6376 We need to save and restore the file and line information so that
6377 errors discovered during expansion are emitted with the right
6378 information. It would be better of the diagnostic routines
6379 used the file/line information embedded in the tree nodes rather
6380 than globals. */
6381 if (cfun && EXPR_HAS_LOCATION (exp))
6382 {
6383 location_t saved_location = input_location;
6384 input_location = EXPR_LOCATION (exp);
6385 emit_line_note (input_location);
6386
6387 /* Record where the insns produced belong. */
32a2193a 6388 record_block_change (TREE_BLOCK (exp));
4ee9c684 6389
6390 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6391
6392 input_location = saved_location;
6393 }
6394 else
6395 {
6396 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6397 }
6398
6399 /* If using non-call exceptions, mark all insns that may trap.
6400 expand_call() will mark CALL_INSNs before we get to this code,
6401 but it doesn't handle libcalls, and these may trap. */
6402 if (rn >= 0)
6403 {
6404 rtx insn;
6405 for (insn = next_real_insn (last); insn;
6406 insn = next_real_insn (insn))
6407 {
6408 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6409 /* If we want exceptions for non-call insns, any
6410 may_trap_p instruction may throw. */
6411 && GET_CODE (PATTERN (insn)) != CLOBBER
6412 && GET_CODE (PATTERN (insn)) != USE
6413 && (GET_CODE (insn) == CALL_INSN || may_trap_p (PATTERN (insn))))
6414 {
6415 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6416 REG_NOTES (insn));
6417 }
6418 }
6419 }
6420
6421 return ret;
6422}
6423
6424static rtx
6425expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6426 enum expand_modifier modifier, rtx *alt_rtl)
10f307d9 6427{
19cb6b50 6428 rtx op0, op1, temp;
10f307d9 6429 tree type = TREE_TYPE (exp);
78a8ed03 6430 int unsignedp;
19cb6b50 6431 enum machine_mode mode;
6432 enum tree_code code = TREE_CODE (exp);
10f307d9 6433 optab this_optab;
32b3a273 6434 rtx subtarget, original_target;
6435 int ignore;
10f307d9 6436 tree context;
6437
32b3a273 6438 mode = TYPE_MODE (type);
78a8ed03 6439 unsignedp = TYPE_UNSIGNED (type);
6440
32b3a273 6441 /* Use subtarget as the target for operand 0 of a binary operation. */
d8e5b213 6442 subtarget = get_subtarget (target);
32b3a273 6443 original_target = target;
6444 ignore = (target == const0_rtx
6445 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6446 || code == CONVERT_EXPR || code == REFERENCE_EXPR
b3187c7c 6447 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
32b3a273 6448 && TREE_CODE (type) == VOID_TYPE));
6449
f75fb6ae 6450 /* If we are going to ignore this result, we need only do something
6451 if there is a side-effect somewhere in the expression. If there
c869557a 6452 is, short-circuit the most common cases here. Note that we must
6453 not call expand_expr with anything but const0_rtx in case this
6454 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
10f307d9 6455
f75fb6ae 6456 if (ignore)
6457 {
6458 if (! TREE_SIDE_EFFECTS (exp))
6459 return const0_rtx;
6460
155b05dc 6461 /* Ensure we reference a volatile object even if value is ignored, but
6462 don't do this if all we are doing is taking its address. */
f75fb6ae 6463 if (TREE_THIS_VOLATILE (exp)
6464 && TREE_CODE (exp) != FUNCTION_DECL
155b05dc 6465 && mode != VOIDmode && mode != BLKmode
6466 && modifier != EXPAND_CONST_ADDRESS)
f75fb6ae 6467 {
8a06f2d4 6468 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
e16ceb8e 6469 if (MEM_P (temp))
f75fb6ae 6470 temp = copy_to_reg (temp);
6471 return const0_rtx;
6472 }
6473
155b05dc 6474 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6475 || code == INDIRECT_REF || code == BUFFER_REF)
8a06f2d4 6476 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6477 modifier);
6478
155b05dc 6479 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
ba04d9d5 6480 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
f75fb6ae 6481 {
8a06f2d4 6482 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6483 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
f75fb6ae 6484 return const0_rtx;
6485 }
6486 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6487 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6488 /* If the second operand has no side effects, just evaluate
a92771b8 6489 the first. */
8a06f2d4 6490 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6491 modifier);
155b05dc 6492 else if (code == BIT_FIELD_REF)
6493 {
8a06f2d4 6494 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6495 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6496 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
155b05dc 6497 return const0_rtx;
6498 }
8a06f2d4 6499
6dae9dfa 6500 target = 0;
f75fb6ae 6501 }
10f307d9 6502
34f17b00 6503 /* If will do cse, generate all results into pseudo registers
6504 since 1) that allows cse to find more things
6505 and 2) otherwise cse could produce an insn the machine
805e22b2 6506 cannot support. An exception is a CONSTRUCTOR into a multi-word
6507 MEM: that's much more likely to be most efficient into the MEM.
6508 Another is a CALL_EXPR which must return in memory. */
34f17b00 6509
10f307d9 6510 if (! cse_not_expected && mode != BLKmode && target
8ad4c111 6511 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
805e22b2 6512 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
45550790 6513 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
a35a63ff 6514 target = 0;
10f307d9 6515
10f307d9 6516 switch (code)
6517 {
6518 case LABEL_DECL:
bb5fd95e 6519 {
6520 tree function = decl_function_context (exp);
7014838c 6521
4ee9c684 6522 temp = label_rtx (exp);
6523 temp = gen_rtx_LABEL_REF (Pmode, temp);
6524
f94a4a77 6525 if (function != current_function_decl
4ee9c684 6526 && function != 0)
6527 LABEL_REF_NONLOCAL_P (temp) = 1;
6528
6529 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
1aaabd2e 6530 return temp;
bb5fd95e 6531 }
10f307d9 6532
6533 case PARM_DECL:
71a455ac 6534 if (!DECL_RTL_SET_P (exp))
10f307d9 6535 {
9bc3739f 6536 error ("%Jprior parameter's size depends on '%D'", exp, exp);
2ef1e405 6537 return CONST0_RTX (mode);
10f307d9 6538 }
6539
a92771b8 6540 /* ... fall through ... */
d2ae1b1e 6541
10f307d9 6542 case VAR_DECL:
f8c3511b 6543 /* If a static var's type was incomplete when the decl was written,
6544 but the type is complete now, lay out the decl now. */
59e226b0 6545 if (DECL_SIZE (exp) == 0
6546 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
f8c3511b 6547 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
65433eb4 6548 layout_decl (exp, 0);
eb4b06b6 6549
a92771b8 6550 /* ... fall through ... */
d2ae1b1e 6551
f8c3511b 6552 case FUNCTION_DECL:
10f307d9 6553 case RESULT_DECL:
6554 if (DECL_RTL (exp) == 0)
6555 abort ();
d2ae1b1e 6556
34f17b00 6557 /* Ensure variable marked as used even if it doesn't go through
6558 a parser. If it hasn't be used yet, write out an external
6559 definition. */
6560 if (! TREE_USED (exp))
6561 {
6562 assemble_external (exp);
6563 TREE_USED (exp) = 1;
6564 }
6565
6e6b4174 6566 /* Show we haven't gotten RTL for this yet. */
6567 temp = 0;
6568
10f307d9 6569 /* Handle variables inherited from containing functions. */
6570 context = decl_function_context (exp);
6571
10f307d9 6572 if (context != 0 && context != current_function_decl
10f307d9 6573 /* If var is static, we don't need a static chain to access it. */
e16ceb8e 6574 && ! (MEM_P (DECL_RTL (exp))
10f307d9 6575 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6576 {
6577 rtx addr;
6578
6579 /* Mark as non-local and addressable. */
bc417a8f 6580 DECL_NONLOCAL (exp) = 1;
8fd50fe1 6581 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6582 abort ();
dc24ddbd 6583 lang_hooks.mark_addressable (exp);
e16ceb8e 6584 if (!MEM_P (DECL_RTL (exp)))
10f307d9 6585 abort ();
6586 addr = XEXP (DECL_RTL (exp), 0);
e16ceb8e 6587 if (MEM_P (addr))
537ffcfc 6588 addr
6589 = replace_equiv_address (addr,
6590 fix_lexical_addr (XEXP (addr, 0), exp));
10f307d9 6591 else
6592 addr = fix_lexical_addr (addr, exp);
f7c44134 6593
537ffcfc 6594 temp = replace_equiv_address (DECL_RTL (exp), addr);
10f307d9 6595 }
2ef1e405 6596
10f307d9 6597 /* This is the case of an array whose size is to be determined
6598 from its initializer, while the initializer is still being parsed.
6599 See expand_decl. */
d2ae1b1e 6600
e16ceb8e 6601 else if (MEM_P (DECL_RTL (exp))
8ad4c111 6602 && REG_P (XEXP (DECL_RTL (exp), 0)))
537ffcfc 6603 temp = validize_mem (DECL_RTL (exp));
d2ae1b1e 6604
6605 /* If DECL_RTL is memory, we are in the normal case and either
6606 the address is not valid or it is not a register and -fforce-addr
6607 is specified, get the address into a register. */
6608
e16ceb8e 6609 else if (MEM_P (DECL_RTL (exp))
6e6b4174 6610 && modifier != EXPAND_CONST_ADDRESS
6611 && modifier != EXPAND_SUM
6612 && modifier != EXPAND_INITIALIZER
6613 && (! memory_address_p (DECL_MODE (exp),
6614 XEXP (DECL_RTL (exp), 0))
6615 || (flag_force_addr
8ad4c111 6616 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
60ffaf4d 6617 {
6618 if (alt_rtl)
6619 *alt_rtl = DECL_RTL (exp);
6620 temp = replace_equiv_address (DECL_RTL (exp),
6621 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6622 }
acfb31e5 6623
6e6b4174 6624 /* If we got something, return it. But first, set the alignment
5ac439f3 6625 if the address is a register. */
6e6b4174 6626 if (temp != 0)
6627 {
e16ceb8e 6628 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
80909c64 6629 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6e6b4174 6630
6631 return temp;
6632 }
6633
acfb31e5 6634 /* If the mode of DECL_RTL does not match that of the decl, it
6635 must be a promoted value. We return a SUBREG of the wanted mode,
6636 but mark it so that we know that it was already extended. */
6637
8ad4c111 6638 if (REG_P (DECL_RTL (exp))
ac85e396 6639 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
acfb31e5 6640 {
acfb31e5 6641 /* Get the signedness used for this variable. Ensure we get the
6642 same mode we got when the variable was declared. */
54fa89c6 6643 if (GET_MODE (DECL_RTL (exp))
ff385626 6644 != promote_mode (type, DECL_MODE (exp), &unsignedp,
d20c8e15 6645 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
acfb31e5 6646 abort ();
6647
701e46d0 6648 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
acfb31e5 6649 SUBREG_PROMOTED_VAR_P (temp) = 1;
bfd242e8 6650 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
acfb31e5 6651 return temp;
6652 }
6653
10f307d9 6654 return DECL_RTL (exp);
6655
6656 case INTEGER_CST:
9998b6a9 6657 temp = immed_double_const (TREE_INT_CST_LOW (exp),
a0c2c45b 6658 TREE_INT_CST_HIGH (exp), mode);
10f307d9 6659
9998b6a9 6660 /* ??? If overflow is set, fold will have done an incomplete job,
6661 which can result in (plus xx (const_int 0)), which can get
6662 simplified by validate_replace_rtx during virtual register
6663 instantiation, which can result in unrecognizable insns.
6664 Avoid this by forcing all overflows into registers. */
f64482cc 6665 if (TREE_CONSTANT_OVERFLOW (exp)
6666 && modifier != EXPAND_INITIALIZER)
9998b6a9 6667 temp = force_reg (mode, temp);
6668
6669 return temp;
6670
c3309fc6 6671 case VECTOR_CST:
6672 return const_vector_from_tree (exp);
6673
10f307d9 6674 case CONST_DECL:
a35a63ff 6675 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
10f307d9 6676
6677 case REAL_CST:
6678 /* If optimized, generate immediate CONST_DOUBLE
fa56dc1d 6679 which will be turned into memory by reload if necessary.
6680
10f307d9 6681 We used to force a register so that loop.c could see it. But
6682 this does not allow gen_* patterns to perform optimizations with
6683 the constants. It also produces two insns in cases like "x = 1.0;".
6684 On most machines, floating-point constants are not permitted in
6685 many insns, so we'd end up copying it to a register in any case.
6686
6687 Now, we do the copying in expand_binop, if appropriate. */
2ff23ed0 6688 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6689 TYPE_MODE (TREE_TYPE (exp)));
10f307d9 6690
6691 case COMPLEX_CST:
67cae17d 6692 /* Handle evaluating a complex constant in a CONCAT target. */
6693 if (original_target && GET_CODE (original_target) == CONCAT)
6694 {
6695 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6696 rtx rtarg, itarg;
6697
6698 rtarg = XEXP (original_target, 0);
6699 itarg = XEXP (original_target, 1);
6700
6701 /* Move the real and imaginary parts separately. */
6702 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6703 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6704
6705 if (op0 != rtarg)
6706 emit_move_insn (rtarg, op0);
6707 if (op1 != itarg)
6708 emit_move_insn (itarg, op1);
6709
6710 return original_target;
6711 }
6712
6473f3f4 6713 /* ... fall through ... */
67cae17d 6714
10f307d9 6715 case STRING_CST:
63c8163c 6716 temp = output_constant_def (exp, 1);
10f307d9 6717
63c8163c 6718 /* temp contains a constant address.
10f307d9 6719 On RISC machines where a constant address isn't valid,
6720 make some insns to get that address into a register. */
63c8163c 6721 if (modifier != EXPAND_CONST_ADDRESS
10f307d9 6722 && modifier != EXPAND_INITIALIZER
6723 && modifier != EXPAND_SUM
63c8163c 6724 && (! memory_address_p (mode, XEXP (temp, 0))
6725 || flag_force_addr))
6726 return replace_equiv_address (temp,
6727 copy_rtx (XEXP (temp, 0)));
6728 return temp;
10f307d9 6729
6730 case SAVE_EXPR:
67c155cb 6731 {
6732 tree val = TREE_OPERAND (exp, 0);
6733 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
997d68fe 6734
67c155cb 6735 if (TREE_CODE (val) != VAR_DECL || !DECL_ARTIFICIAL (val))
6736 {
6737 /* We can indeed still hit this case, typically via builtin
6738 expanders calling save_expr immediately before expanding
6739 something. Assume this means that we only have to deal
6740 with non-BLKmode values. */
6741 if (GET_MODE (ret) == BLKmode)
6742 abort ();
acfb31e5 6743
67c155cb 6744 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6745 DECL_ARTIFICIAL (val) = 1;
6746 TREE_OPERAND (exp, 0) = val;
acfb31e5 6747
67c155cb 6748 if (!CONSTANT_P (ret))
6749 ret = copy_to_reg (ret);
6750 SET_DECL_RTL (val, ret);
6751 }
acfb31e5 6752
67c155cb 6753 return ret;
6754 }
10f307d9 6755
0e676ec9 6756 case UNSAVE_EXPR:
6757 {
6758 rtx temp;
6759 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
1d347c23 6760 TREE_OPERAND (exp, 0)
dc24ddbd 6761 = lang_hooks.unsave_expr_now (TREE_OPERAND (exp, 0));
0e676ec9 6762 return temp;
6763 }
6764
c19f64ba 6765 case GOTO_EXPR:
6766 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6767 expand_goto (TREE_OPERAND (exp, 0));
6768 else
6769 expand_computed_goto (TREE_OPERAND (exp, 0));
6770 return const0_rtx;
6771
03192665 6772 /* These are lowered during gimplification, so we should never ever
6773 see them here. */
6774 case LOOP_EXPR:
10f307d9 6775 case EXIT_EXPR:
03192665 6776 abort ();
10f307d9 6777
d0b30cc7 6778 case LABELED_BLOCK_EXPR:
6779 if (LABELED_BLOCK_BODY (exp))
735f4358 6780 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6312a35e 6781 /* Should perhaps use expand_label, but this is simpler and safer. */
18df45ce 6782 do_pending_stack_adjust ();
d0b30cc7 6783 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6784 return const0_rtx;
6785
6786 case EXIT_BLOCK_EXPR:
6787 if (EXIT_BLOCK_RETURN (exp))
be2828ce 6788 sorry ("returned value in block_exit_expr");
d0b30cc7 6789 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6790 return const0_rtx;
6791
10f307d9 6792 case BIND_EXPR:
6793 {
4ee9c684 6794 tree block = BIND_EXPR_BLOCK (exp);
6795 int mark_ends;
10f307d9 6796
735f4358 6797 /* If we're in functions-as-trees mode, this BIND_EXPR represents
6798 the block, so we need to emit NOTE_INSN_BLOCK_* notes. */
6799 mark_ends = (block != NULL_TREE);
6800 expand_start_bindings_and_block (mark_ends ? 0 : 2, block);
10f307d9 6801
4ee9c684 6802 /* If VARS have not yet been expanded, expand them now. */
6803 expand_vars (BIND_EXPR_VARS (exp));
6804
6805 /* TARGET was clobbered early in this function. The correct
6806 indicator or whether or not we need the value of this
6807 expression is the IGNORE variable. */
6808 temp = expand_expr (BIND_EXPR_BODY (exp),
6809 ignore ? const0_rtx : target,
6810 tmode, modifier);
10f307d9 6811
4ee9c684 6812 expand_end_bindings (BIND_EXPR_VARS (exp), mark_ends, 0);
10f307d9 6813
6814 return temp;
6815 }
6816
10f307d9 6817 case CONSTRUCTOR:
f75fb6ae 6818 /* If we don't need the result, just ensure we evaluate any
6819 subexpressions. */
6820 if (ignore)
6821 {
6822 tree elt;
8a06f2d4 6823
f75fb6ae 6824 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
8a06f2d4 6825 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6826
f75fb6ae 6827 return const0_rtx;
6828 }
603c4ee1 6829
2ef1e405 6830 /* All elts simple constants => refer to a constant in memory. But
6831 if this is a non-BLKmode mode, let it store a field at a time
6832 since that should make a CONST_INT or CONST_DOUBLE when we
603c4ee1 6833 fold. Likewise, if we have a target we can use, it is best to
8cb5b99d 6834 store directly into the target unless the type is large enough
6835 that memcpy will be used. If we are making an initializer and
a43fa0dd 6836 all operands are constant, put it in memory as well.
6837
6838 FIXME: Avoid trying to fill vector constructors piece-meal.
6839 Output them with output_constant_def below unless we're sure
6840 they're zeros. This should go away when vector initializers
6841 are treated like VECTOR_CST instead of arrays.
6842 */
f75fb6ae 6843 else if ((TREE_STATIC (exp)
603c4ee1 6844 && ((mode == BLKmode
997d68fe 6845 && ! (target != 0 && safe_from_p (target, exp, 1)))
8cb5b99d 6846 || TREE_ADDRESSABLE (exp)
325d1c45 6847 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
fa56dc1d 6848 && (! MOVE_BY_PIECES_P
325d1c45 6849 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6850 TYPE_ALIGN (type)))
4ee9c684 6851 && ! mostly_zeros_p (exp))))
aa71376d 6852 || ((modifier == EXPAND_INITIALIZER
6853 || modifier == EXPAND_CONST_ADDRESS)
6854 && TREE_CONSTANT (exp)))
10f307d9 6855 {
abf74c5b 6856 rtx constructor = output_constant_def (exp, 1);
325d1c45 6857
bb5fd95e 6858 if (modifier != EXPAND_CONST_ADDRESS
6859 && modifier != EXPAND_INITIALIZER
537ffcfc 6860 && modifier != EXPAND_SUM)
6861 constructor = validize_mem (constructor);
6862
10f307d9 6863 return constructor;
6864 }
10f307d9 6865 else
6866 {
c359e3f7 6867 /* Handle calls that pass values in multiple non-contiguous
6868 locations. The Irix 6 ABI has examples of this. */
997d68fe 6869 if (target == 0 || ! safe_from_p (target, exp, 1)
a35a63ff 6870 || GET_CODE (target) == PARALLEL
6871 || modifier == EXPAND_STACK_PARM)
387bc205 6872 target
6873 = assign_temp (build_qualified_type (type,
6874 (TYPE_QUALS (type)
6875 | (TREE_READONLY (exp)
6876 * TYPE_QUAL_CONST))),
18279aee 6877 0, TREE_ADDRESSABLE (exp), 1);
6703a20a 6878
e792f237 6879 store_constructor (exp, target, 0, int_expr_size (exp));
10f307d9 6880 return target;
6881 }
6882
6883 case INDIRECT_REF:
6884 {
6885 tree exp1 = TREE_OPERAND (exp, 0);
fa56dc1d 6886
4ee9c684 6887 if (modifier != EXPAND_WRITE)
6888 {
6889 tree t;
6890
6891 t = fold_read_from_constant_string (exp);
6892 if (t)
6893 return expand_expr (t, target, tmode, modifier);
6894 }
10f307d9 6895
1128c34c 6896 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6897 op0 = memory_address (mode, op0);
941522d6 6898 temp = gen_rtx_MEM (mode, op0);
f7c44134 6899 set_mem_attributes (temp, exp, 0);
37749825 6900
155b05dc 6901 /* If we are writing to this object and its type is a record with
6902 readonly fields, we must mark it as readonly so it will
6903 conflict with readonly references to those fields. */
8a06f2d4 6904 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
155b05dc 6905 RTX_UNCHANGING_P (temp) = 1;
6906
2a8921a2 6907 return temp;
6908 }
10f307d9 6909
6910 case ARRAY_REF:
4ee9c684 6911
6912#ifdef ENABLE_CHECKING
cf389750 6913 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6914 abort ();
4ee9c684 6915#endif
10f307d9 6916
10f307d9 6917 {
cf389750 6918 tree array = TREE_OPERAND (exp, 0);
6374121b 6919 tree low_bound = array_ref_low_bound (exp);
902de8ed 6920 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
26e80911 6921 HOST_WIDE_INT i;
c869557a 6922
ecef77f6 6923 /* Optimize the special-case of a zero lower bound.
6924
6925 We convert the low_bound to sizetype to avoid some problems
6926 with constant folding. (E.g. suppose the lower bound is 1,
6927 and its mode is QI. Without the conversion, (ARRAY
6928 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
902de8ed 6929 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
ecef77f6 6930
cf389750 6931 if (! integer_zerop (low_bound))
902de8ed 6932 index = size_diffop (index, convert (sizetype, low_bound));
cf389750 6933
cf389750 6934 /* Fold an expression like: "foo"[2].
8169404e 6935 This is not done in fold so it won't happen inside &.
6936 Don't fold if this is for wide characters since it's too
6937 difficult to do correctly and this is a very rare case. */
cf389750 6938
68b956ae 6939 if (modifier != EXPAND_CONST_ADDRESS
6940 && modifier != EXPAND_INITIALIZER
4ee9c684 6941 && modifier != EXPAND_MEMORY)
6942 {
6943 tree t = fold_read_from_constant_string (exp);
6944
6945 if (t)
6946 return expand_expr (t, target, tmode, modifier);
6947 }
10f307d9 6948
cf389750 6949 /* If this is a constant index into a constant array,
6950 just get the value from the array. Handle both the cases when
6951 we have an explicit constructor and when our operand is a variable
6952 that was declared const. */
2ef1e405 6953
68b956ae 6954 if (modifier != EXPAND_CONST_ADDRESS
6955 && modifier != EXPAND_INITIALIZER
6956 && modifier != EXPAND_MEMORY
6957 && TREE_CODE (array) == CONSTRUCTOR
6958 && ! TREE_SIDE_EFFECTS (array)
a0c2c45b 6959 && TREE_CODE (index) == INTEGER_CST
fa56dc1d 6960 && 0 > compare_tree_int (index,
a0c2c45b 6961 list_length (CONSTRUCTOR_ELTS
6962 (TREE_OPERAND (exp, 0)))))
cf389750 6963 {
a0c2c45b 6964 tree elem;
6965
6966 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6967 i = TREE_INT_CST_LOW (index);
6968 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6969 ;
6970
6971 if (elem)
8a06f2d4 6972 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6973 modifier);
cf389750 6974 }
fa56dc1d 6975
cf389750 6976 else if (optimize >= 1
b1ff8ab1 6977 && modifier != EXPAND_CONST_ADDRESS
6978 && modifier != EXPAND_INITIALIZER
68b956ae 6979 && modifier != EXPAND_MEMORY
cf389750 6980 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6981 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
3a855a53 6982 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6983 && targetm.binds_local_p (array))
cf389750 6984 {
26e80911 6985 if (TREE_CODE (index) == INTEGER_CST)
cf389750 6986 {
6987 tree init = DECL_INITIAL (array);
6988
cf389750 6989 if (TREE_CODE (init) == CONSTRUCTOR)
6990 {
5d844ba2 6991 tree elem;
cf389750 6992
a0c2c45b 6993 for (elem = CONSTRUCTOR_ELTS (init);
7f01d61c 6994 (elem
6995 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
a0c2c45b 6996 elem = TREE_CHAIN (elem))
6997 ;
6998
a6d6d374 6999 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
cf389750 7000 return expand_expr (fold (TREE_VALUE (elem)), target,
8a06f2d4 7001 tmode, modifier);
cf389750 7002 }
7003 else if (TREE_CODE (init) == STRING_CST
a0c2c45b 7004 && 0 > compare_tree_int (index,
7005 TREE_STRING_LENGTH (init)))
bdb729f9 7006 {
7007 tree type = TREE_TYPE (TREE_TYPE (init));
7008 enum machine_mode mode = TYPE_MODE (type);
7009
7010 if (GET_MODE_CLASS (mode) == MODE_INT
7011 && GET_MODE_SIZE (mode) == 1)
506664d9 7012 return gen_int_mode (TREE_STRING_POINTER (init)
7013 [TREE_INT_CST_LOW (index)], mode);
bdb729f9 7014 }
cf389750 7015 }
7016 }
7017 }
63c8163c 7018 goto normal_inner_ref;
10f307d9 7019
7020 case COMPONENT_REF:
2ef1e405 7021 /* If the operand is a CONSTRUCTOR, we can just extract the
63c8163c 7022 appropriate field if it is present. */
7023 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
2ef1e405 7024 {
7025 tree elt;
7026
7027 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7028 elt = TREE_CHAIN (elt))
c30615f6 7029 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7030 /* We can normally use the value of the field in the
7031 CONSTRUCTOR. However, if this is a bitfield in
7032 an integral mode that we can fit in a HOST_WIDE_INT,
7033 we must mask only the number of bits in the bitfield,
7034 since this is done implicitly by the constructor. If
7035 the bitfield does not meet either of those conditions,
7036 we can't do this optimization. */
7037 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7038 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7039 == MODE_INT)
7040 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7041 <= HOST_BITS_PER_WIDE_INT))))
7042 {
a35a63ff 7043 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7044 && modifier == EXPAND_STACK_PARM)
7045 target = 0;
fa56dc1d 7046 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
c30615f6 7047 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7048 {
ab7943b9 7049 HOST_WIDE_INT bitsize
7050 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6de9716c 7051 enum machine_mode imode
7052 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
c30615f6 7053
78a8ed03 7054 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
c30615f6 7055 {
7056 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6de9716c 7057 op0 = expand_and (imode, op0, op1, target);
c30615f6 7058 }
7059 else
7060 {
7061 tree count
997d68fe 7062 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7063 0);
c30615f6 7064
7065 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7066 target, 0);
7067 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7068 target, 0);
7069 }
7070 }
7071
7072 return op0;
7073 }
2ef1e405 7074 }
63c8163c 7075 goto normal_inner_ref;
2ef1e405 7076
63c8163c 7077 case BIT_FIELD_REF:
7078 case ARRAY_RANGE_REF:
7079 normal_inner_ref:
10f307d9 7080 {
7081 enum machine_mode mode1;
02e7a332 7082 HOST_WIDE_INT bitsize, bitpos;
954bdcb1 7083 tree offset;
10f307d9 7084 int volatilep = 0;
7fce34be 7085 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2b96c5f6 7086 &mode1, &unsignedp, &volatilep);
c3a9c149 7087 rtx orig_op0;
10f307d9 7088
227bf826 7089 /* If we got back the original object, something is wrong. Perhaps
7090 we are evaluating an expression too early. In any event, don't
7091 infinitely recurse. */
7092 if (tem == exp)
7093 abort ();
7094
5dfe36ec 7095 /* If TEM's type is a union of variable size, pass TARGET to the inner
00039714 7096 computation, since it will need a temporary and TARGET is known
7097 to have to do. This occurs in unchecked conversion in Ada. */
fa56dc1d 7098
c3a9c149 7099 orig_op0 = op0
7100 = expand_expr (tem,
7101 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7102 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7103 != INTEGER_CST)
a35a63ff 7104 && modifier != EXPAND_STACK_PARM
c3a9c149 7105 ? target : NULL_RTX),
7106 VOIDmode,
7107 (modifier == EXPAND_INITIALIZER
a35a63ff 7108 || modifier == EXPAND_CONST_ADDRESS
7109 || modifier == EXPAND_STACK_PARM)
c3a9c149 7110 ? modifier : EXPAND_NORMAL);
10f307d9 7111
2a8921a2 7112 /* If this is a constant, put it into a register if it is a
155b05dc 7113 legitimate constant and OFFSET is 0 and memory if it isn't. */
2a8921a2 7114 if (CONSTANT_P (op0))
7115 {
7116 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
155b05dc 7117 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7118 && offset == 0)
2a8921a2 7119 op0 = force_reg (mode, op0);
7120 else
7121 op0 = validize_mem (force_const_mem (mode, op0));
7122 }
7123
a7bab26c 7124 /* Otherwise, if this object not in memory and we either have an
7125 offset or a BLKmode result, put it there. This case can't occur in
7126 C, but can in Ada if we have unchecked conversion of an expression
7127 from a scalar type to an array or record type or for an
7128 ARRAY_RANGE_REF whose type is BLKmode. */
e16ceb8e 7129 else if (!MEM_P (op0)
a7bab26c 7130 && (offset != 0
7131 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7132 {
67c155cb 7133 tree nt = build_qualified_type (TREE_TYPE (tem),
7134 (TYPE_QUALS (TREE_TYPE (tem))
7135 | TYPE_QUAL_CONST));
7136 rtx memloc = assign_temp (nt, 1, 1, 1);
1d5ca076 7137
67c155cb 7138 emit_move_insn (memloc, op0);
7139 op0 = memloc;
a7bab26c 7140 }
7141
954bdcb1 7142 if (offset != 0)
7143 {
a35a63ff 7144 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7145 EXPAND_SUM);
954bdcb1 7146
e16ceb8e 7147 if (!MEM_P (op0))
954bdcb1 7148 abort ();
5785f96f 7149
5785f96f 7150#ifdef POINTERS_EXTEND_UNSIGNED
479e4d5e 7151 if (GET_MODE (offset_rtx) != Pmode)
33402d67 7152 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4a836698 7153#else
7154 if (GET_MODE (offset_rtx) != ptr_mode)
7155 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5785f96f 7156#endif
7157
fceda2f5 7158 if (GET_MODE (op0) == BLKmode
7159 /* A constant address in OP0 can have VOIDmode, we must
7160 not try to call force_reg in that case. */
f2eca2c2 7161 && GET_MODE (XEXP (op0, 0)) != VOIDmode
155b05dc 7162 && bitsize != 0
fa56dc1d 7163 && (bitpos % bitsize) == 0
25d55d72 7164 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
2b96c5f6 7165 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
25d55d72 7166 {
fac6aae6 7167 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
25d55d72 7168 bitpos = 0;
7169 }
7170
fcdc122e 7171 op0 = offset_address (op0, offset_rtx,
7172 highest_pow2_factor (offset));
954bdcb1 7173 }
7174
67c68e45 7175 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7176 record its alignment as BIGGEST_ALIGNMENT. */
e16ceb8e 7177 if (MEM_P (op0) && bitpos == 0 && offset != 0
67c68e45 7178 && is_aligning_offset (offset, tem))
7179 set_mem_align (op0, BIGGEST_ALIGNMENT);
7180
10f307d9 7181 /* Don't forget about volatility even if this is a bitfield. */
e16ceb8e 7182 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10f307d9 7183 {
c3a9c149 7184 if (op0 == orig_op0)
7185 op0 = copy_rtx (op0);
7186
10f307d9 7187 MEM_VOLATILE_P (op0) = 1;
7188 }
7189
963043a6 7190 /* The following code doesn't handle CONCAT.
7191 Assume only bitpos == 0 can be used for CONCAT, due to
7192 one element arrays having the same mode as its element. */
7193 if (GET_CODE (op0) == CONCAT)
7194 {
7195 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7196 abort ();
7197 return op0;
7198 }
7199
4e05e574 7200 /* In cases where an aligned union has an unaligned object
7201 as a field, we might be extracting a BLKmode value from
7202 an integer-mode (e.g., SImode) object. Handle this case
7203 by doing the extract into an object as wide as the field
7204 (which we know to be the width of a basic mode), then
b1ff8ab1 7205 storing into memory, and changing the mode to BLKmode. */
10f307d9 7206 if (mode1 == VOIDmode
8ad4c111 7207 || REG_P (op0) || GET_CODE (op0) == SUBREG
b1ff8ab1 7208 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7209 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
fb2d4326 7210 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7211 && modifier != EXPAND_CONST_ADDRESS
7212 && modifier != EXPAND_INITIALIZER)
b1ff8ab1 7213 /* If the field isn't aligned enough to fetch as a memref,
7214 fetch it as a bit field. */
7215 || (mode1 != BLKmode
8f6f6bc8 7216 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
fceda2f5 7217 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
e16ceb8e 7218 || (MEM_P (op0)
fceda2f5 7219 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7220 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
25ff63d0 7221 && ((modifier == EXPAND_CONST_ADDRESS
7222 || modifier == EXPAND_INITIALIZER)
7223 ? STRICT_ALIGNMENT
7224 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
8f6f6bc8 7225 || (bitpos % BITS_PER_UNIT != 0)))
b1ff8ab1 7226 /* If the type and the field are a constant size and the
7227 size of the type isn't the same size as the bitfield,
7228 we must use bitfield operations. */
7229 || (bitsize >= 0
7230 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7231 == INTEGER_CST)
7232 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
2b96c5f6 7233 bitsize)))
10f307d9 7234 {
10f307d9 7235 enum machine_mode ext_mode = mode;
7236
155b05dc 7237 if (ext_mode == BLKmode
e16ceb8e 7238 && ! (target != 0 && MEM_P (op0)
7239 && MEM_P (target)
155b05dc 7240 && bitpos % BITS_PER_UNIT == 0))
10f307d9 7241 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7242
7243 if (ext_mode == BLKmode)
0e20f9fb 7244 {
0e9fefce 7245 if (target == 0)
7246 target = assign_temp (type, 0, 1, 1);
7247
7248 if (bitsize == 0)
7249 return target;
7250
0e20f9fb 7251 /* In this case, BITPOS must start at a byte boundary and
7252 TARGET, if specified, must be a MEM. */
e16ceb8e 7253 if (!MEM_P (op0)
7254 || (target != 0 && !MEM_P (target))
0e20f9fb 7255 || bitpos % BITS_PER_UNIT != 0)
7256 abort ();
7257
0e9fefce 7258 emit_block_move (target,
7259 adjust_address (op0, VOIDmode,
7260 bitpos / BITS_PER_UNIT),
2b96c5f6 7261 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
0378dbdc 7262 / BITS_PER_UNIT),
a35a63ff 7263 (modifier == EXPAND_STACK_PARM
7264 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
fa56dc1d 7265
0e20f9fb 7266 return target;
7267 }
10f307d9 7268
6e6b4174 7269 op0 = validize_mem (op0);
7270
e16ceb8e 7271 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
2c269e73 7272 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
6e6b4174 7273
a35a63ff 7274 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7275 (modifier == EXPAND_STACK_PARM
7276 ? NULL_RTX : target),
7277 ext_mode, ext_mode,
10f307d9 7278 int_size_in_bytes (TREE_TYPE (tem)));
0aa5cbcc 7279
7280 /* If the result is a record type and BITSIZE is narrower than
7281 the mode of OP0, an integral mode, and this is a big endian
7282 machine, we must put the field into the high-order bits. */
7283 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7284 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
cce8da2f 7285 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
0aa5cbcc 7286 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7287 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7288 - bitsize),
7289 op0, 1);
7290
55a176cc 7291 /* If the result type is BLKmode, store the data into a temporary
7292 of the appropriate type, but with the mode corresponding to the
7293 mode for the data we have (op0's mode). It's tempting to make
7294 this a constant type, since we know it's only being stored once,
7295 but that can cause problems if we are taking the address of this
7296 COMPONENT_REF because the MEM of any reference via that address
7297 will have flags corresponding to the type, which will not
7298 necessarily be constant. */
10f307d9 7299 if (mode == BLKmode)
7300 {
55a176cc 7301 rtx new
7302 = assign_stack_temp_for_type
7303 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
10f307d9 7304
7305 emit_move_insn (new, op0);
7306 op0 = copy_rtx (new);
7307 PUT_MODE (op0, BLKmode);
a9d9ab08 7308 set_mem_attributes (op0, exp, 1);
10f307d9 7309 }
7310
7311 return op0;
7312 }
7313
f8ca8b77 7314 /* If the result is BLKmode, use that to access the object
7315 now as well. */
7316 if (mode == BLKmode)
7317 mode1 = BLKmode;
7318
10f307d9 7319 /* Get a reference to just this component. */
7320 if (modifier == EXPAND_CONST_ADDRESS
7321 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
e4e86ec5 7322 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10f307d9 7323 else
e513d163 7324 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
b5ba9f3a 7325
c3a9c149 7326 if (op0 == orig_op0)
7327 op0 = copy_rtx (op0);
7328
f7c44134 7329 set_mem_attributes (op0, exp, 0);
8ad4c111 7330 if (REG_P (XEXP (op0, 0)))
2b96c5f6 7331 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
6e6b4174 7332
10f307d9 7333 MEM_VOLATILE_P (op0) |= volatilep;
1c9f9aa6 7334 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
0909656b 7335 || modifier == EXPAND_CONST_ADDRESS
1c9f9aa6 7336 || modifier == EXPAND_INITIALIZER)
10f307d9 7337 return op0;
1c9f9aa6 7338 else if (target == 0)
10f307d9 7339 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
1c9f9aa6 7340
10f307d9 7341 convert_move (target, op0, unsignedp);
7342 return target;
7343 }
7344
215e2f1d 7345 case OBJ_TYPE_REF:
7346 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
cef0c6a0 7347
10f307d9 7348 /* Intended for a reference to a buffer of a file-object in Pascal.
7349 But it's not certain that a special tree code will really be
7350 necessary for these. INDIRECT_REF might work for them. */
7351 case BUFFER_REF:
7352 abort ();
7353
b63679d2 7354 case IN_EXPR:
b63679d2 7355 {
d2ae1b1e 7356 /* Pascal set IN expression.
7357
7358 Algorithm:
7359 rlo = set_low - (set_low%bits_per_word);
7360 the_word = set [ (index - rlo)/bits_per_word ];
7361 bit_index = index % bits_per_word;
7362 bitmask = 1 << bit_index;
7363 return !!(the_word & bitmask); */
7364
b63679d2 7365 tree set = TREE_OPERAND (exp, 0);
7366 tree index = TREE_OPERAND (exp, 1);
78a8ed03 7367 int iunsignedp = TYPE_UNSIGNED (TREE_TYPE (index));
b63679d2 7368 tree set_type = TREE_TYPE (set);
b63679d2 7369 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7370 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d2ae1b1e 7371 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7372 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7373 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7374 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7375 rtx setaddr = XEXP (setval, 0);
7376 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
b63679d2 7377 rtx rlow;
7378 rtx diff, quo, rem, addr, bit, result;
b63679d2 7379
d2ae1b1e 7380 /* If domain is empty, answer is no. Likewise if index is constant
7381 and out of bounds. */
7d27e4c9 7382 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d2ae1b1e 7383 && TREE_CODE (set_low_bound) == INTEGER_CST
7d27e4c9 7384 && tree_int_cst_lt (set_high_bound, set_low_bound))
d2ae1b1e 7385 || (TREE_CODE (index) == INTEGER_CST
7386 && TREE_CODE (set_low_bound) == INTEGER_CST
7387 && tree_int_cst_lt (index, set_low_bound))
7388 || (TREE_CODE (set_high_bound) == INTEGER_CST
7389 && TREE_CODE (index) == INTEGER_CST
7390 && tree_int_cst_lt (set_high_bound, index))))
b63679d2 7391 return const0_rtx;
7392
d2ae1b1e 7393 if (target == 0)
7394 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
b63679d2 7395
7396 /* If we get here, we have to generate the code for both cases
7397 (in range and out of range). */
7398
7399 op0 = gen_label_rtx ();
7400 op1 = gen_label_rtx ();
7401
7402 if (! (GET_CODE (index_val) == CONST_INT
7403 && GET_CODE (lo_r) == CONST_INT))
2b96c5f6 7404 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7405 GET_MODE (index_val), iunsignedp, op1);
b63679d2 7406
7407 if (! (GET_CODE (index_val) == CONST_INT
7408 && GET_CODE (hi_r) == CONST_INT))
2b96c5f6 7409 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7410 GET_MODE (index_val), iunsignedp, op1);
b63679d2 7411
7412 /* Calculate the element number of bit zero in the first word
7413 of the set. */
7414 if (GET_CODE (lo_r) == CONST_INT)
17cc6bd4 7415 rlow = GEN_INT (INTVAL (lo_r)
fa56dc1d 7416 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
b63679d2 7417 else
17cc6bd4 7418 rlow = expand_binop (index_mode, and_optab, lo_r,
7419 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d2ae1b1e 7420 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
b63679d2 7421
d2ae1b1e 7422 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7423 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
b63679d2 7424
7425 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d2ae1b1e 7426 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
b63679d2 7427 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d2ae1b1e 7428 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7429
b63679d2 7430 addr = memory_address (byte_mode,
d2ae1b1e 7431 expand_binop (index_mode, add_optab, diff,
7432 setaddr, NULL_RTX, iunsignedp,
17cc6bd4 7433 OPTAB_LIB_WIDEN));
d2ae1b1e 7434
fa56dc1d 7435 /* Extract the bit we want to examine. */
b63679d2 7436 bit = expand_shift (RSHIFT_EXPR, byte_mode,
941522d6 7437 gen_rtx_MEM (byte_mode, addr),
17cc6bd4 7438 make_tree (TREE_TYPE (index), rem),
7439 NULL_RTX, 1);
7440 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7441 GET_MODE (target) == byte_mode ? target : 0,
b63679d2 7442 1, OPTAB_LIB_WIDEN);
17cc6bd4 7443
7444 if (result != target)
7445 convert_move (target, result, 1);
b63679d2 7446
7447 /* Output the code to handle the out-of-range case. */
7448 emit_jump (op0);
7449 emit_label (op1);
7450 emit_move_insn (target, const0_rtx);
7451 emit_label (op0);
7452 return target;
7453 }
7454
10f307d9 7455 case WITH_CLEANUP_EXPR:
5929001a 7456 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
10f307d9 7457 {
5929001a 7458 WITH_CLEANUP_EXPR_RTL (exp)
8a06f2d4 7459 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
a9bc793b 7460 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7461 CLEANUP_EH_ONLY (exp));
694ec519 7462
10f307d9 7463 /* That's it for this cleanup. */
5929001a 7464 TREE_OPERAND (exp, 1) = 0;
10f307d9 7465 }
5929001a 7466 return WITH_CLEANUP_EXPR_RTL (exp);
10f307d9 7467
34e2ddcd 7468 case CLEANUP_POINT_EXPR:
7469 {
694ec519 7470 /* Start a new binding layer that will keep track of all cleanup
7471 actions to be performed. */
87a9ad11 7472 expand_start_bindings (2);
694ec519 7473
6fd6341f 7474 target_temp_slot_level = temp_slot_level;
694ec519 7475
8a06f2d4 7476 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
80036564 7477 /* If we're going to use this value, load it up now. */
7478 if (! ignore)
7479 op0 = force_not_mem (op0);
6fd6341f 7480 preserve_temp_slots (op0);
694ec519 7481 expand_end_bindings (NULL_TREE, 0, 0);
34e2ddcd 7482 }
7483 return op0;
7484
10f307d9 7485 case CALL_EXPR:
7486 /* Check for a built-in function. */
7487 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d2ae1b1e 7488 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7489 == FUNCTION_DECL)
10f307d9 7490 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
ff385626 7491 {
edbbe5ca 7492 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7493 == BUILT_IN_FRONTEND)
5135beeb 7494 return lang_hooks.expand_expr (exp, original_target,
7495 tmode, modifier,
7496 alt_rtl);
edbbe5ca 7497 else
7498 return expand_builtin (exp, target, subtarget, tmode, ignore);
7499 }
d2ae1b1e 7500
4e0ff571 7501 return expand_call (exp, target, ignore);
10f307d9 7502
7503 case NON_LVALUE_EXPR:
7504 case NOP_EXPR:
7505 case CONVERT_EXPR:
7506 case REFERENCE_EXPR:
87ec3f77 7507 if (TREE_OPERAND (exp, 0) == error_mark_node)
f30669db 7508 return const0_rtx;
87ec3f77 7509
10f307d9 7510 if (TREE_CODE (type) == UNION_TYPE)
7511 {
7512 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
155b05dc 7513
a9d9ab08 7514 /* If both input and output are BLKmode, this conversion isn't doing
7515 anything except possibly changing memory attribute. */
7516 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7517 {
7518 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7519 modifier);
7520
7521 result = copy_rtx (result);
7522 set_mem_attributes (result, exp, 0);
7523 return result;
7524 }
155b05dc 7525
10f307d9 7526 if (target == 0)
be6993e7 7527 {
7528 if (TYPE_MODE (type) != BLKmode)
7529 target = gen_reg_rtx (TYPE_MODE (type));
7530 else
7531 target = assign_temp (type, 0, 1, 1);
7532 }
d2ae1b1e 7533
e16ceb8e 7534 if (MEM_P (target))
10f307d9 7535 /* Store data into beginning of memory target. */
7536 store_expr (TREE_OPERAND (exp, 0),
a35a63ff 7537 adjust_address (target, TYPE_MODE (valtype), 0),
7538 modifier == EXPAND_STACK_PARM ? 2 : 0);
acfb31e5 7539
8ad4c111 7540 else if (REG_P (target))
10f307d9 7541 /* Store this field into a union of the proper type. */
155b05dc 7542 store_field (target,
7543 MIN ((int_size_in_bytes (TREE_TYPE
7544 (TREE_OPERAND (exp, 0)))
7545 * BITS_PER_UNIT),
e1439bcb 7546 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
155b05dc 7547 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
2b96c5f6 7548 VOIDmode, 0, type, 0);
10f307d9 7549 else
7550 abort ();
7551
7552 /* Return the entire union. */
7553 return target;
7554 }
d2ae1b1e 7555
d324678b 7556 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7557 {
7558 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
8a06f2d4 7559 modifier);
d324678b 7560
7561 /* If the signedness of the conversion differs and OP0 is
7562 a promoted SUBREG, clear that indication since we now
7563 have to do the proper extension. */
78a8ed03 7564 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
d324678b 7565 && GET_CODE (op0) == SUBREG)
7566 SUBREG_PROMOTED_VAR_P (op0) = 0;
7567
7568 return op0;
7569 }
7570
5b1bb114 7571 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
dda75192 7572 if (GET_MODE (op0) == mode)
7573 return op0;
dda75192 7574
d2ae1b1e 7575 /* If OP0 is a constant, just convert it into the proper mode. */
7576 if (CONSTANT_P (op0))
5b1bb114 7577 {
7578 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7579 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7580
ff385626 7581 if (modifier == EXPAND_INITIALIZER)
5b1bb114 7582 return simplify_gen_subreg (mode, op0, inner_mode,
7583 subreg_lowpart_offset (mode,
7584 inner_mode));
7585 else
7586 return convert_modes (mode, inner_mode, op0,
78a8ed03 7587 TYPE_UNSIGNED (inner_type));
5b1bb114 7588 }
dda75192 7589
1aaabd2e 7590 if (modifier == EXPAND_INITIALIZER)
941522d6 7591 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d2ae1b1e 7592
10f307d9 7593 if (target == 0)
d2ae1b1e 7594 return
7595 convert_to_mode (mode, op0,
78a8ed03 7596 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
10f307d9 7597 else
d2ae1b1e 7598 convert_move (target, op0,
78a8ed03 7599 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
10f307d9 7600 return target;
7601
f96c43fb 7602 case VIEW_CONVERT_EXPR:
8a06f2d4 7603 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
f96c43fb 7604
7605 /* If the input and output modes are both the same, we are done.
794d6bca 7606 Otherwise, if neither mode is BLKmode and both are integral and within
7607 a word, we can use gen_lowpart. If neither is true, make sure the
7608 operand is in memory and convert the MEM to the new mode. */
f96c43fb 7609 if (TYPE_MODE (type) == GET_MODE (op0))
7610 ;
7611 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
794d6bca 7612 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7613 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
f96c43fb 7614 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7615 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7616 op0 = gen_lowpart (TYPE_MODE (type), op0);
e16ceb8e 7617 else if (!MEM_P (op0))
f96c43fb 7618 {
e58d0f17 7619 /* If the operand is not a MEM, force it into memory. Since we
7620 are going to be be changing the mode of the MEM, don't call
7621 force_const_mem for constants because we don't allow pool
7622 constants to change mode. */
f96c43fb 7623 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
f96c43fb 7624
e58d0f17 7625 if (TREE_ADDRESSABLE (exp))
7626 abort ();
f96c43fb 7627
e58d0f17 7628 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7629 target
7630 = assign_stack_temp_for_type
7631 (TYPE_MODE (inner_type),
7632 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
f96c43fb 7633
e58d0f17 7634 emit_move_insn (target, op0);
7635 op0 = target;
f96c43fb 7636 }
7637
e58d0f17 7638 /* At this point, OP0 is in the correct mode. If the output type is such
7639 that the operand is known to be aligned, indicate that it is.
7640 Otherwise, we need only be concerned about alignment for non-BLKmode
7641 results. */
e16ceb8e 7642 if (MEM_P (op0))
f96c43fb 7643 {
7644 op0 = copy_rtx (op0);
7645
f96c43fb 7646 if (TYPE_ALIGN_OK (type))
7647 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7648 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7649 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7650 {
7651 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
cce8da2f 7652 HOST_WIDE_INT temp_size
7653 = MAX (int_size_in_bytes (inner_type),
7654 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
f96c43fb 7655 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7656 temp_size, 0, type);
7a827396 7657 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
f96c43fb 7658
e58d0f17 7659 if (TREE_ADDRESSABLE (exp))
7660 abort ();
7661
f96c43fb 7662 if (GET_MODE (op0) == BLKmode)
7663 emit_block_move (new_with_op0_mode, op0,
0378dbdc 7664 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
a35a63ff 7665 (modifier == EXPAND_STACK_PARM
7666 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
f96c43fb 7667 else
7668 emit_move_insn (new_with_op0_mode, op0);
7669
7670 op0 = new;
7671 }
ff385626 7672
7a827396 7673 op0 = adjust_address (op0, TYPE_MODE (type), 0);
f96c43fb 7674 }
7675
7676 return op0;
7677
10f307d9 7678 case PLUS_EXPR:
bec2d490 7679 this_optab = ! unsignedp && flag_trapv
a2c7420e 7680 && (GET_MODE_CLASS (mode) == MODE_INT)
bec2d490 7681 ? addv_optab : add_optab;
10f307d9 7682
735f4358 7683 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
10f307d9 7684 something else, make sure we add the register to the constant and
7685 then to the other thing. This case can occur during strength
7686 reduction and doing it this way will produce better code if the
7687 frame pointer or argument pointer is eliminated.
7688
7689 fold-const.c will ensure that the constant is always in the inner
7690 PLUS_EXPR, so the only case we need to do anything about is if
7691 sp, ap, or fp is our second argument, in which case we must swap
7692 the innermost first argument and our second argument. */
7693
7694 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7695 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
735f4358 7696 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7697 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7698 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7699 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
10f307d9 7700 {
7701 tree t = TREE_OPERAND (exp, 1);
7702
7703 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7704 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7705 }
7706
ed8d3eee 7707 /* If the result is to be ptr_mode and we are adding an integer to
10f307d9 7708 something, we might be forming a constant. So try to use
7709 plus_constant. If it produces a sum and we can't accept it,
7710 use force_operand. This allows P = &ARR[const] to generate
7711 efficient code on machines where a SYMBOL_REF is not a valid
7712 address.
7713
7714 If this is an EXPAND_SUM call, always return the sum. */
66aa258b 7715 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
ff385626 7716 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
10f307d9 7717 {
a35a63ff 7718 if (modifier == EXPAND_STACK_PARM)
7719 target = 0;
66aa258b 7720 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7721 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7722 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7723 {
2c551bbe 7724 rtx constant_part;
7725
66aa258b 7726 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7727 EXPAND_SUM);
2c551bbe 7728 /* Use immed_double_const to ensure that the constant is
7729 truncated according to the mode of OP1, then sign extended
7730 to a HOST_WIDE_INT. Using the constant directly can result
7731 in non-canonical RTL in a 64x32 cross compile. */
7732 constant_part
7733 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7734 (HOST_WIDE_INT) 0,
0bf16c4a 7735 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
79d5c3ba 7736 op1 = plus_constant (op1, INTVAL (constant_part));
66aa258b 7737 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7738 op1 = force_operand (op1, target);
7739 return op1;
7740 }
10f307d9 7741
66aa258b 7742 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7743 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7744 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7745 {
2c551bbe 7746 rtx constant_part;
7747
66aa258b 7748 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
00e465b1 7749 (modifier == EXPAND_INITIALIZER
7750 ? EXPAND_INITIALIZER : EXPAND_SUM));
66aa258b 7751 if (! CONSTANT_P (op0))
7752 {
7753 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7754 VOIDmode, modifier);
45fb3e2c 7755 /* Return a PLUS if modifier says it's OK. */
7756 if (modifier == EXPAND_SUM
7757 || modifier == EXPAND_INITIALIZER)
7758 return simplify_gen_binary (PLUS, mode, op0, op1);
7759 goto binop2;
66aa258b 7760 }
2c551bbe 7761 /* Use immed_double_const to ensure that the constant is
7762 truncated according to the mode of OP1, then sign extended
7763 to a HOST_WIDE_INT. Using the constant directly can result
7764 in non-canonical RTL in a 64x32 cross compile. */
7765 constant_part
7766 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7767 (HOST_WIDE_INT) 0,
f2761a0e 7768 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
79d5c3ba 7769 op0 = plus_constant (op0, INTVAL (constant_part));
66aa258b 7770 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7771 op0 = force_operand (op0, target);
7772 return op0;
7773 }
10f307d9 7774 }
7775
7776 /* No sense saving up arithmetic to be done
7777 if it's all in the wrong mode to form part of an address.
7778 And force_operand won't know whether to sign-extend or
7779 zero-extend. */
7780 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
ed8d3eee 7781 || mode != ptr_mode)
a60e4107 7782 {
33204670 7783 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7784 subtarget, &op0, &op1, 0);
8c1d1299 7785 if (op0 == const0_rtx)
7786 return op1;
7787 if (op1 == const0_rtx)
7788 return op0;
a60e4107 7789 goto binop2;
7790 }
10f307d9 7791
33204670 7792 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7793 subtarget, &op0, &op1, modifier);
45fb3e2c 7794 return simplify_gen_binary (PLUS, mode, op0, op1);
10f307d9 7795
7796 case MINUS_EXPR:
94d01330 7797 /* For initializers, we are allowed to return a MINUS of two
7798 symbolic constants. Here we handle all cases when both operands
7799 are constant. */
10f307d9 7800 /* Handle difference of two symbolic constants,
7801 for the sake of an initializer. */
7802 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7803 && really_constant_p (TREE_OPERAND (exp, 0))
7804 && really_constant_p (TREE_OPERAND (exp, 1)))
7805 {
33204670 7806 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7807 NULL_RTX, &op0, &op1, modifier);
94d01330 7808
94d01330 7809 /* If the last operand is a CONST_INT, use plus_constant of
7810 the negated constant. Else make the MINUS. */
7811 if (GET_CODE (op1) == CONST_INT)
7812 return plus_constant (op0, - INTVAL (op1));
7813 else
941522d6 7814 return gen_rtx_MINUS (mode, op0, op1);
10f307d9 7815 }
a02b3586 7816
bec2d490 7817 this_optab = ! unsignedp && flag_trapv
7818 && (GET_MODE_CLASS(mode) == MODE_INT)
7819 ? subv_optab : sub_optab;
acd367d1 7820
7821 /* No sense saving up arithmetic to be done
7822 if it's all in the wrong mode to form part of an address.
7823 And force_operand won't know whether to sign-extend or
7824 zero-extend. */
7825 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7826 || mode != ptr_mode)
7827 goto binop;
7828
33204670 7829 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7830 subtarget, &op0, &op1, modifier);
acd367d1 7831
7832 /* Convert A - const to A + (-const). */
7833 if (GET_CODE (op1) == CONST_INT)
7834 {
7835 op1 = negate_rtx (mode, op1);
45fb3e2c 7836 return simplify_gen_binary (PLUS, mode, op0, op1);
acd367d1 7837 }
7838
7839 goto binop2;
10f307d9 7840
7841 case MULT_EXPR:
10f307d9 7842 /* If first operand is constant, swap them.
7843 Thus the following special case checks need only
7844 check the second operand. */
7845 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7846 {
19cb6b50 7847 tree t1 = TREE_OPERAND (exp, 0);
10f307d9 7848 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7849 TREE_OPERAND (exp, 1) = t1;
7850 }
7851
7852 /* Attempt to return something suitable for generating an
7853 indexed address, for machines that support that. */
7854
ed8d3eee 7855 if (modifier == EXPAND_SUM && mode == ptr_mode
78e5555a 7856 && host_integerp (TREE_OPERAND (exp, 1), 0))
10f307d9 7857 {
a0c9f8fa 7858 tree exp1 = TREE_OPERAND (exp, 1);
7859
eb4b06b6 7860 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7861 EXPAND_SUM);
10f307d9 7862
8ad4c111 7863 if (!REG_P (op0))
b572011e 7864 op0 = force_operand (op0, NULL_RTX);
8ad4c111 7865 if (!REG_P (op0))
10f307d9 7866 op0 = copy_to_mode_reg (mode, op0);
7867
a0c9f8fa 7868 return gen_rtx_MULT (mode, op0,
7869 gen_int_mode (tree_low_cst (exp1, 0),
7870 TYPE_MODE (TREE_TYPE (exp1))));
10f307d9 7871 }
7872
a35a63ff 7873 if (modifier == EXPAND_STACK_PARM)
7874 target = 0;
7875
10f307d9 7876 /* Check for multiplying things that have been extended
7877 from a narrower type. If this machine supports multiplying
7878 in that narrower type with a result in the desired type,
7879 do it that way, and avoid the explicit type-conversion. */
7880 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7881 && TREE_CODE (type) == INTEGER_TYPE
7882 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7883 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7884 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7885 && int_fits_type_p (TREE_OPERAND (exp, 1),
7886 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7887 /* Don't use a widening multiply if a shift will do. */
7888 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
b572011e 7889 > HOST_BITS_PER_WIDE_INT)
10f307d9 7890 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7891 ||
7892 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
78a8ed03 7893 && (TYPE_PRECISION (TREE_TYPE
7894 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7895 == TYPE_PRECISION (TREE_TYPE
7896 (TREE_OPERAND
7897 (TREE_OPERAND (exp, 0), 0))))
10f307d9 7898 /* If both operands are extended, they must either both
7899 be zero-extended or both be sign-extended. */
78a8ed03 7900 && (TYPE_UNSIGNED (TREE_TYPE
7901 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7902 == TYPE_UNSIGNED (TREE_TYPE
7903 (TREE_OPERAND
7904 (TREE_OPERAND (exp, 0), 0)))))))
10f307d9 7905 {
99e03bd2 7906 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7907 enum machine_mode innermode = TYPE_MODE (op0type);
78a8ed03 7908 bool zextend_p = TYPE_UNSIGNED (op0type);
99e03bd2 7909 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7910 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7911
10b58489 7912 if (mode == GET_MODE_WIDER_MODE (innermode))
10f307d9 7913 {
10b58489 7914 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7915 {
10b58489 7916 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
33204670 7917 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7918 TREE_OPERAND (exp, 1),
7919 NULL_RTX, &op0, &op1, 0);
10b58489 7920 else
33204670 7921 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7922 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7923 NULL_RTX, &op0, &op1, 0);
10b58489 7924 goto binop2;
7925 }
7926 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7927 && innermode == word_mode)
7928 {
99e03bd2 7929 rtx htem, hipart;
10b58489 7930 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7931 NULL_RTX, VOIDmode, 0);
7932 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
15324f8c 7933 op1 = convert_modes (innermode, mode,
7934 expand_expr (TREE_OPERAND (exp, 1),
7935 NULL_RTX, VOIDmode, 0),
7936 unsignedp);
10b58489 7937 else
7938 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7939 NULL_RTX, VOIDmode, 0);
7940 temp = expand_binop (mode, other_optab, op0, op1, target,
7941 unsignedp, OPTAB_LIB_WIDEN);
99e03bd2 7942 hipart = gen_highpart (innermode, temp);
7943 htem = expand_mult_highpart_adjust (innermode, hipart,
7944 op0, op1, hipart,
7945 zextend_p);
7946 if (htem != hipart)
7947 emit_move_insn (hipart, htem);
10b58489 7948 return temp;
7949 }
10f307d9 7950 }
7951 }
33204670 7952 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7953 subtarget, &op0, &op1, 0);
10f307d9 7954 return expand_mult (mode, op0, op1, target, unsignedp);
7955
7956 case TRUNC_DIV_EXPR:
7957 case FLOOR_DIV_EXPR:
7958 case CEIL_DIV_EXPR:
7959 case ROUND_DIV_EXPR:
7960 case EXACT_DIV_EXPR:
a35a63ff 7961 if (modifier == EXPAND_STACK_PARM)
7962 target = 0;
10f307d9 7963 /* Possible optimization: compute the dividend with EXPAND_SUM
7964 then if the divisor is constant can optimize the case
7965 where some terms of the dividend have coeffs divisible by it. */
33204670 7966 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7967 subtarget, &op0, &op1, 0);
10f307d9 7968 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7969
7970 case RDIV_EXPR:
3623b3f6 7971 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7972 expensive divide. If not, combine will rebuild the original
7973 computation. */
7974 if (flag_unsafe_math_optimizations && optimize && !optimize_size
fe1b1167 7975 && TREE_CODE (type) == REAL_TYPE
3623b3f6 7976 && !real_onep (TREE_OPERAND (exp, 0)))
7977 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7978 build (RDIV_EXPR, type,
7979 build_real (type, dconst1),
7980 TREE_OPERAND (exp, 1))),
5ff01bda 7981 target, tmode, modifier);
ad99e708 7982 this_optab = sdiv_optab;
10f307d9 7983 goto binop;
7984
7985 case TRUNC_MOD_EXPR:
7986 case FLOOR_MOD_EXPR:
7987 case CEIL_MOD_EXPR:
7988 case ROUND_MOD_EXPR:
a35a63ff 7989 if (modifier == EXPAND_STACK_PARM)
7990 target = 0;
33204670 7991 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7992 subtarget, &op0, &op1, 0);
10f307d9 7993 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7994
7995 case FIX_ROUND_EXPR:
7996 case FIX_FLOOR_EXPR:
7997 case FIX_CEIL_EXPR:
7998 abort (); /* Not used for C. */
7999
8000 case FIX_TRUNC_EXPR:
b572011e 8001 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
a35a63ff 8002 if (target == 0 || modifier == EXPAND_STACK_PARM)
10f307d9 8003 target = gen_reg_rtx (mode);
8004 expand_fix (target, op0, unsignedp);
8005 return target;
8006
8007 case FLOAT_EXPR:
b572011e 8008 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
a35a63ff 8009 if (target == 0 || modifier == EXPAND_STACK_PARM)
10f307d9 8010 target = gen_reg_rtx (mode);
8011 /* expand_float can't figure out what to do if FROM has VOIDmode.
8012 So give it the correct mode. With -O, cse will optimize this. */
8013 if (GET_MODE (op0) == VOIDmode)
8014 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8015 op0);
8016 expand_float (target, op0,
78a8ed03 8017 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
10f307d9 8018 return target;
8019
8020 case NEGATE_EXPR:
1cb59c60 8021 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
a35a63ff 8022 if (modifier == EXPAND_STACK_PARM)
8023 target = 0;
bec2d490 8024 temp = expand_unop (mode,
ff385626 8025 ! unsignedp && flag_trapv
8026 && (GET_MODE_CLASS(mode) == MODE_INT)
8027 ? negv_optab : neg_optab, op0, target, 0);
10f307d9 8028 if (temp == 0)
8029 abort ();
8030 return temp;
8031
8032 case ABS_EXPR:
8033 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
a35a63ff 8034 if (modifier == EXPAND_STACK_PARM)
8035 target = 0;
10f307d9 8036
71d5a758 8037 /* ABS_EXPR is not valid for complex arguments. */
d2ae1b1e 8038 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8039 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
71d5a758 8040 abort ();
5db186f1 8041
10f307d9 8042 /* Unsigned abs is simply the operand. Testing here means we don't
8043 risk generating incorrect code below. */
78a8ed03 8044 if (TYPE_UNSIGNED (type))
10f307d9 8045 return op0;
8046
bec2d490 8047 return expand_abs (mode, op0, target, unsignedp,
997d68fe 8048 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
10f307d9 8049
8050 case MAX_EXPR:
8051 case MIN_EXPR:
8052 target = original_target;
a35a63ff 8053 if (target == 0
8054 || modifier == EXPAND_STACK_PARM
e16ceb8e 8055 || (MEM_P (target) && MEM_VOLATILE_P (target))
d2ae1b1e 8056 || GET_MODE (target) != mode
8ad4c111 8057 || (REG_P (target)
10f307d9 8058 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8059 target = gen_reg_rtx (mode);
33204670 8060 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8061 target, &op0, &op1, 0);
10f307d9 8062
8063 /* First try to do it with a special MIN or MAX instruction.
8064 If that does not win, use a conditional jump to select the proper
8065 value. */
7645fec2 8066 this_optab = (unsignedp
10f307d9 8067 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8068 : (code == MIN_EXPR ? smin_optab : smax_optab));
8069
8070 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8071 OPTAB_WIDEN);
8072 if (temp != 0)
8073 return temp;
8074
446a42ee 8075 /* At this point, a MEM target is no longer useful; we will get better
8076 code without it. */
fa56dc1d 8077
e16ceb8e 8078 if (MEM_P (target))
446a42ee 8079 target = gen_reg_rtx (mode);
8080
a54ebf2e 8081 /* If op1 was placed in target, swap op0 and op1. */
8082 if (target != op0 && target == op1)
8083 {
8084 rtx tem = op0;
8085 op0 = op1;
8086 op1 = tem;
8087 }
8088
1145b168 8089 if (target != op0)
8090 emit_move_insn (target, op0);
d2ae1b1e 8091
10f307d9 8092 op0 = gen_label_rtx ();
d2ae1b1e 8093
228661d1 8094 /* If this mode is an integer too wide to compare properly,
8095 compare word by word. Rely on cse to optimize constant cases. */
a4110d9a 8096 if (GET_MODE_CLASS (mode) == MODE_INT
8097 && ! can_compare_p (GE, mode, ccp_jump))
10f307d9 8098 {
228661d1 8099 if (code == MAX_EXPR)
7645fec2 8100 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
8101 NULL_RTX, op0);
10f307d9 8102 else
7645fec2 8103 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
8104 NULL_RTX, op0);
10f307d9 8105 }
228661d1 8106 else
8107 {
1a29b174 8108 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7645fec2 8109 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
228661d1 8110 }
1a29b174 8111 emit_move_insn (target, op1);
10f307d9 8112 emit_label (op0);
8113 return target;
8114
10f307d9 8115 case BIT_NOT_EXPR:
8116 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
a35a63ff 8117 if (modifier == EXPAND_STACK_PARM)
8118 target = 0;
10f307d9 8119 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8120 if (temp == 0)
8121 abort ();
8122 return temp;
8123
d2ae1b1e 8124 /* ??? Can optimize bitwise operations with one arg constant.
8125 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8126 and (a bitwise1 b) bitwise2 b (etc)
8127 but that is probably not worth while. */
8128
8129 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8130 boolean values when we want in all cases to compute both of them. In
8131 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8132 as actual zero-or-1 values and then bitwise anding. In cases where
8133 there cannot be any side effects, better code would be made by
8134 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8135 how to recognize those cases. */
8136
10f307d9 8137 case TRUTH_AND_EXPR:
8138 case BIT_AND_EXPR:
8139 this_optab = and_optab;
8140 goto binop;
8141
10f307d9 8142 case TRUTH_OR_EXPR:
8143 case BIT_IOR_EXPR:
8144 this_optab = ior_optab;
8145 goto binop;
8146
c43d8fab 8147 case TRUTH_XOR_EXPR:
10f307d9 8148 case BIT_XOR_EXPR:
8149 this_optab = xor_optab;
8150 goto binop;
8151
8152 case LSHIFT_EXPR:
8153 case RSHIFT_EXPR:
8154 case LROTATE_EXPR:
8155 case RROTATE_EXPR:
997d68fe 8156 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
10f307d9 8157 subtarget = 0;
a35a63ff 8158 if (modifier == EXPAND_STACK_PARM)
8159 target = 0;
10f307d9 8160 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8161 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8162 unsignedp);
8163
d2ae1b1e 8164 /* Could determine the answer when only additive constants differ. Also,
8165 the addition of one can be handled by changing the condition. */
10f307d9 8166 case LT_EXPR:
8167 case LE_EXPR:
8168 case GT_EXPR:
8169 case GE_EXPR:
8170 case EQ_EXPR:
8171 case NE_EXPR:
a4110d9a 8172 case UNORDERED_EXPR:
8173 case ORDERED_EXPR:
8174 case UNLT_EXPR:
8175 case UNLE_EXPR:
8176 case UNGT_EXPR:
8177 case UNGE_EXPR:
8178 case UNEQ_EXPR:
318a728f 8179 case LTGT_EXPR:
a35a63ff 8180 temp = do_store_flag (exp,
8181 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8182 tmode != VOIDmode ? tmode : mode, 0);
10f307d9 8183 if (temp != 0)
8184 return temp;
d2ae1b1e 8185
a92771b8 8186 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
10f307d9 8187 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8188 && original_target
8ad4c111 8189 && REG_P (original_target)
10f307d9 8190 && (GET_MODE (original_target)
8191 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8192 {
d2ae1b1e 8193 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8194 VOIDmode, 0);
8195
701cbdec 8196 /* If temp is constant, we can just compute the result. */
8197 if (GET_CODE (temp) == CONST_INT)
8198 {
8199 if (INTVAL (temp) != 0)
8200 emit_move_insn (target, const1_rtx);
8201 else
8202 emit_move_insn (target, const0_rtx);
8203
8204 return target;
8205 }
8206
10f307d9 8207 if (temp != original_target)
701cbdec 8208 {
8209 enum machine_mode mode1 = GET_MODE (temp);
8210 if (mode1 == VOIDmode)
8211 mode1 = tmode != VOIDmode ? tmode : mode;
ff385626 8212
701cbdec 8213 temp = copy_to_mode_reg (mode1, temp);
8214 }
d2ae1b1e 8215
10f307d9 8216 op1 = gen_label_rtx ();
5a894bc6 8217 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
2b96c5f6 8218 GET_MODE (temp), unsignedp, op1);
10f307d9 8219 emit_move_insn (temp, const1_rtx);
8220 emit_label (op1);
8221 return temp;
8222 }
d2ae1b1e 8223
10f307d9 8224 /* If no set-flag instruction, must generate a conditional
8225 store into a temporary variable. Drop through
8226 and handle this like && and ||. */
8227
8228 case TRUTH_ANDIF_EXPR:
8229 case TRUTH_ORIF_EXPR:
34f17b00 8230 if (! ignore
a35a63ff 8231 && (target == 0
8232 || modifier == EXPAND_STACK_PARM
8233 || ! safe_from_p (target, exp, 1)
34f17b00 8234 /* Make sure we don't have a hard reg (such as function's return
8235 value) live across basic blocks, if not optimizing. */
8ad4c111 8236 || (!optimize && REG_P (target)
34f17b00 8237 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
10f307d9 8238 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
34f17b00 8239
8240 if (target)
8241 emit_clr_insn (target);
8242
10f307d9 8243 op1 = gen_label_rtx ();
8244 jumpifnot (exp, op1);
34f17b00 8245
8246 if (target)
8247 emit_0_to_1_insn (target);
8248
10f307d9 8249 emit_label (op1);
34f17b00 8250 return ignore ? const0_rtx : target;
10f307d9 8251
8252 case TRUTH_NOT_EXPR:
a35a63ff 8253 if (modifier == EXPAND_STACK_PARM)
8254 target = 0;
10f307d9 8255 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8256 /* The parser is careful to generate TRUTH_NOT_EXPR
8257 only with operands that are always zero or one. */
b572011e 8258 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
10f307d9 8259 target, 1, OPTAB_LIB_WIDEN);
8260 if (temp == 0)
8261 abort ();
8262 return temp;
8263
8264 case COMPOUND_EXPR:
8265 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8266 emit_queue ();
60ffaf4d 8267 return expand_expr_real (TREE_OPERAND (exp, 1),
8268 (ignore ? const0_rtx : target),
8269 VOIDmode, modifier, alt_rtl);
10f307d9 8270
4ee9c684 8271 case STATEMENT_LIST:
8272 {
8273 tree_stmt_iterator iter;
8274
8275 if (!ignore)
8276 abort ();
8277
8278 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8279 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8280 }
8281 return const0_rtx;
8282
10f307d9 8283 case COND_EXPR:
4ee9c684 8284 /* If it's void, we don't need to worry about computing a value. */
8285 if (VOID_TYPE_P (TREE_TYPE (exp)))
8286 {
8287 tree pred = TREE_OPERAND (exp, 0);
8288 tree then_ = TREE_OPERAND (exp, 1);
8289 tree else_ = TREE_OPERAND (exp, 2);
8290
8291 /* If we do not have any pending cleanups or stack_levels
8292 to restore, and at least one arm of the COND_EXPR is a
8293 GOTO_EXPR to a local label, then we can emit more efficient
8294 code by using jumpif/jumpifnot instead of the 'if' machinery. */
8295 if (! optimize
8296 || containing_blocks_have_cleanups_or_stack_level ())
8297 ;
8298 else if (TREE_CODE (then_) == GOTO_EXPR
8299 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
8300 {
8301 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
8302 return expand_expr (else_, const0_rtx, VOIDmode, 0);
8303 }
8304 else if (TREE_CODE (else_) == GOTO_EXPR
8305 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
8306 {
8307 jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_)));
8308 return expand_expr (then_, const0_rtx, VOIDmode, 0);
8309 }
8310
8311 /* Just use the 'if' machinery. */
8312 expand_start_cond (pred, 0);
8313 start_cleanup_deferral ();
8314 expand_expr (then_, const0_rtx, VOIDmode, 0);
8315
8316 exp = else_;
8317
8318 /* Iterate over 'else if's instead of recursing. */
8319 for (; TREE_CODE (exp) == COND_EXPR; exp = TREE_OPERAND (exp, 2))
8320 {
8321 expand_start_else ();
8322 if (EXPR_HAS_LOCATION (exp))
8323 {
8324 emit_line_note (EXPR_LOCATION (exp));
32a2193a 8325 record_block_change (TREE_BLOCK (exp));
4ee9c684 8326 }
8327 expand_elseif (TREE_OPERAND (exp, 0));
8328 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, 0);
8329 }
8330 /* Don't emit the jump and label if there's no 'else' clause. */
8331 if (TREE_SIDE_EFFECTS (exp))
8332 {
8333 expand_start_else ();
8334 expand_expr (exp, const0_rtx, VOIDmode, 0);
8335 }
8336 end_cleanup_deferral ();
8337 expand_end_cond ();
8338 return const0_rtx;
8339 }
8340
4035eace 8341 /* If we would have a "singleton" (see below) were it not for a
8342 conversion in each arm, bring that conversion back out. */
8343 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8344 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8345 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8346 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8347 {
9c811526 8348 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8349 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8350
8351 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8352 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8353 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8354 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8355 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8356 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8357 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8358 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
4035eace 8359 return expand_expr (build1 (NOP_EXPR, type,
9c811526 8360 build (COND_EXPR, TREE_TYPE (iftrue),
4035eace 8361 TREE_OPERAND (exp, 0),
9c811526 8362 iftrue, iffalse)),
4035eace 8363 target, tmode, modifier);
8364 }
8365
10f307d9 8366 {
8367 /* Note that COND_EXPRs whose type is a structure or union
8368 are required to be constructed to contain assignments of
8369 a temporary variable, so that we can evaluate them here
8370 for side effect only. If type is void, we must do likewise. */
8371
8372 /* If an arm of the branch requires a cleanup,
8373 only that cleanup is performed. */
8374
8375 tree singleton = 0;
8376 tree binary_op = 0, unary_op = 0;
10f307d9 8377
8378 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8379 convert it to our mode, if necessary. */
8380 if (integer_onep (TREE_OPERAND (exp, 1))
8381 && integer_zerop (TREE_OPERAND (exp, 2))
8382 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8383 {
f75fb6ae 8384 if (ignore)
8385 {
8386 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8a06f2d4 8387 modifier);
f75fb6ae 8388 return const0_rtx;
8389 }
8390
a35a63ff 8391 if (modifier == EXPAND_STACK_PARM)
8392 target = 0;
8a06f2d4 8393 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
10f307d9 8394 if (GET_MODE (op0) == mode)
8395 return op0;
d2ae1b1e 8396
10f307d9 8397 if (target == 0)
8398 target = gen_reg_rtx (mode);
8399 convert_move (target, op0, unsignedp);
8400 return target;
8401 }
8402
4035eace 8403 /* Check for X ? A + B : A. If we have this, we can copy A to the
8404 output and conditionally add B. Similarly for unary operations.
8405 Don't do this if X has side-effects because those side effects
8406 might affect A or B and the "?" operation is a sequence point in
8407 ANSI. (operand_equal_p tests for side effects.) */
10f307d9 8408
8409 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8410 && operand_equal_p (TREE_OPERAND (exp, 2),
8411 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8412 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8413 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8414 && operand_equal_p (TREE_OPERAND (exp, 1),
8415 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8416 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8417 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8418 && operand_equal_p (TREE_OPERAND (exp, 2),
8419 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8420 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8421 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8422 && operand_equal_p (TREE_OPERAND (exp, 1),
8423 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8424 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8425
46e62598 8426 /* If we are not to produce a result, we have no target. Otherwise,
8427 if a target was specified use it; it will not be used as an
fa56dc1d 8428 intermediate target unless it is safe. If no target, use a
46e62598 8429 temporary. */
8430
8431 if (ignore)
8432 temp = 0;
a35a63ff 8433 else if (modifier == EXPAND_STACK_PARM)
8434 temp = assign_temp (type, 0, 0, 1);
46e62598 8435 else if (original_target
997d68fe 8436 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8ad4c111 8437 || (singleton && REG_P (original_target)
46e62598 8438 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8439 && original_target == var_rtx (singleton)))
8440 && GET_MODE (original_target) == mode
e500743d 8441#ifdef HAVE_conditional_move
8442 && (! can_conditionally_move_p (mode)
8ad4c111 8443 || REG_P (original_target)
e500743d 8444 || TREE_ADDRESSABLE (type))
8445#endif
e16ceb8e 8446 && (!MEM_P (original_target)
9c0e6d90 8447 || TREE_ADDRESSABLE (type)))
46e62598 8448 temp = original_target;
8449 else if (TREE_ADDRESSABLE (type))
8450 abort ();
8451 else
8452 temp = assign_temp (type, 0, 0, 1);
8453
4035eace 8454 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8455 do the test of X as a store-flag operation, do this as
8456 A + ((X != 0) << log C). Similarly for other simple binary
8457 operators. Only do for C == 1 if BRANCH_COST is low. */
f75fb6ae 8458 if (temp && singleton && binary_op
10f307d9 8459 && (TREE_CODE (binary_op) == PLUS_EXPR
8460 || TREE_CODE (binary_op) == MINUS_EXPR
8461 || TREE_CODE (binary_op) == BIT_IOR_EXPR
b35321d6 8462 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
4035eace 8463 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8464 : integer_onep (TREE_OPERAND (binary_op, 1)))
10f307d9 8465 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8466 {
8467 rtx result;
b4f00eec 8468 tree cond;
bec2d490 8469 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
ff385626 8470 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8471 ? addv_optab : add_optab)
8472 : TREE_CODE (binary_op) == MINUS_EXPR
8473 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8474 ? subv_optab : sub_optab)
8475 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8476 : xor_optab);
10f307d9 8477
b4f00eec 8478 /* If we had X ? A : A + 1, do this as A + (X == 0). */
10f307d9 8479 if (singleton == TREE_OPERAND (exp, 1))
b4f00eec 8480 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8481 else
8482 cond = TREE_OPERAND (exp, 0);
10f307d9 8483
b4f00eec 8484 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8485 ? temp : NULL_RTX),
10f307d9 8486 mode, BRANCH_COST <= 1);
8487
4035eace 8488 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8489 result = expand_shift (LSHIFT_EXPR, mode, result,
8490 build_int_2 (tree_log2
8491 (TREE_OPERAND
8492 (binary_op, 1)),
8493 0),
997d68fe 8494 (safe_from_p (temp, singleton, 1)
4035eace 8495 ? temp : NULL_RTX), 0);
8496
10f307d9 8497 if (result)
8498 {
b572011e 8499 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
10f307d9 8500 return expand_binop (mode, boptab, op1, result, temp,
8501 unsignedp, OPTAB_LIB_WIDEN);
8502 }
10f307d9 8503 }
fa56dc1d 8504
d07f1b1f 8505 do_pending_stack_adjust ();
10f307d9 8506 NO_DEFER_POP;
8507 op0 = gen_label_rtx ();
8508
8509 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8510 {
8511 if (temp != 0)
8512 {
8513 /* If the target conflicts with the other operand of the
8514 binary op, we can't use it. Also, we can't use the target
8515 if it is a hard register, because evaluating the condition
8516 might clobber it. */
8517 if ((binary_op
997d68fe 8518 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8ad4c111 8519 || (REG_P (temp)
10f307d9 8520 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8521 temp = gen_reg_rtx (mode);
a35a63ff 8522 store_expr (singleton, temp,
8523 modifier == EXPAND_STACK_PARM ? 2 : 0);
10f307d9 8524 }
8525 else
b572011e 8526 expand_expr (singleton,
637e11f7 8527 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
10f307d9 8528 if (singleton == TREE_OPERAND (exp, 1))
8529 jumpif (TREE_OPERAND (exp, 0), op0);
8530 else
8531 jumpifnot (TREE_OPERAND (exp, 0), op0);
8532
ad87de1e 8533 start_cleanup_deferral ();
10f307d9 8534 if (binary_op && temp == 0)
8535 /* Just touch the other operand. */
8536 expand_expr (TREE_OPERAND (binary_op, 1),
b572011e 8537 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
10f307d9 8538 else if (binary_op)
8539 store_expr (build (TREE_CODE (binary_op), type,
8540 make_tree (type, temp),
8541 TREE_OPERAND (binary_op, 1)),
a35a63ff 8542 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
10f307d9 8543 else
8544 store_expr (build1 (TREE_CODE (unary_op), type,
8545 make_tree (type, temp)),
a35a63ff 8546 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
10f307d9 8547 op1 = op0;
10f307d9 8548 }
10f307d9 8549 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8550 comparison operator. If we have one of these cases, set the
8551 output to A, branch on A (cse will merge these two references),
8552 then set the output to FOO. */
8553 else if (temp
8554 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8555 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8556 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8557 TREE_OPERAND (exp, 1), 0)
0dbd1c74 8558 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8559 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
997d68fe 8560 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
10f307d9 8561 {
8ad4c111 8562 if (REG_P (temp)
fa56dc1d 8563 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
10f307d9 8564 temp = gen_reg_rtx (mode);
a35a63ff 8565 store_expr (TREE_OPERAND (exp, 1), temp,
8566 modifier == EXPAND_STACK_PARM ? 2 : 0);
10f307d9 8567 jumpif (TREE_OPERAND (exp, 0), op0);
34e2ddcd 8568
ad87de1e 8569 start_cleanup_deferral ();
64cfb0d0 8570 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8571 store_expr (TREE_OPERAND (exp, 2), temp,
8572 modifier == EXPAND_STACK_PARM ? 2 : 0);
8573 else
8574 expand_expr (TREE_OPERAND (exp, 2),
8575 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
10f307d9 8576 op1 = op0;
8577 }
8578 else if (temp
8579 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8580 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8581 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8582 TREE_OPERAND (exp, 2), 0)
0dbd1c74 8583 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8584 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
997d68fe 8585 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
10f307d9 8586 {
8ad4c111 8587 if (REG_P (temp)
fa56dc1d 8588 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
10f307d9 8589 temp = gen_reg_rtx (mode);
a35a63ff 8590 store_expr (TREE_OPERAND (exp, 2), temp,
8591 modifier == EXPAND_STACK_PARM ? 2 : 0);
10f307d9 8592 jumpifnot (TREE_OPERAND (exp, 0), op0);
34e2ddcd 8593
ad87de1e 8594 start_cleanup_deferral ();
64cfb0d0 8595 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8596 store_expr (TREE_OPERAND (exp, 1), temp,
8597 modifier == EXPAND_STACK_PARM ? 2 : 0);
8598 else
8599 expand_expr (TREE_OPERAND (exp, 1),
8600 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
10f307d9 8601 op1 = op0;
8602 }
8603 else
8604 {
8605 op1 = gen_label_rtx ();
8606 jumpifnot (TREE_OPERAND (exp, 0), op0);
34e2ddcd 8607
ad87de1e 8608 start_cleanup_deferral ();
fa56dc1d 8609
d5b495d7 8610 /* One branch of the cond can be void, if it never returns. For
fa56dc1d 8611 example A ? throw : E */
d5b495d7 8612 if (temp != 0
fa56dc1d 8613 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
a35a63ff 8614 store_expr (TREE_OPERAND (exp, 1), temp,
8615 modifier == EXPAND_STACK_PARM ? 2 : 0);
10f307d9 8616 else
b572011e 8617 expand_expr (TREE_OPERAND (exp, 1),
8618 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
ad87de1e 8619 end_cleanup_deferral ();
10f307d9 8620 emit_queue ();
8621 emit_jump_insn (gen_jump (op1));
8622 emit_barrier ();
8623 emit_label (op0);
ad87de1e 8624 start_cleanup_deferral ();
d5b495d7 8625 if (temp != 0
fa56dc1d 8626 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
a35a63ff 8627 store_expr (TREE_OPERAND (exp, 2), temp,
8628 modifier == EXPAND_STACK_PARM ? 2 : 0);
10f307d9 8629 else
b572011e 8630 expand_expr (TREE_OPERAND (exp, 2),
8631 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
10f307d9 8632 }
8633
ad87de1e 8634 end_cleanup_deferral ();
10f307d9 8635
8636 emit_queue ();
8637 emit_label (op1);
8638 OK_DEFER_POP;
34e2ddcd 8639
10f307d9 8640 return temp;
8641 }
8642
8643 case TARGET_EXPR:
8644 {
8645 /* Something needs to be initialized, but we didn't know
8646 where that thing was when building the tree. For example,
8647 it could be the return value of a function, or a parameter
8648 to a function which lays down in the stack, or a temporary
8649 variable which must be passed by reference.
8650
8651 We guarantee that the expression will either be constructed
8652 or copied into our original target. */
8653
8654 tree slot = TREE_OPERAND (exp, 0);
55e5e99d 8655 tree cleanups = NULL_TREE;
382a84fe 8656 tree exp1;
10f307d9 8657
8658 if (TREE_CODE (slot) != VAR_DECL)
8659 abort ();
8660
4ebbf7e7 8661 if (! ignore)
8662 target = original_target;
8663
813a136d 8664 /* Set this here so that if we get a target that refers to a
8665 register variable that's already been used, put_reg_into_stack
fa56dc1d 8666 knows that it should fix up those uses. */
813a136d 8667 TREE_USED (slot) = 1;
8668
10f307d9 8669 if (target == 0)
8670 {
0e8e37b2 8671 if (DECL_RTL_SET_P (slot))
254deafd 8672 {
8673 target = DECL_RTL (slot);
382a84fe 8674 /* If we have already expanded the slot, so don't do
254deafd 8675 it again. (mrs) */
382a84fe 8676 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8677 return target;
254deafd 8678 }
10f307d9 8679 else
8680 {
0dbd1c74 8681 target = assign_temp (type, 2, 0, 1);
0e8e37b2 8682 SET_DECL_RTL (slot, target);
10f307d9 8683
52c30396 8684 /* Since SLOT is not known to the called function
8685 to belong to its stack frame, we must build an explicit
8686 cleanup. This case occurs when we must build up a reference
8687 to pass the reference as an argument. In this case,
8688 it is very likely that such a reference need not be
8689 built here. */
8690
8691 if (TREE_OPERAND (exp, 2) == 0)
04745efb 8692 TREE_OPERAND (exp, 2)
dc24ddbd 8693 = lang_hooks.maybe_build_cleanup (slot);
55e5e99d 8694 cleanups = TREE_OPERAND (exp, 2);
52c30396 8695 }
10f307d9 8696 }
8697 else
8698 {
8699 /* This case does occur, when expanding a parameter which
8700 needs to be constructed on the stack. The target
8701 is the actual stack address that we want to initialize.
8702 The function we call will perform the cleanup in this case. */
8703
5bf72397 8704 /* If we have already assigned it space, use that space,
8705 not target that we were passed in, as our target
8706 parameter is only a hint. */
0e8e37b2 8707 if (DECL_RTL_SET_P (slot))
fa56dc1d 8708 {
8709 target = DECL_RTL (slot);
8710 /* If we have already expanded the slot, so don't do
5bf72397 8711 it again. (mrs) */
fa56dc1d 8712 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8713 return target;
5bf72397 8714 }
4d22520d 8715 else
e8825bb0 8716 SET_DECL_RTL (slot, target);
10f307d9 8717 }
8718
836b9503 8719 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
382a84fe 8720 /* Mark it as expanded. */
8721 TREE_OPERAND (exp, 1) = NULL_TREE;
8722
bbdcc797 8723 if (VOID_TYPE_P (TREE_TYPE (exp1)))
8724 /* If the initializer is void, just expand it; it will initialize
8725 the object directly. */
8726 expand_expr (exp1, const0_rtx, VOIDmode, 0);
8727 else
8728 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
5f019534 8729
a9bc793b 8730 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
fa56dc1d 8731
483f9fca 8732 return target;
10f307d9 8733 }
8734
8735 case INIT_EXPR:
8736 {
8737 tree lhs = TREE_OPERAND (exp, 0);
8738 tree rhs = TREE_OPERAND (exp, 1);
10f307d9 8739
725cd5ad 8740 temp = expand_assignment (lhs, rhs, ! ignore);
10f307d9 8741 return temp;
8742 }
8743
8744 case MODIFY_EXPR:
8745 {
8746 /* If lhs is complex, expand calls in rhs before computing it.
8cbe13ee 8747 That's so we don't compute a pointer and save it over a
8748 call. If lhs is simple, compute it first so we can give it
8749 as a target if the rhs is just a call. This avoids an
8750 extra temp and copy and that prevents a partial-subsumption
8751 which makes bad code. Actually we could treat
8752 component_ref's of vars like vars. */
10f307d9 8753
8754 tree lhs = TREE_OPERAND (exp, 0);
8755 tree rhs = TREE_OPERAND (exp, 1);
10f307d9 8756
8757 temp = 0;
8758
10f307d9 8759 /* Check for |= or &= of a bitfield of size one into another bitfield
8760 of size 1. In this case, (unless we need the result of the
8761 assignment) we can do this more efficiently with a
8762 test followed by an assignment, if necessary.
8763
8764 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8765 things change so we do, this code should be enhanced to
8766 support it. */
8767 if (ignore
8768 && TREE_CODE (lhs) == COMPONENT_REF
8769 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8770 || TREE_CODE (rhs) == BIT_AND_EXPR)
8771 && TREE_OPERAND (rhs, 0) == lhs
8772 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
a0c2c45b 8773 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8774 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10f307d9 8775 {
8776 rtx label = gen_label_rtx ();
8777
8778 do_jump (TREE_OPERAND (rhs, 1),
8779 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8780 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8781 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8782 (TREE_CODE (rhs) == BIT_IOR_EXPR
8783 ? integer_one_node
8784 : integer_zero_node)),
725cd5ad 8785 0);
01ab6370 8786 do_pending_stack_adjust ();
10f307d9 8787 emit_label (label);
8788 return const0_rtx;
8789 }
8790
725cd5ad 8791 temp = expand_assignment (lhs, rhs, ! ignore);
ff385626 8792
10f307d9 8793 return temp;
8794 }
8795
2be2ba3c 8796 case RETURN_EXPR:
8797 if (!TREE_OPERAND (exp, 0))
8798 expand_null_return ();
8799 else
8800 expand_return (TREE_OPERAND (exp, 0));
8801 return const0_rtx;
8802
10f307d9 8803 case PREINCREMENT_EXPR:
8804 case PREDECREMENT_EXPR:
37e76d7d 8805 return expand_increment (exp, 0, ignore);
10f307d9 8806
8807 case POSTINCREMENT_EXPR:
8808 case POSTDECREMENT_EXPR:
8809 /* Faster to treat as pre-increment if result is not used. */
37e76d7d 8810 return expand_increment (exp, ! ignore, ignore);
10f307d9 8811
8812 case ADDR_EXPR:
a35a63ff 8813 if (modifier == EXPAND_STACK_PARM)
8814 target = 0;
fbd92dcf 8815 /* If we are taking the address of something erroneous, just
8816 return a zero. */
4ee9c684 8817 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
fbd92dcf 8818 return const0_rtx;
606a59ec 8819 /* If we are taking the address of a constant and are at the
8820 top level, we have to use output_constant_def since we can't
8821 call force_const_mem at top level. */
8822 else if (cfun == 0
8823 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8824 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8825 == 'c')))
8826 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
10f307d9 8827 else
8828 {
52c30396 8829 /* We make sure to pass const0_rtx down if we came in with
8830 ignore set, to avoid doing the cleanups twice for something. */
8831 op0 = expand_expr (TREE_OPERAND (exp, 0),
8832 ignore ? const0_rtx : NULL_RTX, VOIDmode,
10f307d9 8833 (modifier == EXPAND_INITIALIZER
8834 ? modifier : EXPAND_CONST_ADDRESS));
3f9387ea 8835
f17fe086 8836 /* If we are going to ignore the result, OP0 will have been set
8837 to const0_rtx, so just return it. Don't get confused and
8838 think we are taking the address of the constant. */
8839 if (ignore)
8840 return op0;
8841
f9636a66 8842 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8843 clever and returns a REG when given a MEM. */
8844 op0 = protect_from_queue (op0, 1);
d0bb4e31 8845
7014838c 8846 /* We would like the object in memory. If it is a constant, we can
8847 have it be statically allocated into memory. For a non-constant,
8848 we need to allocate some memory and store the value into it. */
3f9387ea 8849
8850 if (CONSTANT_P (op0))
8851 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8852 op0);
8ad4c111 8853 else if (REG_P (op0) || GET_CODE (op0) == SUBREG
e8825bb0 8854 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == PARALLEL
8855 || GET_CODE (op0) == LO_SUM)
3f9387ea 8856 {
67c155cb 8857 /* If this object is in a register, it can't be BLKmode. */
8858 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8859 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8860
8861 if (GET_CODE (op0) == PARALLEL)
8862 /* Handle calls that pass values in multiple
8863 non-contiguous locations. The Irix 6 ABI has examples
8864 of this. */
8865 emit_group_store (memloc, op0, inner_type,
8866 int_size_in_bytes (inner_type));
ddf4ad75 8867 else
67c155cb 8868 emit_move_insn (memloc, op0);
ff385626 8869
67c155cb 8870 op0 = memloc;
3f9387ea 8871 }
8872
e16ceb8e 8873 if (!MEM_P (op0))
10f307d9 8874 abort ();
fa56dc1d 8875
3084721c 8876 mark_temp_addr_taken (op0);
10f307d9 8877 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
ed8d3eee 8878 {
3084721c 8879 op0 = XEXP (op0, 0);
85d654dd 8880 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
3084721c 8881 op0 = convert_memory_address (ptr_mode, op0);
3084721c 8882 return op0;
ed8d3eee 8883 }
4aa0b850 8884
1ed6227f 8885 /* If OP0 is not aligned as least as much as the type requires, we
8886 need to make a temporary, copy OP0 to it, and take the address of
8887 the temporary. We want to use the alignment of the type, not of
8888 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8889 the test for BLKmode means that can't happen. The test for
8890 BLKmode is because we never make mis-aligned MEMs with
8891 non-BLKmode.
8892
8893 We don't need to do this at all if the machine doesn't have
8894 strict alignment. */
8895 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8896 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
f96c43fb 8897 > MEM_ALIGN (op0))
8898 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
2b96c5f6 8899 {
8900 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
dbdac186 8901 rtx new;
2b96c5f6 8902
a9d9ab08 8903 if (TYPE_ALIGN_OK (inner_type))
8904 abort ();
8905
dbdac186 8906 if (TREE_ADDRESSABLE (inner_type))
8907 {
8908 /* We can't make a bitwise copy of this object, so fail. */
8909 error ("cannot take the address of an unaligned member");
8910 return const0_rtx;
8911 }
8912
8913 new = assign_stack_temp_for_type
8914 (TYPE_MODE (inner_type),
8915 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8916 : int_size_in_bytes (inner_type),
8917 1, build_qualified_type (inner_type,
8918 (TYPE_QUALS (inner_type)
8919 | TYPE_QUAL_CONST)));
8920
0378dbdc 8921 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
a35a63ff 8922 (modifier == EXPAND_STACK_PARM
8923 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
dbdac186 8924
2b96c5f6 8925 op0 = new;
8926 }
8927
10f307d9 8928 op0 = force_operand (XEXP (op0, 0), target);
8929 }
4aa0b850 8930
cc84ca71 8931 if (flag_force_addr
8ad4c111 8932 && !REG_P (op0)
cc84ca71 8933 && modifier != EXPAND_CONST_ADDRESS
8934 && modifier != EXPAND_INITIALIZER
8935 && modifier != EXPAND_SUM)
4aa0b850 8936 op0 = force_reg (Pmode, op0);
8937
8ad4c111 8938 if (REG_P (op0)
6e6b4174 8939 && ! REG_USERVAR_P (op0))
80909c64 8940 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
4aa0b850 8941
85d654dd 8942 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
d621c33d 8943 op0 = convert_memory_address (ptr_mode, op0);
ed8d3eee 8944
10f307d9 8945 return op0;
8946
8947 case ENTRY_VALUE_EXPR:
8948 abort ();
8949
b63679d2 8950 /* COMPLEX type for Extended Pascal & Fortran */
8951 case COMPLEX_EXPR:
8952 {
8953 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
dd0d17cd 8954 rtx insns;
b63679d2 8955
8956 /* Get the rtx code of the operands. */
8957 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8958 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8959
8960 if (! target)
8961 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8962
dd0d17cd 8963 start_sequence ();
b63679d2 8964
8965 /* Move the real (op0) and imaginary (op1) parts to their location. */
5db186f1 8966 emit_move_insn (gen_realpart (mode, target), op0);
8967 emit_move_insn (gen_imagpart (mode, target), op1);
b63679d2 8968
dd0d17cd 8969 insns = get_insns ();
8970 end_sequence ();
8971
b63679d2 8972 /* Complex construction should appear as a single unit. */
dd0d17cd 8973 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8974 each with a separate pseudo as destination.
8975 It's not correct for flow to treat them as a unit. */
8ba8185c 8976 if (GET_CODE (target) != CONCAT)
dd0d17cd 8977 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8978 else
31d3e01c 8979 emit_insn (insns);
b63679d2 8980
8981 return target;
8982 }
8983
8984 case REALPART_EXPR:
5db186f1 8985 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8986 return gen_realpart (mode, op0);
fa56dc1d 8987
b63679d2 8988 case IMAGPART_EXPR:
5db186f1 8989 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8990 return gen_imagpart (mode, op0);
b63679d2 8991
8992 case CONJ_EXPR:
8993 {
1b77ec5f 8994 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
b63679d2 8995 rtx imag_t;
dd0d17cd 8996 rtx insns;
fa56dc1d 8997
8998 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
b63679d2 8999
9000 if (! target)
d2ae1b1e 9001 target = gen_reg_rtx (mode);
fa56dc1d 9002
dd0d17cd 9003 start_sequence ();
b63679d2 9004
9005 /* Store the realpart and the negated imagpart to target. */
1b77ec5f 9006 emit_move_insn (gen_realpart (partmode, target),
9007 gen_realpart (partmode, op0));
b63679d2 9008
1b77ec5f 9009 imag_t = gen_imagpart (partmode, target);
bec2d490 9010 temp = expand_unop (partmode,
ff385626 9011 ! unsignedp && flag_trapv
9012 && (GET_MODE_CLASS(partmode) == MODE_INT)
9013 ? negv_optab : neg_optab,
fa56dc1d 9014 gen_imagpart (partmode, op0), imag_t, 0);
b63679d2 9015 if (temp != imag_t)
9016 emit_move_insn (imag_t, temp);
9017
dd0d17cd 9018 insns = get_insns ();
9019 end_sequence ();
9020
fa56dc1d 9021 /* Conjugate should appear as a single unit
d2ae1b1e 9022 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
dd0d17cd 9023 each with a separate pseudo as destination.
9024 It's not correct for flow to treat them as a unit. */
8ba8185c 9025 if (GET_CODE (target) != CONCAT)
dd0d17cd 9026 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9027 else
31d3e01c 9028 emit_insn (insns);
b63679d2 9029
9030 return target;
9031 }
9032
4ee9c684 9033 case RESX_EXPR:
9034 expand_resx_expr (exp);
9035 return const0_rtx;
9036
694ec519 9037 case TRY_CATCH_EXPR:
9038 {
9039 tree handler = TREE_OPERAND (exp, 1);
9040
9041 expand_eh_region_start ();
694ec519 9042 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
4ee9c684 9043 expand_eh_handler (handler);
694ec519 9044
9045 return op0;
9046 }
9047
4ee9c684 9048 case CATCH_EXPR:
9049 expand_start_catch (CATCH_TYPES (exp));
9050 expand_expr (CATCH_BODY (exp), const0_rtx, VOIDmode, 0);
9051 expand_end_catch ();
9052 return const0_rtx;
9053
9054 case EH_FILTER_EXPR:
9055 /* Should have been handled in expand_eh_handler. */
9056 abort ();
9057
f0c211a3 9058 case TRY_FINALLY_EXPR:
9059 {
9060 tree try_block = TREE_OPERAND (exp, 0);
9061 tree finally_block = TREE_OPERAND (exp, 1);
f0c211a3 9062
4ee9c684 9063 if ((!optimize && lang_protect_cleanup_actions == NULL)
9064 || unsafe_for_reeval (finally_block) > 1)
6fc87840 9065 {
9066 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9067 is not sufficient, so we cannot expand the block twice.
9068 So we play games with GOTO_SUBROUTINE_EXPR to let us
9069 expand the thing only once. */
e62d4117 9070 /* When not optimizing, we go ahead with this form since
9071 (1) user breakpoints operate more predictably without
9072 code duplication, and
9073 (2) we're not running any of the global optimizers
9074 that would explode in time/space with the highly
9075 connected CFG created by the indirect branching. */
6fc87840 9076
9077 rtx finally_label = gen_label_rtx ();
9078 rtx done_label = gen_label_rtx ();
9079 rtx return_link = gen_reg_rtx (Pmode);
9080 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9081 (tree) finally_label, (tree) return_link);
9082 TREE_SIDE_EFFECTS (cleanup) = 1;
9083
9084 /* Start a new binding layer that will keep track of all cleanup
9085 actions to be performed. */
9086 expand_start_bindings (2);
9087 target_temp_slot_level = temp_slot_level;
9088
9089 expand_decl_cleanup (NULL_TREE, cleanup);
9090 op0 = expand_expr (try_block, target, tmode, modifier);
9091
9092 preserve_temp_slots (op0);
9093 expand_end_bindings (NULL_TREE, 0, 0);
9094 emit_jump (done_label);
9095 emit_label (finally_label);
9096 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9097 emit_indirect_jump (return_link);
9098 emit_label (done_label);
9099 }
9100 else
9101 {
9102 expand_start_bindings (2);
9103 target_temp_slot_level = temp_slot_level;
f0c211a3 9104
6fc87840 9105 expand_decl_cleanup (NULL_TREE, finally_block);
9106 op0 = expand_expr (try_block, target, tmode, modifier);
f0c211a3 9107
6fc87840 9108 preserve_temp_slots (op0);
9109 expand_end_bindings (NULL_TREE, 0, 0);
9110 }
f0c211a3 9111
f0c211a3 9112 return op0;
9113 }
9114
fa56dc1d 9115 case GOTO_SUBROUTINE_EXPR:
f0c211a3 9116 {
9117 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9118 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9119 rtx return_address = gen_label_rtx ();
fa56dc1d 9120 emit_move_insn (return_link,
9121 gen_rtx_LABEL_REF (Pmode, return_address));
f0c211a3 9122 emit_jump (subr);
9123 emit_label (return_address);
9124 return const0_rtx;
9125 }
9126
a66c9326 9127 case VA_ARG_EXPR:
9128 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9129
df4b504c 9130 case EXC_PTR_EXPR:
572fdaa3 9131 return get_exception_pointer (cfun);
df4b504c 9132
4ee9c684 9133 case FILTER_EXPR:
9134 return get_exception_filter (cfun);
9135
6bfa2cc1 9136 case FDESC_EXPR:
9137 /* Function descriptors are not valid except for as
9138 initialization constants, and should not be expanded. */
9139 abort ();
9140
4ee9c684 9141 case SWITCH_EXPR:
9142 expand_start_case (0, SWITCH_COND (exp), integer_type_node,
9143 "switch");
9144 if (SWITCH_BODY (exp))
9145 expand_expr_stmt (SWITCH_BODY (exp));
9146 if (SWITCH_LABELS (exp))
9147 {
9148 tree duplicate = 0;
9149 tree vec = SWITCH_LABELS (exp);
9150 size_t i, n = TREE_VEC_LENGTH (vec);
9151
9152 for (i = 0; i < n; ++i)
9153 {
9154 tree elt = TREE_VEC_ELT (vec, i);
9155 tree controlling_expr_type = TREE_TYPE (SWITCH_COND (exp));
9156 tree min_value = TYPE_MIN_VALUE (controlling_expr_type);
9157 tree max_value = TYPE_MAX_VALUE (controlling_expr_type);
9158
9159 tree case_low = CASE_LOW (elt);
9160 tree case_high = CASE_HIGH (elt) ? CASE_HIGH (elt) : case_low;
9161 if (case_low && case_high)
9162 {
9163 /* Case label is less than minimum for type. */
72e31389 9164 if (TREE_CODE (min_value) == INTEGER_CST
9165 && tree_int_cst_compare (case_low, min_value) < 0
9166 && tree_int_cst_compare (case_high, min_value) < 0)
4ee9c684 9167 {
9168 warning ("case label value %d is less than minimum value for type",
9169 TREE_INT_CST (case_low));
9170 continue;
9171 }
9172
9173 /* Case value is greater than maximum for type. */
72e31389 9174 if (TREE_CODE (max_value) == INTEGER_CST
9175 && tree_int_cst_compare (case_low, max_value) > 0
9176 && tree_int_cst_compare (case_high, max_value) > 0)
4ee9c684 9177 {
9178 warning ("case label value %d exceeds maximum value for type",
9179 TREE_INT_CST (case_high));
9180 continue;
9181 }
9182
9183 /* Saturate lower case label value to minimum. */
72e31389 9184 if (TREE_CODE (min_value) == INTEGER_CST
9185 && tree_int_cst_compare (case_high, min_value) >= 0
9186 && tree_int_cst_compare (case_low, min_value) < 0)
4ee9c684 9187 {
9188 warning ("lower value %d in case label range less than minimum value for type",
9189 TREE_INT_CST (case_low));
9190 case_low = min_value;
9191 }
9192
9193 /* Saturate upper case label value to maximum. */
72e31389 9194 if (TREE_CODE (max_value) == INTEGER_CST
9195 && tree_int_cst_compare (case_low, max_value) <= 0
9196 && tree_int_cst_compare (case_high, max_value) > 0)
4ee9c684 9197 {
9198 warning ("upper value %d in case label range exceeds maximum value for type",
9199 TREE_INT_CST (case_high));
9200 case_high = max_value;
9201 }
9202 }
9203
9204 add_case_node (case_low, case_high, CASE_LABEL (elt), &duplicate, true);
9205 if (duplicate)
9206 abort ();
9207 }
9208 }
9209 expand_end_case_type (SWITCH_COND (exp), TREE_TYPE (exp));
9210 return const0_rtx;
9211
9212 case LABEL_EXPR:
9213 expand_label (TREE_OPERAND (exp, 0));
9214 return const0_rtx;
9215
9216 case CASE_LABEL_EXPR:
9217 {
9218 tree duplicate = 0;
9219 add_case_node (CASE_LOW (exp), CASE_HIGH (exp), CASE_LABEL (exp),
9220 &duplicate, false);
9221 if (duplicate)
9222 abort ();
9223 return const0_rtx;
9224 }
9225
9226 case ASM_EXPR:
9227 expand_asm_expr (exp);
9228 return const0_rtx;
9229
10f307d9 9230 default:
5135beeb 9231 return lang_hooks.expand_expr (exp, original_target, tmode,
9232 modifier, alt_rtl);
10f307d9 9233 }
9234
9235 /* Here to do an ordinary binary operator, generating an instruction
9236 from the optab already placed in `this_optab'. */
9237 binop:
33204670 9238 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9239 subtarget, &op0, &op1, 0);
10f307d9 9240 binop2:
a35a63ff 9241 if (modifier == EXPAND_STACK_PARM)
9242 target = 0;
10f307d9 9243 temp = expand_binop (mode, this_optab, op0, op1, target,
9244 unsignedp, OPTAB_LIB_WIDEN);
9245 if (temp == 0)
9246 abort ();
9247 return temp;
9248}
b54842d8 9249\f
67c68e45 9250/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9251 when applied to the address of EXP produces an address known to be
9252 aligned more than BIGGEST_ALIGNMENT. */
9253
9254static int
35cb5232 9255is_aligning_offset (tree offset, tree exp)
67c68e45 9256{
55f9d7dc 9257 /* Strip off any conversions. */
67c68e45 9258 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9259 || TREE_CODE (offset) == NOP_EXPR
55f9d7dc 9260 || TREE_CODE (offset) == CONVERT_EXPR)
67c68e45 9261 offset = TREE_OPERAND (offset, 0);
9262
9263 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9264 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9265 if (TREE_CODE (offset) != BIT_AND_EXPR
9266 || !host_integerp (TREE_OPERAND (offset, 1), 1)
6be0ba7c 9267 || compare_tree_int (TREE_OPERAND (offset, 1),
9268 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
67c68e45 9269 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9270 return 0;
9271
9272 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9273 It must be NEGATE_EXPR. Then strip any more conversions. */
9274 offset = TREE_OPERAND (offset, 0);
9275 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9276 || TREE_CODE (offset) == NOP_EXPR
9277 || TREE_CODE (offset) == CONVERT_EXPR)
9278 offset = TREE_OPERAND (offset, 0);
9279
9280 if (TREE_CODE (offset) != NEGATE_EXPR)
9281 return 0;
9282
9283 offset = TREE_OPERAND (offset, 0);
9284 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9285 || TREE_CODE (offset) == NOP_EXPR
9286 || TREE_CODE (offset) == CONVERT_EXPR)
9287 offset = TREE_OPERAND (offset, 0);
9288
55f9d7dc 9289 /* This must now be the address of EXP. */
9290 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
67c68e45 9291}
9292\f
dafdd1c8 9293/* Return the tree node if an ARG corresponds to a string constant or zero
6ef828f9 9294 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
902de8ed 9295 in bytes within the string that ARG is accessing. The type of the
9296 offset will be `sizetype'. */
b54842d8 9297
53800dbe 9298tree
35cb5232 9299string_constant (tree arg, tree *ptr_offset)
b54842d8 9300{
9301 STRIP_NOPS (arg);
9302
9303 if (TREE_CODE (arg) == ADDR_EXPR
9304 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9305 {
902de8ed 9306 *ptr_offset = size_zero_node;
b54842d8 9307 return TREE_OPERAND (arg, 0);
9308 }
4ee9c684 9309 if (TREE_CODE (arg) == ADDR_EXPR
9310 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
9311 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg, 0), 0)) == STRING_CST)
9312 {
9313 *ptr_offset = convert (sizetype, TREE_OPERAND (TREE_OPERAND (arg, 0), 1));
9314 return TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9315 }
b54842d8 9316 else if (TREE_CODE (arg) == PLUS_EXPR)
9317 {
9318 tree arg0 = TREE_OPERAND (arg, 0);
9319 tree arg1 = TREE_OPERAND (arg, 1);
9320
9321 STRIP_NOPS (arg0);
9322 STRIP_NOPS (arg1);
9323
9324 if (TREE_CODE (arg0) == ADDR_EXPR
9325 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
10f307d9 9326 {
902de8ed 9327 *ptr_offset = convert (sizetype, arg1);
b54842d8 9328 return TREE_OPERAND (arg0, 0);
10f307d9 9329 }
b54842d8 9330 else if (TREE_CODE (arg1) == ADDR_EXPR
9331 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
10f307d9 9332 {
902de8ed 9333 *ptr_offset = convert (sizetype, arg0);
b54842d8 9334 return TREE_OPERAND (arg1, 0);
10f307d9 9335 }
b54842d8 9336 }
649d8da6 9337
b54842d8 9338 return 0;
9339}
649d8da6 9340\f
b54842d8 9341/* Expand code for a post- or pre- increment or decrement
9342 and return the RTX for the result.
9343 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
acfb31e5 9344
b54842d8 9345static rtx
35cb5232 9346expand_increment (tree exp, int post, int ignore)
649d8da6 9347{
19cb6b50 9348 rtx op0, op1;
9349 rtx temp, value;
9350 tree incremented = TREE_OPERAND (exp, 0);
b54842d8 9351 optab this_optab = add_optab;
9352 int icode;
9353 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9354 int op0_is_copy = 0;
9355 int single_insn = 0;
9356 /* 1 means we can't store into OP0 directly,
9357 because it is a subreg narrower than a word,
9358 and we don't dare clobber the rest of the word. */
9359 int bad_subreg = 0;
acfb31e5 9360
b54842d8 9361 /* Stabilize any component ref that might need to be
9362 evaluated more than once below. */
9363 if (!post
9364 || TREE_CODE (incremented) == BIT_FIELD_REF
9365 || (TREE_CODE (incremented) == COMPONENT_REF
9366 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9367 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9368 incremented = stabilize_reference (incremented);
9369 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9370 ones into save exprs so that they don't accidentally get evaluated
9371 more than once by the code below. */
9372 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9373 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9374 incremented = save_expr (incremented);
0dbd1c74 9375
b54842d8 9376 /* Compute the operands as RTX.
9377 Note whether OP0 is the actual lvalue or a copy of it:
9378 I believe it is a copy iff it is a register or subreg
1e625a2e 9379 and insns were generated in computing it. */
0dbd1c74 9380
b54842d8 9381 temp = get_last_insn ();
8a06f2d4 9382 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
0dbd1c74 9383
b54842d8 9384 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9385 in place but instead must do sign- or zero-extension during assignment,
9386 so we copy it into a new register and let the code below use it as
9387 a copy.
0dbd1c74 9388
b54842d8 9389 Note that we can safely modify this SUBREG since it is know not to be
9390 shared (it was made by the expand_expr call above). */
9391
9392 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9393 {
9394 if (post)
9395 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9396 else
9397 bad_subreg = 1;
9398 }
9399 else if (GET_CODE (op0) == SUBREG
9400 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9401 {
9402 /* We cannot increment this SUBREG in place. If we are
9403 post-incrementing, get a copy of the old value. Otherwise,
9404 just mark that we cannot increment in place. */
9405 if (post)
9406 op0 = copy_to_reg (op0);
9407 else
9408 bad_subreg = 1;
0dbd1c74 9409 }
9410
8ad4c111 9411 op0_is_copy = ((GET_CODE (op0) == SUBREG || REG_P (op0))
b54842d8 9412 && temp != get_last_insn ());
8a06f2d4 9413 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
acfb31e5 9414
b54842d8 9415 /* Decide whether incrementing or decrementing. */
9416 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9417 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9418 this_optab = sub_optab;
9419
9420 /* Convert decrement by a constant into a negative increment. */
9421 if (this_optab == sub_optab
9422 && GET_CODE (op1) == CONST_INT)
649d8da6 9423 {
fa56dc1d 9424 op1 = GEN_INT (-INTVAL (op1));
b54842d8 9425 this_optab = add_optab;
649d8da6 9426 }
acfb31e5 9427
bec2d490 9428 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
e17f5b23 9429 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
bec2d490 9430
b54842d8 9431 /* For a preincrement, see if we can do this with a single instruction. */
9432 if (!post)
9433 {
9434 icode = (int) this_optab->handlers[(int) mode].insn_code;
9435 if (icode != (int) CODE_FOR_nothing
9436 /* Make sure that OP0 is valid for operands 0 and 1
9437 of the insn we want to queue. */
6357eaae 9438 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9439 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9440 && (*insn_data[icode].operand[2].predicate) (op1, mode))
b54842d8 9441 single_insn = 1;
9442 }
10f307d9 9443
b54842d8 9444 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9445 then we cannot just increment OP0. We must therefore contrive to
9446 increment the original value. Then, for postincrement, we can return
9447 OP0 since it is a copy of the old value. For preincrement, expand here
9448 unless we can do it with a single insn.
10f307d9 9449
b54842d8 9450 Likewise if storing directly into OP0 would clobber high bits
9451 we need to preserve (bad_subreg). */
9452 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
36ce2511 9453 {
b54842d8 9454 /* This is the easiest way to increment the value wherever it is.
9455 Problems with multiple evaluation of INCREMENTED are prevented
9456 because either (1) it is a component_ref or preincrement,
9457 in which case it was stabilized above, or (2) it is an array_ref
9458 with constant index in an array in a register, which is
9459 safe to reevaluate. */
9460 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9461 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9462 ? MINUS_EXPR : PLUS_EXPR),
9463 TREE_TYPE (exp),
9464 incremented,
9465 TREE_OPERAND (exp, 1));
36ce2511 9466
b54842d8 9467 while (TREE_CODE (incremented) == NOP_EXPR
9468 || TREE_CODE (incremented) == CONVERT_EXPR)
9469 {
9470 newexp = convert (TREE_TYPE (incremented), newexp);
9471 incremented = TREE_OPERAND (incremented, 0);
9472 }
10f307d9 9473
725cd5ad 9474 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
b54842d8 9475 return post ? op0 : temp;
9476 }
10f307d9 9477
b54842d8 9478 if (post)
9479 {
9480 /* We have a true reference to the value in OP0.
9481 If there is an insn to add or subtract in this mode, queue it.
40e55fbb 9482 Queuing the increment insn avoids the register shuffling
b54842d8 9483 that often results if we must increment now and first save
9484 the old value for subsequent use. */
10f307d9 9485
b54842d8 9486#if 0 /* Turned off to avoid making extra insn for indexed memref. */
9487 op0 = stabilize (op0);
9488#endif
3674f4b7 9489
b54842d8 9490 icode = (int) this_optab->handlers[(int) mode].insn_code;
9491 if (icode != (int) CODE_FOR_nothing
9492 /* Make sure that OP0 is valid for operands 0 and 1
9493 of the insn we want to queue. */
6357eaae 9494 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9495 && (*insn_data[icode].operand[1].predicate) (op0, mode))
b54842d8 9496 {
6357eaae 9497 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b54842d8 9498 op1 = force_reg (mode, op1);
10f307d9 9499
b54842d8 9500 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9501 }
e16ceb8e 9502 if (icode != (int) CODE_FOR_nothing && MEM_P (op0))
b54842d8 9503 {
9504 rtx addr = (general_operand (XEXP (op0, 0), mode)
9505 ? force_reg (Pmode, XEXP (op0, 0))
9506 : copy_to_reg (XEXP (op0, 0)));
9507 rtx temp, result;
649d8da6 9508
537ffcfc 9509 op0 = replace_equiv_address (op0, addr);
b54842d8 9510 temp = force_reg (GET_MODE (op0), op0);
6357eaae 9511 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b54842d8 9512 op1 = force_reg (mode, op1);
649d8da6 9513
b54842d8 9514 /* The increment queue is LIFO, thus we have to `queue'
9515 the instructions in reverse order. */
9516 enqueue_insn (op0, gen_move_insn (op0, temp));
9517 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9518 return result;
10f307d9 9519 }
9520 }
649d8da6 9521
b54842d8 9522 /* Preincrement, or we can't increment with one simple insn. */
9523 if (post)
9524 /* Save a copy of the value before inc or dec, to return it later. */
9525 temp = value = copy_to_reg (op0);
9526 else
9527 /* Arrange to return the incremented value. */
9528 /* Copy the rtx because expand_binop will protect from the queue,
9529 and the results of that would be invalid for us to return
9530 if our caller does emit_queue before using our result. */
9531 temp = copy_rtx (value = op0);
10f307d9 9532
b54842d8 9533 /* Increment however we can. */
8a06f2d4 9534 op1 = expand_binop (mode, this_optab, value, op1, op0,
78a8ed03 9535 TYPE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8a06f2d4 9536
b54842d8 9537 /* Make sure the value is stored into OP0. */
9538 if (op1 != op0)
9539 emit_move_insn (op0, op1);
7214c9d7 9540
b54842d8 9541 return temp;
9542}
9543\f
b54842d8 9544/* Generate code to calculate EXP using a store-flag instruction
9545 and return an rtx for the result. EXP is either a comparison
9546 or a TRUTH_NOT_EXPR whose operand is a comparison.
649d8da6 9547
b54842d8 9548 If TARGET is nonzero, store the result there if convenient.
649d8da6 9549
6ef828f9 9550 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
b54842d8 9551 cheap.
649d8da6 9552
b54842d8 9553 Return zero if there is no suitable set-flag instruction
9554 available on this machine.
649d8da6 9555
b54842d8 9556 Once expand_expr has been called on the arguments of the comparison,
9557 we are committed to doing the store flag, since it is not safe to
9558 re-evaluate the expression. We emit the store-flag insn by calling
9559 emit_store_flag, but only expand the arguments if we have a reason
9560 to believe that emit_store_flag will be successful. If we think that
9561 it will, but it isn't, we have to simulate the store-flag with a
9562 set/jump/set sequence. */
649d8da6 9563
b54842d8 9564static rtx
35cb5232 9565do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
b54842d8 9566{
9567 enum rtx_code code;
9568 tree arg0, arg1, type;
9569 tree tem;
9570 enum machine_mode operand_mode;
9571 int invert = 0;
9572 int unsignedp;
9573 rtx op0, op1;
9574 enum insn_code icode;
9575 rtx subtarget = target;
0c22b90f 9576 rtx result, label;
649d8da6 9577
b54842d8 9578 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9579 result at the end. We can't simply invert the test since it would
9580 have already been inverted if it were valid. This case occurs for
9581 some floating-point comparisons. */
649d8da6 9582
b54842d8 9583 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9584 invert = 1, exp = TREE_OPERAND (exp, 0);
649d8da6 9585
b54842d8 9586 arg0 = TREE_OPERAND (exp, 0);
9587 arg1 = TREE_OPERAND (exp, 1);
fc80e4dd 9588
9589 /* Don't crash if the comparison was erroneous. */
9590 if (arg0 == error_mark_node || arg1 == error_mark_node)
9591 return const0_rtx;
9592
b54842d8 9593 type = TREE_TYPE (arg0);
9594 operand_mode = TYPE_MODE (type);
78a8ed03 9595 unsignedp = TYPE_UNSIGNED (type);
649d8da6 9596
b54842d8 9597 /* We won't bother with BLKmode store-flag operations because it would mean
9598 passing a lot of information to emit_store_flag. */
9599 if (operand_mode == BLKmode)
9600 return 0;
649d8da6 9601
b54842d8 9602 /* We won't bother with store-flag operations involving function pointers
9603 when function pointers must be canonicalized before comparisons. */
9604#ifdef HAVE_canonicalize_funcptr_for_compare
9605 if (HAVE_canonicalize_funcptr_for_compare
9606 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9607 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9608 == FUNCTION_TYPE))
9609 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9610 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9611 == FUNCTION_TYPE))))
9612 return 0;
649d8da6 9613#endif
9614
b54842d8 9615 STRIP_NOPS (arg0);
9616 STRIP_NOPS (arg1);
649d8da6 9617
b54842d8 9618 /* Get the rtx comparison code to use. We know that EXP is a comparison
9619 operation of some type. Some comparisons against 1 and -1 can be
9620 converted to comparisons with zero. Do so here so that the tests
9621 below will be aware that we have a comparison with zero. These
9622 tests will not catch constants in the first operand, but constants
9623 are rarely passed as the first operand. */
649d8da6 9624
b54842d8 9625 switch (TREE_CODE (exp))
9626 {
9627 case EQ_EXPR:
9628 code = EQ;
10f307d9 9629 break;
b54842d8 9630 case NE_EXPR:
9631 code = NE;
10f307d9 9632 break;
b54842d8 9633 case LT_EXPR:
9634 if (integer_onep (arg1))
9635 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9636 else
9637 code = unsignedp ? LTU : LT;
649d8da6 9638 break;
b54842d8 9639 case LE_EXPR:
9640 if (! unsignedp && integer_all_onesp (arg1))
9641 arg1 = integer_zero_node, code = LT;
9642 else
9643 code = unsignedp ? LEU : LE;
649d8da6 9644 break;
b54842d8 9645 case GT_EXPR:
9646 if (! unsignedp && integer_all_onesp (arg1))
9647 arg1 = integer_zero_node, code = GE;
9648 else
9649 code = unsignedp ? GTU : GT;
9650 break;
9651 case GE_EXPR:
9652 if (integer_onep (arg1))
9653 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9654 else
9655 code = unsignedp ? GEU : GE;
649d8da6 9656 break;
a4110d9a 9657
9658 case UNORDERED_EXPR:
9659 code = UNORDERED;
9660 break;
9661 case ORDERED_EXPR:
9662 code = ORDERED;
9663 break;
9664 case UNLT_EXPR:
9665 code = UNLT;
9666 break;
9667 case UNLE_EXPR:
9668 code = UNLE;
9669 break;
9670 case UNGT_EXPR:
9671 code = UNGT;
9672 break;
9673 case UNGE_EXPR:
9674 code = UNGE;
9675 break;
9676 case UNEQ_EXPR:
9677 code = UNEQ;
9678 break;
318a728f 9679 case LTGT_EXPR:
9680 code = LTGT;
9681 break;
a4110d9a 9682
649d8da6 9683 default:
b54842d8 9684 abort ();
10f307d9 9685 }
10f307d9 9686
b54842d8 9687 /* Put a constant second. */
9688 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9689 {
9690 tem = arg0; arg0 = arg1; arg1 = tem;
9691 code = swap_condition (code);
649d8da6 9692 }
10f307d9 9693
b54842d8 9694 /* If this is an equality or inequality test of a single bit, we can
9695 do this by shifting the bit being tested to the low-order bit and
9696 masking the result with the constant 1. If the condition was EQ,
9697 we xor it with 1. This does not require an scc insn and is faster
6881f973 9698 than an scc insn even if we have it.
9699
9700 The code to make this transformation was moved into fold_single_bit_test,
9701 so we just call into the folder and expand its result. */
3218a49d 9702
b54842d8 9703 if ((code == NE || code == EQ)
9704 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9705 && integer_pow2p (TREE_OPERAND (arg0, 1)))
76c3b8b7 9706 {
dc24ddbd 9707 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
76c3b8b7 9708 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
1d5ca076 9709 arg0, arg1, type),
76c3b8b7 9710 target, VOIDmode, EXPAND_NORMAL);
9711 }
10f307d9 9712
b54842d8 9713 /* Now see if we are likely to be able to do this. Return if not. */
a4110d9a 9714 if (! can_compare_p (code, operand_mode, ccp_store_flag))
b54842d8 9715 return 0;
a4110d9a 9716
b54842d8 9717 icode = setcc_gen_code[(int) code];
9718 if (icode == CODE_FOR_nothing
6357eaae 9719 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
649d8da6 9720 {
b54842d8 9721 /* We can only do this if it is one of the special cases that
9722 can be handled without an scc insn. */
9723 if ((code == LT && integer_zerop (arg1))
9724 || (! only_cheap && code == GE && integer_zerop (arg1)))
9725 ;
9726 else if (BRANCH_COST >= 0
9727 && ! only_cheap && (code == NE || code == EQ)
9728 && TREE_CODE (type) != REAL_TYPE
9729 && ((abs_optab->handlers[(int) operand_mode].insn_code
9730 != CODE_FOR_nothing)
9731 || (ffs_optab->handlers[(int) operand_mode].insn_code
9732 != CODE_FOR_nothing)))
9733 ;
9734 else
9735 return 0;
649d8da6 9736 }
fa56dc1d 9737
d8e5b213 9738 if (! get_subtarget (target)
a54ebf2e 9739 || GET_MODE (subtarget) != operand_mode)
b54842d8 9740 subtarget = 0;
9741
33204670 9742 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
b54842d8 9743
9744 if (target == 0)
9745 target = gen_reg_rtx (mode);
9746
9747 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9748 because, if the emit_store_flag does anything it will succeed and
9749 OP0 and OP1 will not be used subsequently. */
649d8da6 9750
b54842d8 9751 result = emit_store_flag (target, code,
9752 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9753 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9754 operand_mode, unsignedp, 1);
649d8da6 9755
b54842d8 9756 if (result)
9757 {
9758 if (invert)
9759 result = expand_binop (mode, xor_optab, result, const1_rtx,
9760 result, 0, OPTAB_LIB_WIDEN);
9761 return result;
649d8da6 9762 }
10f307d9 9763
b54842d8 9764 /* If this failed, we have to do this with set/compare/jump/set code. */
8ad4c111 9765 if (!REG_P (target)
b54842d8 9766 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9767 target = gen_reg_rtx (GET_MODE (target));
9768
9769 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9770 result = compare_from_rtx (op0, op1, code, unsignedp,
2b96c5f6 9771 operand_mode, NULL_RTX);
b54842d8 9772 if (GET_CODE (result) == CONST_INT)
9773 return (((result == const0_rtx && ! invert)
9774 || (result != const0_rtx && invert))
9775 ? const0_rtx : const1_rtx);
649d8da6 9776
66ac11b2 9777 /* The code of RESULT may not match CODE if compare_from_rtx
9778 decided to swap its operands and reverse the original code.
9779
9780 We know that compare_from_rtx returns either a CONST_INT or
9781 a new comparison code, so it is safe to just extract the
9782 code from RESULT. */
9783 code = GET_CODE (result);
9784
b54842d8 9785 label = gen_label_rtx ();
9786 if (bcc_gen_fctn[(int) code] == 0)
9787 abort ();
a92771b8 9788
b54842d8 9789 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9790 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9791 emit_label (label);
10f307d9 9792
b54842d8 9793 return target;
649d8da6 9794}
b54842d8 9795\f
b54842d8 9796
539a3a92 9797/* Stubs in case we haven't got a casesi insn. */
9798#ifndef HAVE_casesi
9799# define HAVE_casesi 0
9800# define gen_casesi(a, b, c, d, e) (0)
9801# define CODE_FOR_casesi CODE_FOR_nothing
9802#endif
9803
9804/* If the machine does not have a case insn that compares the bounds,
9805 this means extra overhead for dispatch tables, which raises the
9806 threshold for using them. */
9807#ifndef CASE_VALUES_THRESHOLD
9808#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9809#endif /* CASE_VALUES_THRESHOLD */
9810
9811unsigned int
35cb5232 9812case_values_threshold (void)
539a3a92 9813{
9814 return CASE_VALUES_THRESHOLD;
9815}
9816
9817/* Attempt to generate a casesi instruction. Returns 1 if successful,
9818 0 otherwise (i.e. if there is no casesi instruction). */
9819int
35cb5232 9820try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9821 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
539a3a92 9822{
9823 enum machine_mode index_mode = SImode;
9824 int index_bits = GET_MODE_BITSIZE (index_mode);
9825 rtx op1, op2, index;
9826 enum machine_mode op_mode;
9827
9828 if (! HAVE_casesi)
9829 return 0;
9830
9831 /* Convert the index to SImode. */
9832 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9833 {
9834 enum machine_mode omode = TYPE_MODE (index_type);
9835 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9836
9837 /* We must handle the endpoints in the original mode. */
9838 index_expr = build (MINUS_EXPR, index_type,
9839 index_expr, minval);
9840 minval = integer_zero_node;
9841 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9842 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
2b96c5f6 9843 omode, 1, default_label);
539a3a92 9844 /* Now we can safely truncate. */
9845 index = convert_to_mode (index_mode, index, 0);
9846 }
9847 else
9848 {
9849 if (TYPE_MODE (index_type) != index_mode)
9850 {
dc24ddbd 9851 index_expr = convert (lang_hooks.types.type_for_size
771d21fa 9852 (index_bits, 0), index_expr);
539a3a92 9853 index_type = TREE_TYPE (index_expr);
9854 }
9855
9856 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9857 }
9858 emit_queue ();
9859 index = protect_from_queue (index, 0);
9860 do_pending_stack_adjust ();
9861
9862 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9863 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9864 (index, op_mode))
9865 index = copy_to_mode_reg (op_mode, index);
35f44ac1 9866
539a3a92 9867 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9868
9869 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9870 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
78a8ed03 9871 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
539a3a92 9872 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9873 (op1, op_mode))
9874 op1 = copy_to_mode_reg (op_mode, op1);
9875
9876 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9877
9878 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9879 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
78a8ed03 9880 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
539a3a92 9881 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9882 (op2, op_mode))
9883 op2 = copy_to_mode_reg (op_mode, op2);
9884
9885 emit_jump_insn (gen_casesi (index, op1, op2,
9886 table_label, default_label));
9887 return 1;
9888}
9889
9890/* Attempt to generate a tablejump instruction; same concept. */
9891#ifndef HAVE_tablejump
9892#define HAVE_tablejump 0
9893#define gen_tablejump(x, y) (0)
9894#endif
9895
9896/* Subroutine of the next function.
9897
9898 INDEX is the value being switched on, with the lowest value
b54842d8 9899 in the table already subtracted.
9900 MODE is its expected mode (needed if INDEX is constant).
9901 RANGE is the length of the jump table.
9902 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
1ccc1a7e 9903
b54842d8 9904 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9905 index value is out of range. */
a92771b8 9906
539a3a92 9907static void
35cb5232 9908do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9909 rtx default_label)
649d8da6 9910{
19cb6b50 9911 rtx temp, vector;
1ccc1a7e 9912
88d866dd 9913 if (INTVAL (range) > cfun->max_jumptable_ents)
9914 cfun->max_jumptable_ents = INTVAL (range);
71a455ac 9915
b54842d8 9916 /* Do an unsigned comparison (in the proper mode) between the index
9917 expression and the value which represents the length of the range.
9918 Since we just finished subtracting the lower bound of the range
9919 from the index expression, this comparison allows us to simultaneously
9920 check that the original index expression value is both greater than
9921 or equal to the minimum value of the range and less than or equal to
9922 the maximum value of the range. */
9282409c 9923
5a894bc6 9924 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
2b96c5f6 9925 default_label);
10f307d9 9926
b54842d8 9927 /* If index is in range, it must fit in Pmode.
9928 Convert to Pmode so we can index with it. */
9929 if (mode != Pmode)
9930 index = convert_to_mode (Pmode, index, 1);
10f307d9 9931
c7bf1374 9932 /* Don't let a MEM slip through, because then INDEX that comes
b54842d8 9933 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9934 and break_out_memory_refs will go to work on it and mess it up. */
9935#ifdef PIC_CASE_VECTOR_ADDRESS
8ad4c111 9936 if (flag_pic && !REG_P (index))
b54842d8 9937 index = copy_to_mode_reg (Pmode, index);
9938#endif
649d8da6 9939
b54842d8 9940 /* If flag_force_addr were to affect this address
9941 it could interfere with the tricky assumptions made
9942 about addresses that contain label-refs,
9943 which may be valid only very near the tablejump itself. */
9944 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9945 GET_MODE_SIZE, because this indicates how large insns are. The other
9946 uses should all be Pmode, because they are addresses. This code
9947 could fail if addresses and insns are not the same size. */
9948 index = gen_rtx_PLUS (Pmode,
9949 gen_rtx_MULT (Pmode, index,
9950 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9951 gen_rtx_LABEL_REF (Pmode, table_label));
9952#ifdef PIC_CASE_VECTOR_ADDRESS
9953 if (flag_pic)
9954 index = PIC_CASE_VECTOR_ADDRESS (index);
9955 else
10f307d9 9956#endif
b54842d8 9957 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9958 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9959 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9960 RTX_UNCHANGING_P (vector) = 1;
1ed8ccdb 9961 MEM_NOTRAP_P (vector) = 1;
b54842d8 9962 convert_move (temp, vector, 0);
9963
9964 emit_jump_insn (gen_tablejump (temp, table_label));
9965
9966 /* If we are generating PIC code or if the table is PC-relative, the
9967 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9968 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9969 emit_barrier ();
10f307d9 9970}
b54842d8 9971
539a3a92 9972int
35cb5232 9973try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9974 rtx table_label, rtx default_label)
539a3a92 9975{
9976 rtx index;
9977
9978 if (! HAVE_tablejump)
9979 return 0;
9980
9981 index_expr = fold (build (MINUS_EXPR, index_type,
9982 convert (index_type, index_expr),
9983 convert (index_type, minval)));
9984 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9985 emit_queue ();
9986 index = protect_from_queue (index, 0);
9987 do_pending_stack_adjust ();
9988
9989 do_tablejump (index, TYPE_MODE (index_type),
9990 convert_modes (TYPE_MODE (index_type),
9991 TYPE_MODE (TREE_TYPE (range)),
9992 expand_expr (range, NULL_RTX,
9993 VOIDmode, 0),
78a8ed03 9994 TYPE_UNSIGNED (TREE_TYPE (range))),
539a3a92 9995 table_label, default_label);
9996 return 1;
9997}
1f3233d1 9998
ead34f59 9999/* Nonzero if the mode is a valid vector mode for this architecture.
10000 This returns nonzero even if there is no hardware support for the
10001 vector mode, but we can emulate with narrower modes. */
10002
10003int
35cb5232 10004vector_mode_valid_p (enum machine_mode mode)
ead34f59 10005{
10006 enum mode_class class = GET_MODE_CLASS (mode);
10007 enum machine_mode innermode;
10008
10009 /* Doh! What's going on? */
10010 if (class != MODE_VECTOR_INT
10011 && class != MODE_VECTOR_FLOAT)
10012 return 0;
10013
10014 /* Hardware support. Woo hoo! */
10015 if (VECTOR_MODE_SUPPORTED_P (mode))
10016 return 1;
10017
10018 innermode = GET_MODE_INNER (mode);
10019
10020 /* We should probably return 1 if requesting V4DI and we have no DI,
10021 but we have V2DI, but this is probably very unlikely. */
10022
10023 /* If we have support for the inner mode, we can safely emulate it.
10024 We may not have V2DI, but me can emulate with a pair of DIs. */
10025 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10026}
10027
c3309fc6 10028/* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10029static rtx
35cb5232 10030const_vector_from_tree (tree exp)
c3309fc6 10031{
10032 rtvec v;
10033 int units, i;
10034 tree link, elt;
10035 enum machine_mode inner, mode;
10036
10037 mode = TYPE_MODE (TREE_TYPE (exp));
10038
4ee9c684 10039 if (initializer_zerop (exp))
c3309fc6 10040 return CONST0_RTX (mode);
10041
10042 units = GET_MODE_NUNITS (mode);
10043 inner = GET_MODE_INNER (mode);
10044
10045 v = rtvec_alloc (units);
10046
10047 link = TREE_VECTOR_CST_ELTS (exp);
10048 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10049 {
10050 elt = TREE_VALUE (link);
10051
10052 if (TREE_CODE (elt) == REAL_CST)
10053 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10054 inner);
10055 else
10056 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10057 TREE_INT_CST_HIGH (elt),
10058 inner);
10059 }
10060
c2f2aba8 10061 /* Initialize remaining elements to 0. */
10062 for (; i < units; ++i)
10063 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10064
c3309fc6 10065 return gen_rtx_raw_CONST_VECTOR (mode, v);
10066}
1f3233d1 10067#include "gt-expr.h"