]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/expr.c
alias.c (nonlocal_mentioned_p, [...]): Use, LABEL_P, JUMP_P, CALL_P, NONJUMP_INSN_P...
[thirdparty/gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
8752c357 2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
d9221e01 3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
bbf6f052 4
1322177d 5This file is part of GCC.
bbf6f052 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
bbf6f052 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
bbf6f052
RK
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
bbf6f052 21
bbf6f052 22#include "config.h"
670ee920 23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
ca695ac9 26#include "machmode.h"
11ad4784 27#include "real.h"
bbf6f052
RK
28#include "rtl.h"
29#include "tree.h"
30#include "flags.h"
bf76bb5a 31#include "regs.h"
4ed67205 32#include "hard-reg-set.h"
3d195391 33#include "except.h"
bbf6f052 34#include "function.h"
bbf6f052 35#include "insn-config.h"
34e81b5a 36#include "insn-attr.h"
3a94c984 37/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
d6f4ec51 38#include "expr.h"
e78d8e51
ZW
39#include "optabs.h"
40#include "libfuncs.h"
bbf6f052 41#include "recog.h"
3ef1eef4 42#include "reload.h"
bbf6f052 43#include "output.h"
bbf6f052 44#include "typeclass.h"
10f0ad3d 45#include "toplev.h"
d7db6646 46#include "ggc.h"
ac79cd5a 47#include "langhooks.h"
e2c49ac2 48#include "intl.h"
b1474bb7 49#include "tm_p.h"
6de9cd9a 50#include "tree-iterator.h"
2f8e398b
PB
51#include "tree-pass.h"
52#include "tree-flow.h"
c988af2b 53#include "target.h"
2f8e398b 54#include "timevar.h"
bbf6f052 55
bbf6f052 56/* Decide whether a function's arguments should be processed
bbc8a071
RK
57 from first to last or from last to first.
58
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
bbf6f052 61
bbf6f052 62#ifdef PUSH_ROUNDING
bbc8a071 63
2da4124d 64#ifndef PUSH_ARGS_REVERSED
3319a347 65#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
3a94c984 66#define PUSH_ARGS_REVERSED /* If it's last to first. */
bbf6f052 67#endif
2da4124d 68#endif
bbc8a071 69
bbf6f052
RK
70#endif
71
72#ifndef STACK_PUSH_CODE
73#ifdef STACK_GROWS_DOWNWARD
74#define STACK_PUSH_CODE PRE_DEC
75#else
76#define STACK_PUSH_CODE PRE_INC
77#endif
78#endif
79
4ca79136 80
bbf6f052
RK
81/* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87int cse_not_expected;
88
4969d05d
RK
89/* This structure is used by move_by_pieces to describe the move to
90 be performed. */
4969d05d
RK
91struct move_by_pieces
92{
93 rtx to;
94 rtx to_addr;
95 int autinc_to;
96 int explicit_inc_to;
97 rtx from;
98 rtx from_addr;
99 int autinc_from;
100 int explicit_inc_from;
3bdf5ad1
RK
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
4969d05d
RK
103 int reverse;
104};
105
57814e5e 106/* This structure is used by store_by_pieces to describe the clear to
9de08200
RK
107 be performed. */
108
57814e5e 109struct store_by_pieces
9de08200
RK
110{
111 rtx to;
112 rtx to_addr;
113 int autinc_to;
114 int explicit_inc_to;
3bdf5ad1
RK
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
502b8322 117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
fad205ff 118 void *constfundata;
9de08200
RK
119 int reverse;
120};
121
502b8322
AJ
122static rtx enqueue_insn (rtx, rtx);
123static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124 unsigned int);
125static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127static bool block_move_libcall_safe_for_call_parm (void);
70128ad9 128static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
502b8322
AJ
129static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
130static tree emit_block_move_libcall_fn (int);
131static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
70128ad9 137static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
502b8322
AJ
138static rtx clear_storage_via_libcall (rtx, rtx);
139static tree clear_storage_libcall_fn (int);
140static rtx compress_float_constant (rtx, rtx);
141static rtx get_subtarget (rtx);
502b8322
AJ
142static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, enum machine_mode, int, tree, int);
148static rtx var_rtx (tree);
149
150static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
d50a16c4 151static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
502b8322
AJ
152
153static int is_aligning_offset (tree, tree);
154static rtx expand_increment (tree, int, int);
eb698c58
RS
155static void expand_operands (tree, tree, rtx, rtx*, rtx*,
156 enum expand_modifier);
bc15d0ef 157static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
502b8322 158static rtx do_store_flag (tree, rtx, enum machine_mode, int);
21d93687 159#ifdef PUSH_ROUNDING
502b8322 160static void emit_single_push_insn (enum machine_mode, rtx, tree);
21d93687 161#endif
502b8322
AJ
162static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
163static rtx const_vector_from_tree (tree);
bbf6f052 164
4fa52007
RK
165/* Record for each mode whether we can move a register directly to or
166 from an object of that mode in memory. If we can't, we won't try
167 to use that mode directly when accessing a field of that mode. */
168
169static char direct_load[NUM_MACHINE_MODES];
170static char direct_store[NUM_MACHINE_MODES];
171
51286de6
RH
172/* Record for each mode whether we can float-extend from memory. */
173
174static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
175
fbe1758d 176/* This macro is used to determine whether move_by_pieces should be called
3a94c984 177 to perform a structure copy. */
fbe1758d 178#ifndef MOVE_BY_PIECES_P
19caa751 179#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
8752c357 180 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
fbe1758d
AM
181#endif
182
78762e3b
RS
183/* This macro is used to determine whether clear_by_pieces should be
184 called to clear storage. */
185#ifndef CLEAR_BY_PIECES_P
186#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
187 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
188#endif
189
4977bab6
ZW
190/* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193#ifndef STORE_BY_PIECES_P
194#define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
195#endif
196
266007a7 197/* This array records the insn_code of insns to perform block moves. */
70128ad9 198enum insn_code movmem_optab[NUM_MACHINE_MODES];
266007a7 199
9de08200 200/* This array records the insn_code of insns to perform block clears. */
70128ad9 201enum insn_code clrmem_optab[NUM_MACHINE_MODES];
9de08200 202
118355a0
ZW
203/* These arrays record the insn_code of two different kinds of insns
204 to perform block compares. */
205enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
206enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
207
cc2902df 208/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
e87b4f3f
RS
209
210#ifndef SLOW_UNALIGNED_ACCESS
e1565e65 211#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
e87b4f3f 212#endif
bbf6f052 213\f
4fa52007 214/* This is run once per compilation to set up which modes can be used
266007a7 215 directly in memory and to initialize the block move optab. */
4fa52007
RK
216
217void
502b8322 218init_expr_once (void)
4fa52007
RK
219{
220 rtx insn, pat;
221 enum machine_mode mode;
cff48d8f 222 int num_clobbers;
9ec36da5 223 rtx mem, mem1;
bf1660a6 224 rtx reg;
9ec36da5 225
e2549997
RS
226 /* Try indexing by frame ptr and try by stack ptr.
227 It is known that on the Convex the stack ptr isn't a valid index.
228 With luck, one or the other is valid on any machine. */
9ec36da5
JL
229 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
230 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 231
bf1660a6
JL
232 /* A scratch register we can modify in-place below to avoid
233 useless RTL allocations. */
234 reg = gen_rtx_REG (VOIDmode, -1);
235
1f8c3c5b
RH
236 insn = rtx_alloc (INSN);
237 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
238 PATTERN (insn) = pat;
4fa52007
RK
239
240 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
241 mode = (enum machine_mode) ((int) mode + 1))
242 {
243 int regno;
4fa52007
RK
244
245 direct_load[(int) mode] = direct_store[(int) mode] = 0;
246 PUT_MODE (mem, mode);
e2549997 247 PUT_MODE (mem1, mode);
bf1660a6 248 PUT_MODE (reg, mode);
4fa52007 249
e6fe56a4
RK
250 /* See if there is some register that can be used in this mode and
251 directly loaded or stored from memory. */
252
7308a047
RS
253 if (mode != VOIDmode && mode != BLKmode)
254 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
255 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
256 regno++)
257 {
258 if (! HARD_REGNO_MODE_OK (regno, mode))
259 continue;
e6fe56a4 260
bf1660a6 261 REGNO (reg) = regno;
e6fe56a4 262
7308a047
RS
263 SET_SRC (pat) = mem;
264 SET_DEST (pat) = reg;
265 if (recog (pat, insn, &num_clobbers) >= 0)
266 direct_load[(int) mode] = 1;
e6fe56a4 267
e2549997
RS
268 SET_SRC (pat) = mem1;
269 SET_DEST (pat) = reg;
270 if (recog (pat, insn, &num_clobbers) >= 0)
271 direct_load[(int) mode] = 1;
272
7308a047
RS
273 SET_SRC (pat) = reg;
274 SET_DEST (pat) = mem;
275 if (recog (pat, insn, &num_clobbers) >= 0)
276 direct_store[(int) mode] = 1;
e2549997
RS
277
278 SET_SRC (pat) = reg;
279 SET_DEST (pat) = mem1;
280 if (recog (pat, insn, &num_clobbers) >= 0)
281 direct_store[(int) mode] = 1;
7308a047 282 }
4fa52007
RK
283 }
284
51286de6
RH
285 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
286
287 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
288 mode = GET_MODE_WIDER_MODE (mode))
289 {
290 enum machine_mode srcmode;
291 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
0fb7aeda 292 srcmode = GET_MODE_WIDER_MODE (srcmode))
51286de6
RH
293 {
294 enum insn_code ic;
295
296 ic = can_extend_p (mode, srcmode, 0);
297 if (ic == CODE_FOR_nothing)
298 continue;
299
300 PUT_MODE (mem, srcmode);
0fb7aeda 301
51286de6
RH
302 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
303 float_extend_from_mem[mode][srcmode] = true;
304 }
305 }
4fa52007 306}
cff48d8f 307
bbf6f052
RK
308/* This is run at the start of compiling a function. */
309
310void
502b8322 311init_expr (void)
bbf6f052 312{
3a70d621 313 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
bbf6f052
RK
314}
315
49ad7cfa 316/* Small sanity check that the queue is empty at the end of a function. */
296b4ed9 317
bbf6f052 318void
502b8322 319finish_expr_for_function (void)
bbf6f052 320{
49ad7cfa
BS
321 if (pending_chain)
322 abort ();
bbf6f052
RK
323}
324\f
325/* Manage the queue of increment instructions to be output
326 for POSTINCREMENT_EXPR expressions, etc. */
327
bbf6f052
RK
328/* Queue up to increment (or change) VAR later. BODY says how:
329 BODY should be the same thing you would pass to emit_insn
330 to increment right away. It will go to emit_insn later on.
331
332 The value is a QUEUED expression to be used in place of VAR
333 where you want to guarantee the pre-incrementation value of VAR. */
334
335static rtx
502b8322 336enqueue_insn (rtx var, rtx body)
bbf6f052 337{
c5c76735
JL
338 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
339 body, pending_chain);
bbf6f052
RK
340 return pending_chain;
341}
342
343/* Use protect_from_queue to convert a QUEUED expression
344 into something that you can put immediately into an instruction.
345 If the queued incrementation has not happened yet,
346 protect_from_queue returns the variable itself.
347 If the incrementation has happened, protect_from_queue returns a temp
348 that contains a copy of the old value of the variable.
349
350 Any time an rtx which might possibly be a QUEUED is to be put
351 into an instruction, it must be passed through protect_from_queue first.
352 QUEUED expressions are not meaningful in instructions.
353
354 Do not pass a value through protect_from_queue and then hold
355 on to it for a while before putting it in an instruction!
356 If the queue is flushed in between, incorrect code will result. */
357
358rtx
502b8322 359protect_from_queue (rtx x, int modify)
bbf6f052 360{
b3694847 361 RTX_CODE code = GET_CODE (x);
bbf6f052
RK
362
363#if 0 /* A QUEUED can hang around after the queue is forced out. */
364 /* Shortcut for most common case. */
365 if (pending_chain == 0)
366 return x;
367#endif
368
369 if (code != QUEUED)
370 {
e9baa644
RK
371 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
372 use of autoincrement. Make a copy of the contents of the memory
373 location rather than a copy of the address, but not if the value is
374 of mode BLKmode. Don't modify X in place since it might be
375 shared. */
bbf6f052
RK
376 if (code == MEM && GET_MODE (x) != BLKmode
377 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
378 {
f1ec5147
RK
379 rtx y = XEXP (x, 0);
380 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
e9baa644 381
bbf6f052
RK
382 if (QUEUED_INSN (y))
383 {
f1ec5147
RK
384 rtx temp = gen_reg_rtx (GET_MODE (x));
385
e9baa644 386 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
387 QUEUED_INSN (y));
388 return temp;
389 }
f1ec5147 390
73b7f58c
BS
391 /* Copy the address into a pseudo, so that the returned value
392 remains correct across calls to emit_queue. */
f1ec5147 393 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
bbf6f052 394 }
f1ec5147 395
bbf6f052
RK
396 /* Otherwise, recursively protect the subexpressions of all
397 the kinds of rtx's that can contain a QUEUED. */
398 if (code == MEM)
3f15938e
RS
399 {
400 rtx tem = protect_from_queue (XEXP (x, 0), 0);
401 if (tem != XEXP (x, 0))
402 {
403 x = copy_rtx (x);
404 XEXP (x, 0) = tem;
405 }
406 }
bbf6f052
RK
407 else if (code == PLUS || code == MULT)
408 {
3f15938e
RS
409 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
410 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
411 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
412 {
413 x = copy_rtx (x);
414 XEXP (x, 0) = new0;
415 XEXP (x, 1) = new1;
416 }
bbf6f052
RK
417 }
418 return x;
419 }
73b7f58c
BS
420 /* If the increment has not happened, use the variable itself. Copy it
421 into a new pseudo so that the value remains correct across calls to
422 emit_queue. */
bbf6f052 423 if (QUEUED_INSN (x) == 0)
73b7f58c 424 return copy_to_reg (QUEUED_VAR (x));
bbf6f052
RK
425 /* If the increment has happened and a pre-increment copy exists,
426 use that copy. */
427 if (QUEUED_COPY (x) != 0)
428 return QUEUED_COPY (x);
429 /* The increment has happened but we haven't set up a pre-increment copy.
430 Set one up now, and use it. */
431 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
432 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
433 QUEUED_INSN (x));
434 return QUEUED_COPY (x);
435}
436
437/* Return nonzero if X contains a QUEUED expression:
438 if it contains anything that will be altered by a queued increment.
439 We handle only combinations of MEM, PLUS, MINUS and MULT operators
440 since memory addresses generally contain only those. */
441
1f06ee8d 442int
502b8322 443queued_subexp_p (rtx x)
bbf6f052 444{
b3694847 445 enum rtx_code code = GET_CODE (x);
bbf6f052
RK
446 switch (code)
447 {
448 case QUEUED:
449 return 1;
450 case MEM:
451 return queued_subexp_p (XEXP (x, 0));
452 case MULT:
453 case PLUS:
454 case MINUS:
e9a25f70
JL
455 return (queued_subexp_p (XEXP (x, 0))
456 || queued_subexp_p (XEXP (x, 1)));
457 default:
458 return 0;
bbf6f052 459 }
bbf6f052
RK
460}
461
1bbd65cd
EB
462/* Retrieve a mark on the queue. */
463
464static rtx
465mark_queue (void)
466{
467 return pending_chain;
468}
bbf6f052 469
1bbd65cd
EB
470/* Perform all the pending incrementations that have been enqueued
471 after MARK was retrieved. If MARK is null, perform all the
472 pending incrementations. */
473
474static void
475emit_insns_enqueued_after_mark (rtx mark)
bbf6f052 476{
b3694847 477 rtx p;
1bbd65cd
EB
478
479 /* The marked incrementation may have been emitted in the meantime
480 through a call to emit_queue. In this case, the mark is not valid
481 anymore so do nothing. */
482 if (mark && ! QUEUED_BODY (mark))
483 return;
484
485 while ((p = pending_chain) != mark)
bbf6f052 486 {
41b083c4
R
487 rtx body = QUEUED_BODY (p);
488
2f937369
DM
489 switch (GET_CODE (body))
490 {
491 case INSN:
492 case JUMP_INSN:
493 case CALL_INSN:
494 case CODE_LABEL:
495 case BARRIER:
496 case NOTE:
497 QUEUED_INSN (p) = body;
498 emit_insn (body);
499 break;
500
501#ifdef ENABLE_CHECKING
502 case SEQUENCE:
503 abort ();
504 break;
505#endif
506
507 default:
508 QUEUED_INSN (p) = emit_insn (body);
509 break;
41b083c4 510 }
2f937369 511
1bbd65cd 512 QUEUED_BODY (p) = 0;
bbf6f052
RK
513 pending_chain = QUEUED_NEXT (p);
514 }
515}
1bbd65cd
EB
516
517/* Perform all the pending incrementations. */
518
519void
520emit_queue (void)
521{
522 emit_insns_enqueued_after_mark (NULL_RTX);
523}
bbf6f052
RK
524\f
525/* Copy data from FROM to TO, where the machine modes are not the same.
526 Both modes may be integer, or both may be floating.
527 UNSIGNEDP should be nonzero if FROM is an unsigned type.
528 This causes zero-extension instead of sign-extension. */
529
530void
502b8322 531convert_move (rtx to, rtx from, int unsignedp)
bbf6f052
RK
532{
533 enum machine_mode to_mode = GET_MODE (to);
534 enum machine_mode from_mode = GET_MODE (from);
535 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
536 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
537 enum insn_code code;
538 rtx libcall;
539
540 /* rtx code for making an equivalent value. */
37d0b254
SE
541 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
542 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
bbf6f052
RK
543
544 to = protect_from_queue (to, 1);
545 from = protect_from_queue (from, 0);
546
547 if (to_real != from_real)
548 abort ();
549
6de9cd9a
DN
550 /* If the source and destination are already the same, then there's
551 nothing to do. */
552 if (to == from)
553 return;
554
1499e0a8
RK
555 /* If FROM is a SUBREG that indicates that we have already done at least
556 the required extension, strip it. We don't handle such SUBREGs as
557 TO here. */
558
559 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
560 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
561 >= GET_MODE_SIZE (to_mode))
562 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
563 from = gen_lowpart (to_mode, from), from_mode = to_mode;
564
565 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
566 abort ();
567
bbf6f052
RK
568 if (to_mode == from_mode
569 || (from_mode == VOIDmode && CONSTANT_P (from)))
570 {
571 emit_move_insn (to, from);
572 return;
573 }
574
0b4565c9
BS
575 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
576 {
577 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
578 abort ();
3a94c984 579
0b4565c9 580 if (VECTOR_MODE_P (to_mode))
bafe341a 581 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
0b4565c9 582 else
bafe341a 583 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
0b4565c9
BS
584
585 emit_move_insn (to, from);
586 return;
587 }
588
06765df1
R
589 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
590 {
591 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
592 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
593 return;
594 }
595
bbf6f052
RK
596 if (to_real)
597 {
642dfa8b 598 rtx value, insns;
85363ca0 599 convert_optab tab;
81d79e2c 600
e44846d6 601 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
85363ca0 602 tab = sext_optab;
e44846d6 603 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
85363ca0
ZW
604 tab = trunc_optab;
605 else
606 abort ();
2b01c326 607
85363ca0 608 /* Try converting directly if the insn is supported. */
2b01c326 609
85363ca0
ZW
610 code = tab->handlers[to_mode][from_mode].insn_code;
611 if (code != CODE_FOR_nothing)
b092b471 612 {
85363ca0
ZW
613 emit_unop_insn (code, to, from,
614 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
b092b471
JW
615 return;
616 }
b092b471 617
85363ca0
ZW
618 /* Otherwise use a libcall. */
619 libcall = tab->handlers[to_mode][from_mode].libfunc;
3a94c984 620
85363ca0 621 if (!libcall)
b092b471 622 /* This conversion is not implemented yet. */
bbf6f052
RK
623 abort ();
624
642dfa8b 625 start_sequence ();
ebb1b59a 626 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
81d79e2c 627 1, from, from_mode);
642dfa8b
BS
628 insns = get_insns ();
629 end_sequence ();
450b1728
EC
630 emit_libcall_block (insns, to, value,
631 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
632 from)
633 : gen_rtx_FLOAT_EXTEND (to_mode, from));
bbf6f052
RK
634 return;
635 }
636
85363ca0
ZW
637 /* Handle pointer conversion. */ /* SPEE 900220. */
638 /* Targets are expected to provide conversion insns between PxImode and
639 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
640 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
641 {
642 enum machine_mode full_mode
643 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
644
645 if (trunc_optab->handlers[to_mode][full_mode].insn_code
646 == CODE_FOR_nothing)
647 abort ();
648
649 if (full_mode != from_mode)
650 from = convert_to_mode (full_mode, from, unsignedp);
651 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
652 to, from, UNKNOWN);
653 return;
654 }
655 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
656 {
657 enum machine_mode full_mode
658 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
659
660 if (sext_optab->handlers[full_mode][from_mode].insn_code
661 == CODE_FOR_nothing)
662 abort ();
663
664 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
665 to, from, UNKNOWN);
666 if (to_mode == full_mode)
667 return;
668
a1105617 669 /* else proceed to integer conversions below. */
85363ca0
ZW
670 from_mode = full_mode;
671 }
672
bbf6f052
RK
673 /* Now both modes are integers. */
674
675 /* Handle expanding beyond a word. */
676 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
677 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
678 {
679 rtx insns;
680 rtx lowpart;
681 rtx fill_value;
682 rtx lowfrom;
683 int i;
684 enum machine_mode lowpart_mode;
685 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
686
687 /* Try converting directly if the insn is supported. */
688 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
689 != CODE_FOR_nothing)
690 {
cd1b4b44
RK
691 /* If FROM is a SUBREG, put it into a register. Do this
692 so that we always generate the same set of insns for
693 better cse'ing; if an intermediate assignment occurred,
694 we won't be doing the operation directly on the SUBREG. */
695 if (optimize > 0 && GET_CODE (from) == SUBREG)
696 from = force_reg (from_mode, from);
bbf6f052
RK
697 emit_unop_insn (code, to, from, equiv_code);
698 return;
699 }
700 /* Next, try converting via full word. */
701 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
702 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
703 != CODE_FOR_nothing))
704 {
f8cfc6aa 705 if (REG_P (to))
6a2d136b
EB
706 {
707 if (reg_overlap_mentioned_p (to, from))
708 from = force_reg (from_mode, from);
709 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
710 }
bbf6f052
RK
711 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
712 emit_unop_insn (code, to,
713 gen_lowpart (word_mode, to), equiv_code);
714 return;
715 }
716
717 /* No special multiword conversion insn; do it by hand. */
718 start_sequence ();
719
5c5033c3
RK
720 /* Since we will turn this into a no conflict block, we must ensure
721 that the source does not overlap the target. */
722
723 if (reg_overlap_mentioned_p (to, from))
724 from = force_reg (from_mode, from);
725
bbf6f052
RK
726 /* Get a copy of FROM widened to a word, if necessary. */
727 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
728 lowpart_mode = word_mode;
729 else
730 lowpart_mode = from_mode;
731
732 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
733
734 lowpart = gen_lowpart (lowpart_mode, to);
735 emit_move_insn (lowpart, lowfrom);
736
737 /* Compute the value to put in each remaining word. */
738 if (unsignedp)
739 fill_value = const0_rtx;
740 else
741 {
742#ifdef HAVE_slt
743 if (HAVE_slt
a995e389 744 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
bbf6f052
RK
745 && STORE_FLAG_VALUE == -1)
746 {
906c4e36 747 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
a06ef755 748 lowpart_mode, 0);
bbf6f052
RK
749 fill_value = gen_reg_rtx (word_mode);
750 emit_insn (gen_slt (fill_value));
751 }
752 else
753#endif
754 {
755 fill_value
756 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
757 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 758 NULL_RTX, 0);
bbf6f052
RK
759 fill_value = convert_to_mode (word_mode, fill_value, 1);
760 }
761 }
762
763 /* Fill the remaining words. */
764 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
765 {
766 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
767 rtx subword = operand_subword (to, index, 1, to_mode);
768
769 if (subword == 0)
770 abort ();
771
772 if (fill_value != subword)
773 emit_move_insn (subword, fill_value);
774 }
775
776 insns = get_insns ();
777 end_sequence ();
778
906c4e36 779 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 780 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
781 return;
782 }
783
d3c64ee3
RS
784 /* Truncating multi-word to a word or less. */
785 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
786 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 787 {
3c0cb5de 788 if (!((MEM_P (from)
431a6eca
JW
789 && ! MEM_VOLATILE_P (from)
790 && direct_load[(int) to_mode]
791 && ! mode_dependent_address_p (XEXP (from, 0)))
f8cfc6aa 792 || REG_P (from)
431a6eca
JW
793 || GET_CODE (from) == SUBREG))
794 from = force_reg (from_mode, from);
bbf6f052
RK
795 convert_move (to, gen_lowpart (word_mode, from), 0);
796 return;
797 }
798
bbf6f052
RK
799 /* Now follow all the conversions between integers
800 no more than a word long. */
801
802 /* For truncation, usually we can just refer to FROM in a narrower mode. */
803 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
804 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 805 GET_MODE_BITSIZE (from_mode)))
bbf6f052 806 {
3c0cb5de 807 if (!((MEM_P (from)
d3c64ee3
RS
808 && ! MEM_VOLATILE_P (from)
809 && direct_load[(int) to_mode]
810 && ! mode_dependent_address_p (XEXP (from, 0)))
f8cfc6aa 811 || REG_P (from)
d3c64ee3
RS
812 || GET_CODE (from) == SUBREG))
813 from = force_reg (from_mode, from);
f8cfc6aa 814 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
34aa3599
RK
815 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
816 from = copy_to_reg (from);
bbf6f052
RK
817 emit_move_insn (to, gen_lowpart (to_mode, from));
818 return;
819 }
820
d3c64ee3 821 /* Handle extension. */
bbf6f052
RK
822 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
823 {
824 /* Convert directly if that works. */
825 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
826 != CODE_FOR_nothing)
827 {
9413de45
RK
828 if (flag_force_mem)
829 from = force_not_mem (from);
830
bbf6f052
RK
831 emit_unop_insn (code, to, from, equiv_code);
832 return;
833 }
834 else
835 {
836 enum machine_mode intermediate;
2b28d92e
NC
837 rtx tmp;
838 tree shift_amount;
bbf6f052
RK
839
840 /* Search for a mode to convert via. */
841 for (intermediate = from_mode; intermediate != VOIDmode;
842 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
843 if (((can_extend_p (to_mode, intermediate, unsignedp)
844 != CODE_FOR_nothing)
845 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
d60eaeff
JL
846 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
847 GET_MODE_BITSIZE (intermediate))))
bbf6f052
RK
848 && (can_extend_p (intermediate, from_mode, unsignedp)
849 != CODE_FOR_nothing))
850 {
851 convert_move (to, convert_to_mode (intermediate, from,
852 unsignedp), unsignedp);
853 return;
854 }
855
2b28d92e 856 /* No suitable intermediate mode.
3a94c984 857 Generate what we need with shifts. */
2b28d92e
NC
858 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
859 - GET_MODE_BITSIZE (from_mode), 0);
860 from = gen_lowpart (to_mode, force_reg (from_mode, from));
861 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
862 to, unsignedp);
3a94c984 863 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
2b28d92e
NC
864 to, unsignedp);
865 if (tmp != to)
866 emit_move_insn (to, tmp);
867 return;
bbf6f052
RK
868 }
869 }
870
3a94c984 871 /* Support special truncate insns for certain modes. */
85363ca0 872 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
bbf6f052 873 {
85363ca0
ZW
874 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
875 to, from, UNKNOWN);
b9bcad65
RK
876 return;
877 }
878
bbf6f052
RK
879 /* Handle truncation of volatile memrefs, and so on;
880 the things that couldn't be truncated directly,
85363ca0
ZW
881 and for which there was no special instruction.
882
883 ??? Code above formerly short-circuited this, for most integer
884 mode pairs, with a force_reg in from_mode followed by a recursive
885 call to this routine. Appears always to have been wrong. */
bbf6f052
RK
886 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
887 {
888 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
889 emit_move_insn (to, temp);
890 return;
891 }
892
893 /* Mode combination is not recognized. */
894 abort ();
895}
896
897/* Return an rtx for a value that would result
898 from converting X to mode MODE.
899 Both X and MODE may be floating, or both integer.
900 UNSIGNEDP is nonzero if X is an unsigned value.
901 This can be done by referring to a part of X in place
5d901c31
RS
902 or by copying to a new temporary with conversion.
903
904 This function *must not* call protect_from_queue
905 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
906
907rtx
502b8322 908convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
5ffe63ed
RS
909{
910 return convert_modes (mode, VOIDmode, x, unsignedp);
911}
912
913/* Return an rtx for a value that would result
914 from converting X from mode OLDMODE to mode MODE.
915 Both modes may be floating, or both integer.
916 UNSIGNEDP is nonzero if X is an unsigned value.
917
918 This can be done by referring to a part of X in place
919 or by copying to a new temporary with conversion.
920
921 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
922
923 This function *must not* call protect_from_queue
924 except when putting X into an insn (in which case convert_move does it). */
925
926rtx
502b8322 927convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
bbf6f052 928{
b3694847 929 rtx temp;
5ffe63ed 930
1499e0a8
RK
931 /* If FROM is a SUBREG that indicates that we have already done at least
932 the required extension, strip it. */
933
934 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
935 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
936 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
937 x = gen_lowpart (mode, x);
bbf6f052 938
64791b18
RK
939 if (GET_MODE (x) != VOIDmode)
940 oldmode = GET_MODE (x);
3a94c984 941
5ffe63ed 942 if (mode == oldmode)
bbf6f052
RK
943 return x;
944
945 /* There is one case that we must handle specially: If we are converting
906c4e36 946 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
947 we are to interpret the constant as unsigned, gen_lowpart will do
948 the wrong if the constant appears negative. What we want to do is
949 make the high-order word of the constant zero, not all ones. */
950
951 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 952 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 953 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
954 {
955 HOST_WIDE_INT val = INTVAL (x);
956
957 if (oldmode != VOIDmode
958 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
959 {
960 int width = GET_MODE_BITSIZE (oldmode);
961
962 /* We need to zero extend VAL. */
963 val &= ((HOST_WIDE_INT) 1 << width) - 1;
964 }
965
966 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
967 }
bbf6f052
RK
968
969 /* We can do this with a gen_lowpart if both desired and current modes
970 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
971 non-volatile MEM. Except for the constant case where MODE is no
972 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 973
ba2e110c
RK
974 if ((GET_CODE (x) == CONST_INT
975 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 976 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 977 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 978 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 979 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
3c0cb5de 980 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
d57c66da 981 && direct_load[(int) mode])
f8cfc6aa 982 || (REG_P (x)
006c9f4a
SE
983 && (! HARD_REGISTER_P (x)
984 || HARD_REGNO_MODE_OK (REGNO (x), mode))
2bf29316
JW
985 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
986 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
987 {
988 /* ?? If we don't know OLDMODE, we have to assume here that
989 X does not need sign- or zero-extension. This may not be
990 the case, but it's the best we can do. */
991 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
992 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
993 {
994 HOST_WIDE_INT val = INTVAL (x);
995 int width = GET_MODE_BITSIZE (oldmode);
996
997 /* We must sign or zero-extend in this case. Start by
998 zero-extending, then sign extend if we need to. */
999 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1000 if (! unsignedp
1001 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1002 val |= (HOST_WIDE_INT) (-1) << width;
1003
2496c7bd 1004 return gen_int_mode (val, mode);
ba2e110c
RK
1005 }
1006
1007 return gen_lowpart (mode, x);
1008 }
bbf6f052 1009
ebe75517
JH
1010 /* Converting from integer constant into mode is always equivalent to an
1011 subreg operation. */
1012 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1013 {
1014 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1015 abort ();
1016 return simplify_gen_subreg (mode, x, oldmode, 0);
1017 }
1018
bbf6f052
RK
1019 temp = gen_reg_rtx (mode);
1020 convert_move (temp, x, unsignedp);
1021 return temp;
1022}
1023\f
cf5124f6
RS
1024/* STORE_MAX_PIECES is the number of bytes at a time that we can
1025 store efficiently. Due to internal GCC limitations, this is
1026 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1027 for an immediate constant. */
1028
1029#define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1030
8fd3cf4e
JJ
1031/* Determine whether the LEN bytes can be moved by using several move
1032 instructions. Return nonzero if a call to move_by_pieces should
1033 succeed. */
1034
1035int
502b8322
AJ
1036can_move_by_pieces (unsigned HOST_WIDE_INT len,
1037 unsigned int align ATTRIBUTE_UNUSED)
8fd3cf4e
JJ
1038{
1039 return MOVE_BY_PIECES_P (len, align);
1040}
1041
21d93687
RK
1042/* Generate several move instructions to copy LEN bytes from block FROM to
1043 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1044 and TO through protect_from_queue before calling.
566aa174 1045
21d93687
RK
1046 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1047 used to push FROM to the stack.
566aa174 1048
8fd3cf4e 1049 ALIGN is maximum stack alignment we can assume.
bbf6f052 1050
8fd3cf4e
JJ
1051 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1052 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1053 stpcpy. */
1054
1055rtx
502b8322
AJ
1056move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1057 unsigned int align, int endp)
bbf6f052
RK
1058{
1059 struct move_by_pieces data;
566aa174 1060 rtx to_addr, from_addr = XEXP (from, 0);
770ae6cc 1061 unsigned int max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
1062 enum machine_mode mode = VOIDmode, tmode;
1063 enum insn_code icode;
bbf6f052 1064
f26aca6d
DD
1065 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1066
bbf6f052 1067 data.offset = 0;
bbf6f052 1068 data.from_addr = from_addr;
566aa174
JH
1069 if (to)
1070 {
1071 to_addr = XEXP (to, 0);
1072 data.to = to;
1073 data.autinc_to
1074 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1075 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1076 data.reverse
1077 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1078 }
1079 else
1080 {
1081 to_addr = NULL_RTX;
1082 data.to = NULL_RTX;
1083 data.autinc_to = 1;
1084#ifdef STACK_GROWS_DOWNWARD
1085 data.reverse = 1;
1086#else
1087 data.reverse = 0;
1088#endif
1089 }
1090 data.to_addr = to_addr;
bbf6f052 1091 data.from = from;
bbf6f052
RK
1092 data.autinc_from
1093 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1094 || GET_CODE (from_addr) == POST_INC
1095 || GET_CODE (from_addr) == POST_DEC);
1096
1097 data.explicit_inc_from = 0;
1098 data.explicit_inc_to = 0;
bbf6f052
RK
1099 if (data.reverse) data.offset = len;
1100 data.len = len;
1101
1102 /* If copying requires more than two move insns,
1103 copy addresses to registers (to make displacements shorter)
1104 and use post-increment if available. */
1105 if (!(data.autinc_from && data.autinc_to)
1106 && move_by_pieces_ninsns (len, align) > 2)
1107 {
3a94c984 1108 /* Find the mode of the largest move... */
fbe1758d
AM
1109 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1110 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1111 if (GET_MODE_SIZE (tmode) < max_size)
1112 mode = tmode;
1113
1114 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
1115 {
1116 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1117 data.autinc_from = 1;
1118 data.explicit_inc_from = -1;
1119 }
fbe1758d 1120 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
1121 {
1122 data.from_addr = copy_addr_to_reg (from_addr);
1123 data.autinc_from = 1;
1124 data.explicit_inc_from = 1;
1125 }
bbf6f052
RK
1126 if (!data.autinc_from && CONSTANT_P (from_addr))
1127 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 1128 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
1129 {
1130 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1131 data.autinc_to = 1;
1132 data.explicit_inc_to = -1;
1133 }
fbe1758d 1134 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
1135 {
1136 data.to_addr = copy_addr_to_reg (to_addr);
1137 data.autinc_to = 1;
1138 data.explicit_inc_to = 1;
1139 }
bbf6f052
RK
1140 if (!data.autinc_to && CONSTANT_P (to_addr))
1141 data.to_addr = copy_addr_to_reg (to_addr);
1142 }
1143
e1565e65 1144 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751
RK
1145 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1146 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1147
1148 /* First move what we can in the largest integer mode, then go to
1149 successively smaller modes. */
1150
1151 while (max_size > 1)
1152 {
e7c33f54
RK
1153 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1154 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1155 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1156 mode = tmode;
1157
1158 if (mode == VOIDmode)
1159 break;
1160
1161 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1162 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1163 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1164
1165 max_size = GET_MODE_SIZE (mode);
1166 }
1167
1168 /* The code above should have handled everything. */
2a8e278c 1169 if (data.len > 0)
bbf6f052 1170 abort ();
8fd3cf4e
JJ
1171
1172 if (endp)
1173 {
1174 rtx to1;
1175
1176 if (data.reverse)
1177 abort ();
1178 if (data.autinc_to)
1179 {
1180 if (endp == 2)
1181 {
1182 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1183 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1184 else
1185 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1186 -1));
1187 }
1188 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1189 data.offset);
1190 }
1191 else
1192 {
1193 if (endp == 2)
1194 --data.offset;
1195 to1 = adjust_address (data.to, QImode, data.offset);
1196 }
1197 return to1;
1198 }
1199 else
1200 return data.to;
bbf6f052
RK
1201}
1202
1203/* Return number of insns required to move L bytes by pieces.
f1eaaf73 1204 ALIGN (in bits) is maximum alignment we can assume. */
bbf6f052 1205
3bdf5ad1 1206static unsigned HOST_WIDE_INT
502b8322 1207move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
bbf6f052 1208{
3bdf5ad1
RK
1209 unsigned HOST_WIDE_INT n_insns = 0;
1210 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
bbf6f052 1211
e1565e65 1212 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 1213 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
14c78e9b 1214 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1215
1216 while (max_size > 1)
1217 {
1218 enum machine_mode mode = VOIDmode, tmode;
1219 enum insn_code icode;
1220
e7c33f54
RK
1221 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1222 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1223 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1224 mode = tmode;
1225
1226 if (mode == VOIDmode)
1227 break;
1228
1229 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1230 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1231 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1232
1233 max_size = GET_MODE_SIZE (mode);
1234 }
1235
13c6f0d5
NS
1236 if (l)
1237 abort ();
bbf6f052
RK
1238 return n_insns;
1239}
1240
1241/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1242 with move instructions for mode MODE. GENFUN is the gen_... function
1243 to make a move insn for that mode. DATA has all the other info. */
1244
1245static void
502b8322
AJ
1246move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1247 struct move_by_pieces *data)
bbf6f052 1248{
3bdf5ad1 1249 unsigned int size = GET_MODE_SIZE (mode);
ae0ed63a 1250 rtx to1 = NULL_RTX, from1;
bbf6f052
RK
1251
1252 while (data->len >= size)
1253 {
3bdf5ad1
RK
1254 if (data->reverse)
1255 data->offset -= size;
1256
566aa174 1257 if (data->to)
3bdf5ad1 1258 {
566aa174 1259 if (data->autinc_to)
630036c6
JJ
1260 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1261 data->offset);
566aa174 1262 else
f4ef873c 1263 to1 = adjust_address (data->to, mode, data->offset);
3bdf5ad1 1264 }
3bdf5ad1
RK
1265
1266 if (data->autinc_from)
630036c6
JJ
1267 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1268 data->offset);
3bdf5ad1 1269 else
f4ef873c 1270 from1 = adjust_address (data->from, mode, data->offset);
bbf6f052 1271
940da324 1272 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
3d709fd3
RH
1273 emit_insn (gen_add2_insn (data->to_addr,
1274 GEN_INT (-(HOST_WIDE_INT)size)));
940da324 1275 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
3d709fd3
RH
1276 emit_insn (gen_add2_insn (data->from_addr,
1277 GEN_INT (-(HOST_WIDE_INT)size)));
bbf6f052 1278
566aa174
JH
1279 if (data->to)
1280 emit_insn ((*genfun) (to1, from1));
1281 else
21d93687
RK
1282 {
1283#ifdef PUSH_ROUNDING
1284 emit_single_push_insn (mode, from1, NULL);
1285#else
1286 abort ();
1287#endif
1288 }
3bdf5ad1 1289
940da324 1290 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1291 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1292 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1293 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052 1294
3bdf5ad1
RK
1295 if (! data->reverse)
1296 data->offset += size;
bbf6f052
RK
1297
1298 data->len -= size;
1299 }
1300}
1301\f
4ca79136
RH
1302/* Emit code to move a block Y to a block X. This may be done with
1303 string-move instructions, with multiple scalar move instructions,
1304 or with a library call.
bbf6f052 1305
4ca79136 1306 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
bbf6f052 1307 SIZE is an rtx that says how long they are.
19caa751 1308 ALIGN is the maximum alignment we can assume they have.
44bb111a 1309 METHOD describes what kind of copy this is, and what mechanisms may be used.
bbf6f052 1310
e9a25f70
JL
1311 Return the address of the new block, if memcpy is called and returns it,
1312 0 otherwise. */
1313
1314rtx
502b8322 1315emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
bbf6f052 1316{
44bb111a 1317 bool may_use_call;
e9a25f70 1318 rtx retval = 0;
44bb111a
RH
1319 unsigned int align;
1320
1321 switch (method)
1322 {
1323 case BLOCK_OP_NORMAL:
1324 may_use_call = true;
1325 break;
1326
1327 case BLOCK_OP_CALL_PARM:
1328 may_use_call = block_move_libcall_safe_for_call_parm ();
1329
1330 /* Make inhibit_defer_pop nonzero around the library call
1331 to force it to pop the arguments right away. */
1332 NO_DEFER_POP;
1333 break;
1334
1335 case BLOCK_OP_NO_LIBCALL:
1336 may_use_call = false;
1337 break;
1338
1339 default:
1340 abort ();
1341 }
1342
1343 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
e9a25f70 1344
bbf6f052
RK
1345 x = protect_from_queue (x, 1);
1346 y = protect_from_queue (y, 0);
5d901c31 1347 size = protect_from_queue (size, 0);
bbf6f052 1348
3c0cb5de 1349 if (!MEM_P (x))
bbf6f052 1350 abort ();
3c0cb5de 1351 if (!MEM_P (y))
bbf6f052
RK
1352 abort ();
1353 if (size == 0)
1354 abort ();
1355
82c82743
RH
1356 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1357 block copy is more efficient for other large modes, e.g. DCmode. */
1358 x = adjust_address (x, BLKmode, 0);
1359 y = adjust_address (y, BLKmode, 0);
1360
cb38fd88
RH
1361 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1362 can be incorrect is coming from __builtin_memcpy. */
1363 if (GET_CODE (size) == CONST_INT)
1364 {
6972c506
JJ
1365 if (INTVAL (size) == 0)
1366 return 0;
1367
cb38fd88
RH
1368 x = shallow_copy_rtx (x);
1369 y = shallow_copy_rtx (y);
1370 set_mem_size (x, size);
1371 set_mem_size (y, size);
1372 }
1373
fbe1758d 1374 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
8fd3cf4e 1375 move_by_pieces (x, y, INTVAL (size), align, 0);
70128ad9 1376 else if (emit_block_move_via_movmem (x, y, size, align))
4ca79136 1377 ;
44bb111a 1378 else if (may_use_call)
4ca79136 1379 retval = emit_block_move_via_libcall (x, y, size);
44bb111a
RH
1380 else
1381 emit_block_move_via_loop (x, y, size, align);
1382
1383 if (method == BLOCK_OP_CALL_PARM)
1384 OK_DEFER_POP;
266007a7 1385
4ca79136
RH
1386 return retval;
1387}
266007a7 1388
502b8322 1389/* A subroutine of emit_block_move. Returns true if calling the
44bb111a
RH
1390 block move libcall will not clobber any parameters which may have
1391 already been placed on the stack. */
1392
1393static bool
502b8322 1394block_move_libcall_safe_for_call_parm (void)
44bb111a 1395{
a357a6d4 1396 /* If arguments are pushed on the stack, then they're safe. */
44bb111a
RH
1397 if (PUSH_ARGS)
1398 return true;
44bb111a 1399
450b1728 1400 /* If registers go on the stack anyway, any argument is sure to clobber
a357a6d4
GK
1401 an outgoing argument. */
1402#if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1403 {
1404 tree fn = emit_block_move_libcall_fn (false);
1405 (void) fn;
1406 if (REG_PARM_STACK_SPACE (fn) != 0)
1407 return false;
1408 }
44bb111a 1409#endif
44bb111a 1410
a357a6d4
GK
1411 /* If any argument goes in memory, then it might clobber an outgoing
1412 argument. */
1413 {
1414 CUMULATIVE_ARGS args_so_far;
1415 tree fn, arg;
450b1728 1416
a357a6d4 1417 fn = emit_block_move_libcall_fn (false);
0f6937fe 1418 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
450b1728 1419
a357a6d4
GK
1420 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1421 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1422 {
1423 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1424 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1425 if (!tmp || !REG_P (tmp))
44bb111a 1426 return false;
a357a6d4
GK
1427 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1428 NULL_TREE, 1))
1429 return false;
a357a6d4
GK
1430 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1431 }
1432 }
1433 return true;
44bb111a
RH
1434}
1435
70128ad9 1436/* A subroutine of emit_block_move. Expand a movmem pattern;
4ca79136 1437 return true if successful. */
3ef1eef4 1438
4ca79136 1439static bool
70128ad9 1440emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
4ca79136 1441{
4ca79136 1442 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
a5e9c810 1443 int save_volatile_ok = volatile_ok;
4ca79136 1444 enum machine_mode mode;
266007a7 1445
4ca79136
RH
1446 /* Since this is a move insn, we don't care about volatility. */
1447 volatile_ok = 1;
1448
ee960939
OH
1449 /* Try the most limited insn first, because there's no point
1450 including more than one in the machine description unless
1451 the more limited one has some advantage. */
1452
4ca79136
RH
1453 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1454 mode = GET_MODE_WIDER_MODE (mode))
1455 {
70128ad9 1456 enum insn_code code = movmem_optab[(int) mode];
4ca79136
RH
1457 insn_operand_predicate_fn pred;
1458
1459 if (code != CODE_FOR_nothing
1460 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1461 here because if SIZE is less than the mode mask, as it is
1462 returned by the macro, it will definitely be less than the
1463 actual mode mask. */
1464 && ((GET_CODE (size) == CONST_INT
1465 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1466 <= (GET_MODE_MASK (mode) >> 1)))
1467 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1468 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1469 || (*pred) (x, BLKmode))
1470 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1471 || (*pred) (y, BLKmode))
1472 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1473 || (*pred) (opalign, VOIDmode)))
1474 {
1475 rtx op2;
1476 rtx last = get_last_insn ();
1477 rtx pat;
1478
1479 op2 = convert_to_mode (mode, size, 1);
1480 pred = insn_data[(int) code].operand[2].predicate;
1481 if (pred != 0 && ! (*pred) (op2, mode))
1482 op2 = copy_to_mode_reg (mode, op2);
1483
1484 /* ??? When called via emit_block_move_for_call, it'd be
1485 nice if there were some way to inform the backend, so
1486 that it doesn't fail the expansion because it thinks
1487 emitting the libcall would be more efficient. */
1488
1489 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1490 if (pat)
1491 {
1492 emit_insn (pat);
a5e9c810 1493 volatile_ok = save_volatile_ok;
4ca79136 1494 return true;
bbf6f052 1495 }
4ca79136
RH
1496 else
1497 delete_insns_since (last);
bbf6f052 1498 }
4ca79136 1499 }
bbf6f052 1500
a5e9c810 1501 volatile_ok = save_volatile_ok;
4ca79136
RH
1502 return false;
1503}
3ef1eef4 1504
8f99553f 1505/* A subroutine of emit_block_move. Expand a call to memcpy.
4ca79136 1506 Return the return value from memcpy, 0 otherwise. */
4bc973ae 1507
4ca79136 1508static rtx
502b8322 1509emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
4ca79136 1510{
ee960939 1511 rtx dst_addr, src_addr;
4ca79136
RH
1512 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1513 enum machine_mode size_mode;
1514 rtx retval;
4bc973ae 1515
4ca79136 1516 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
4bc973ae 1517
ee960939
OH
1518 It is unsafe to save the value generated by protect_from_queue and reuse
1519 it later. Consider what happens if emit_queue is called before the
1520 return value from protect_from_queue is used.
4bc973ae 1521
ee960939
OH
1522 Expansion of the CALL_EXPR below will call emit_queue before we are
1523 finished emitting RTL for argument setup. So if we are not careful we
1524 could get the wrong value for an argument.
4bc973ae 1525
ee960939 1526 To avoid this problem we go ahead and emit code to copy the addresses of
4dfa0342 1527 DST and SRC and SIZE into new pseudos.
4bc973ae 1528
ee960939
OH
1529 Note this is not strictly needed for library calls since they do not call
1530 emit_queue before loading their arguments. However, we may need to have
1531 library calls call emit_queue in the future since failing to do so could
1532 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1533 arguments in registers. */
1534
1535 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1536 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
4ca79136 1537
ee960939
OH
1538 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1539 src_addr = convert_memory_address (ptr_mode, src_addr);
ee960939
OH
1540
1541 dst_tree = make_tree (ptr_type_node, dst_addr);
1542 src_tree = make_tree (ptr_type_node, src_addr);
4ca79136 1543
8f99553f 1544 size_mode = TYPE_MODE (sizetype);
ee960939 1545
4ca79136
RH
1546 size = convert_to_mode (size_mode, size, 1);
1547 size = copy_to_mode_reg (size_mode, size);
1548
1549 /* It is incorrect to use the libcall calling conventions to call
1550 memcpy in this context. This could be a user call to memcpy and
1551 the user may wish to examine the return value from memcpy. For
1552 targets where libcalls and normal calls have different conventions
8f99553f 1553 for returning pointers, we could end up generating incorrect code. */
4ca79136 1554
8f99553f 1555 size_tree = make_tree (sizetype, size);
4ca79136
RH
1556
1557 fn = emit_block_move_libcall_fn (true);
1558 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
8f99553f
JM
1559 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1560 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
4ca79136
RH
1561
1562 /* Now we have to build up the CALL_EXPR itself. */
1563 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1564 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1565 call_expr, arg_list, NULL_TREE);
4ca79136
RH
1566
1567 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1568
ee960939
OH
1569 /* If we are initializing a readonly value, show the above call clobbered
1570 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1571 the delay slot scheduler might overlook conflicts and take nasty
1572 decisions. */
4ca79136 1573 if (RTX_UNCHANGING_P (dst))
ee960939
OH
1574 add_function_usage_to
1575 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1576 gen_rtx_CLOBBER (VOIDmode, dst),
1577 NULL_RTX));
4ca79136 1578
8f99553f 1579 return retval;
4ca79136 1580}
52cf7115 1581
4ca79136
RH
1582/* A subroutine of emit_block_move_via_libcall. Create the tree node
1583 for the function we use for block copies. The first time FOR_CALL
1584 is true, we call assemble_external. */
52cf7115 1585
4ca79136
RH
1586static GTY(()) tree block_move_fn;
1587
9661b15f 1588void
502b8322 1589init_block_move_fn (const char *asmspec)
4ca79136 1590{
9661b15f 1591 if (!block_move_fn)
4ca79136 1592 {
8fd3cf4e 1593 tree args, fn;
9661b15f 1594
8f99553f
JM
1595 fn = get_identifier ("memcpy");
1596 args = build_function_type_list (ptr_type_node, ptr_type_node,
1597 const_ptr_type_node, sizetype,
1598 NULL_TREE);
52cf7115 1599
4ca79136
RH
1600 fn = build_decl (FUNCTION_DECL, fn, args);
1601 DECL_EXTERNAL (fn) = 1;
1602 TREE_PUBLIC (fn) = 1;
1603 DECL_ARTIFICIAL (fn) = 1;
1604 TREE_NOTHROW (fn) = 1;
66c60e67 1605
4ca79136 1606 block_move_fn = fn;
bbf6f052 1607 }
e9a25f70 1608
9661b15f
JJ
1609 if (asmspec)
1610 {
1611 SET_DECL_RTL (block_move_fn, NULL_RTX);
1612 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1613 }
1614}
1615
1616static tree
502b8322 1617emit_block_move_libcall_fn (int for_call)
9661b15f
JJ
1618{
1619 static bool emitted_extern;
1620
1621 if (!block_move_fn)
1622 init_block_move_fn (NULL);
1623
4ca79136
RH
1624 if (for_call && !emitted_extern)
1625 {
1626 emitted_extern = true;
9661b15f
JJ
1627 make_decl_rtl (block_move_fn, NULL);
1628 assemble_external (block_move_fn);
4ca79136
RH
1629 }
1630
9661b15f 1631 return block_move_fn;
bbf6f052 1632}
44bb111a
RH
1633
1634/* A subroutine of emit_block_move. Copy the data via an explicit
1635 loop. This is used only when libcalls are forbidden. */
1636/* ??? It'd be nice to copy in hunks larger than QImode. */
1637
1638static void
502b8322
AJ
1639emit_block_move_via_loop (rtx x, rtx y, rtx size,
1640 unsigned int align ATTRIBUTE_UNUSED)
44bb111a
RH
1641{
1642 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1643 enum machine_mode iter_mode;
1644
1645 iter_mode = GET_MODE (size);
1646 if (iter_mode == VOIDmode)
1647 iter_mode = word_mode;
1648
1649 top_label = gen_label_rtx ();
1650 cmp_label = gen_label_rtx ();
1651 iter = gen_reg_rtx (iter_mode);
1652
1653 emit_move_insn (iter, const0_rtx);
1654
1655 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1656 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1657 do_pending_stack_adjust ();
1658
44bb111a
RH
1659 emit_jump (cmp_label);
1660 emit_label (top_label);
1661
1662 tmp = convert_modes (Pmode, iter_mode, iter, true);
1663 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1664 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1665 x = change_address (x, QImode, x_addr);
1666 y = change_address (y, QImode, y_addr);
1667
1668 emit_move_insn (x, y);
1669
1670 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1671 true, OPTAB_LIB_WIDEN);
1672 if (tmp != iter)
1673 emit_move_insn (iter, tmp);
1674
44bb111a
RH
1675 emit_label (cmp_label);
1676
1677 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1678 true, top_label);
44bb111a 1679}
bbf6f052
RK
1680\f
1681/* Copy all or part of a value X into registers starting at REGNO.
1682 The number of registers to be filled is NREGS. */
1683
1684void
502b8322 1685move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
bbf6f052
RK
1686{
1687 int i;
381127e8 1688#ifdef HAVE_load_multiple
3a94c984 1689 rtx pat;
381127e8
RL
1690 rtx last;
1691#endif
bbf6f052 1692
72bb9717
RK
1693 if (nregs == 0)
1694 return;
1695
bbf6f052
RK
1696 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1697 x = validize_mem (force_const_mem (mode, x));
1698
1699 /* See if the machine can do this with a load multiple insn. */
1700#ifdef HAVE_load_multiple
c3a02afe 1701 if (HAVE_load_multiple)
bbf6f052 1702 {
c3a02afe 1703 last = get_last_insn ();
38a448ca 1704 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1705 GEN_INT (nregs));
1706 if (pat)
1707 {
1708 emit_insn (pat);
1709 return;
1710 }
1711 else
1712 delete_insns_since (last);
bbf6f052 1713 }
bbf6f052
RK
1714#endif
1715
1716 for (i = 0; i < nregs; i++)
38a448ca 1717 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1718 operand_subword_force (x, i, mode));
1719}
1720
1721/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
c6b97fac 1722 The number of registers to be filled is NREGS. */
0040593d 1723
bbf6f052 1724void
502b8322 1725move_block_from_reg (int regno, rtx x, int nregs)
bbf6f052
RK
1726{
1727 int i;
bbf6f052 1728
2954d7db
RK
1729 if (nregs == 0)
1730 return;
1731
bbf6f052
RK
1732 /* See if the machine can do this with a store multiple insn. */
1733#ifdef HAVE_store_multiple
c3a02afe 1734 if (HAVE_store_multiple)
bbf6f052 1735 {
c6b97fac
AM
1736 rtx last = get_last_insn ();
1737 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1738 GEN_INT (nregs));
c3a02afe
RK
1739 if (pat)
1740 {
1741 emit_insn (pat);
1742 return;
1743 }
1744 else
1745 delete_insns_since (last);
bbf6f052 1746 }
bbf6f052
RK
1747#endif
1748
1749 for (i = 0; i < nregs; i++)
1750 {
1751 rtx tem = operand_subword (x, i, 1, BLKmode);
1752
1753 if (tem == 0)
1754 abort ();
1755
38a448ca 1756 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1757 }
1758}
1759
084a1106
JDA
1760/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1761 ORIG, where ORIG is a non-consecutive group of registers represented by
1762 a PARALLEL. The clone is identical to the original except in that the
1763 original set of registers is replaced by a new set of pseudo registers.
1764 The new set has the same modes as the original set. */
1765
1766rtx
502b8322 1767gen_group_rtx (rtx orig)
084a1106
JDA
1768{
1769 int i, length;
1770 rtx *tmps;
1771
1772 if (GET_CODE (orig) != PARALLEL)
1773 abort ();
1774
1775 length = XVECLEN (orig, 0);
703ad42b 1776 tmps = alloca (sizeof (rtx) * length);
084a1106
JDA
1777
1778 /* Skip a NULL entry in first slot. */
1779 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1780
1781 if (i)
1782 tmps[0] = 0;
1783
1784 for (; i < length; i++)
1785 {
1786 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1787 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1788
1789 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1790 }
1791
1792 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1793}
1794
6e985040
AM
1795/* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1796 where DST is non-consecutive registers represented by a PARALLEL.
1797 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
450b1728 1798 if not known. */
fffa9c1d
JW
1799
1800void
6e985040 1801emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
fffa9c1d 1802{
aac5cc16
RH
1803 rtx *tmps, src;
1804 int start, i;
fffa9c1d 1805
aac5cc16 1806 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
1807 abort ();
1808
1809 /* Check for a NULL entry, used to indicate that the parameter goes
1810 both on the stack and in registers. */
aac5cc16
RH
1811 if (XEXP (XVECEXP (dst, 0, 0), 0))
1812 start = 0;
fffa9c1d 1813 else
aac5cc16
RH
1814 start = 1;
1815
703ad42b 1816 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
aac5cc16 1817
aac5cc16
RH
1818 /* Process the pieces. */
1819 for (i = start; i < XVECLEN (dst, 0); i++)
1820 {
1821 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
770ae6cc
RK
1822 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1823 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
1824 int shift = 0;
1825
1826 /* Handle trailing fragments that run over the size of the struct. */
8752c357 1827 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
aac5cc16 1828 {
6e985040
AM
1829 /* Arrange to shift the fragment to where it belongs.
1830 extract_bit_field loads to the lsb of the reg. */
1831 if (
1832#ifdef BLOCK_REG_PADDING
1833 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1834 == (BYTES_BIG_ENDIAN ? upward : downward)
1835#else
1836 BYTES_BIG_ENDIAN
1837#endif
1838 )
1839 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
aac5cc16
RH
1840 bytelen = ssize - bytepos;
1841 if (bytelen <= 0)
729a2125 1842 abort ();
aac5cc16
RH
1843 }
1844
f3ce87a9
DE
1845 /* If we won't be loading directly from memory, protect the real source
1846 from strange tricks we might play; but make sure that the source can
1847 be loaded directly into the destination. */
1848 src = orig_src;
3c0cb5de 1849 if (!MEM_P (orig_src)
f3ce87a9
DE
1850 && (!CONSTANT_P (orig_src)
1851 || (GET_MODE (orig_src) != mode
1852 && GET_MODE (orig_src) != VOIDmode)))
1853 {
1854 if (GET_MODE (orig_src) == VOIDmode)
1855 src = gen_reg_rtx (mode);
1856 else
1857 src = gen_reg_rtx (GET_MODE (orig_src));
04050c69 1858
f3ce87a9
DE
1859 emit_move_insn (src, orig_src);
1860 }
1861
aac5cc16 1862 /* Optimize the access just a bit. */
3c0cb5de 1863 if (MEM_P (src)
6e985040
AM
1864 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1865 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
729a2125 1866 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16
RH
1867 && bytelen == GET_MODE_SIZE (mode))
1868 {
1869 tmps[i] = gen_reg_rtx (mode);
f4ef873c 1870 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
fffa9c1d 1871 }
7c4a6db0
JW
1872 else if (GET_CODE (src) == CONCAT)
1873 {
015b1ad1
JDA
1874 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1875 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1876
1877 if ((bytepos == 0 && bytelen == slen0)
1878 || (bytepos != 0 && bytepos + bytelen <= slen))
cbb92744 1879 {
015b1ad1
JDA
1880 /* The following assumes that the concatenated objects all
1881 have the same size. In this case, a simple calculation
1882 can be used to determine the object and the bit field
1883 to be extracted. */
1884 tmps[i] = XEXP (src, bytepos / slen0);
cbb92744 1885 if (! CONSTANT_P (tmps[i])
f8cfc6aa 1886 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
cbb92744 1887 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
015b1ad1
JDA
1888 (bytepos % slen0) * BITS_PER_UNIT,
1889 1, NULL_RTX, mode, mode, ssize);
cbb92744 1890 }
58f69841
JH
1891 else if (bytepos == 0)
1892 {
015b1ad1 1893 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
58f69841 1894 emit_move_insn (mem, src);
04050c69 1895 tmps[i] = adjust_address (mem, mode, 0);
58f69841 1896 }
7c4a6db0
JW
1897 else
1898 abort ();
1899 }
9c0631a7
AH
1900 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1901 SIMD register, which is currently broken. While we get GCC
1902 to emit proper RTL for these cases, let's dump to memory. */
1903 else if (VECTOR_MODE_P (GET_MODE (dst))
f8cfc6aa 1904 && REG_P (src))
9c0631a7
AH
1905 {
1906 int slen = GET_MODE_SIZE (GET_MODE (src));
1907 rtx mem;
1908
1909 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1910 emit_move_insn (mem, src);
1911 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1912 }
d3a16cbd
FJ
1913 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1914 && XVECLEN (dst, 0) > 1)
1915 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
f3ce87a9 1916 else if (CONSTANT_P (src)
f8cfc6aa 1917 || (REG_P (src) && GET_MODE (src) == mode))
2ee5437b 1918 tmps[i] = src;
fffa9c1d 1919 else
19caa751
RK
1920 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1921 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
04050c69 1922 mode, mode, ssize);
fffa9c1d 1923
6e985040 1924 if (shift)
09b52670
RS
1925 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1926 build_int_2 (shift, 0), tmps[i], 0);
fffa9c1d 1927 }
19caa751 1928
3a94c984 1929 emit_queue ();
aac5cc16
RH
1930
1931 /* Copy the extracted pieces into the proper (probable) hard regs. */
1932 for (i = start; i < XVECLEN (dst, 0); i++)
1933 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
1934}
1935
084a1106
JDA
1936/* Emit code to move a block SRC to block DST, where SRC and DST are
1937 non-consecutive groups of registers, each represented by a PARALLEL. */
1938
1939void
502b8322 1940emit_group_move (rtx dst, rtx src)
084a1106
JDA
1941{
1942 int i;
1943
1944 if (GET_CODE (src) != PARALLEL
1945 || GET_CODE (dst) != PARALLEL
1946 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1947 abort ();
1948
1949 /* Skip first entry if NULL. */
1950 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1951 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1952 XEXP (XVECEXP (src, 0, i), 0));
1953}
1954
6e985040
AM
1955/* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1956 where SRC is non-consecutive registers represented by a PARALLEL.
1957 SSIZE represents the total size of block ORIG_DST, or -1 if not
1958 known. */
fffa9c1d
JW
1959
1960void
6e985040 1961emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
fffa9c1d 1962{
aac5cc16
RH
1963 rtx *tmps, dst;
1964 int start, i;
fffa9c1d 1965
aac5cc16 1966 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
1967 abort ();
1968
1969 /* Check for a NULL entry, used to indicate that the parameter goes
1970 both on the stack and in registers. */
aac5cc16
RH
1971 if (XEXP (XVECEXP (src, 0, 0), 0))
1972 start = 0;
fffa9c1d 1973 else
aac5cc16
RH
1974 start = 1;
1975
703ad42b 1976 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
fffa9c1d 1977
aac5cc16
RH
1978 /* Copy the (probable) hard regs into pseudos. */
1979 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 1980 {
aac5cc16
RH
1981 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1982 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1983 emit_move_insn (tmps[i], reg);
1984 }
3a94c984 1985 emit_queue ();
fffa9c1d 1986
aac5cc16
RH
1987 /* If we won't be storing directly into memory, protect the real destination
1988 from strange tricks we might play. */
1989 dst = orig_dst;
10a9f2be
JW
1990 if (GET_CODE (dst) == PARALLEL)
1991 {
1992 rtx temp;
1993
1994 /* We can get a PARALLEL dst if there is a conditional expression in
1995 a return statement. In that case, the dst and src are the same,
1996 so no action is necessary. */
1997 if (rtx_equal_p (dst, src))
1998 return;
1999
2000 /* It is unclear if we can ever reach here, but we may as well handle
2001 it. Allocate a temporary, and split this into a store/load to/from
2002 the temporary. */
2003
2004 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
6e985040
AM
2005 emit_group_store (temp, src, type, ssize);
2006 emit_group_load (dst, temp, type, ssize);
10a9f2be
JW
2007 return;
2008 }
3c0cb5de 2009 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
aac5cc16
RH
2010 {
2011 dst = gen_reg_rtx (GET_MODE (orig_dst));
2012 /* Make life a bit easier for combine. */
8ae91fc0 2013 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
aac5cc16 2014 }
aac5cc16
RH
2015
2016 /* Process the pieces. */
2017 for (i = start; i < XVECLEN (src, 0); i++)
2018 {
770ae6cc 2019 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
aac5cc16 2020 enum machine_mode mode = GET_MODE (tmps[i]);
770ae6cc 2021 unsigned int bytelen = GET_MODE_SIZE (mode);
6ddae612 2022 rtx dest = dst;
aac5cc16
RH
2023
2024 /* Handle trailing fragments that run over the size of the struct. */
8752c357 2025 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
71bc0330 2026 {
6e985040
AM
2027 /* store_bit_field always takes its value from the lsb.
2028 Move the fragment to the lsb if it's not already there. */
2029 if (
2030#ifdef BLOCK_REG_PADDING
2031 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2032 == (BYTES_BIG_ENDIAN ? upward : downward)
2033#else
2034 BYTES_BIG_ENDIAN
2035#endif
2036 )
aac5cc16
RH
2037 {
2038 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
09b52670
RS
2039 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2040 build_int_2 (shift, 0), tmps[i], 0);
aac5cc16
RH
2041 }
2042 bytelen = ssize - bytepos;
71bc0330 2043 }
fffa9c1d 2044
6ddae612
JJ
2045 if (GET_CODE (dst) == CONCAT)
2046 {
2047 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2048 dest = XEXP (dst, 0);
2049 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2050 {
2051 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2052 dest = XEXP (dst, 1);
2053 }
0d446150
JH
2054 else if (bytepos == 0 && XVECLEN (src, 0))
2055 {
2056 dest = assign_stack_temp (GET_MODE (dest),
2057 GET_MODE_SIZE (GET_MODE (dest)), 0);
2058 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2059 tmps[i]);
2060 dst = dest;
2061 break;
2062 }
6ddae612
JJ
2063 else
2064 abort ();
2065 }
2066
aac5cc16 2067 /* Optimize the access just a bit. */
3c0cb5de 2068 if (MEM_P (dest)
6e985040
AM
2069 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2070 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
729a2125 2071 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16 2072 && bytelen == GET_MODE_SIZE (mode))
6ddae612 2073 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
aac5cc16 2074 else
6ddae612 2075 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
04050c69 2076 mode, tmps[i], ssize);
fffa9c1d 2077 }
729a2125 2078
3a94c984 2079 emit_queue ();
aac5cc16
RH
2080
2081 /* Copy from the pseudo into the (probable) hard reg. */
0d446150 2082 if (orig_dst != dst)
aac5cc16 2083 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2084}
2085
c36fce9a
GRK
2086/* Generate code to copy a BLKmode object of TYPE out of a
2087 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2088 is null, a stack temporary is created. TGTBLK is returned.
2089
c988af2b
RS
2090 The purpose of this routine is to handle functions that return
2091 BLKmode structures in registers. Some machines (the PA for example)
2092 want to return all small structures in registers regardless of the
2093 structure's alignment. */
c36fce9a
GRK
2094
2095rtx
502b8322 2096copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
c36fce9a 2097{
19caa751
RK
2098 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2099 rtx src = NULL, dst = NULL;
2100 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
c988af2b 2101 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
19caa751
RK
2102
2103 if (tgtblk == 0)
2104 {
1da68f56
RK
2105 tgtblk = assign_temp (build_qualified_type (type,
2106 (TYPE_QUALS (type)
2107 | TYPE_QUAL_CONST)),
2108 0, 1, 1);
19caa751
RK
2109 preserve_temp_slots (tgtblk);
2110 }
3a94c984 2111
1ed1b4fb 2112 /* This code assumes srcreg is at least a full word. If it isn't, copy it
9ac3e73b 2113 into a new pseudo which is a full word. */
0d7839da 2114
19caa751
RK
2115 if (GET_MODE (srcreg) != BLKmode
2116 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
8df83eae 2117 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
19caa751 2118
c988af2b
RS
2119 /* If the structure doesn't take up a whole number of words, see whether
2120 SRCREG is padded on the left or on the right. If it's on the left,
2121 set PADDING_CORRECTION to the number of bits to skip.
2122
2123 In most ABIs, the structure will be returned at the least end of
2124 the register, which translates to right padding on little-endian
2125 targets and left padding on big-endian targets. The opposite
2126 holds if the structure is returned at the most significant
2127 end of the register. */
2128 if (bytes % UNITS_PER_WORD != 0
2129 && (targetm.calls.return_in_msb (type)
2130 ? !BYTES_BIG_ENDIAN
2131 : BYTES_BIG_ENDIAN))
2132 padding_correction
19caa751
RK
2133 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2134
2135 /* Copy the structure BITSIZE bites at a time.
3a94c984 2136
19caa751
RK
2137 We could probably emit more efficient code for machines which do not use
2138 strict alignment, but it doesn't seem worth the effort at the current
2139 time. */
c988af2b 2140 for (bitpos = 0, xbitpos = padding_correction;
19caa751
RK
2141 bitpos < bytes * BITS_PER_UNIT;
2142 bitpos += bitsize, xbitpos += bitsize)
2143 {
3a94c984 2144 /* We need a new source operand each time xbitpos is on a
c988af2b 2145 word boundary and when xbitpos == padding_correction
19caa751
RK
2146 (the first time through). */
2147 if (xbitpos % BITS_PER_WORD == 0
c988af2b 2148 || xbitpos == padding_correction)
b47f8cfc
JH
2149 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2150 GET_MODE (srcreg));
19caa751
RK
2151
2152 /* We need a new destination operand each time bitpos is on
2153 a word boundary. */
2154 if (bitpos % BITS_PER_WORD == 0)
2155 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
3a94c984 2156
19caa751
RK
2157 /* Use xbitpos for the source extraction (right justified) and
2158 xbitpos for the destination store (left justified). */
2159 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2160 extract_bit_field (src, bitsize,
2161 xbitpos % BITS_PER_WORD, 1,
2162 NULL_RTX, word_mode, word_mode,
04050c69
RK
2163 BITS_PER_WORD),
2164 BITS_PER_WORD);
19caa751
RK
2165 }
2166
2167 return tgtblk;
c36fce9a
GRK
2168}
2169
94b25f81
RK
2170/* Add a USE expression for REG to the (possibly empty) list pointed
2171 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2172
2173void
502b8322 2174use_reg (rtx *call_fusage, rtx reg)
b3f8cf4a 2175{
f8cfc6aa 2176 if (!REG_P (reg)
0304dfbb 2177 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3a94c984 2178 abort ();
b3f8cf4a
RK
2179
2180 *call_fusage
38a448ca
RH
2181 = gen_rtx_EXPR_LIST (VOIDmode,
2182 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2183}
2184
94b25f81
RK
2185/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2186 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2187
2188void
502b8322 2189use_regs (rtx *call_fusage, int regno, int nregs)
bbf6f052 2190{
0304dfbb 2191 int i;
bbf6f052 2192
0304dfbb
DE
2193 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2194 abort ();
2195
2196 for (i = 0; i < nregs; i++)
e50126e8 2197 use_reg (call_fusage, regno_reg_rtx[regno + i]);
bbf6f052 2198}
fffa9c1d
JW
2199
2200/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2201 PARALLEL REGS. This is for calls that pass values in multiple
2202 non-contiguous locations. The Irix 6 ABI has examples of this. */
2203
2204void
502b8322 2205use_group_regs (rtx *call_fusage, rtx regs)
fffa9c1d
JW
2206{
2207 int i;
2208
6bd35f86
DE
2209 for (i = 0; i < XVECLEN (regs, 0); i++)
2210 {
2211 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2212
6bd35f86
DE
2213 /* A NULL entry means the parameter goes both on the stack and in
2214 registers. This can also be a MEM for targets that pass values
2215 partially on the stack and partially in registers. */
f8cfc6aa 2216 if (reg != 0 && REG_P (reg))
6bd35f86
DE
2217 use_reg (call_fusage, reg);
2218 }
fffa9c1d 2219}
bbf6f052 2220\f
57814e5e 2221
cf5124f6
RS
2222/* Determine whether the LEN bytes generated by CONSTFUN can be
2223 stored to memory using several move instructions. CONSTFUNDATA is
2224 a pointer which will be passed as argument in every CONSTFUN call.
2225 ALIGN is maximum alignment we can assume. Return nonzero if a
2226 call to store_by_pieces should succeed. */
2227
57814e5e 2228int
502b8322
AJ
2229can_store_by_pieces (unsigned HOST_WIDE_INT len,
2230 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2231 void *constfundata, unsigned int align)
57814e5e 2232{
98166639 2233 unsigned HOST_WIDE_INT max_size, l;
57814e5e
JJ
2234 HOST_WIDE_INT offset = 0;
2235 enum machine_mode mode, tmode;
2236 enum insn_code icode;
2237 int reverse;
2238 rtx cst;
2239
2c430630
RS
2240 if (len == 0)
2241 return 1;
2242
4977bab6 2243 if (! STORE_BY_PIECES_P (len, align))
57814e5e
JJ
2244 return 0;
2245
2246 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2247 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2248 align = MOVE_MAX * BITS_PER_UNIT;
2249
2250 /* We would first store what we can in the largest integer mode, then go to
2251 successively smaller modes. */
2252
2253 for (reverse = 0;
2254 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2255 reverse++)
2256 {
2257 l = len;
2258 mode = VOIDmode;
cf5124f6 2259 max_size = STORE_MAX_PIECES + 1;
57814e5e
JJ
2260 while (max_size > 1)
2261 {
2262 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2263 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2264 if (GET_MODE_SIZE (tmode) < max_size)
2265 mode = tmode;
2266
2267 if (mode == VOIDmode)
2268 break;
2269
2270 icode = mov_optab->handlers[(int) mode].insn_code;
2271 if (icode != CODE_FOR_nothing
2272 && align >= GET_MODE_ALIGNMENT (mode))
2273 {
2274 unsigned int size = GET_MODE_SIZE (mode);
2275
2276 while (l >= size)
2277 {
2278 if (reverse)
2279 offset -= size;
2280
2281 cst = (*constfun) (constfundata, offset, mode);
2282 if (!LEGITIMATE_CONSTANT_P (cst))
2283 return 0;
2284
2285 if (!reverse)
2286 offset += size;
2287
2288 l -= size;
2289 }
2290 }
2291
2292 max_size = GET_MODE_SIZE (mode);
2293 }
2294
2295 /* The code above should have handled everything. */
2296 if (l != 0)
2297 abort ();
2298 }
2299
2300 return 1;
2301}
2302
2303/* Generate several move instructions to store LEN bytes generated by
2304 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2305 pointer which will be passed as argument in every CONSTFUN call.
8fd3cf4e
JJ
2306 ALIGN is maximum alignment we can assume.
2307 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2308 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2309 stpcpy. */
57814e5e 2310
8fd3cf4e 2311rtx
502b8322
AJ
2312store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2313 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2314 void *constfundata, unsigned int align, int endp)
57814e5e
JJ
2315{
2316 struct store_by_pieces data;
2317
2c430630
RS
2318 if (len == 0)
2319 {
2320 if (endp == 2)
2321 abort ();
2322 return to;
2323 }
2324
4977bab6 2325 if (! STORE_BY_PIECES_P (len, align))
57814e5e
JJ
2326 abort ();
2327 to = protect_from_queue (to, 1);
2328 data.constfun = constfun;
2329 data.constfundata = constfundata;
2330 data.len = len;
2331 data.to = to;
2332 store_by_pieces_1 (&data, align);
8fd3cf4e
JJ
2333 if (endp)
2334 {
2335 rtx to1;
2336
2337 if (data.reverse)
2338 abort ();
2339 if (data.autinc_to)
2340 {
2341 if (endp == 2)
2342 {
2343 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2344 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2345 else
2346 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2347 -1));
2348 }
2349 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2350 data.offset);
2351 }
2352 else
2353 {
2354 if (endp == 2)
2355 --data.offset;
2356 to1 = adjust_address (data.to, QImode, data.offset);
2357 }
2358 return to1;
2359 }
2360 else
2361 return data.to;
57814e5e
JJ
2362}
2363
19caa751
RK
2364/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2365 rtx with BLKmode). The caller must pass TO through protect_from_queue
2366 before calling. ALIGN is maximum alignment we can assume. */
9de08200
RK
2367
2368static void
342e2b74 2369clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
9de08200 2370{
57814e5e
JJ
2371 struct store_by_pieces data;
2372
2c430630
RS
2373 if (len == 0)
2374 return;
2375
57814e5e 2376 data.constfun = clear_by_pieces_1;
df4ae160 2377 data.constfundata = NULL;
57814e5e
JJ
2378 data.len = len;
2379 data.to = to;
2380 store_by_pieces_1 (&data, align);
2381}
2382
2383/* Callback routine for clear_by_pieces.
2384 Return const0_rtx unconditionally. */
2385
2386static rtx
502b8322
AJ
2387clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2388 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2389 enum machine_mode mode ATTRIBUTE_UNUSED)
57814e5e
JJ
2390{
2391 return const0_rtx;
2392}
2393
2394/* Subroutine of clear_by_pieces and store_by_pieces.
2395 Generate several move instructions to store LEN bytes of block TO. (A MEM
2396 rtx with BLKmode). The caller must pass TO through protect_from_queue
2397 before calling. ALIGN is maximum alignment we can assume. */
2398
2399static void
502b8322
AJ
2400store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2401 unsigned int align ATTRIBUTE_UNUSED)
57814e5e
JJ
2402{
2403 rtx to_addr = XEXP (data->to, 0);
cf5124f6 2404 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
fbe1758d
AM
2405 enum machine_mode mode = VOIDmode, tmode;
2406 enum insn_code icode;
9de08200 2407
57814e5e
JJ
2408 data->offset = 0;
2409 data->to_addr = to_addr;
2410 data->autinc_to
9de08200
RK
2411 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2412 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2413
57814e5e
JJ
2414 data->explicit_inc_to = 0;
2415 data->reverse
9de08200 2416 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
57814e5e
JJ
2417 if (data->reverse)
2418 data->offset = data->len;
9de08200 2419
57814e5e 2420 /* If storing requires more than two move insns,
9de08200
RK
2421 copy addresses to registers (to make displacements shorter)
2422 and use post-increment if available. */
57814e5e
JJ
2423 if (!data->autinc_to
2424 && move_by_pieces_ninsns (data->len, align) > 2)
9de08200 2425 {
3a94c984 2426 /* Determine the main mode we'll be using. */
fbe1758d
AM
2427 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2428 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2429 if (GET_MODE_SIZE (tmode) < max_size)
2430 mode = tmode;
2431
57814e5e 2432 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
9de08200 2433 {
57814e5e
JJ
2434 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2435 data->autinc_to = 1;
2436 data->explicit_inc_to = -1;
9de08200 2437 }
3bdf5ad1 2438
57814e5e
JJ
2439 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2440 && ! data->autinc_to)
9de08200 2441 {
57814e5e
JJ
2442 data->to_addr = copy_addr_to_reg (to_addr);
2443 data->autinc_to = 1;
2444 data->explicit_inc_to = 1;
9de08200 2445 }
3bdf5ad1 2446
57814e5e
JJ
2447 if ( !data->autinc_to && CONSTANT_P (to_addr))
2448 data->to_addr = copy_addr_to_reg (to_addr);
9de08200
RK
2449 }
2450
e1565e65 2451 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 2452 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
bdb429a5 2453 align = MOVE_MAX * BITS_PER_UNIT;
9de08200 2454
57814e5e 2455 /* First store what we can in the largest integer mode, then go to
9de08200
RK
2456 successively smaller modes. */
2457
2458 while (max_size > 1)
2459 {
9de08200
RK
2460 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2461 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2462 if (GET_MODE_SIZE (tmode) < max_size)
2463 mode = tmode;
2464
2465 if (mode == VOIDmode)
2466 break;
2467
2468 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 2469 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
57814e5e 2470 store_by_pieces_2 (GEN_FCN (icode), mode, data);
9de08200
RK
2471
2472 max_size = GET_MODE_SIZE (mode);
2473 }
2474
2475 /* The code above should have handled everything. */
57814e5e 2476 if (data->len != 0)
9de08200
RK
2477 abort ();
2478}
2479
57814e5e 2480/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
9de08200
RK
2481 with move instructions for mode MODE. GENFUN is the gen_... function
2482 to make a move insn for that mode. DATA has all the other info. */
2483
2484static void
502b8322
AJ
2485store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2486 struct store_by_pieces *data)
9de08200 2487{
3bdf5ad1 2488 unsigned int size = GET_MODE_SIZE (mode);
57814e5e 2489 rtx to1, cst;
9de08200
RK
2490
2491 while (data->len >= size)
2492 {
3bdf5ad1
RK
2493 if (data->reverse)
2494 data->offset -= size;
9de08200 2495
3bdf5ad1 2496 if (data->autinc_to)
630036c6
JJ
2497 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2498 data->offset);
3a94c984 2499 else
f4ef873c 2500 to1 = adjust_address (data->to, mode, data->offset);
9de08200 2501
940da324 2502 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
57814e5e
JJ
2503 emit_insn (gen_add2_insn (data->to_addr,
2504 GEN_INT (-(HOST_WIDE_INT) size)));
9de08200 2505
57814e5e
JJ
2506 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2507 emit_insn ((*genfun) (to1, cst));
3bdf5ad1 2508
940da324 2509 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2510 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200 2511
3bdf5ad1
RK
2512 if (! data->reverse)
2513 data->offset += size;
9de08200
RK
2514
2515 data->len -= size;
2516 }
2517}
2518\f
19caa751 2519/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
8ac61af7 2520 its length in bytes. */
e9a25f70
JL
2521
2522rtx
502b8322 2523clear_storage (rtx object, rtx size)
bbf6f052 2524{
e9a25f70 2525 rtx retval = 0;
3c0cb5de 2526 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
8ac61af7 2527 : GET_MODE_ALIGNMENT (GET_MODE (object)));
e9a25f70 2528
fcf1b822
RK
2529 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2530 just move a zero. Otherwise, do this a piece at a time. */
69ef87e2 2531 if (GET_MODE (object) != BLKmode
fcf1b822 2532 && GET_CODE (size) == CONST_INT
4ca79136 2533 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
fcf1b822
RK
2534 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2535 else
bbf6f052 2536 {
9de08200
RK
2537 object = protect_from_queue (object, 1);
2538 size = protect_from_queue (size, 0);
2539
6972c506 2540 if (size == const0_rtx)
2c430630
RS
2541 ;
2542 else if (GET_CODE (size) == CONST_INT
78762e3b 2543 && CLEAR_BY_PIECES_P (INTVAL (size), align))
9de08200 2544 clear_by_pieces (object, INTVAL (size), align);
70128ad9 2545 else if (clear_storage_via_clrmem (object, size, align))
4ca79136 2546 ;
9de08200 2547 else
4ca79136
RH
2548 retval = clear_storage_via_libcall (object, size);
2549 }
2550
2551 return retval;
2552}
2553
70128ad9 2554/* A subroutine of clear_storage. Expand a clrmem pattern;
4ca79136
RH
2555 return true if successful. */
2556
2557static bool
70128ad9 2558clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
4ca79136
RH
2559{
2560 /* Try the most limited insn first, because there's no point
2561 including more than one in the machine description unless
2562 the more limited one has some advantage. */
2563
2564 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2565 enum machine_mode mode;
2566
2567 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2568 mode = GET_MODE_WIDER_MODE (mode))
2569 {
70128ad9 2570 enum insn_code code = clrmem_optab[(int) mode];
4ca79136
RH
2571 insn_operand_predicate_fn pred;
2572
2573 if (code != CODE_FOR_nothing
2574 /* We don't need MODE to be narrower than
2575 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2576 the mode mask, as it is returned by the macro, it will
2577 definitely be less than the actual mode mask. */
2578 && ((GET_CODE (size) == CONST_INT
2579 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2580 <= (GET_MODE_MASK (mode) >> 1)))
2581 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2582 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2583 || (*pred) (object, BLKmode))
2584 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2585 || (*pred) (opalign, VOIDmode)))
9de08200 2586 {
4ca79136
RH
2587 rtx op1;
2588 rtx last = get_last_insn ();
2589 rtx pat;
9de08200 2590
4ca79136
RH
2591 op1 = convert_to_mode (mode, size, 1);
2592 pred = insn_data[(int) code].operand[1].predicate;
2593 if (pred != 0 && ! (*pred) (op1, mode))
2594 op1 = copy_to_mode_reg (mode, op1);
9de08200 2595
4ca79136
RH
2596 pat = GEN_FCN ((int) code) (object, op1, opalign);
2597 if (pat)
9de08200 2598 {
4ca79136
RH
2599 emit_insn (pat);
2600 return true;
2601 }
2602 else
2603 delete_insns_since (last);
2604 }
2605 }
9de08200 2606
4ca79136
RH
2607 return false;
2608}
9de08200 2609
8f99553f 2610/* A subroutine of clear_storage. Expand a call to memset.
4ca79136 2611 Return the return value of memset, 0 otherwise. */
9de08200 2612
4ca79136 2613static rtx
502b8322 2614clear_storage_via_libcall (rtx object, rtx size)
4ca79136
RH
2615{
2616 tree call_expr, arg_list, fn, object_tree, size_tree;
2617 enum machine_mode size_mode;
2618 rtx retval;
9de08200 2619
4ca79136 2620 /* OBJECT or SIZE may have been passed through protect_from_queue.
52cf7115 2621
4ca79136
RH
2622 It is unsafe to save the value generated by protect_from_queue
2623 and reuse it later. Consider what happens if emit_queue is
2624 called before the return value from protect_from_queue is used.
52cf7115 2625
4ca79136
RH
2626 Expansion of the CALL_EXPR below will call emit_queue before
2627 we are finished emitting RTL for argument setup. So if we are
2628 not careful we could get the wrong value for an argument.
52cf7115 2629
4ca79136 2630 To avoid this problem we go ahead and emit code to copy OBJECT
4dfa0342 2631 and SIZE into new pseudos.
52cf7115 2632
4ca79136
RH
2633 Note this is not strictly needed for library calls since they
2634 do not call emit_queue before loading their arguments. However,
2635 we may need to have library calls call emit_queue in the future
2636 since failing to do so could cause problems for targets which
2637 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
52cf7115 2638
4ca79136 2639 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 2640
8f99553f 2641 size_mode = TYPE_MODE (sizetype);
4ca79136
RH
2642 size = convert_to_mode (size_mode, size, 1);
2643 size = copy_to_mode_reg (size_mode, size);
52cf7115 2644
4ca79136
RH
2645 /* It is incorrect to use the libcall calling conventions to call
2646 memset in this context. This could be a user call to memset and
2647 the user may wish to examine the return value from memset. For
2648 targets where libcalls and normal calls have different conventions
8f99553f 2649 for returning pointers, we could end up generating incorrect code. */
4bc973ae 2650
4ca79136 2651 object_tree = make_tree (ptr_type_node, object);
8f99553f 2652 size_tree = make_tree (sizetype, size);
4ca79136
RH
2653
2654 fn = clear_storage_libcall_fn (true);
2655 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
8f99553f 2656 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
4ca79136
RH
2657 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2658
2659 /* Now we have to build up the CALL_EXPR itself. */
2660 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2661 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2662 call_expr, arg_list, NULL_TREE);
4ca79136
RH
2663
2664 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2665
2666 /* If we are initializing a readonly value, show the above call
2667 clobbered it. Otherwise, a load from it may erroneously be
2668 hoisted from a loop. */
2669 if (RTX_UNCHANGING_P (object))
2670 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2671
8f99553f 2672 return retval;
4ca79136
RH
2673}
2674
2675/* A subroutine of clear_storage_via_libcall. Create the tree node
2676 for the function we use for block clears. The first time FOR_CALL
2677 is true, we call assemble_external. */
2678
2679static GTY(()) tree block_clear_fn;
66c60e67 2680
9661b15f 2681void
502b8322 2682init_block_clear_fn (const char *asmspec)
4ca79136 2683{
9661b15f 2684 if (!block_clear_fn)
4ca79136 2685 {
9661b15f
JJ
2686 tree fn, args;
2687
8f99553f
JM
2688 fn = get_identifier ("memset");
2689 args = build_function_type_list (ptr_type_node, ptr_type_node,
2690 integer_type_node, sizetype,
2691 NULL_TREE);
4ca79136
RH
2692
2693 fn = build_decl (FUNCTION_DECL, fn, args);
2694 DECL_EXTERNAL (fn) = 1;
2695 TREE_PUBLIC (fn) = 1;
2696 DECL_ARTIFICIAL (fn) = 1;
2697 TREE_NOTHROW (fn) = 1;
2698
2699 block_clear_fn = fn;
bbf6f052 2700 }
e9a25f70 2701
9661b15f
JJ
2702 if (asmspec)
2703 {
2704 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2705 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2706 }
2707}
2708
2709static tree
502b8322 2710clear_storage_libcall_fn (int for_call)
9661b15f
JJ
2711{
2712 static bool emitted_extern;
2713
2714 if (!block_clear_fn)
2715 init_block_clear_fn (NULL);
2716
4ca79136
RH
2717 if (for_call && !emitted_extern)
2718 {
2719 emitted_extern = true;
9661b15f
JJ
2720 make_decl_rtl (block_clear_fn, NULL);
2721 assemble_external (block_clear_fn);
4ca79136 2722 }
bbf6f052 2723
9661b15f 2724 return block_clear_fn;
4ca79136
RH
2725}
2726\f
bbf6f052
RK
2727/* Generate code to copy Y into X.
2728 Both Y and X must have the same mode, except that
2729 Y can be a constant with VOIDmode.
2730 This mode cannot be BLKmode; use emit_block_move for that.
2731
2732 Return the last instruction emitted. */
2733
2734rtx
502b8322 2735emit_move_insn (rtx x, rtx y)
bbf6f052
RK
2736{
2737 enum machine_mode mode = GET_MODE (x);
de1b33dd 2738 rtx y_cst = NULL_RTX;
0c19a26f 2739 rtx last_insn, set;
bbf6f052
RK
2740
2741 x = protect_from_queue (x, 1);
2742 y = protect_from_queue (y, 0);
2743
2744 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2745 abort ();
2746
6de9cd9a 2747 if (CONSTANT_P (y))
de1b33dd 2748 {
51286de6 2749 if (optimize
075fc17a 2750 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
51286de6
RH
2751 && (last_insn = compress_float_constant (x, y)))
2752 return last_insn;
2753
0c19a26f
RS
2754 y_cst = y;
2755
51286de6
RH
2756 if (!LEGITIMATE_CONSTANT_P (y))
2757 {
51286de6 2758 y = force_const_mem (mode, y);
3a04ff64
RH
2759
2760 /* If the target's cannot_force_const_mem prevented the spill,
2761 assume that the target's move expanders will also take care
2762 of the non-legitimate constant. */
2763 if (!y)
2764 y = y_cst;
51286de6 2765 }
de1b33dd 2766 }
bbf6f052
RK
2767
2768 /* If X or Y are memory references, verify that their addresses are valid
2769 for the machine. */
3c0cb5de 2770 if (MEM_P (x)
bbf6f052
RK
2771 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2772 && ! push_operand (x, GET_MODE (x)))
2773 || (flag_force_addr
2774 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
792760b9 2775 x = validize_mem (x);
bbf6f052 2776
3c0cb5de 2777 if (MEM_P (y)
bbf6f052
RK
2778 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2779 || (flag_force_addr
2780 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
792760b9 2781 y = validize_mem (y);
bbf6f052
RK
2782
2783 if (mode == BLKmode)
2784 abort ();
2785
de1b33dd
AO
2786 last_insn = emit_move_insn_1 (x, y);
2787
f8cfc6aa 2788 if (y_cst && REG_P (x)
0c19a26f
RS
2789 && (set = single_set (last_insn)) != NULL_RTX
2790 && SET_DEST (set) == x
2791 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3d238248 2792 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
de1b33dd
AO
2793
2794 return last_insn;
261c4230
RS
2795}
2796
2797/* Low level part of emit_move_insn.
2798 Called just like emit_move_insn, but assumes X and Y
2799 are basically valid. */
2800
2801rtx
502b8322 2802emit_move_insn_1 (rtx x, rtx y)
261c4230
RS
2803{
2804 enum machine_mode mode = GET_MODE (x);
2805 enum machine_mode submode;
2806 enum mode_class class = GET_MODE_CLASS (mode);
261c4230 2807
dbbbbf3b 2808 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3a94c984 2809 abort ();
76bbe028 2810
bbf6f052
RK
2811 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2812 return
2813 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2814
89742723 2815 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2816 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
27e58a70 2817 && BLKmode != (submode = GET_MODE_INNER (mode))
7308a047
RS
2818 && (mov_optab->handlers[(int) submode].insn_code
2819 != CODE_FOR_nothing))
2820 {
2821 /* Don't split destination if it is a stack push. */
2822 int stack = push_operand (x, GET_MODE (x));
7308a047 2823
79ce92d7 2824#ifdef PUSH_ROUNDING
0e9cbd11
KH
2825 /* In case we output to the stack, but the size is smaller than the
2826 machine can push exactly, we need to use move instructions. */
1a06f5fe 2827 if (stack
bb93b973
RK
2828 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2829 != GET_MODE_SIZE (submode)))
1a06f5fe
JH
2830 {
2831 rtx temp;
bb93b973 2832 HOST_WIDE_INT offset1, offset2;
1a06f5fe
JH
2833
2834 /* Do not use anti_adjust_stack, since we don't want to update
2835 stack_pointer_delta. */
2836 temp = expand_binop (Pmode,
2837#ifdef STACK_GROWS_DOWNWARD
2838 sub_optab,
2839#else
2840 add_optab,
2841#endif
2842 stack_pointer_rtx,
2843 GEN_INT
bb93b973
RK
2844 (PUSH_ROUNDING
2845 (GET_MODE_SIZE (GET_MODE (x)))),
2846 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2847
1a06f5fe
JH
2848 if (temp != stack_pointer_rtx)
2849 emit_move_insn (stack_pointer_rtx, temp);
bb93b973 2850
1a06f5fe
JH
2851#ifdef STACK_GROWS_DOWNWARD
2852 offset1 = 0;
2853 offset2 = GET_MODE_SIZE (submode);
2854#else
2855 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2856 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2857 + GET_MODE_SIZE (submode));
2858#endif
bb93b973 2859
1a06f5fe
JH
2860 emit_move_insn (change_address (x, submode,
2861 gen_rtx_PLUS (Pmode,
2862 stack_pointer_rtx,
2863 GEN_INT (offset1))),
2864 gen_realpart (submode, y));
2865 emit_move_insn (change_address (x, submode,
2866 gen_rtx_PLUS (Pmode,
2867 stack_pointer_rtx,
2868 GEN_INT (offset2))),
2869 gen_imagpart (submode, y));
2870 }
e9c0bd54 2871 else
79ce92d7 2872#endif
7308a047
RS
2873 /* If this is a stack, push the highpart first, so it
2874 will be in the argument order.
2875
2876 In that case, change_address is used only to convert
2877 the mode, not to change the address. */
e9c0bd54 2878 if (stack)
c937357e 2879 {
e33c0d66
RS
2880 /* Note that the real part always precedes the imag part in memory
2881 regardless of machine's endianness. */
c937357e 2882#ifdef STACK_GROWS_DOWNWARD
a79b3dc7
RS
2883 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2884 gen_imagpart (submode, y));
2885 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2886 gen_realpart (submode, y));
c937357e 2887#else
a79b3dc7
RS
2888 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2889 gen_realpart (submode, y));
2890 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2891 gen_imagpart (submode, y));
c937357e
RS
2892#endif
2893 }
2894 else
2895 {
235ae7be
DM
2896 rtx realpart_x, realpart_y;
2897 rtx imagpart_x, imagpart_y;
2898
405f63da
MM
2899 /* If this is a complex value with each part being smaller than a
2900 word, the usual calling sequence will likely pack the pieces into
2901 a single register. Unfortunately, SUBREG of hard registers only
2902 deals in terms of words, so we have a problem converting input
2903 arguments to the CONCAT of two registers that is used elsewhere
2904 for complex values. If this is before reload, we can copy it into
2905 memory and reload. FIXME, we should see about using extract and
2906 insert on integer registers, but complex short and complex char
2907 variables should be rarely used. */
3a94c984 2908 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
405f63da
MM
2909 && (reload_in_progress | reload_completed) == 0)
2910 {
bb93b973
RK
2911 int packed_dest_p
2912 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2913 int packed_src_p
2914 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
405f63da
MM
2915
2916 if (packed_dest_p || packed_src_p)
2917 {
2918 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2919 ? MODE_FLOAT : MODE_INT);
2920
1da68f56
RK
2921 enum machine_mode reg_mode
2922 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
405f63da
MM
2923
2924 if (reg_mode != BLKmode)
2925 {
2926 rtx mem = assign_stack_temp (reg_mode,
2927 GET_MODE_SIZE (mode), 0);
f4ef873c 2928 rtx cmem = adjust_address (mem, mode, 0);
405f63da 2929
405f63da
MM
2930 if (packed_dest_p)
2931 {
2932 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
bb93b973 2933
405f63da
MM
2934 emit_move_insn_1 (cmem, y);
2935 return emit_move_insn_1 (sreg, mem);
2936 }
2937 else
2938 {
2939 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
bb93b973 2940
405f63da
MM
2941 emit_move_insn_1 (mem, sreg);
2942 return emit_move_insn_1 (x, cmem);
2943 }
2944 }
2945 }
2946 }
2947
235ae7be
DM
2948 realpart_x = gen_realpart (submode, x);
2949 realpart_y = gen_realpart (submode, y);
2950 imagpart_x = gen_imagpart (submode, x);
2951 imagpart_y = gen_imagpart (submode, y);
2952
2953 /* Show the output dies here. This is necessary for SUBREGs
2954 of pseudos since we cannot track their lifetimes correctly;
c14c6529
RH
2955 hard regs shouldn't appear here except as return values.
2956 We never want to emit such a clobber after reload. */
2957 if (x != y
235ae7be
DM
2958 && ! (reload_in_progress || reload_completed)
2959 && (GET_CODE (realpart_x) == SUBREG
2960 || GET_CODE (imagpart_x) == SUBREG))
bb93b973 2961 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2638126a 2962
a79b3dc7
RS
2963 emit_move_insn (realpart_x, realpart_y);
2964 emit_move_insn (imagpart_x, imagpart_y);
c937357e 2965 }
7308a047 2966
7a1ab50a 2967 return get_last_insn ();
7308a047
RS
2968 }
2969
a3600c71
HPN
2970 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
2971 find a mode to do it in. If we have a movcc, use it. Otherwise,
2972 find the MODE_INT mode of the same width. */
2973 else if (GET_MODE_CLASS (mode) == MODE_CC
2974 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2975 {
2976 enum insn_code insn_code;
2977 enum machine_mode tmode = VOIDmode;
2978 rtx x1 = x, y1 = y;
2979
2980 if (mode != CCmode
2981 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
2982 tmode = CCmode;
2983 else
2984 for (tmode = QImode; tmode != VOIDmode;
2985 tmode = GET_MODE_WIDER_MODE (tmode))
2986 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
2987 break;
2988
2989 if (tmode == VOIDmode)
2990 abort ();
2991
2992 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
2993 may call change_address which is not appropriate if we were
2994 called when a reload was in progress. We don't have to worry
2995 about changing the address since the size in bytes is supposed to
2996 be the same. Copy the MEM to change the mode and move any
2997 substitutions from the old MEM to the new one. */
2998
2999 if (reload_in_progress)
3000 {
3001 x = gen_lowpart_common (tmode, x1);
3c0cb5de 3002 if (x == 0 && MEM_P (x1))
a3600c71
HPN
3003 {
3004 x = adjust_address_nv (x1, tmode, 0);
3005 copy_replacements (x1, x);
3006 }
3007
3008 y = gen_lowpart_common (tmode, y1);
3c0cb5de 3009 if (y == 0 && MEM_P (y1))
a3600c71
HPN
3010 {
3011 y = adjust_address_nv (y1, tmode, 0);
3012 copy_replacements (y1, y);
3013 }
3014 }
3015 else
3016 {
3017 x = gen_lowpart (tmode, x);
3018 y = gen_lowpart (tmode, y);
3019 }
502b8322 3020
a3600c71
HPN
3021 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3022 return emit_insn (GEN_FCN (insn_code) (x, y));
3023 }
3024
5581fc91
RS
3025 /* Try using a move pattern for the corresponding integer mode. This is
3026 only safe when simplify_subreg can convert MODE constants into integer
3027 constants. At present, it can only do this reliably if the value
3028 fits within a HOST_WIDE_INT. */
3029 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3030 && (submode = int_mode_for_mode (mode)) != BLKmode
3031 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3032 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3033 (simplify_gen_subreg (submode, x, mode, 0),
3034 simplify_gen_subreg (submode, y, mode, 0)));
3035
cffa2189
R
3036 /* This will handle any multi-word or full-word mode that lacks a move_insn
3037 pattern. However, you will get better code if you define such patterns,
bbf6f052 3038 even if they must turn into multiple assembler instructions. */
cffa2189 3039 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
bbf6f052
RK
3040 {
3041 rtx last_insn = 0;
3ef1eef4 3042 rtx seq, inner;
235ae7be 3043 int need_clobber;
bb93b973 3044 int i;
3a94c984 3045
a98c9f1a
RK
3046#ifdef PUSH_ROUNDING
3047
3048 /* If X is a push on the stack, do the push now and replace
3049 X with a reference to the stack pointer. */
3050 if (push_operand (x, GET_MODE (x)))
3051 {
918a6124
GK
3052 rtx temp;
3053 enum rtx_code code;
0fb7aeda 3054
918a6124
GK
3055 /* Do not use anti_adjust_stack, since we don't want to update
3056 stack_pointer_delta. */
3057 temp = expand_binop (Pmode,
3058#ifdef STACK_GROWS_DOWNWARD
3059 sub_optab,
3060#else
3061 add_optab,
3062#endif
3063 stack_pointer_rtx,
3064 GEN_INT
bb93b973
RK
3065 (PUSH_ROUNDING
3066 (GET_MODE_SIZE (GET_MODE (x)))),
a426c92e 3067 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
bb93b973 3068
0fb7aeda
KH
3069 if (temp != stack_pointer_rtx)
3070 emit_move_insn (stack_pointer_rtx, temp);
918a6124
GK
3071
3072 code = GET_CODE (XEXP (x, 0));
bb93b973 3073
918a6124
GK
3074 /* Just hope that small offsets off SP are OK. */
3075 if (code == POST_INC)
0fb7aeda 3076 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
bb93b973
RK
3077 GEN_INT (-((HOST_WIDE_INT)
3078 GET_MODE_SIZE (GET_MODE (x)))));
918a6124 3079 else if (code == POST_DEC)
0fb7aeda 3080 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
918a6124
GK
3081 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3082 else
3083 temp = stack_pointer_rtx;
3084
3085 x = change_address (x, VOIDmode, temp);
a98c9f1a
RK
3086 }
3087#endif
3a94c984 3088
3ef1eef4
RK
3089 /* If we are in reload, see if either operand is a MEM whose address
3090 is scheduled for replacement. */
3c0cb5de 3091 if (reload_in_progress && MEM_P (x)
3ef1eef4 3092 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
f1ec5147 3093 x = replace_equiv_address_nv (x, inner);
3c0cb5de 3094 if (reload_in_progress && MEM_P (y)
3ef1eef4 3095 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
f1ec5147 3096 y = replace_equiv_address_nv (y, inner);
3ef1eef4 3097
235ae7be 3098 start_sequence ();
15a7a8ec 3099
235ae7be 3100 need_clobber = 0;
bbf6f052 3101 for (i = 0;
3a94c984 3102 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
bbf6f052
RK
3103 i++)
3104 {
3105 rtx xpart = operand_subword (x, i, 1, mode);
3106 rtx ypart = operand_subword (y, i, 1, mode);
3107
3108 /* If we can't get a part of Y, put Y into memory if it is a
3109 constant. Otherwise, force it into a register. If we still
3110 can't get a part of Y, abort. */
3111 if (ypart == 0 && CONSTANT_P (y))
3112 {
3113 y = force_const_mem (mode, y);
3114 ypart = operand_subword (y, i, 1, mode);
3115 }
3116 else if (ypart == 0)
3117 ypart = operand_subword_force (y, i, mode);
3118
3119 if (xpart == 0 || ypart == 0)
3120 abort ();
3121
235ae7be
DM
3122 need_clobber |= (GET_CODE (xpart) == SUBREG);
3123
bbf6f052
RK
3124 last_insn = emit_move_insn (xpart, ypart);
3125 }
6551fa4d 3126
2f937369 3127 seq = get_insns ();
235ae7be
DM
3128 end_sequence ();
3129
3130 /* Show the output dies here. This is necessary for SUBREGs
3131 of pseudos since we cannot track their lifetimes correctly;
3132 hard regs shouldn't appear here except as return values.
3133 We never want to emit such a clobber after reload. */
3134 if (x != y
3135 && ! (reload_in_progress || reload_completed)
3136 && need_clobber != 0)
bb93b973 3137 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
235ae7be
DM
3138
3139 emit_insn (seq);
3140
bbf6f052
RK
3141 return last_insn;
3142 }
3143 else
3144 abort ();
3145}
51286de6
RH
3146
3147/* If Y is representable exactly in a narrower mode, and the target can
3148 perform the extension directly from constant or memory, then emit the
3149 move as an extension. */
3150
3151static rtx
502b8322 3152compress_float_constant (rtx x, rtx y)
51286de6
RH
3153{
3154 enum machine_mode dstmode = GET_MODE (x);
3155 enum machine_mode orig_srcmode = GET_MODE (y);
3156 enum machine_mode srcmode;
3157 REAL_VALUE_TYPE r;
3158
3159 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3160
3161 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3162 srcmode != orig_srcmode;
3163 srcmode = GET_MODE_WIDER_MODE (srcmode))
3164 {
3165 enum insn_code ic;
3166 rtx trunc_y, last_insn;
3167
3168 /* Skip if the target can't extend this way. */
3169 ic = can_extend_p (dstmode, srcmode, 0);
3170 if (ic == CODE_FOR_nothing)
3171 continue;
3172
3173 /* Skip if the narrowed value isn't exact. */
3174 if (! exact_real_truncate (srcmode, &r))
3175 continue;
3176
3177 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3178
3179 if (LEGITIMATE_CONSTANT_P (trunc_y))
3180 {
3181 /* Skip if the target needs extra instructions to perform
3182 the extension. */
3183 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3184 continue;
3185 }
3186 else if (float_extend_from_mem[dstmode][srcmode])
3187 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3188 else
3189 continue;
3190
3191 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3192 last_insn = get_last_insn ();
3193
f8cfc6aa 3194 if (REG_P (x))
0c19a26f 3195 set_unique_reg_note (last_insn, REG_EQUAL, y);
51286de6
RH
3196
3197 return last_insn;
3198 }
3199
3200 return NULL_RTX;
3201}
bbf6f052
RK
3202\f
3203/* Pushing data onto the stack. */
3204
3205/* Push a block of length SIZE (perhaps variable)
3206 and return an rtx to address the beginning of the block.
3207 Note that it is not possible for the value returned to be a QUEUED.
3208 The value may be virtual_outgoing_args_rtx.
3209
3210 EXTRA is the number of bytes of padding to push in addition to SIZE.
3211 BELOW nonzero means this padding comes at low addresses;
3212 otherwise, the padding comes at high addresses. */
3213
3214rtx
502b8322 3215push_block (rtx size, int extra, int below)
bbf6f052 3216{
b3694847 3217 rtx temp;
88f63c77
RK
3218
3219 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
3220 if (CONSTANT_P (size))
3221 anti_adjust_stack (plus_constant (size, extra));
f8cfc6aa 3222 else if (REG_P (size) && extra == 0)
bbf6f052
RK
3223 anti_adjust_stack (size);
3224 else
3225 {
ce48579b 3226 temp = copy_to_mode_reg (Pmode, size);
bbf6f052 3227 if (extra != 0)
906c4e36 3228 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
3229 temp, 0, OPTAB_LIB_WIDEN);
3230 anti_adjust_stack (temp);
3231 }
3232
f73ad30e 3233#ifndef STACK_GROWS_DOWNWARD
f73ad30e 3234 if (0)
f73ad30e
JH
3235#else
3236 if (1)
bbf6f052 3237#endif
f73ad30e 3238 {
f73ad30e
JH
3239 temp = virtual_outgoing_args_rtx;
3240 if (extra != 0 && below)
3241 temp = plus_constant (temp, extra);
3242 }
3243 else
3244 {
3245 if (GET_CODE (size) == CONST_INT)
3246 temp = plus_constant (virtual_outgoing_args_rtx,
3a94c984 3247 -INTVAL (size) - (below ? 0 : extra));
f73ad30e
JH
3248 else if (extra != 0 && !below)
3249 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3bdf5ad1 3250 negate_rtx (Pmode, plus_constant (size, extra)));
f73ad30e
JH
3251 else
3252 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3253 negate_rtx (Pmode, size));
3254 }
bbf6f052
RK
3255
3256 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3257}
3258
21d93687
RK
3259#ifdef PUSH_ROUNDING
3260
566aa174 3261/* Emit single push insn. */
21d93687 3262
566aa174 3263static void
502b8322 3264emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
566aa174 3265{
566aa174 3266 rtx dest_addr;
918a6124 3267 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
566aa174 3268 rtx dest;
371b8fc0
JH
3269 enum insn_code icode;
3270 insn_operand_predicate_fn pred;
566aa174 3271
371b8fc0
JH
3272 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3273 /* If there is push pattern, use it. Otherwise try old way of throwing
3274 MEM representing push operation to move expander. */
3275 icode = push_optab->handlers[(int) mode].insn_code;
3276 if (icode != CODE_FOR_nothing)
3277 {
3278 if (((pred = insn_data[(int) icode].operand[0].predicate)
505ddab6 3279 && !((*pred) (x, mode))))
371b8fc0
JH
3280 x = force_reg (mode, x);
3281 emit_insn (GEN_FCN (icode) (x));
3282 return;
3283 }
566aa174
JH
3284 if (GET_MODE_SIZE (mode) == rounded_size)
3285 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
329d586f
KH
3286 /* If we are to pad downward, adjust the stack pointer first and
3287 then store X into the stack location using an offset. This is
3288 because emit_move_insn does not know how to pad; it does not have
3289 access to type. */
3290 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3291 {
3292 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3293 HOST_WIDE_INT offset;
3294
3295 emit_move_insn (stack_pointer_rtx,
3296 expand_binop (Pmode,
3297#ifdef STACK_GROWS_DOWNWARD
3298 sub_optab,
3299#else
3300 add_optab,
3301#endif
3302 stack_pointer_rtx,
3303 GEN_INT (rounded_size),
3304 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3305
3306 offset = (HOST_WIDE_INT) padding_size;
3307#ifdef STACK_GROWS_DOWNWARD
3308 if (STACK_PUSH_CODE == POST_DEC)
3309 /* We have already decremented the stack pointer, so get the
3310 previous value. */
3311 offset += (HOST_WIDE_INT) rounded_size;
3312#else
3313 if (STACK_PUSH_CODE == POST_INC)
3314 /* We have already incremented the stack pointer, so get the
3315 previous value. */
3316 offset -= (HOST_WIDE_INT) rounded_size;
3317#endif
3318 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3319 }
566aa174
JH
3320 else
3321 {
3322#ifdef STACK_GROWS_DOWNWARD
329d586f 3323 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
566aa174 3324 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
505ddab6 3325 GEN_INT (-(HOST_WIDE_INT) rounded_size));
566aa174 3326#else
329d586f 3327 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
566aa174
JH
3328 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3329 GEN_INT (rounded_size));
3330#endif
3331 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3332 }
3333
3334 dest = gen_rtx_MEM (mode, dest_addr);
3335
566aa174
JH
3336 if (type != 0)
3337 {
3338 set_mem_attributes (dest, type, 1);
c3d32120
RK
3339
3340 if (flag_optimize_sibling_calls)
3341 /* Function incoming arguments may overlap with sibling call
3342 outgoing arguments and we cannot allow reordering of reads
3343 from function arguments with stores to outgoing arguments
3344 of sibling calls. */
3345 set_mem_alias_set (dest, 0);
566aa174
JH
3346 }
3347 emit_move_insn (dest, x);
566aa174 3348}
21d93687 3349#endif
566aa174 3350
bbf6f052
RK
3351/* Generate code to push X onto the stack, assuming it has mode MODE and
3352 type TYPE.
3353 MODE is redundant except when X is a CONST_INT (since they don't
3354 carry mode info).
3355 SIZE is an rtx for the size of data to be copied (in bytes),
3356 needed only if X is BLKmode.
3357
f1eaaf73 3358 ALIGN (in bits) is maximum alignment we can assume.
bbf6f052 3359
cd048831
RK
3360 If PARTIAL and REG are both nonzero, then copy that many of the first
3361 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
3362 The amount of space pushed is decreased by PARTIAL words,
3363 rounded *down* to a multiple of PARM_BOUNDARY.
3364 REG must be a hard register in this case.
cd048831
RK
3365 If REG is zero but PARTIAL is not, take any all others actions for an
3366 argument partially in registers, but do not actually load any
3367 registers.
bbf6f052
RK
3368
3369 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 3370 This is ignored if an argument block has already been allocated.
bbf6f052
RK
3371
3372 On a machine that lacks real push insns, ARGS_ADDR is the address of
3373 the bottom of the argument block for this call. We use indexing off there
3374 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3375 argument block has not been preallocated.
3376
e5e809f4
JL
3377 ARGS_SO_FAR is the size of args previously pushed for this call.
3378
3379 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3380 for arguments passed in registers. If nonzero, it will be the number
3381 of bytes required. */
bbf6f052
RK
3382
3383void
502b8322
AJ
3384emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3385 unsigned int align, int partial, rtx reg, int extra,
3386 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3387 rtx alignment_pad)
bbf6f052
RK
3388{
3389 rtx xinner;
3390 enum direction stack_direction
3391#ifdef STACK_GROWS_DOWNWARD
3392 = downward;
3393#else
3394 = upward;
3395#endif
3396
3397 /* Decide where to pad the argument: `downward' for below,
3398 `upward' for above, or `none' for don't pad it.
3399 Default is below for small data on big-endian machines; else above. */
3400 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3401
0fb7aeda 3402 /* Invert direction if stack is post-decrement.
9e0e11bf
GK
3403 FIXME: why? */
3404 if (STACK_PUSH_CODE == POST_DEC)
bbf6f052
RK
3405 if (where_pad != none)
3406 where_pad = (where_pad == downward ? upward : downward);
3407
3408 xinner = x = protect_from_queue (x, 0);
3409
3410 if (mode == BLKmode)
3411 {
3412 /* Copy a block into the stack, entirely or partially. */
3413
b3694847 3414 rtx temp;
bbf6f052 3415 int used = partial * UNITS_PER_WORD;
531547e9 3416 int offset;
bbf6f052 3417 int skip;
3a94c984 3418
531547e9
FJ
3419 if (reg && GET_CODE (reg) == PARALLEL)
3420 {
3421 /* Use the size of the elt to compute offset. */
3422 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3423 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3424 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3425 }
3426 else
3427 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3428
bbf6f052
RK
3429 if (size == 0)
3430 abort ();
3431
3432 used -= offset;
3433
3434 /* USED is now the # of bytes we need not copy to the stack
3435 because registers will take care of them. */
3436
3437 if (partial != 0)
f4ef873c 3438 xinner = adjust_address (xinner, BLKmode, used);
bbf6f052
RK
3439
3440 /* If the partial register-part of the arg counts in its stack size,
3441 skip the part of stack space corresponding to the registers.
3442 Otherwise, start copying to the beginning of the stack space,
3443 by setting SKIP to 0. */
e5e809f4 3444 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
3445
3446#ifdef PUSH_ROUNDING
3447 /* Do it with several push insns if that doesn't take lots of insns
3448 and if there is no difficulty with push insns that skip bytes
3449 on the stack for alignment purposes. */
3450 if (args_addr == 0
f73ad30e 3451 && PUSH_ARGS
bbf6f052
RK
3452 && GET_CODE (size) == CONST_INT
3453 && skip == 0
f26aca6d 3454 && MEM_ALIGN (xinner) >= align
15914757 3455 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
3456 /* Here we avoid the case of a structure whose weak alignment
3457 forces many pushes of a small amount of data,
3458 and such small pushes do rounding that causes trouble. */
e1565e65 3459 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
19caa751 3460 || align >= BIGGEST_ALIGNMENT
f1eaaf73
DE
3461 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3462 == (align / BITS_PER_UNIT)))
bbf6f052
RK
3463 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3464 {
3465 /* Push padding now if padding above and stack grows down,
3466 or if padding below and stack grows up.
3467 But if space already allocated, this has already been done. */
3468 if (extra && args_addr == 0
3469 && where_pad != none && where_pad != stack_direction)
906c4e36 3470 anti_adjust_stack (GEN_INT (extra));
bbf6f052 3471
8fd3cf4e 3472 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
bbf6f052
RK
3473 }
3474 else
3a94c984 3475#endif /* PUSH_ROUNDING */
bbf6f052 3476 {
7ab923cc
JJ
3477 rtx target;
3478
bbf6f052
RK
3479 /* Otherwise make space on the stack and copy the data
3480 to the address of that space. */
3481
3482 /* Deduct words put into registers from the size we must copy. */
3483 if (partial != 0)
3484 {
3485 if (GET_CODE (size) == CONST_INT)
906c4e36 3486 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
3487 else
3488 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
3489 GEN_INT (used), NULL_RTX, 0,
3490 OPTAB_LIB_WIDEN);
bbf6f052
RK
3491 }
3492
3493 /* Get the address of the stack space.
3494 In this case, we do not deal with EXTRA separately.
3495 A single stack adjust will do. */
3496 if (! args_addr)
3497 {
3498 temp = push_block (size, extra, where_pad == downward);
3499 extra = 0;
3500 }
3501 else if (GET_CODE (args_so_far) == CONST_INT)
3502 temp = memory_address (BLKmode,
3503 plus_constant (args_addr,
3504 skip + INTVAL (args_so_far)));
3505 else
3506 temp = memory_address (BLKmode,
38a448ca
RH
3507 plus_constant (gen_rtx_PLUS (Pmode,
3508 args_addr,
3509 args_so_far),
bbf6f052 3510 skip));
4ca79136
RH
3511
3512 if (!ACCUMULATE_OUTGOING_ARGS)
3513 {
3514 /* If the source is referenced relative to the stack pointer,
3515 copy it to another register to stabilize it. We do not need
3516 to do this if we know that we won't be changing sp. */
3517
3518 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3519 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3520 temp = copy_to_reg (temp);
3521 }
3522
3a94c984 3523 target = gen_rtx_MEM (BLKmode, temp);
7ab923cc 3524
3a94c984
KH
3525 if (type != 0)
3526 {
3527 set_mem_attributes (target, type, 1);
3528 /* Function incoming arguments may overlap with sibling call
3529 outgoing arguments and we cannot allow reordering of reads
3530 from function arguments with stores to outgoing arguments
3531 of sibling calls. */
ba4828e0 3532 set_mem_alias_set (target, 0);
3a94c984 3533 }
4ca79136 3534
44bb111a
RH
3535 /* ALIGN may well be better aligned than TYPE, e.g. due to
3536 PARM_BOUNDARY. Assume the caller isn't lying. */
3537 set_mem_align (target, align);
4ca79136 3538
44bb111a 3539 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
bbf6f052
RK
3540 }
3541 }
3542 else if (partial > 0)
3543 {
3544 /* Scalar partly in registers. */
3545
3546 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3547 int i;
3548 int not_stack;
3549 /* # words of start of argument
3550 that we must make space for but need not store. */
3551 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3552 int args_offset = INTVAL (args_so_far);
3553 int skip;
3554
3555 /* Push padding now if padding above and stack grows down,
3556 or if padding below and stack grows up.
3557 But if space already allocated, this has already been done. */
3558 if (extra && args_addr == 0
3559 && where_pad != none && where_pad != stack_direction)
906c4e36 3560 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3561
3562 /* If we make space by pushing it, we might as well push
3563 the real data. Otherwise, we can leave OFFSET nonzero
3564 and leave the space uninitialized. */
3565 if (args_addr == 0)
3566 offset = 0;
3567
3568 /* Now NOT_STACK gets the number of words that we don't need to
3569 allocate on the stack. */
3570 not_stack = partial - offset;
3571
3572 /* If the partial register-part of the arg counts in its stack size,
3573 skip the part of stack space corresponding to the registers.
3574 Otherwise, start copying to the beginning of the stack space,
3575 by setting SKIP to 0. */
e5e809f4 3576 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3577
3578 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3579 x = validize_mem (force_const_mem (mode, x));
3580
3581 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3582 SUBREGs of such registers are not allowed. */
f8cfc6aa 3583 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
bbf6f052
RK
3584 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3585 x = copy_to_reg (x);
3586
3587 /* Loop over all the words allocated on the stack for this arg. */
3588 /* We can do it by words, because any scalar bigger than a word
3589 has a size a multiple of a word. */
3590#ifndef PUSH_ARGS_REVERSED
3591 for (i = not_stack; i < size; i++)
3592#else
3593 for (i = size - 1; i >= not_stack; i--)
3594#endif
3595 if (i >= not_stack + offset)
3596 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3597 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3598 0, args_addr,
3599 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4 3600 * UNITS_PER_WORD)),
4fc026cd 3601 reg_parm_stack_space, alignment_pad);
bbf6f052
RK
3602 }
3603 else
3604 {
3605 rtx addr;
3bdf5ad1 3606 rtx dest;
bbf6f052
RK
3607
3608 /* Push padding now if padding above and stack grows down,
3609 or if padding below and stack grows up.
3610 But if space already allocated, this has already been done. */
3611 if (extra && args_addr == 0
3612 && where_pad != none && where_pad != stack_direction)
906c4e36 3613 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3614
3615#ifdef PUSH_ROUNDING
f73ad30e 3616 if (args_addr == 0 && PUSH_ARGS)
566aa174 3617 emit_single_push_insn (mode, x, type);
bbf6f052
RK
3618 else
3619#endif
921b3427
RK
3620 {
3621 if (GET_CODE (args_so_far) == CONST_INT)
3622 addr
3623 = memory_address (mode,
3a94c984 3624 plus_constant (args_addr,
921b3427 3625 INTVAL (args_so_far)));
3a94c984 3626 else
38a448ca
RH
3627 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3628 args_so_far));
566aa174
JH
3629 dest = gen_rtx_MEM (mode, addr);
3630 if (type != 0)
3631 {
3632 set_mem_attributes (dest, type, 1);
3633 /* Function incoming arguments may overlap with sibling call
3634 outgoing arguments and we cannot allow reordering of reads
3635 from function arguments with stores to outgoing arguments
3636 of sibling calls. */
ba4828e0 3637 set_mem_alias_set (dest, 0);
566aa174 3638 }
bbf6f052 3639
566aa174 3640 emit_move_insn (dest, x);
566aa174 3641 }
bbf6f052
RK
3642 }
3643
bbf6f052
RK
3644 /* If part should go in registers, copy that part
3645 into the appropriate registers. Do this now, at the end,
3646 since mem-to-mem copies above may do function calls. */
cd048831 3647 if (partial > 0 && reg != 0)
fffa9c1d
JW
3648 {
3649 /* Handle calls that pass values in multiple non-contiguous locations.
3650 The Irix 6 ABI has examples of this. */
3651 if (GET_CODE (reg) == PARALLEL)
6e985040 3652 emit_group_load (reg, x, type, -1);
fffa9c1d
JW
3653 else
3654 move_block_to_reg (REGNO (reg), x, partial, mode);
3655 }
bbf6f052
RK
3656
3657 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3658 anti_adjust_stack (GEN_INT (extra));
3a94c984 3659
3ea2292a 3660 if (alignment_pad && args_addr == 0)
4fc026cd 3661 anti_adjust_stack (alignment_pad);
bbf6f052
RK
3662}
3663\f
296b4ed9
RK
3664/* Return X if X can be used as a subtarget in a sequence of arithmetic
3665 operations. */
3666
3667static rtx
502b8322 3668get_subtarget (rtx x)
296b4ed9
RK
3669{
3670 return ((x == 0
3671 /* Only registers can be subtargets. */
f8cfc6aa 3672 || !REG_P (x)
296b4ed9
RK
3673 /* If the register is readonly, it can't be set more than once. */
3674 || RTX_UNCHANGING_P (x)
3675 /* Don't use hard regs to avoid extending their life. */
3676 || REGNO (x) < FIRST_PSEUDO_REGISTER
3677 /* Avoid subtargets inside loops,
3678 since they hide some invariant expressions. */
3679 || preserve_subexpressions_p ())
3680 ? 0 : x);
3681}
3682
bbf6f052
RK
3683/* Expand an assignment that stores the value of FROM into TO.
3684 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
3685 (This may contain a QUEUED rtx;
3686 if the value is constant, this rtx is a constant.)
b90f141a 3687 Otherwise, the returned value is NULL_RTX. */
bbf6f052
RK
3688
3689rtx
b90f141a 3690expand_assignment (tree to, tree from, int want_value)
bbf6f052 3691{
b3694847 3692 rtx to_rtx = 0;
bbf6f052
RK
3693 rtx result;
3694
3695 /* Don't crash if the lhs of the assignment was erroneous. */
3696
3697 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
3698 {
3699 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3700 return want_value ? result : NULL_RTX;
3701 }
bbf6f052
RK
3702
3703 /* Assignment of a structure component needs special treatment
3704 if the structure component's rtx is not simply a MEM.
6be58303
JW
3705 Assignment of an array element at a constant index, and assignment of
3706 an array element in an unaligned packed structure field, has the same
3707 problem. */
bbf6f052 3708
08293add 3709 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
7c02ae17
DE
3710 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3711 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
bbf6f052
RK
3712 {
3713 enum machine_mode mode1;
770ae6cc 3714 HOST_WIDE_INT bitsize, bitpos;
a06ef755 3715 rtx orig_to_rtx;
7bb0943f 3716 tree offset;
bbf6f052
RK
3717 int unsignedp;
3718 int volatilep = 0;
0088fcb1
RK
3719 tree tem;
3720
3721 push_temp_slots ();
839c4796 3722 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
a06ef755 3723 &unsignedp, &volatilep);
bbf6f052
RK
3724
3725 /* If we are going to use store_bit_field and extract_bit_field,
3726 make sure to_rtx will be safe for multiple use. */
3727
3728 if (mode1 == VOIDmode && want_value)
3729 tem = stabilize_reference (tem);
3730
1ed1b4fb
RK
3731 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3732
7bb0943f
RS
3733 if (offset != 0)
3734 {
e3c8ea67 3735 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7bb0943f 3736
3c0cb5de 3737 if (!MEM_P (to_rtx))
7bb0943f 3738 abort ();
bd070e1a 3739
bd070e1a 3740#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 3741 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 3742 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
3743#else
3744 if (GET_MODE (offset_rtx) != ptr_mode)
3745 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 3746#endif
bd070e1a 3747
9a7b9f4f
JL
3748 /* A constant address in TO_RTX can have VOIDmode, we must not try
3749 to call force_reg for that case. Avoid that case. */
3c0cb5de 3750 if (MEM_P (to_rtx)
89752202 3751 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 3752 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
a06ef755 3753 && bitsize > 0
3a94c984 3754 && (bitpos % bitsize) == 0
89752202 3755 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 3756 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
89752202 3757 {
e3c8ea67 3758 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
3759 bitpos = 0;
3760 }
3761
0d4903b8 3762 to_rtx = offset_address (to_rtx, offset_rtx,
d50a16c4
EB
3763 highest_pow2_factor_for_target (to,
3764 offset));
7bb0943f 3765 }
c5c76735 3766
3c0cb5de 3767 if (MEM_P (to_rtx))
998d7deb 3768 {
998d7deb
RH
3769 /* If the field is at offset zero, we could have been given the
3770 DECL_RTX of the parent struct. Don't munge it. */
3771 to_rtx = shallow_copy_rtx (to_rtx);
3772
6f1087be 3773 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
998d7deb 3774 }
effbcc6a 3775
a06ef755
RK
3776 /* Deal with volatile and readonly fields. The former is only done
3777 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3c0cb5de 3778 if (volatilep && MEM_P (to_rtx))
a06ef755
RK
3779 {
3780 if (to_rtx == orig_to_rtx)
3781 to_rtx = copy_rtx (to_rtx);
3782 MEM_VOLATILE_P (to_rtx) = 1;
bbf6f052
RK
3783 }
3784
956d6950 3785 if (TREE_CODE (to) == COMPONENT_REF
d76bc29c
EB
3786 && TREE_READONLY (TREE_OPERAND (to, 1))
3787 /* We can't assert that a MEM won't be set more than once
3788 if the component is not addressable because another
3789 non-addressable component may be referenced by the same MEM. */
3c0cb5de 3790 && ! (MEM_P (to_rtx) && ! can_address_p (to)))
956d6950 3791 {
a06ef755 3792 if (to_rtx == orig_to_rtx)
956d6950 3793 to_rtx = copy_rtx (to_rtx);
956d6950
JL
3794 RTX_UNCHANGING_P (to_rtx) = 1;
3795 }
3796
3c0cb5de 3797 if (MEM_P (to_rtx) && ! can_address_p (to))
a06ef755
RK
3798 {
3799 if (to_rtx == orig_to_rtx)
3800 to_rtx = copy_rtx (to_rtx);
3801 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3802 }
3803
7682ef83
JJ
3804 /* Disabled temporarily. GET_MODE (to_rtx) is often not the right
3805 mode. */
3806 while (0 && mode1 == VOIDmode && !want_value
60ba25bf
JJ
3807 && bitpos + bitsize <= BITS_PER_WORD
3808 && bitsize < BITS_PER_WORD
3809 && GET_MODE_BITSIZE (GET_MODE (to_rtx)) <= BITS_PER_WORD
3810 && !TREE_SIDE_EFFECTS (to)
3811 && !TREE_THIS_VOLATILE (to))
df62f18a 3812 {
60ba25bf 3813 tree src, op0, op1;
df62f18a
JJ
3814 rtx value;
3815 HOST_WIDE_INT count = bitpos;
60ba25bf
JJ
3816 optab binop;
3817
3818 src = from;
3819 STRIP_NOPS (src);
3820 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE
3821 || TREE_CODE_CLASS (TREE_CODE (src)) != '2')
3822 break;
3823
3824 op0 = TREE_OPERAND (src, 0);
3825 op1 = TREE_OPERAND (src, 1);
3826 STRIP_NOPS (op0);
3827
3828 if (! operand_equal_p (to, op0, 0))
3829 break;
df62f18a
JJ
3830
3831 if (BYTES_BIG_ENDIAN)
3832 count = GET_MODE_BITSIZE (GET_MODE (to_rtx)) - bitpos - bitsize;
3833
3834 /* Special case some bitfield op= exp. */
60ba25bf 3835 switch (TREE_CODE (src))
df62f18a
JJ
3836 {
3837 case PLUS_EXPR:
3838 case MINUS_EXPR:
3839 if (count <= 0)
3840 break;
3841
3842 /* For now, just optimize the case of the topmost bitfield
60ba25bf
JJ
3843 where we don't need to do any masking and also
3844 1 bit bitfields where xor can be used.
df62f18a
JJ
3845 We might win by one instruction for the other bitfields
3846 too if insv/extv instructions aren't used, so that
3847 can be added later. */
60ba25bf
JJ
3848 if (count + bitsize != GET_MODE_BITSIZE (GET_MODE (to_rtx))
3849 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
df62f18a 3850 break;
60ba25bf 3851 value = expand_expr (op1, NULL_RTX, VOIDmode, 0);
df62f18a
JJ
3852 value = protect_from_queue (value, 0);
3853 to_rtx = protect_from_queue (to_rtx, 1);
60ba25bf
JJ
3854 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3855 if (bitsize == 1
3856 && count + bitsize != GET_MODE_BITSIZE (GET_MODE (to_rtx)))
3857 {
3858 value = expand_and (GET_MODE (to_rtx), value, const1_rtx,
3859 NULL_RTX);
3860 binop = xor_optab;
3861 }
df62f18a
JJ
3862 value = expand_shift (LSHIFT_EXPR, GET_MODE (to_rtx),
3863 value, build_int_2 (count, 0),
3864 NULL_RTX, 1);
60ba25bf 3865 result = expand_binop (GET_MODE (to_rtx), binop, to_rtx,
df62f18a
JJ
3866 value, to_rtx, 1, OPTAB_WIDEN);
3867 if (result != to_rtx)
3868 emit_move_insn (to_rtx, result);
3869 free_temp_slots ();
3870 pop_temp_slots ();
3871 return NULL_RTX;
3872 default:
3873 break;
3874 }
60ba25bf
JJ
3875
3876 break;
df62f18a
JJ
3877 }
3878
a06ef755
RK
3879 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3880 (want_value
3881 /* Spurious cast for HPUX compiler. */
3882 ? ((enum machine_mode)
3883 TYPE_MODE (TREE_TYPE (to)))
3884 : VOIDmode),
3885 unsignedp, TREE_TYPE (tem), get_alias_set (to));
a69beca1 3886
a06ef755
RK
3887 preserve_temp_slots (result);
3888 free_temp_slots ();
3889 pop_temp_slots ();
a69beca1 3890
a06ef755
RK
3891 /* If the value is meaningful, convert RESULT to the proper mode.
3892 Otherwise, return nothing. */
3893 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3894 TYPE_MODE (TREE_TYPE (from)),
3895 result,
8df83eae 3896 TYPE_UNSIGNED (TREE_TYPE (to)))
a06ef755 3897 : NULL_RTX);
bbf6f052
RK
3898 }
3899
cd1db108
RS
3900 /* If the rhs is a function call and its value is not an aggregate,
3901 call the function before we start to compute the lhs.
3902 This is needed for correct code for cases such as
3903 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
3904 requires loading up part of an address in a separate insn.
3905
1858863b
JW
3906 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3907 since it might be a promoted variable where the zero- or sign- extension
3908 needs to be done. Handling this in the normal way is safe because no
3909 computation is done before the call. */
61f71b34 3910 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
b35cd3c1 3911 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1858863b 3912 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
f8cfc6aa 3913 && REG_P (DECL_RTL (to))))
cd1db108 3914 {
0088fcb1
RK
3915 rtx value;
3916
3917 push_temp_slots ();
3918 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 3919 if (to_rtx == 0)
37a08a29 3920 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
aaf87c45 3921
fffa9c1d
JW
3922 /* Handle calls that return values in multiple non-contiguous locations.
3923 The Irix 6 ABI has examples of this. */
3924 if (GET_CODE (to_rtx) == PARALLEL)
6e985040
AM
3925 emit_group_load (to_rtx, value, TREE_TYPE (from),
3926 int_size_in_bytes (TREE_TYPE (from)));
fffa9c1d 3927 else if (GET_MODE (to_rtx) == BLKmode)
44bb111a 3928 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
aaf87c45 3929 else
6419e5b0 3930 {
5ae6cd0d 3931 if (POINTER_TYPE_P (TREE_TYPE (to)))
6419e5b0 3932 value = convert_memory_address (GET_MODE (to_rtx), value);
6419e5b0
DT
3933 emit_move_insn (to_rtx, value);
3934 }
cd1db108
RS
3935 preserve_temp_slots (to_rtx);
3936 free_temp_slots ();
0088fcb1 3937 pop_temp_slots ();
709f5be1 3938 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3939 }
3940
bbf6f052
RK
3941 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3942 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3943
3944 if (to_rtx == 0)
37a08a29 3945 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
bbf6f052 3946
86d38d25 3947 /* Don't move directly into a return register. */
14a774a9 3948 if (TREE_CODE (to) == RESULT_DECL
f8cfc6aa 3949 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
86d38d25 3950 {
0088fcb1
RK
3951 rtx temp;
3952
3953 push_temp_slots ();
3954 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
14a774a9
RK
3955
3956 if (GET_CODE (to_rtx) == PARALLEL)
6e985040
AM
3957 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3958 int_size_in_bytes (TREE_TYPE (from)));
14a774a9
RK
3959 else
3960 emit_move_insn (to_rtx, temp);
3961
86d38d25
RS
3962 preserve_temp_slots (to_rtx);
3963 free_temp_slots ();
0088fcb1 3964 pop_temp_slots ();
709f5be1 3965 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3966 }
3967
bbf6f052
RK
3968 /* In case we are returning the contents of an object which overlaps
3969 the place the value is being stored, use a safe function when copying
3970 a value through a pointer into a structure value return block. */
3971 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3972 && current_function_returns_struct
3973 && !current_function_returns_pcc_struct)
3974 {
0088fcb1
RK
3975 rtx from_rtx, size;
3976
3977 push_temp_slots ();
33a20d10 3978 size = expr_size (from);
37a08a29 3979 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052 3980
8f99553f
JM
3981 emit_library_call (memmove_libfunc, LCT_NORMAL,
3982 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3983 XEXP (from_rtx, 0), Pmode,
3984 convert_to_mode (TYPE_MODE (sizetype),
3985 size, TYPE_UNSIGNED (sizetype)),
3986 TYPE_MODE (sizetype));
bbf6f052
RK
3987
3988 preserve_temp_slots (to_rtx);
3989 free_temp_slots ();
0088fcb1 3990 pop_temp_slots ();
709f5be1 3991 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3992 }
3993
3994 /* Compute FROM and store the value in the rtx we got. */
3995
0088fcb1 3996 push_temp_slots ();
bbf6f052
RK
3997 result = store_expr (from, to_rtx, want_value);
3998 preserve_temp_slots (result);
3999 free_temp_slots ();
0088fcb1 4000 pop_temp_slots ();
709f5be1 4001 return want_value ? result : NULL_RTX;
bbf6f052
RK
4002}
4003
4004/* Generate code for computing expression EXP,
4005 and storing the value into TARGET.
bbf6f052
RK
4006 TARGET may contain a QUEUED rtx.
4007
8403445a 4008 If WANT_VALUE & 1 is nonzero, return a copy of the value
709f5be1
RS
4009 not in TARGET, so that we can be sure to use the proper
4010 value in a containing expression even if TARGET has something
4011 else stored in it. If possible, we copy the value through a pseudo
4012 and return that pseudo. Or, if the value is constant, we try to
4013 return the constant. In some cases, we return a pseudo
4014 copied *from* TARGET.
4015
4016 If the mode is BLKmode then we may return TARGET itself.
4017 It turns out that in BLKmode it doesn't cause a problem.
4018 because C has no operators that could combine two different
4019 assignments into the same BLKmode object with different values
4020 with no sequence point. Will other languages need this to
4021 be more thorough?
4022
8403445a 4023 If WANT_VALUE & 1 is 0, we return NULL, to make sure
709f5be1 4024 to catch quickly any cases where the caller uses the value
8403445a
AM
4025 and fails to set WANT_VALUE.
4026
4027 If WANT_VALUE & 2 is set, this is a store into a call param on the
4028 stack, and block moves may need to be treated specially. */
bbf6f052
RK
4029
4030rtx
502b8322 4031store_expr (tree exp, rtx target, int want_value)
bbf6f052 4032{
b3694847 4033 rtx temp;
0fab64a3 4034 rtx alt_rtl = NULL_RTX;
1bbd65cd 4035 rtx mark = mark_queue ();
bbf6f052 4036 int dont_return_target = 0;
e5408e52 4037 int dont_store_target = 0;
bbf6f052 4038
847311f4
AL
4039 if (VOID_TYPE_P (TREE_TYPE (exp)))
4040 {
4041 /* C++ can generate ?: expressions with a throw expression in one
4042 branch and an rvalue in the other. Here, we resolve attempts to
4d6922ee 4043 store the throw expression's nonexistent result. */
847311f4
AL
4044 if (want_value)
4045 abort ();
4046 expand_expr (exp, const0_rtx, VOIDmode, 0);
4047 return NULL_RTX;
4048 }
bbf6f052
RK
4049 if (TREE_CODE (exp) == COMPOUND_EXPR)
4050 {
4051 /* Perform first part of compound expression, then assign from second
4052 part. */
8403445a
AM
4053 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4054 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
bbf6f052 4055 emit_queue ();
709f5be1 4056 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
4057 }
4058 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4059 {
4060 /* For conditional expression, get safe form of the target. Then
4061 test the condition, doing the appropriate assignment on either
4062 side. This avoids the creation of unnecessary temporaries.
4063 For non-BLKmode, it is more efficient not to do this. */
4064
4065 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4066
4067 emit_queue ();
4068 target = protect_from_queue (target, 1);
4069
dabf8373 4070 do_pending_stack_adjust ();
bbf6f052
RK
4071 NO_DEFER_POP;
4072 jumpifnot (TREE_OPERAND (exp, 0), lab1);
8403445a 4073 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
bbf6f052
RK
4074 emit_queue ();
4075 emit_jump_insn (gen_jump (lab2));
4076 emit_barrier ();
4077 emit_label (lab1);
8403445a 4078 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
bbf6f052
RK
4079 emit_queue ();
4080 emit_label (lab2);
4081 OK_DEFER_POP;
a3a58acc 4082
8403445a 4083 return want_value & 1 ? target : NULL_RTX;
bbf6f052 4084 }
bbf6f052 4085 else if (queued_subexp_p (target))
709f5be1
RS
4086 /* If target contains a postincrement, let's not risk
4087 using it as the place to generate the rhs. */
bbf6f052
RK
4088 {
4089 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4090 {
4091 /* Expand EXP into a new pseudo. */
4092 temp = gen_reg_rtx (GET_MODE (target));
8403445a
AM
4093 temp = expand_expr (exp, temp, GET_MODE (target),
4094 (want_value & 2
4095 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
bbf6f052
RK
4096 }
4097 else
8403445a
AM
4098 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4099 (want_value & 2
4100 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
709f5be1
RS
4101
4102 /* If target is volatile, ANSI requires accessing the value
4103 *from* the target, if it is accessed. So make that happen.
4104 In no case return the target itself. */
8403445a 4105 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
709f5be1 4106 dont_return_target = 1;
bbf6f052 4107 }
8403445a 4108 else if ((want_value & 1) != 0
3c0cb5de 4109 && MEM_P (target)
8403445a 4110 && ! MEM_VOLATILE_P (target)
12f06d17
CH
4111 && GET_MODE (target) != BLKmode)
4112 /* If target is in memory and caller wants value in a register instead,
4113 arrange that. Pass TARGET as target for expand_expr so that,
4114 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4115 We know expand_expr will not use the target in that case.
4116 Don't do this if TARGET is volatile because we are supposed
4117 to write it and then read it. */
4118 {
8403445a
AM
4119 temp = expand_expr (exp, target, GET_MODE (target),
4120 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
12f06d17 4121 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
e5408e52
JJ
4122 {
4123 /* If TEMP is already in the desired TARGET, only copy it from
4124 memory and don't store it there again. */
4125 if (temp == target
4126 || (rtx_equal_p (temp, target)
4127 && ! side_effects_p (temp) && ! side_effects_p (target)))
4128 dont_store_target = 1;
4129 temp = copy_to_reg (temp);
4130 }
12f06d17
CH
4131 dont_return_target = 1;
4132 }
1499e0a8 4133 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
09da1532 4134 /* If this is a scalar in a register that is stored in a wider mode
1499e0a8
RK
4135 than the declared mode, compute the result into its declared mode
4136 and then convert to the wider mode. Our value is the computed
4137 expression. */
4138 {
b76b08ef
RK
4139 rtx inner_target = 0;
4140
5a32d038 4141 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
4142 which will often result in some optimizations. Do the conversion
4143 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
4144 the extend. But don't do this if the type of EXP is a subtype
4145 of something else since then the conversion might involve
4146 more than just converting modes. */
8403445a
AM
4147 if ((want_value & 1) == 0
4148 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
ab6c58f1 4149 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d 4150 {
8df83eae 4151 if (TYPE_UNSIGNED (TREE_TYPE (exp))
f635a84d 4152 != SUBREG_PROMOTED_UNSIGNED_P (target))
ceef8ce4 4153 exp = convert
ae2bcd98 4154 (lang_hooks.types.signed_or_unsigned_type
ceef8ce4 4155 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
f635a84d 4156
ae2bcd98 4157 exp = convert (lang_hooks.types.type_for_mode
b0c48229
NB
4158 (GET_MODE (SUBREG_REG (target)),
4159 SUBREG_PROMOTED_UNSIGNED_P (target)),
f635a84d 4160 exp);
b76b08ef
RK
4161
4162 inner_target = SUBREG_REG (target);
f635a84d 4163 }
3a94c984 4164
8403445a
AM
4165 temp = expand_expr (exp, inner_target, VOIDmode,
4166 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
b258707c 4167
7abec5be 4168 /* If TEMP is a MEM and we want a result value, make the access
502b8322
AJ
4169 now so it gets done only once. Strictly speaking, this is
4170 only necessary if the MEM is volatile, or if the address
7abec5be
RH
4171 overlaps TARGET. But not performing the load twice also
4172 reduces the amount of rtl we generate and then have to CSE. */
3c0cb5de 4173 if (MEM_P (temp) && (want_value & 1) != 0)
766f36c7
RK
4174 temp = copy_to_reg (temp);
4175
b258707c
RS
4176 /* If TEMP is a VOIDmode constant, use convert_modes to make
4177 sure that we properly convert it. */
4178 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1f1b0541
RH
4179 {
4180 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4181 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4182 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4183 GET_MODE (target), temp,
4184 SUBREG_PROMOTED_UNSIGNED_P (target));
4185 }
b258707c 4186
1499e0a8
RK
4187 convert_move (SUBREG_REG (target), temp,
4188 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4189
4190 /* If we promoted a constant, change the mode back down to match
4191 target. Otherwise, the caller might get confused by a result whose
4192 mode is larger than expected. */
4193
8403445a 4194 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
3dbecef9 4195 {
b3ca30df
JJ
4196 if (GET_MODE (temp) != VOIDmode)
4197 {
4198 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4199 SUBREG_PROMOTED_VAR_P (temp) = 1;
0fb7aeda 4200 SUBREG_PROMOTED_UNSIGNED_SET (temp,
7879b81e 4201 SUBREG_PROMOTED_UNSIGNED_P (target));
b3ca30df
JJ
4202 }
4203 else
4204 temp = convert_modes (GET_MODE (target),
4205 GET_MODE (SUBREG_REG (target)),
4206 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4207 }
4208
8403445a 4209 return want_value & 1 ? temp : NULL_RTX;
1499e0a8 4210 }
bbf6f052
RK
4211 else
4212 {
0fab64a3
MM
4213 temp = expand_expr_real (exp, target, GET_MODE (target),
4214 (want_value & 2
4215 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4216 &alt_rtl);
766f36c7 4217 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
4218 If TARGET is a volatile mem ref, either return TARGET
4219 or return a reg copied *from* TARGET; ANSI requires this.
4220
4221 Otherwise, if TEMP is not TARGET, return TEMP
4222 if it is constant (for efficiency),
4223 or if we really want the correct value. */
f8cfc6aa 4224 if (!(target && REG_P (target)
bbf6f052 4225 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3c0cb5de 4226 && !(MEM_P (target) && MEM_VOLATILE_P (target))
effbcc6a 4227 && ! rtx_equal_p (temp, target)
8403445a 4228 && (CONSTANT_P (temp) || (want_value & 1) != 0))
bbf6f052
RK
4229 dont_return_target = 1;
4230 }
4231
b258707c
RS
4232 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4233 the same as that of TARGET, adjust the constant. This is needed, for
4234 example, in case it is a CONST_DOUBLE and we want only a word-sized
4235 value. */
4236 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 4237 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
4238 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4239 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
8df83eae 4240 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
b258707c 4241
bbf6f052 4242 /* If value was not generated in the target, store it there.
1bbd65cd
EB
4243 Convert the value to TARGET's type first if necessary and emit the
4244 pending incrementations that have been queued when expanding EXP.
4245 Note that we cannot emit the whole queue blindly because this will
4246 effectively disable the POST_INC optimization later.
4247
37a08a29 4248 If TEMP and TARGET compare equal according to rtx_equal_p, but
f3f2255a
R
4249 one or both of them are volatile memory refs, we have to distinguish
4250 two cases:
4251 - expand_expr has used TARGET. In this case, we must not generate
4252 another copy. This can be detected by TARGET being equal according
4253 to == .
4254 - expand_expr has not used TARGET - that means that the source just
4255 happens to have the same RTX form. Since temp will have been created
4256 by expand_expr, it will compare unequal according to == .
4257 We must generate a copy in this case, to reach the correct number
4258 of volatile memory references. */
bbf6f052 4259
6036acbb 4260 if ((! rtx_equal_p (temp, target)
f3f2255a
R
4261 || (temp != target && (side_effects_p (temp)
4262 || side_effects_p (target))))
e5408e52 4263 && TREE_CODE (exp) != ERROR_MARK
a9772b60 4264 && ! dont_store_target
9c5c5f2c
MM
4265 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4266 but TARGET is not valid memory reference, TEMP will differ
4267 from TARGET although it is really the same location. */
0fab64a3 4268 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
e56fc090
HPN
4269 /* If there's nothing to copy, don't bother. Don't call expr_size
4270 unless necessary, because some front-ends (C++) expr_size-hook
4271 aborts on objects that are not supposed to be bit-copied or
4272 bit-initialized. */
4273 && expr_size (exp) != const0_rtx)
bbf6f052 4274 {
1bbd65cd 4275 emit_insns_enqueued_after_mark (mark);
bbf6f052 4276 target = protect_from_queue (target, 1);
e6d55fd7 4277 temp = protect_from_queue (temp, 0);
bbf6f052 4278 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 4279 && GET_MODE (temp) != VOIDmode)
bbf6f052 4280 {
8df83eae 4281 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
bbf6f052
RK
4282 if (dont_return_target)
4283 {
4284 /* In this case, we will return TEMP,
4285 so make sure it has the proper mode.
4286 But don't forget to store the value into TARGET. */
4287 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4288 emit_move_insn (target, temp);
4289 }
4290 else
4291 convert_move (target, temp, unsignedp);
4292 }
4293
4294 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4295 {
c24ae149
RK
4296 /* Handle copying a string constant into an array. The string
4297 constant may be shorter than the array. So copy just the string's
4298 actual length, and clear the rest. First get the size of the data
4299 type of the string, which is actually the size of the target. */
4300 rtx size = expr_size (exp);
bbf6f052 4301
e87b4f3f
RS
4302 if (GET_CODE (size) == CONST_INT
4303 && INTVAL (size) < TREE_STRING_LENGTH (exp))
8403445a
AM
4304 emit_block_move (target, temp, size,
4305 (want_value & 2
4306 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4307 else
bbf6f052 4308 {
e87b4f3f
RS
4309 /* Compute the size of the data to copy from the string. */
4310 tree copy_size
c03b7665 4311 = size_binop (MIN_EXPR,
b50d17a1 4312 make_tree (sizetype, size),
fed3cef0 4313 size_int (TREE_STRING_LENGTH (exp)));
8403445a
AM
4314 rtx copy_size_rtx
4315 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4316 (want_value & 2
4317 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
e87b4f3f
RS
4318 rtx label = 0;
4319
4320 /* Copy that much. */
267b28bd 4321 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
8df83eae 4322 TYPE_UNSIGNED (sizetype));
8403445a
AM
4323 emit_block_move (target, temp, copy_size_rtx,
4324 (want_value & 2
4325 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4326
88f63c77
RK
4327 /* Figure out how much is left in TARGET that we have to clear.
4328 Do all calculations in ptr_mode. */
e87b4f3f
RS
4329 if (GET_CODE (copy_size_rtx) == CONST_INT)
4330 {
c24ae149
RK
4331 size = plus_constant (size, -INTVAL (copy_size_rtx));
4332 target = adjust_address (target, BLKmode,
4333 INTVAL (copy_size_rtx));
e87b4f3f
RS
4334 }
4335 else
4336 {
fa06ab5c 4337 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
906c4e36
RK
4338 copy_size_rtx, NULL_RTX, 0,
4339 OPTAB_LIB_WIDEN);
e87b4f3f 4340
c24ae149
RK
4341#ifdef POINTERS_EXTEND_UNSIGNED
4342 if (GET_MODE (copy_size_rtx) != Pmode)
267b28bd 4343 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
8df83eae 4344 TYPE_UNSIGNED (sizetype));
c24ae149
RK
4345#endif
4346
4347 target = offset_address (target, copy_size_rtx,
4348 highest_pow2_factor (copy_size));
e87b4f3f 4349 label = gen_label_rtx ();
c5d5d461 4350 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
a06ef755 4351 GET_MODE (size), 0, label);
e87b4f3f
RS
4352 }
4353
4354 if (size != const0_rtx)
37a08a29 4355 clear_storage (target, size);
22619c3f 4356
e87b4f3f
RS
4357 if (label)
4358 emit_label (label);
bbf6f052
RK
4359 }
4360 }
fffa9c1d
JW
4361 /* Handle calls that return values in multiple non-contiguous locations.
4362 The Irix 6 ABI has examples of this. */
4363 else if (GET_CODE (target) == PARALLEL)
6e985040
AM
4364 emit_group_load (target, temp, TREE_TYPE (exp),
4365 int_size_in_bytes (TREE_TYPE (exp)));
bbf6f052 4366 else if (GET_MODE (temp) == BLKmode)
8403445a
AM
4367 emit_block_move (target, temp, expr_size (exp),
4368 (want_value & 2
4369 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
bbf6f052 4370 else
b0dccb00
RH
4371 {
4372 temp = force_operand (temp, target);
4373 if (temp != target)
4374 emit_move_insn (target, temp);
4375 }
bbf6f052 4376 }
709f5be1 4377
766f36c7 4378 /* If we don't want a value, return NULL_RTX. */
8403445a 4379 if ((want_value & 1) == 0)
766f36c7
RK
4380 return NULL_RTX;
4381
4382 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4383 ??? The latter test doesn't seem to make sense. */
3c0cb5de 4384 else if (dont_return_target && !MEM_P (temp))
bbf6f052 4385 return temp;
766f36c7
RK
4386
4387 /* Return TARGET itself if it is a hard register. */
8403445a
AM
4388 else if ((want_value & 1) != 0
4389 && GET_MODE (target) != BLKmode
f8cfc6aa 4390 && ! (REG_P (target)
766f36c7 4391 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 4392 return copy_to_reg (target);
3a94c984 4393
766f36c7 4394 else
709f5be1 4395 return target;
bbf6f052
RK
4396}
4397\f
1ea7e6ad 4398/* Examine CTOR. Discover how many scalar fields are set to nonzero
6de9cd9a
DN
4399 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4400 are set to non-constant values and place it in *P_NC_ELTS. */
9de08200 4401
6de9cd9a
DN
4402static void
4403categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4404 HOST_WIDE_INT *p_nc_elts)
9de08200 4405{
6de9cd9a
DN
4406 HOST_WIDE_INT nz_elts, nc_elts;
4407 tree list;
9de08200 4408
6de9cd9a
DN
4409 nz_elts = 0;
4410 nc_elts = 0;
4411
4412 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
9de08200 4413 {
6de9cd9a
DN
4414 tree value = TREE_VALUE (list);
4415 tree purpose = TREE_PURPOSE (list);
4416 HOST_WIDE_INT mult;
9de08200 4417
6de9cd9a
DN
4418 mult = 1;
4419 if (TREE_CODE (purpose) == RANGE_EXPR)
4420 {
4421 tree lo_index = TREE_OPERAND (purpose, 0);
4422 tree hi_index = TREE_OPERAND (purpose, 1);
9de08200 4423
6de9cd9a
DN
4424 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4425 mult = (tree_low_cst (hi_index, 1)
4426 - tree_low_cst (lo_index, 1) + 1);
4427 }
9de08200 4428
6de9cd9a
DN
4429 switch (TREE_CODE (value))
4430 {
4431 case CONSTRUCTOR:
4432 {
4433 HOST_WIDE_INT nz = 0, nc = 0;
4434 categorize_ctor_elements_1 (value, &nz, &nc);
4435 nz_elts += mult * nz;
4436 nc_elts += mult * nc;
4437 }
4438 break;
9de08200 4439
6de9cd9a
DN
4440 case INTEGER_CST:
4441 case REAL_CST:
4442 if (!initializer_zerop (value))
4443 nz_elts += mult;
4444 break;
4445 case COMPLEX_CST:
4446 if (!initializer_zerop (TREE_REALPART (value)))
4447 nz_elts += mult;
4448 if (!initializer_zerop (TREE_IMAGPART (value)))
4449 nz_elts += mult;
4450 break;
4451 case VECTOR_CST:
4452 {
4453 tree v;
4454 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4455 if (!initializer_zerop (TREE_VALUE (v)))
4456 nz_elts += mult;
4457 }
4458 break;
69ef87e2 4459
6de9cd9a
DN
4460 default:
4461 nz_elts += mult;
4462 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4463 nc_elts += mult;
4464 break;
4465 }
4466 }
69ef87e2 4467
6de9cd9a
DN
4468 *p_nz_elts += nz_elts;
4469 *p_nc_elts += nc_elts;
4470}
4471
4472void
4473categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4474 HOST_WIDE_INT *p_nc_elts)
4475{
4476 *p_nz_elts = 0;
4477 *p_nc_elts = 0;
4478 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4479}
4480
4481/* Count the number of scalars in TYPE. Return -1 on overflow or
4482 variable-sized. */
4483
4484HOST_WIDE_INT
4485count_type_elements (tree type)
4486{
4487 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4488 switch (TREE_CODE (type))
4489 {
4490 case ARRAY_TYPE:
4491 {
4492 tree telts = array_type_nelts (type);
4493 if (telts && host_integerp (telts, 1))
4494 {
5377d5ba 4495 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
6de9cd9a
DN
4496 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4497 if (n == 0)
4498 return 0;
5377d5ba 4499 else if (max / n > m)
6de9cd9a
DN
4500 return n * m;
4501 }
4502 return -1;
4503 }
4504
4505 case RECORD_TYPE:
4506 {
4507 HOST_WIDE_INT n = 0, t;
4508 tree f;
4509
4510 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4511 if (TREE_CODE (f) == FIELD_DECL)
4512 {
4513 t = count_type_elements (TREE_TYPE (f));
4514 if (t < 0)
4515 return -1;
4516 n += t;
4517 }
4518
4519 return n;
4520 }
9de08200 4521
6de9cd9a
DN
4522 case UNION_TYPE:
4523 case QUAL_UNION_TYPE:
4524 {
4525 /* Ho hum. How in the world do we guess here? Clearly it isn't
4526 right to count the fields. Guess based on the number of words. */
4527 HOST_WIDE_INT n = int_size_in_bytes (type);
4528 if (n < 0)
4529 return -1;
4530 return n / UNITS_PER_WORD;
4531 }
4532
4533 case COMPLEX_TYPE:
4534 return 2;
4535
4536 case VECTOR_TYPE:
4537 /* ??? This is broke. We should encode the vector width in the tree. */
4538 return GET_MODE_NUNITS (TYPE_MODE (type));
4539
4540 case INTEGER_TYPE:
4541 case REAL_TYPE:
4542 case ENUMERAL_TYPE:
4543 case BOOLEAN_TYPE:
4544 case CHAR_TYPE:
4545 case POINTER_TYPE:
4546 case OFFSET_TYPE:
4547 case REFERENCE_TYPE:
9de08200 4548 return 1;
3a94c984 4549
6de9cd9a
DN
4550 case VOID_TYPE:
4551 case METHOD_TYPE:
4552 case FILE_TYPE:
4553 case SET_TYPE:
4554 case FUNCTION_TYPE:
4555 case LANG_TYPE:
e9a25f70 4556 default:
6de9cd9a 4557 abort ();
9de08200 4558 }
9de08200
RK
4559}
4560
4561/* Return 1 if EXP contains mostly (3/4) zeros. */
4562
40209195 4563int
502b8322 4564mostly_zeros_p (tree exp)
9de08200 4565{
9de08200 4566 if (TREE_CODE (exp) == CONSTRUCTOR)
6de9cd9a 4567
9de08200 4568 {
6de9cd9a
DN
4569 HOST_WIDE_INT nz_elts, nc_elts, elts;
4570
4571 /* If there are no ranges of true bits, it is all zero. */
e1a43f73 4572 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
6de9cd9a
DN
4573 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4574
4575 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4576 elts = count_type_elements (TREE_TYPE (exp));
9de08200 4577
6de9cd9a 4578 return nz_elts < elts / 4;
9de08200
RK
4579 }
4580
6de9cd9a 4581 return initializer_zerop (exp);
9de08200
RK
4582}
4583\f
e1a43f73
PB
4584/* Helper function for store_constructor.
4585 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4586 TYPE is the type of the CONSTRUCTOR, not the element type.
04050c69 4587 CLEARED is as for store_constructor.
23cb1766 4588 ALIAS_SET is the alias set to use for any stores.
23ccec44
JW
4589
4590 This provides a recursive shortcut back to store_constructor when it isn't
4591 necessary to go through store_field. This is so that we can pass through
4592 the cleared field to let store_constructor know that we may not have to
4593 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
4594
4595static void
502b8322
AJ
4596store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4597 HOST_WIDE_INT bitpos, enum machine_mode mode,
4598 tree exp, tree type, int cleared, int alias_set)
e1a43f73
PB
4599{
4600 if (TREE_CODE (exp) == CONSTRUCTOR
6c89c39a
RK
4601 /* We can only call store_constructor recursively if the size and
4602 bit position are on a byte boundary. */
23ccec44 4603 && bitpos % BITS_PER_UNIT == 0
6c89c39a 4604 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
cc2902df 4605 /* If we have a nonzero bitpos for a register target, then we just
23ccec44
JW
4606 let store_field do the bitfield handling. This is unlikely to
4607 generate unnecessary clear instructions anyways. */
3c0cb5de 4608 && (bitpos == 0 || MEM_P (target)))
e1a43f73 4609 {
3c0cb5de 4610 if (MEM_P (target))
61cb205c
RK
4611 target
4612 = adjust_address (target,
4613 GET_MODE (target) == BLKmode
4614 || 0 != (bitpos
4615 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4616 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
23cb1766 4617
e0339ef7 4618
04050c69 4619 /* Update the alias set, if required. */
3c0cb5de 4620 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
10b76d73 4621 && MEM_ALIAS_SET (target) != 0)
70072ed9
RK
4622 {
4623 target = copy_rtx (target);
4624 set_mem_alias_set (target, alias_set);
4625 }
e0339ef7 4626
dbb5c281 4627 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
e1a43f73
PB
4628 }
4629 else
a06ef755
RK
4630 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4631 alias_set);
e1a43f73
PB
4632}
4633
bbf6f052 4634/* Store the value of constructor EXP into the rtx TARGET.
04050c69
RK
4635 TARGET is either a REG or a MEM; we know it cannot conflict, since
4636 safe_from_p has been called.
dbb5c281
RK
4637 CLEARED is true if TARGET is known to have been zero'd.
4638 SIZE is the number of bytes of TARGET we are allowed to modify: this
b7010412
RK
4639 may not be the same as the size of EXP if we are assigning to a field
4640 which has been packed to exclude padding bits. */
bbf6f052
RK
4641
4642static void
502b8322 4643store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
bbf6f052 4644{
4af3895e 4645 tree type = TREE_TYPE (exp);
a5efcd63 4646#ifdef WORD_REGISTER_OPERATIONS
13eb1f7f 4647 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
a5efcd63 4648#endif
4af3895e 4649
e44842fe
RK
4650 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4651 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052 4652 {
b3694847 4653 tree elt;
bbf6f052 4654
dbb5c281
RK
4655 /* If size is zero or the target is already cleared, do nothing. */
4656 if (size == 0 || cleared)
2c430630 4657 cleared = 1;
04050c69 4658 /* We either clear the aggregate or indicate the value is dead. */
2c430630
RS
4659 else if ((TREE_CODE (type) == UNION_TYPE
4660 || TREE_CODE (type) == QUAL_UNION_TYPE)
4661 && ! CONSTRUCTOR_ELTS (exp))
04050c69 4662 /* If the constructor is empty, clear the union. */
a59f8640 4663 {
dbb5c281 4664 clear_storage (target, expr_size (exp));
04050c69 4665 cleared = 1;
a59f8640 4666 }
4af3895e
JVA
4667
4668 /* If we are building a static constructor into a register,
4669 set the initial value as zero so we can fold the value into
67225c15
RK
4670 a constant. But if more than one register is involved,
4671 this probably loses. */
f8cfc6aa 4672 else if (REG_P (target) && TREE_STATIC (exp)
67225c15 4673 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200 4674 {
04050c69 4675 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
9de08200
RK
4676 cleared = 1;
4677 }
4678
4679 /* If the constructor has fewer fields than the structure
4680 or if we are initializing the structure to mostly zeros,
0d97bf4c 4681 clear the whole structure first. Don't do this if TARGET is a
fcf1b822
RK
4682 register whose mode size isn't equal to SIZE since clear_storage
4683 can't handle this case. */
7c50e202
OH
4684 else if (size > 0
4685 && ((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4686 || mostly_zeros_p (exp))
f8cfc6aa 4687 && (!REG_P (target)
dbb5c281 4688 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
04050c69 4689 == size)))
9de08200 4690 {
337f4314
RK
4691 rtx xtarget = target;
4692
4693 if (readonly_fields_p (type))
4694 {
4695 xtarget = copy_rtx (xtarget);
4696 RTX_UNCHANGING_P (xtarget) = 1;
4697 }
4698
dbb5c281 4699 clear_storage (xtarget, GEN_INT (size));
9de08200
RK
4700 cleared = 1;
4701 }
dbb5c281
RK
4702
4703 if (! cleared)
4704 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4705
4706 /* Store each element of the constructor into
4707 the corresponding field of TARGET. */
4708
4709 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4710 {
b3694847 4711 tree field = TREE_PURPOSE (elt);
34c73909 4712 tree value = TREE_VALUE (elt);
b3694847 4713 enum machine_mode mode;
770ae6cc
RK
4714 HOST_WIDE_INT bitsize;
4715 HOST_WIDE_INT bitpos = 0;
770ae6cc 4716 tree offset;
b50d17a1 4717 rtx to_rtx = target;
bbf6f052 4718
f32fd778
RS
4719 /* Just ignore missing fields.
4720 We cleared the whole structure, above,
4721 if any fields are missing. */
4722 if (field == 0)
4723 continue;
4724
6de9cd9a 4725 if (cleared && initializer_zerop (value))
e1a43f73 4726 continue;
9de08200 4727
770ae6cc
RK
4728 if (host_integerp (DECL_SIZE (field), 1))
4729 bitsize = tree_low_cst (DECL_SIZE (field), 1);
14a774a9
RK
4730 else
4731 bitsize = -1;
4732
bbf6f052
RK
4733 mode = DECL_MODE (field);
4734 if (DECL_BIT_FIELD (field))
4735 mode = VOIDmode;
4736
770ae6cc
RK
4737 offset = DECL_FIELD_OFFSET (field);
4738 if (host_integerp (offset, 0)
4739 && host_integerp (bit_position (field), 0))
4740 {
4741 bitpos = int_bit_position (field);
4742 offset = 0;
4743 }
b50d17a1 4744 else
770ae6cc 4745 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
3a94c984 4746
b50d17a1
RK
4747 if (offset)
4748 {
4749 rtx offset_rtx;
4750
6fce44af
RK
4751 offset
4752 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4753 make_tree (TREE_TYPE (exp),
4754 target));
bbf6f052 4755
b50d17a1 4756 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3c0cb5de 4757 if (!MEM_P (to_rtx))
b50d17a1
RK
4758 abort ();
4759
bd070e1a 4760#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 4761 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 4762 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
4763#else
4764 if (GET_MODE (offset_rtx) != ptr_mode)
4765 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 4766#endif
bd070e1a 4767
0d4903b8
RK
4768 to_rtx = offset_address (to_rtx, offset_rtx,
4769 highest_pow2_factor (offset));
b50d17a1 4770 }
c5c76735 4771
4e44c1ef 4772 if (TREE_READONLY (field))
cf04eb80 4773 {
3c0cb5de 4774 if (MEM_P (to_rtx))
effbcc6a
RK
4775 to_rtx = copy_rtx (to_rtx);
4776
cf04eb80
RK
4777 RTX_UNCHANGING_P (to_rtx) = 1;
4778 }
4779
34c73909
R
4780#ifdef WORD_REGISTER_OPERATIONS
4781 /* If this initializes a field that is smaller than a word, at the
4782 start of a word, try to widen it to a full word.
4783 This special case allows us to output C++ member function
4784 initializations in a form that the optimizers can understand. */
f8cfc6aa 4785 if (REG_P (target)
34c73909
R
4786 && bitsize < BITS_PER_WORD
4787 && bitpos % BITS_PER_WORD == 0
4788 && GET_MODE_CLASS (mode) == MODE_INT
4789 && TREE_CODE (value) == INTEGER_CST
13eb1f7f
RK
4790 && exp_size >= 0
4791 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
34c73909
R
4792 {
4793 tree type = TREE_TYPE (value);
04050c69 4794
34c73909
R
4795 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4796 {
ae2bcd98 4797 type = lang_hooks.types.type_for_size
8df83eae 4798 (BITS_PER_WORD, TYPE_UNSIGNED (type));
34c73909
R
4799 value = convert (type, value);
4800 }
04050c69 4801
34c73909
R
4802 if (BYTES_BIG_ENDIAN)
4803 value
4804 = fold (build (LSHIFT_EXPR, type, value,
4805 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4806 bitsize = BITS_PER_WORD;
4807 mode = word_mode;
4808 }
4809#endif
10b76d73 4810
3c0cb5de 4811 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
10b76d73
RK
4812 && DECL_NONADDRESSABLE_P (field))
4813 {
4814 to_rtx = copy_rtx (to_rtx);
4815 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4816 }
4817
c5c76735 4818 store_constructor_field (to_rtx, bitsize, bitpos, mode,
8b6000fc 4819 value, type, cleared,
10b76d73 4820 get_alias_set (TREE_TYPE (field)));
bbf6f052
RK
4821 }
4822 }
e6834654
SS
4823 else if (TREE_CODE (type) == ARRAY_TYPE
4824 || TREE_CODE (type) == VECTOR_TYPE)
bbf6f052 4825 {
b3694847
SS
4826 tree elt;
4827 int i;
e1a43f73 4828 int need_to_clear;
5c5214a9 4829 tree domain;
4af3895e 4830 tree elttype = TREE_TYPE (type);
e6834654 4831 int const_bounds_p;
ae0ed63a
JM
4832 HOST_WIDE_INT minelt = 0;
4833 HOST_WIDE_INT maxelt = 0;
997404de
JH
4834 int icode = 0;
4835 rtx *vector = NULL;
4836 int elt_size = 0;
4837 unsigned n_elts = 0;
85f3d674 4838
5c5214a9
ZW
4839 if (TREE_CODE (type) == ARRAY_TYPE)
4840 domain = TYPE_DOMAIN (type);
4841 else
4842 /* Vectors do not have domains; look up the domain of
4843 the array embedded in the debug representation type.
4844 FIXME Would probably be more efficient to treat vectors
4845 separately from arrays. */
e6834654 4846 {
e6834654
SS
4847 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4848 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
997404de
JH
4849 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4850 {
4851 enum machine_mode mode = GET_MODE (target);
4852
4853 icode = (int) vec_init_optab->handlers[mode].insn_code;
4854 if (icode != CODE_FOR_nothing)
4855 {
4856 unsigned int i;
4857
4858 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4859 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4860 vector = alloca (n_elts);
4861 for (i = 0; i < n_elts; i++)
4862 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4863 }
4864 }
e6834654
SS
4865 }
4866
4867 const_bounds_p = (TYPE_MIN_VALUE (domain)
4868 && TYPE_MAX_VALUE (domain)
4869 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4870 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4871
85f3d674
RK
4872 /* If we have constant bounds for the range of the type, get them. */
4873 if (const_bounds_p)
4874 {
4875 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4876 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4877 }
bbf6f052 4878
e1a43f73 4879 /* If the constructor has fewer elements than the array,
38e01259 4880 clear the whole array first. Similarly if this is
e1a43f73 4881 static constructor of a non-BLKmode object. */
f8cfc6aa 4882 if (cleared || (REG_P (target) && TREE_STATIC (exp)))
e1a43f73
PB
4883 need_to_clear = 1;
4884 else
4885 {
4886 HOST_WIDE_INT count = 0, zero_count = 0;
85f3d674
RK
4887 need_to_clear = ! const_bounds_p;
4888
e1a43f73
PB
4889 /* This loop is a more accurate version of the loop in
4890 mostly_zeros_p (it handles RANGE_EXPR in an index).
4891 It is also needed to check for missing elements. */
4892 for (elt = CONSTRUCTOR_ELTS (exp);
85f3d674 4893 elt != NULL_TREE && ! need_to_clear;
df0faff1 4894 elt = TREE_CHAIN (elt))
e1a43f73
PB
4895 {
4896 tree index = TREE_PURPOSE (elt);
4897 HOST_WIDE_INT this_node_count;
19caa751 4898
e1a43f73
PB
4899 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4900 {
4901 tree lo_index = TREE_OPERAND (index, 0);
4902 tree hi_index = TREE_OPERAND (index, 1);
05bccae2 4903
19caa751
RK
4904 if (! host_integerp (lo_index, 1)
4905 || ! host_integerp (hi_index, 1))
e1a43f73
PB
4906 {
4907 need_to_clear = 1;
4908 break;
4909 }
19caa751
RK
4910
4911 this_node_count = (tree_low_cst (hi_index, 1)
4912 - tree_low_cst (lo_index, 1) + 1);
e1a43f73
PB
4913 }
4914 else
4915 this_node_count = 1;
85f3d674 4916
e1a43f73
PB
4917 count += this_node_count;
4918 if (mostly_zeros_p (TREE_VALUE (elt)))
4919 zero_count += this_node_count;
4920 }
85f3d674 4921
8e958f70 4922 /* Clear the entire array first if there are any missing elements,
0f41302f 4923 or if the incidence of zero elements is >= 75%. */
85f3d674
RK
4924 if (! need_to_clear
4925 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
e1a43f73
PB
4926 need_to_clear = 1;
4927 }
85f3d674 4928
997404de 4929 if (need_to_clear && size > 0 && !vector)
9de08200
RK
4930 {
4931 if (! cleared)
725e58b1
RK
4932 {
4933 if (REG_P (target))
4934 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4935 else
dbb5c281 4936 clear_storage (target, GEN_INT (size));
725e58b1 4937 }
dbb5c281 4938 cleared = 1;
9de08200 4939 }
df4556a3 4940 else if (REG_P (target))
dbb5c281
RK
4941 /* Inform later passes that the old value is dead. */
4942 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4943
4944 /* Store each element of the constructor into
4945 the corresponding element of TARGET, determined
4946 by counting the elements. */
4947 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4948 elt;
4949 elt = TREE_CHAIN (elt), i++)
4950 {
b3694847 4951 enum machine_mode mode;
19caa751
RK
4952 HOST_WIDE_INT bitsize;
4953 HOST_WIDE_INT bitpos;
bbf6f052 4954 int unsignedp;
e1a43f73 4955 tree value = TREE_VALUE (elt);
03dc44a6
RS
4956 tree index = TREE_PURPOSE (elt);
4957 rtx xtarget = target;
bbf6f052 4958
6de9cd9a 4959 if (cleared && initializer_zerop (value))
e1a43f73 4960 continue;
9de08200 4961
8df83eae 4962 unsignedp = TYPE_UNSIGNED (elttype);
14a774a9
RK
4963 mode = TYPE_MODE (elttype);
4964 if (mode == BLKmode)
19caa751
RK
4965 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4966 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4967 : -1);
14a774a9
RK
4968 else
4969 bitsize = GET_MODE_BITSIZE (mode);
bbf6f052 4970
e1a43f73
PB
4971 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4972 {
4973 tree lo_index = TREE_OPERAND (index, 0);
4974 tree hi_index = TREE_OPERAND (index, 1);
6af8eb57 4975 rtx index_r, pos_rtx;
05c0b405
PB
4976 HOST_WIDE_INT lo, hi, count;
4977 tree position;
e1a43f73 4978
997404de
JH
4979 if (vector)
4980 abort ();
4981
0f41302f 4982 /* If the range is constant and "small", unroll the loop. */
85f3d674
RK
4983 if (const_bounds_p
4984 && host_integerp (lo_index, 0)
19caa751
RK
4985 && host_integerp (hi_index, 0)
4986 && (lo = tree_low_cst (lo_index, 0),
4987 hi = tree_low_cst (hi_index, 0),
05c0b405 4988 count = hi - lo + 1,
3c0cb5de 4989 (!MEM_P (target)
05c0b405 4990 || count <= 2
19caa751
RK
4991 || (host_integerp (TYPE_SIZE (elttype), 1)
4992 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4993 <= 40 * 8)))))
e1a43f73 4994 {
05c0b405
PB
4995 lo -= minelt; hi -= minelt;
4996 for (; lo <= hi; lo++)
e1a43f73 4997 {
19caa751 4998 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
10b76d73 4999
3c0cb5de 5000 if (MEM_P (target)
10b76d73 5001 && !MEM_KEEP_ALIAS_SET_P (target)
e6834654 5002 && TREE_CODE (type) == ARRAY_TYPE
10b76d73
RK
5003 && TYPE_NONALIASED_COMPONENT (type))
5004 {
5005 target = copy_rtx (target);
5006 MEM_KEEP_ALIAS_SET_P (target) = 1;
5007 }
5008
23cb1766 5009 store_constructor_field
04050c69
RK
5010 (target, bitsize, bitpos, mode, value, type, cleared,
5011 get_alias_set (elttype));
e1a43f73
PB
5012 }
5013 }
5014 else
5015 {
6af8eb57
SB
5016 rtx loop_start = gen_label_rtx ();
5017 rtx loop_end = gen_label_rtx ();
5018 tree exit_cond;
e1a43f73 5019
6af8eb57 5020 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
8df83eae 5021 unsignedp = TYPE_UNSIGNED (domain);
e1a43f73
PB
5022
5023 index = build_decl (VAR_DECL, NULL_TREE, domain);
5024
19e7881c 5025 index_r
e1a43f73
PB
5026 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5027 &unsignedp, 0));
19e7881c 5028 SET_DECL_RTL (index, index_r);
e1a43f73 5029 store_expr (lo_index, index_r, 0);
6af8eb57
SB
5030
5031 /* Build the head of the loop. */
5032 do_pending_stack_adjust ();
5033 emit_queue ();
5034 emit_label (loop_start);
e1a43f73 5035
0f41302f 5036 /* Assign value to element index. */
fed3cef0
RK
5037 position
5038 = convert (ssizetype,
5039 fold (build (MINUS_EXPR, TREE_TYPE (index),
5040 index, TYPE_MIN_VALUE (domain))));
5041 position = size_binop (MULT_EXPR, position,
5042 convert (ssizetype,
5043 TYPE_SIZE_UNIT (elttype)));
5044
e1a43f73 5045 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
0d4903b8
RK
5046 xtarget = offset_address (target, pos_rtx,
5047 highest_pow2_factor (position));
5048 xtarget = adjust_address (xtarget, mode, 0);
e1a43f73 5049 if (TREE_CODE (value) == CONSTRUCTOR)
dbb5c281
RK
5050 store_constructor (value, xtarget, cleared,
5051 bitsize / BITS_PER_UNIT);
e1a43f73
PB
5052 else
5053 store_expr (value, xtarget, 0);
5054
6af8eb57
SB
5055 /* Generate a conditional jump to exit the loop. */
5056 exit_cond = build (LT_EXPR, integer_type_node,
5057 index, hi_index);
5058 jumpif (exit_cond, loop_end);
e1a43f73 5059
6af8eb57
SB
5060 /* Update the loop counter, and jump to the head of
5061 the loop. */
e1a43f73
PB
5062 expand_increment (build (PREINCREMENT_EXPR,
5063 TREE_TYPE (index),
7b8b9722 5064 index, integer_one_node), 0, 0);
6af8eb57
SB
5065 emit_jump (loop_start);
5066
5067 /* Build the end of the loop. */
e1a43f73 5068 emit_label (loop_end);
e1a43f73
PB
5069 }
5070 }
19caa751
RK
5071 else if ((index != 0 && ! host_integerp (index, 0))
5072 || ! host_integerp (TYPE_SIZE (elttype), 1))
03dc44a6 5073 {
03dc44a6
RS
5074 tree position;
5075
997404de
JH
5076 if (vector)
5077 abort ();
5078
5b6c44ff 5079 if (index == 0)
fed3cef0 5080 index = ssize_int (1);
5b6c44ff 5081
e1a43f73 5082 if (minelt)
fed3cef0
RK
5083 index = convert (ssizetype,
5084 fold (build (MINUS_EXPR, index,
5085 TYPE_MIN_VALUE (domain))));
19caa751 5086
fed3cef0
RK
5087 position = size_binop (MULT_EXPR, index,
5088 convert (ssizetype,
5089 TYPE_SIZE_UNIT (elttype)));
0d4903b8
RK
5090 xtarget = offset_address (target,
5091 expand_expr (position, 0, VOIDmode, 0),
5092 highest_pow2_factor (position));
5093 xtarget = adjust_address (xtarget, mode, 0);
e1a43f73 5094 store_expr (value, xtarget, 0);
03dc44a6 5095 }
997404de
JH
5096 else if (vector)
5097 {
5098 int pos;
5099
5100 if (index != 0)
5101 pos = tree_low_cst (index, 0) - minelt;
5102 else
5103 pos = i;
5104 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
5105 }
03dc44a6
RS
5106 else
5107 {
5108 if (index != 0)
19caa751
RK
5109 bitpos = ((tree_low_cst (index, 0) - minelt)
5110 * tree_low_cst (TYPE_SIZE (elttype), 1));
03dc44a6 5111 else
19caa751
RK
5112 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5113
3c0cb5de 5114 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
e6834654 5115 && TREE_CODE (type) == ARRAY_TYPE
10b76d73
RK
5116 && TYPE_NONALIASED_COMPONENT (type))
5117 {
5118 target = copy_rtx (target);
5119 MEM_KEEP_ALIAS_SET_P (target) = 1;
5120 }
9b9bd3b2
JH
5121 store_constructor_field (target, bitsize, bitpos, mode, value,
5122 type, cleared, get_alias_set (elttype));
03dc44a6 5123 }
bbf6f052 5124 }
997404de
JH
5125 if (vector)
5126 {
5127 emit_insn (GEN_FCN (icode) (target,
5128 gen_rtx_PARALLEL (GET_MODE (target),
5129 gen_rtvec_v (n_elts, vector))));
5130 }
bbf6f052 5131 }
19caa751 5132
3a94c984 5133 /* Set constructor assignments. */
071a6595
PB
5134 else if (TREE_CODE (type) == SET_TYPE)
5135 {
e1a43f73 5136 tree elt = CONSTRUCTOR_ELTS (exp);
19caa751 5137 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
5138 tree domain = TYPE_DOMAIN (type);
5139 tree domain_min, domain_max, bitlength;
5140
9faa82d8 5141 /* The default implementation strategy is to extract the constant
071a6595
PB
5142 parts of the constructor, use that to initialize the target,
5143 and then "or" in whatever non-constant ranges we need in addition.
5144
5145 If a large set is all zero or all ones, it is
8f99553f 5146 probably better to set it using memset.
071a6595
PB
5147 Also, if a large set has just a single range, it may also be
5148 better to first clear all the first clear the set (using
8f99553f 5149 memset), and set the bits we want. */
3a94c984 5150
0f41302f 5151 /* Check for all zeros. */
9376fcd6 5152 if (elt == NULL_TREE && size > 0)
071a6595 5153 {
dbb5c281
RK
5154 if (!cleared)
5155 clear_storage (target, GEN_INT (size));
071a6595
PB
5156 return;
5157 }
5158
071a6595
PB
5159 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5160 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5161 bitlength = size_binop (PLUS_EXPR,
fed3cef0
RK
5162 size_diffop (domain_max, domain_min),
5163 ssize_int (1));
071a6595 5164
19caa751 5165 nbits = tree_low_cst (bitlength, 1);
e1a43f73
PB
5166
5167 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5168 are "complicated" (more than one range), initialize (the
3a94c984 5169 constant parts) by copying from a constant. */
e1a43f73
PB
5170 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5171 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 5172 {
19caa751 5173 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
b4ee5a72 5174 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
703ad42b 5175 char *bit_buffer = alloca (nbits);
b4ee5a72 5176 HOST_WIDE_INT word = 0;
19caa751
RK
5177 unsigned int bit_pos = 0;
5178 unsigned int ibit = 0;
5179 unsigned int offset = 0; /* In bytes from beginning of set. */
5180
e1a43f73 5181 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 5182 for (;;)
071a6595 5183 {
b4ee5a72
PB
5184 if (bit_buffer[ibit])
5185 {
b09f3348 5186 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
5187 word |= (1 << (set_word_size - 1 - bit_pos));
5188 else
5189 word |= 1 << bit_pos;
5190 }
19caa751 5191
b4ee5a72
PB
5192 bit_pos++; ibit++;
5193 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 5194 {
dbb5c281 5195 if (word != 0 || ! cleared)
e1a43f73 5196 {
053ee101 5197 rtx datum = gen_int_mode (word, mode);
e1a43f73 5198 rtx to_rtx;
19caa751 5199
0f41302f
MS
5200 /* The assumption here is that it is safe to use
5201 XEXP if the set is multi-word, but not if
5202 it's single-word. */
3c0cb5de 5203 if (MEM_P (target))
f4ef873c 5204 to_rtx = adjust_address (target, mode, offset);
3a94c984 5205 else if (offset == 0)
e1a43f73
PB
5206 to_rtx = target;
5207 else
5208 abort ();
5209 emit_move_insn (to_rtx, datum);
5210 }
19caa751 5211
b4ee5a72
PB
5212 if (ibit == nbits)
5213 break;
5214 word = 0;
5215 bit_pos = 0;
5216 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
5217 }
5218 }
071a6595 5219 }
dbb5c281 5220 else if (!cleared)
19caa751
RK
5221 /* Don't bother clearing storage if the set is all ones. */
5222 if (TREE_CHAIN (elt) != NULL_TREE
5223 || (TREE_PURPOSE (elt) == NULL_TREE
5224 ? nbits != 1
5225 : ( ! host_integerp (TREE_VALUE (elt), 0)
5226 || ! host_integerp (TREE_PURPOSE (elt), 0)
5227 || (tree_low_cst (TREE_VALUE (elt), 0)
5228 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5229 != (HOST_WIDE_INT) nbits))))
dbb5c281 5230 clear_storage (target, expr_size (exp));
3a94c984 5231
e1a43f73 5232 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595 5233 {
3a94c984 5234 /* Start of range of element or NULL. */
071a6595 5235 tree startbit = TREE_PURPOSE (elt);
3a94c984 5236 /* End of range of element, or element value. */
071a6595
PB
5237 tree endbit = TREE_VALUE (elt);
5238 HOST_WIDE_INT startb, endb;
19caa751 5239 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
071a6595
PB
5240
5241 bitlength_rtx = expand_expr (bitlength,
19caa751 5242 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
071a6595 5243
3a94c984 5244 /* Handle non-range tuple element like [ expr ]. */
071a6595
PB
5245 if (startbit == NULL_TREE)
5246 {
5247 startbit = save_expr (endbit);
5248 endbit = startbit;
5249 }
19caa751 5250
071a6595
PB
5251 startbit = convert (sizetype, startbit);
5252 endbit = convert (sizetype, endbit);
5253 if (! integer_zerop (domain_min))
5254 {
5255 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5256 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5257 }
3a94c984 5258 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
071a6595 5259 EXPAND_CONST_ADDRESS);
3a94c984 5260 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
071a6595
PB
5261 EXPAND_CONST_ADDRESS);
5262
5263 if (REG_P (target))
5264 {
1da68f56
RK
5265 targetx
5266 = assign_temp
ae2bcd98 5267 ((build_qualified_type (lang_hooks.types.type_for_mode
b0c48229 5268 (GET_MODE (target), 0),
1da68f56
RK
5269 TYPE_QUAL_CONST)),
5270 0, 1, 1);
071a6595
PB
5271 emit_move_insn (targetx, target);
5272 }
19caa751 5273
3c0cb5de 5274 else if (MEM_P (target))
071a6595
PB
5275 targetx = target;
5276 else
5277 abort ();
5278
4ca79136
RH
5279 /* Optimization: If startbit and endbit are constants divisible
5280 by BITS_PER_UNIT, call memset instead. */
8f99553f 5281 if (TREE_CODE (startbit) == INTEGER_CST
071a6595
PB
5282 && TREE_CODE (endbit) == INTEGER_CST
5283 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 5284 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 5285 {
ebb1b59a 5286 emit_library_call (memset_libfunc, LCT_NORMAL,
071a6595 5287 VOIDmode, 3,
e1a43f73
PB
5288 plus_constant (XEXP (targetx, 0),
5289 startb / BITS_PER_UNIT),
071a6595 5290 Pmode,
3b6f75e2 5291 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 5292 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 5293 TYPE_MODE (sizetype));
071a6595
PB
5294 }
5295 else
68d28100
RH
5296 emit_library_call (setbits_libfunc, LCT_NORMAL,
5297 VOIDmode, 4, XEXP (targetx, 0),
ebb1b59a 5298 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
19caa751
RK
5299 startbit_rtx, TYPE_MODE (sizetype),
5300 endbit_rtx, TYPE_MODE (sizetype));
5301
071a6595
PB
5302 if (REG_P (target))
5303 emit_move_insn (target, targetx);
5304 }
5305 }
bbf6f052
RK
5306
5307 else
5308 abort ();
5309}
5310
5311/* Store the value of EXP (an expression tree)
5312 into a subfield of TARGET which has mode MODE and occupies
5313 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5314 If MODE is VOIDmode, it means that we are storing into a bit-field.
5315
5316 If VALUE_MODE is VOIDmode, return nothing in particular.
5317 UNSIGNEDP is not used in this case.
5318
5319 Otherwise, return an rtx for the value stored. This rtx
5320 has mode VALUE_MODE if that is convenient to do.
5321 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5322
a06ef755 5323 TYPE is the type of the underlying object,
ece32014
MM
5324
5325 ALIAS_SET is the alias set for the destination. This value will
5326 (in general) be different from that for TARGET, since TARGET is a
5327 reference to the containing structure. */
bbf6f052
RK
5328
5329static rtx
502b8322
AJ
5330store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5331 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5332 int unsignedp, tree type, int alias_set)
bbf6f052 5333{
906c4e36 5334 HOST_WIDE_INT width_mask = 0;
bbf6f052 5335
e9a25f70
JL
5336 if (TREE_CODE (exp) == ERROR_MARK)
5337 return const0_rtx;
5338
2be6a7e9
RK
5339 /* If we have nothing to store, do nothing unless the expression has
5340 side-effects. */
5341 if (bitsize == 0)
5342 return expand_expr (exp, const0_rtx, VOIDmode, 0);
6a87d634 5343 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
906c4e36 5344 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
5345
5346 /* If we are storing into an unaligned field of an aligned union that is
5347 in a register, we may have the mode of TARGET being an integer mode but
5348 MODE == BLKmode. In that case, get an aligned object whose size and
5349 alignment are the same as TARGET and store TARGET into it (we can avoid
5350 the store if the field being stored is the entire width of TARGET). Then
5351 call ourselves recursively to store the field into a BLKmode version of
5352 that object. Finally, load from the object into TARGET. This is not
5353 very efficient in general, but should only be slightly more expensive
5354 than the otherwise-required unaligned accesses. Perhaps this can be
85a43a2f
RK
5355 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5356 twice, once with emit_move_insn and once via store_field. */
bbf6f052
RK
5357
5358 if (mode == BLKmode
f8cfc6aa 5359 && (REG_P (target) || GET_CODE (target) == SUBREG))
bbf6f052 5360 {
85a43a2f 5361 rtx object = assign_temp (type, 0, 1, 1);
c4e59f51 5362 rtx blk_object = adjust_address (object, BLKmode, 0);
bbf6f052 5363
8752c357 5364 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
bbf6f052
RK
5365 emit_move_insn (object, target);
5366
a06ef755
RK
5367 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5368 alias_set);
bbf6f052
RK
5369
5370 emit_move_insn (target, object);
5371
a06ef755 5372 /* We want to return the BLKmode version of the data. */
46093b97 5373 return blk_object;
bbf6f052 5374 }
c3b247b4
JM
5375
5376 if (GET_CODE (target) == CONCAT)
5377 {
5378 /* We're storing into a struct containing a single __complex. */
5379
5380 if (bitpos != 0)
5381 abort ();
6de9cd9a 5382 return store_expr (exp, target, value_mode != VOIDmode);
c3b247b4 5383 }
bbf6f052
RK
5384
5385 /* If the structure is in a register or if the component
5386 is a bit field, we cannot use addressing to access it.
5387 Use bit-field techniques or SUBREG to store in it. */
5388
4fa52007 5389 if (mode == VOIDmode
6ab06cbb
JW
5390 || (mode != BLKmode && ! direct_store[(int) mode]
5391 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5392 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
f8cfc6aa 5393 || REG_P (target)
c980ac49 5394 || GET_CODE (target) == SUBREG
ccc98036
RS
5395 /* If the field isn't aligned enough to store as an ordinary memref,
5396 store it as a bit field. */
15b19a7d 5397 || (mode != BLKmode
9e5f281f
OH
5398 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5399 || bitpos % GET_MODE_ALIGNMENT (mode))
5400 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
502b8322 5401 || (bitpos % BITS_PER_UNIT != 0)))
14a774a9
RK
5402 /* If the RHS and field are a constant size and the size of the
5403 RHS isn't the same size as the bitfield, we must use bitfield
5404 operations. */
05bccae2
RK
5405 || (bitsize >= 0
5406 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5407 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
bbf6f052 5408 {
906c4e36 5409 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 5410
ef19912d
RK
5411 /* If BITSIZE is narrower than the size of the type of EXP
5412 we will be narrowing TEMP. Normally, what's wanted are the
5413 low-order bits. However, if EXP's type is a record and this is
5414 big-endian machine, we want the upper BITSIZE bits. */
5415 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
65a07688 5416 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
ef19912d
RK
5417 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5418 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5419 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5420 - bitsize),
c1853da7 5421 NULL_RTX, 1);
ef19912d 5422
bbd6cf73
RK
5423 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5424 MODE. */
5425 if (mode != VOIDmode && mode != BLKmode
5426 && mode != TYPE_MODE (TREE_TYPE (exp)))
5427 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5428
a281e72d
RK
5429 /* If the modes of TARGET and TEMP are both BLKmode, both
5430 must be in memory and BITPOS must be aligned on a byte
5431 boundary. If so, we simply do a block copy. */
5432 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5433 {
3c0cb5de 5434 if (!MEM_P (target) || !MEM_P (temp)
a281e72d
RK
5435 || bitpos % BITS_PER_UNIT != 0)
5436 abort ();
5437
f4ef873c 5438 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
a281e72d 5439 emit_block_move (target, temp,
a06ef755 5440 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a
RH
5441 / BITS_PER_UNIT),
5442 BLOCK_OP_NORMAL);
a281e72d
RK
5443
5444 return value_mode == VOIDmode ? const0_rtx : target;
5445 }
5446
bbf6f052 5447 /* Store the value in the bitfield. */
a06ef755
RK
5448 store_bit_field (target, bitsize, bitpos, mode, temp,
5449 int_size_in_bytes (type));
5450
bbf6f052
RK
5451 if (value_mode != VOIDmode)
5452 {
04050c69
RK
5453 /* The caller wants an rtx for the value.
5454 If possible, avoid refetching from the bitfield itself. */
bbf6f052 5455 if (width_mask != 0
3c0cb5de 5456 && ! (MEM_P (target) && MEM_VOLATILE_P (target)))
5c4d7cfb 5457 {
9074de27 5458 tree count;
5c4d7cfb 5459 enum machine_mode tmode;
86a2c12a 5460
5c4d7cfb 5461 tmode = GET_MODE (temp);
86a2c12a
RS
5462 if (tmode == VOIDmode)
5463 tmode = value_mode;
22273300
JJ
5464
5465 if (unsignedp)
5466 return expand_and (tmode, temp,
2496c7bd 5467 gen_int_mode (width_mask, tmode),
22273300
JJ
5468 NULL_RTX);
5469
5c4d7cfb
RS
5470 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5471 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5472 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5473 }
04050c69 5474
bbf6f052 5475 return extract_bit_field (target, bitsize, bitpos, unsignedp,
04050c69 5476 NULL_RTX, value_mode, VOIDmode,
a06ef755 5477 int_size_in_bytes (type));
bbf6f052
RK
5478 }
5479 return const0_rtx;
5480 }
5481 else
5482 {
5483 rtx addr = XEXP (target, 0);
a06ef755 5484 rtx to_rtx = target;
bbf6f052
RK
5485
5486 /* If a value is wanted, it must be the lhs;
5487 so make the address stable for multiple use. */
5488
f8cfc6aa 5489 if (value_mode != VOIDmode && !REG_P (addr)
bbf6f052
RK
5490 && ! CONSTANT_ADDRESS_P (addr)
5491 /* A frame-pointer reference is already stable. */
5492 && ! (GET_CODE (addr) == PLUS
5493 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5494 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5495 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
a06ef755 5496 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
bbf6f052
RK
5497
5498 /* Now build a reference to just the desired component. */
5499
a06ef755
RK
5500 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5501
5502 if (to_rtx == target)
5503 to_rtx = copy_rtx (to_rtx);
792760b9 5504
c6df88cb 5505 MEM_SET_IN_STRUCT_P (to_rtx, 1);
10b76d73 5506 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
a06ef755 5507 set_mem_alias_set (to_rtx, alias_set);
bbf6f052
RK
5508
5509 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5510 }
5511}
5512\f
5513/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
b4e3fabb
RK
5514 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5515 codes and find the ultimate containing object, which we return.
bbf6f052
RK
5516
5517 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5518 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
5519 If the position of the field is variable, we store a tree
5520 giving the variable offset (in units) in *POFFSET.
5521 This offset is in addition to the bit position.
5522 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
5523
5524 If any of the extraction expressions is volatile,
5525 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5526
5527 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5528 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
5529 is redundant.
5530
5531 If the field describes a variable-sized object, *PMODE is set to
5532 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
6d2f8887 5533 this case, but the address of the object can be found. */
bbf6f052
RK
5534
5535tree
502b8322
AJ
5536get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5537 HOST_WIDE_INT *pbitpos, tree *poffset,
5538 enum machine_mode *pmode, int *punsignedp,
5539 int *pvolatilep)
bbf6f052
RK
5540{
5541 tree size_tree = 0;
5542 enum machine_mode mode = VOIDmode;
fed3cef0 5543 tree offset = size_zero_node;
770ae6cc 5544 tree bit_offset = bitsize_zero_node;
770ae6cc 5545 tree tem;
bbf6f052 5546
770ae6cc
RK
5547 /* First get the mode, signedness, and size. We do this from just the
5548 outermost expression. */
bbf6f052
RK
5549 if (TREE_CODE (exp) == COMPONENT_REF)
5550 {
5551 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5552 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5553 mode = DECL_MODE (TREE_OPERAND (exp, 1));
770ae6cc 5554
a150de29 5555 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
bbf6f052
RK
5556 }
5557 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5558 {
5559 size_tree = TREE_OPERAND (exp, 1);
a150de29 5560 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
bbf6f052
RK
5561 }
5562 else
5563 {
5564 mode = TYPE_MODE (TREE_TYPE (exp));
8df83eae 5565 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
770ae6cc 5566
ab87f8c8
JL
5567 if (mode == BLKmode)
5568 size_tree = TYPE_SIZE (TREE_TYPE (exp));
770ae6cc
RK
5569 else
5570 *pbitsize = GET_MODE_BITSIZE (mode);
bbf6f052 5571 }
3a94c984 5572
770ae6cc 5573 if (size_tree != 0)
bbf6f052 5574 {
770ae6cc 5575 if (! host_integerp (size_tree, 1))
e7c33f54
RK
5576 mode = BLKmode, *pbitsize = -1;
5577 else
770ae6cc 5578 *pbitsize = tree_low_cst (size_tree, 1);
bbf6f052
RK
5579 }
5580
5581 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5582 and find the ultimate containing object. */
bbf6f052
RK
5583 while (1)
5584 {
770ae6cc
RK
5585 if (TREE_CODE (exp) == BIT_FIELD_REF)
5586 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5587 else if (TREE_CODE (exp) == COMPONENT_REF)
bbf6f052 5588 {
770ae6cc 5589 tree field = TREE_OPERAND (exp, 1);
44de5aeb 5590 tree this_offset = component_ref_field_offset (exp);
bbf6f052 5591
e7f3c83f
RK
5592 /* If this field hasn't been filled in yet, don't go
5593 past it. This should only happen when folding expressions
5594 made during type construction. */
770ae6cc 5595 if (this_offset == 0)
e7f3c83f
RK
5596 break;
5597
7156dead 5598 offset = size_binop (PLUS_EXPR, offset, this_offset);
770ae6cc
RK
5599 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5600 DECL_FIELD_BIT_OFFSET (field));
e6d8c385 5601
a06ef755 5602 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
bbf6f052 5603 }
7156dead 5604
b4e3fabb
RK
5605 else if (TREE_CODE (exp) == ARRAY_REF
5606 || TREE_CODE (exp) == ARRAY_RANGE_REF)
bbf6f052 5607 {
742920c7 5608 tree index = TREE_OPERAND (exp, 1);
44de5aeb
RK
5609 tree low_bound = array_ref_low_bound (exp);
5610 tree unit_size = array_ref_element_size (exp);
742920c7 5611
770ae6cc
RK
5612 /* We assume all arrays have sizes that are a multiple of a byte.
5613 First subtract the lower bound, if any, in the type of the
5614 index, then convert to sizetype and multiply by the size of the
5615 array element. */
44de5aeb 5616 if (! integer_zerop (low_bound))
770ae6cc
RK
5617 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5618 index, low_bound));
f8dac6eb 5619
770ae6cc
RK
5620 offset = size_binop (PLUS_EXPR, offset,
5621 size_binop (MULT_EXPR,
5622 convert (sizetype, index),
7156dead 5623 unit_size));
bbf6f052 5624 }
7156dead 5625
c1853da7
RK
5626 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5627 conversions that don't change the mode, and all view conversions
5628 except those that need to "step up" the alignment. */
bbf6f052 5629 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
c1853da7
RK
5630 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5631 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5632 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5633 && STRICT_ALIGNMENT
5634 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5635 < BIGGEST_ALIGNMENT)
5636 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5637 || TYPE_ALIGN_OK (TREE_TYPE
5638 (TREE_OPERAND (exp, 0))))))
bbf6f052
RK
5639 && ! ((TREE_CODE (exp) == NOP_EXPR
5640 || TREE_CODE (exp) == CONVERT_EXPR)
5641 && (TYPE_MODE (TREE_TYPE (exp))
5642 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5643 break;
7bb0943f
RS
5644
5645 /* If any reference in the chain is volatile, the effect is volatile. */
5646 if (TREE_THIS_VOLATILE (exp))
5647 *pvolatilep = 1;
839c4796 5648
bbf6f052
RK
5649 exp = TREE_OPERAND (exp, 0);
5650 }
5651
770ae6cc
RK
5652 /* If OFFSET is constant, see if we can return the whole thing as a
5653 constant bit position. Otherwise, split it up. */
5654 if (host_integerp (offset, 0)
5655 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5656 bitsize_unit_node))
5657 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5658 && host_integerp (tem, 0))
5659 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5660 else
5661 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
b50d17a1 5662
bbf6f052 5663 *pmode = mode;
bbf6f052
RK
5664 return exp;
5665}
921b3427 5666
44de5aeb
RK
5667/* Return a tree of sizetype representing the size, in bytes, of the element
5668 of EXP, an ARRAY_REF. */
5669
5670tree
5671array_ref_element_size (tree exp)
5672{
5673 tree aligned_size = TREE_OPERAND (exp, 3);
5674 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5675
5676 /* If a size was specified in the ARRAY_REF, it's the size measured
5677 in alignment units of the element type. So multiply by that value. */
5678 if (aligned_size)
5679 return size_binop (MULT_EXPR, aligned_size,
5680 size_int (TYPE_ALIGN (elmt_type) / BITS_PER_UNIT));
5681
5682 /* Otherwise, take the size from that of the element type. Substitute
5683 any PLACEHOLDER_EXPR that we have. */
5684 else
5685 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5686}
5687
5688/* Return a tree representing the lower bound of the array mentioned in
5689 EXP, an ARRAY_REF. */
5690
5691tree
5692array_ref_low_bound (tree exp)
5693{
5694 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5695
5696 /* If a lower bound is specified in EXP, use it. */
5697 if (TREE_OPERAND (exp, 2))
5698 return TREE_OPERAND (exp, 2);
5699
5700 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5701 substituting for a PLACEHOLDER_EXPR as needed. */
5702 if (domain_type && TYPE_MIN_VALUE (domain_type))
5703 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5704
5705 /* Otherwise, return a zero of the appropriate type. */
5706 return fold_convert (TREE_TYPE (TREE_OPERAND (exp, 1)), integer_zero_node);
5707}
5708
5709/* Return a tree representing the offset, in bytes, of the field referenced
5710 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5711
5712tree
5713component_ref_field_offset (tree exp)
5714{
5715 tree aligned_offset = TREE_OPERAND (exp, 2);
5716 tree field = TREE_OPERAND (exp, 1);
5717
5718 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5719 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5720 value. */
5721 if (aligned_offset)
5722 return size_binop (MULT_EXPR, aligned_offset,
5723 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5724
5725 /* Otherwise, take the offset from that of the field. Substitute
5726 any PLACEHOLDER_EXPR that we have. */
5727 else
5728 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5729}
5730
ed239f5a
RK
5731/* Return 1 if T is an expression that get_inner_reference handles. */
5732
5733int
502b8322 5734handled_component_p (tree t)
ed239f5a
RK
5735{
5736 switch (TREE_CODE (t))
5737 {
5738 case BIT_FIELD_REF:
5739 case COMPONENT_REF:
5740 case ARRAY_REF:
5741 case ARRAY_RANGE_REF:
5742 case NON_LVALUE_EXPR:
5743 case VIEW_CONVERT_EXPR:
5744 return 1;
5745
1a8c4ca6
EB
5746 /* ??? Sure they are handled, but get_inner_reference may return
5747 a different PBITSIZE, depending upon whether the expression is
5748 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
ed239f5a
RK
5749 case NOP_EXPR:
5750 case CONVERT_EXPR:
5751 return (TYPE_MODE (TREE_TYPE (t))
5752 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5753
5754 default:
5755 return 0;
5756 }
5757}
bbf6f052 5758\f
3fe44edd
RK
5759/* Given an rtx VALUE that may contain additions and multiplications, return
5760 an equivalent value that just refers to a register, memory, or constant.
5761 This is done by generating instructions to perform the arithmetic and
5762 returning a pseudo-register containing the value.
c45a13a6
RK
5763
5764 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
5765
5766rtx
502b8322 5767force_operand (rtx value, rtx target)
bbf6f052 5768{
8a28dbcc 5769 rtx op1, op2;
bbf6f052 5770 /* Use subtarget as the target for operand 0 of a binary operation. */
b3694847 5771 rtx subtarget = get_subtarget (target);
8a28dbcc 5772 enum rtx_code code = GET_CODE (value);
bbf6f052 5773
50654f6c
ZD
5774 /* Check for subreg applied to an expression produced by loop optimizer. */
5775 if (code == SUBREG
f8cfc6aa 5776 && !REG_P (SUBREG_REG (value))
3c0cb5de 5777 && !MEM_P (SUBREG_REG (value)))
50654f6c
ZD
5778 {
5779 value = simplify_gen_subreg (GET_MODE (value),
5780 force_reg (GET_MODE (SUBREG_REG (value)),
5781 force_operand (SUBREG_REG (value),
5782 NULL_RTX)),
5783 GET_MODE (SUBREG_REG (value)),
5784 SUBREG_BYTE (value));
5785 code = GET_CODE (value);
5786 }
5787
8b015896 5788 /* Check for a PIC address load. */
8a28dbcc 5789 if ((code == PLUS || code == MINUS)
8b015896
RH
5790 && XEXP (value, 0) == pic_offset_table_rtx
5791 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5792 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5793 || GET_CODE (XEXP (value, 1)) == CONST))
5794 {
5795 if (!subtarget)
5796 subtarget = gen_reg_rtx (GET_MODE (value));
5797 emit_move_insn (subtarget, value);
5798 return subtarget;
5799 }
5800
8a28dbcc 5801 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
bbf6f052 5802 {
8a28dbcc
JH
5803 if (!target)
5804 target = gen_reg_rtx (GET_MODE (value));
ce0f3925 5805 convert_move (target, force_operand (XEXP (value, 0), NULL),
8a28dbcc
JH
5806 code == ZERO_EXTEND);
5807 return target;
bbf6f052
RK
5808 }
5809
ec8e098d 5810 if (ARITHMETIC_P (value))
bbf6f052
RK
5811 {
5812 op2 = XEXP (value, 1);
f8cfc6aa 5813 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
bbf6f052 5814 subtarget = 0;
8a28dbcc 5815 if (code == MINUS && GET_CODE (op2) == CONST_INT)
bbf6f052 5816 {
8a28dbcc 5817 code = PLUS;
bbf6f052
RK
5818 op2 = negate_rtx (GET_MODE (value), op2);
5819 }
5820
5821 /* Check for an addition with OP2 a constant integer and our first
8a28dbcc
JH
5822 operand a PLUS of a virtual register and something else. In that
5823 case, we want to emit the sum of the virtual register and the
5824 constant first and then add the other value. This allows virtual
5825 register instantiation to simply modify the constant rather than
5826 creating another one around this addition. */
5827 if (code == PLUS && GET_CODE (op2) == CONST_INT
bbf6f052 5828 && GET_CODE (XEXP (value, 0)) == PLUS
f8cfc6aa 5829 && REG_P (XEXP (XEXP (value, 0), 0))
bbf6f052
RK
5830 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5831 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5832 {
8a28dbcc
JH
5833 rtx temp = expand_simple_binop (GET_MODE (value), code,
5834 XEXP (XEXP (value, 0), 0), op2,
5835 subtarget, 0, OPTAB_LIB_WIDEN);
5836 return expand_simple_binop (GET_MODE (value), code, temp,
5837 force_operand (XEXP (XEXP (value,
5838 0), 1), 0),
5839 target, 0, OPTAB_LIB_WIDEN);
bbf6f052 5840 }
3a94c984 5841
8a28dbcc
JH
5842 op1 = force_operand (XEXP (value, 0), subtarget);
5843 op2 = force_operand (op2, NULL_RTX);
5844 switch (code)
5845 {
5846 case MULT:
5847 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5848 case DIV:
5849 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5850 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5851 target, 1, OPTAB_LIB_WIDEN);
5852 else
5853 return expand_divmod (0,
5854 FLOAT_MODE_P (GET_MODE (value))
5855 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5856 GET_MODE (value), op1, op2, target, 0);
5857 break;
5858 case MOD:
5859 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5860 target, 0);
5861 break;
5862 case UDIV:
5863 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5864 target, 1);
5865 break;
5866 case UMOD:
5867 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5868 target, 1);
5869 break;
5870 case ASHIFTRT:
5871 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5872 target, 0, OPTAB_LIB_WIDEN);
5873 break;
5874 default:
5875 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5876 target, 1, OPTAB_LIB_WIDEN);
5877 }
5878 }
ec8e098d 5879 if (UNARY_P (value))
8a28dbcc
JH
5880 {
5881 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5882 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
bbf6f052 5883 }
34e81b5a
RK
5884
5885#ifdef INSN_SCHEDULING
5886 /* On machines that have insn scheduling, we want all memory reference to be
5887 explicit, so we need to deal with such paradoxical SUBREGs. */
3c0cb5de 5888 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
34e81b5a
RK
5889 && (GET_MODE_SIZE (GET_MODE (value))
5890 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5891 value
5892 = simplify_gen_subreg (GET_MODE (value),
5893 force_reg (GET_MODE (SUBREG_REG (value)),
5894 force_operand (SUBREG_REG (value),
5895 NULL_RTX)),
5896 GET_MODE (SUBREG_REG (value)),
5897 SUBREG_BYTE (value));
5898#endif
5899
bbf6f052
RK
5900 return value;
5901}
5902\f
bbf6f052 5903/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
5904 EXP can reference X, which is being modified. TOP_P is nonzero if this
5905 call is going to be used to determine whether we need a temporary
ff439b5f
CB
5906 for EXP, as opposed to a recursive call to this function.
5907
5908 It is always safe for this routine to return zero since it merely
5909 searches for optimization opportunities. */
bbf6f052 5910
8f17b5c5 5911int
502b8322 5912safe_from_p (rtx x, tree exp, int top_p)
bbf6f052
RK
5913{
5914 rtx exp_rtl = 0;
5915 int i, nops;
5916
6676e72f
RK
5917 if (x == 0
5918 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
5919 have no way of allocating temporaries of variable size
5920 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5921 So we assume here that something at a higher level has prevented a
f4510f37 5922 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4 5923 do this when X is BLKmode and when we are at the top level. */
d0f062fb 5924 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
f4510f37 5925 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
5926 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5927 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5928 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5929 != INTEGER_CST)
1da68f56
RK
5930 && GET_MODE (x) == BLKmode)
5931 /* If X is in the outgoing argument area, it is always safe. */
3c0cb5de 5932 || (MEM_P (x)
1da68f56
RK
5933 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5934 || (GET_CODE (XEXP (x, 0)) == PLUS
5935 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
bbf6f052
RK
5936 return 1;
5937
5938 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5939 find the underlying pseudo. */
5940 if (GET_CODE (x) == SUBREG)
5941 {
5942 x = SUBREG_REG (x);
f8cfc6aa 5943 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
bbf6f052
RK
5944 return 0;
5945 }
5946
1da68f56 5947 /* Now look at our tree code and possibly recurse. */
bbf6f052
RK
5948 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5949 {
5950 case 'd':
a9772b60 5951 exp_rtl = DECL_RTL_IF_SET (exp);
bbf6f052
RK
5952 break;
5953
5954 case 'c':
5955 return 1;
5956
5957 case 'x':
5958 if (TREE_CODE (exp) == TREE_LIST)
f8d4be57
CE
5959 {
5960 while (1)
5961 {
5962 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5963 return 0;
5964 exp = TREE_CHAIN (exp);
5965 if (!exp)
5966 return 1;
5967 if (TREE_CODE (exp) != TREE_LIST)
5968 return safe_from_p (x, exp, 0);
5969 }
5970 }
ff439b5f
CB
5971 else if (TREE_CODE (exp) == ERROR_MARK)
5972 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
5973 else
5974 return 0;
5975
350fae66
RK
5976 case 's':
5977 /* The only case we look at here is the DECL_INITIAL inside a
5978 DECL_EXPR. */
5979 return (TREE_CODE (exp) != DECL_EXPR
5980 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5981 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5982 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5983
bbf6f052
RK
5984 case '2':
5985 case '<':
f8d4be57
CE
5986 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5987 return 0;
5d3cc252 5988 /* Fall through. */
f8d4be57
CE
5989
5990 case '1':
5991 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
5992
5993 case 'e':
5994 case 'r':
5995 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5996 the expression. If it is set, we conflict iff we are that rtx or
5997 both are in memory. Otherwise, we check all operands of the
5998 expression recursively. */
5999
6000 switch (TREE_CODE (exp))
6001 {
6002 case ADDR_EXPR:
70072ed9
RK
6003 /* If the operand is static or we are static, we can't conflict.
6004 Likewise if we don't conflict with the operand at all. */
6005 if (staticp (TREE_OPERAND (exp, 0))
6006 || TREE_STATIC (exp)
6007 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6008 return 1;
6009
6010 /* Otherwise, the only way this can conflict is if we are taking
6011 the address of a DECL a that address if part of X, which is
6012 very rare. */
6013 exp = TREE_OPERAND (exp, 0);
6014 if (DECL_P (exp))
6015 {
6016 if (!DECL_RTL_SET_P (exp)
3c0cb5de 6017 || !MEM_P (DECL_RTL (exp)))
70072ed9
RK
6018 return 0;
6019 else
6020 exp_rtl = XEXP (DECL_RTL (exp), 0);
6021 }
6022 break;
bbf6f052
RK
6023
6024 case INDIRECT_REF:
3c0cb5de 6025 if (MEM_P (x)
1da68f56
RK
6026 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6027 get_alias_set (exp)))
bbf6f052
RK
6028 return 0;
6029 break;
6030
6031 case CALL_EXPR:
f9808f81
MM
6032 /* Assume that the call will clobber all hard registers and
6033 all of memory. */
f8cfc6aa 6034 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
3c0cb5de 6035 || MEM_P (x))
f9808f81 6036 return 0;
bbf6f052
RK
6037 break;
6038
bbf6f052 6039 case WITH_CLEANUP_EXPR:
5dab5552 6040 case CLEANUP_POINT_EXPR:
ac45df5d
RH
6041 /* Lowered by gimplify.c. */
6042 abort ();
6043
bbf6f052 6044 case SAVE_EXPR:
82c82743 6045 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052 6046
8129842c
RS
6047 case BIND_EXPR:
6048 /* The only operand we look at is operand 1. The rest aren't
6049 part of the expression. */
e5e809f4 6050 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 6051
e9a25f70
JL
6052 default:
6053 break;
bbf6f052
RK
6054 }
6055
6056 /* If we have an rtx, we do not need to scan our operands. */
6057 if (exp_rtl)
6058 break;
6059
8f17b5c5 6060 nops = first_rtl_op (TREE_CODE (exp));
bbf6f052
RK
6061 for (i = 0; i < nops; i++)
6062 if (TREE_OPERAND (exp, i) != 0
e5e809f4 6063 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052 6064 return 0;
8f17b5c5
MM
6065
6066 /* If this is a language-specific tree code, it may require
6067 special handling. */
dbbbbf3b
JDA
6068 if ((unsigned int) TREE_CODE (exp)
6069 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
ae2bcd98 6070 && !lang_hooks.safe_from_p (x, exp))
8f17b5c5 6071 return 0;
bbf6f052
RK
6072 }
6073
6074 /* If we have an rtl, find any enclosed object. Then see if we conflict
6075 with it. */
6076 if (exp_rtl)
6077 {
6078 if (GET_CODE (exp_rtl) == SUBREG)
6079 {
6080 exp_rtl = SUBREG_REG (exp_rtl);
f8cfc6aa 6081 if (REG_P (exp_rtl)
bbf6f052
RK
6082 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6083 return 0;
6084 }
6085
6086 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
1da68f56 6087 are memory and they conflict. */
bbf6f052 6088 return ! (rtx_equal_p (x, exp_rtl)
3c0cb5de 6089 || (MEM_P (x) && MEM_P (exp_rtl)
21117a17 6090 && true_dependence (exp_rtl, VOIDmode, x,
1da68f56 6091 rtx_addr_varies_p)));
bbf6f052
RK
6092 }
6093
6094 /* If we reach here, it is safe. */
6095 return 1;
6096}
6097
01c8a7c8
RK
6098/* Subroutine of expand_expr: return rtx if EXP is a
6099 variable or parameter; else return 0. */
6100
6101static rtx
502b8322 6102var_rtx (tree exp)
01c8a7c8
RK
6103{
6104 STRIP_NOPS (exp);
6105 switch (TREE_CODE (exp))
6106 {
6107 case PARM_DECL:
6108 case VAR_DECL:
6109 return DECL_RTL (exp);
6110 default:
6111 return 0;
6112 }
6113}
14a774a9 6114\f
0d4903b8
RK
6115/* Return the highest power of two that EXP is known to be a multiple of.
6116 This is used in updating alignment of MEMs in array references. */
6117
9ceca302 6118static unsigned HOST_WIDE_INT
502b8322 6119highest_pow2_factor (tree exp)
0d4903b8 6120{
9ceca302 6121 unsigned HOST_WIDE_INT c0, c1;
0d4903b8
RK
6122
6123 switch (TREE_CODE (exp))
6124 {
6125 case INTEGER_CST:
e0f1be5c
JJ
6126 /* We can find the lowest bit that's a one. If the low
6127 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6128 We need to handle this case since we can find it in a COND_EXPR,
a98ebe2e 6129 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
e0f1be5c 6130 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
3a531a8b 6131 later ICE. */
e0f1be5c 6132 if (TREE_CONSTANT_OVERFLOW (exp))
1ed1b4fb 6133 return BIGGEST_ALIGNMENT;
e0f1be5c 6134 else
0d4903b8 6135 {
e0f1be5c
JJ
6136 /* Note: tree_low_cst is intentionally not used here,
6137 we don't care about the upper bits. */
6138 c0 = TREE_INT_CST_LOW (exp);
6139 c0 &= -c0;
6140 return c0 ? c0 : BIGGEST_ALIGNMENT;
0d4903b8
RK
6141 }
6142 break;
6143
65a07688 6144 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
0d4903b8
RK
6145 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6146 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6147 return MIN (c0, c1);
6148
6149 case MULT_EXPR:
6150 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6151 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6152 return c0 * c1;
6153
6154 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6155 case CEIL_DIV_EXPR:
65a07688
RK
6156 if (integer_pow2p (TREE_OPERAND (exp, 1))
6157 && host_integerp (TREE_OPERAND (exp, 1), 1))
6158 {
6159 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6160 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6161 return MAX (1, c0 / c1);
6162 }
6163 break;
0d4903b8
RK
6164
6165 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6fce44af 6166 case SAVE_EXPR:
0d4903b8
RK
6167 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6168
65a07688
RK
6169 case COMPOUND_EXPR:
6170 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6171
0d4903b8
RK
6172 case COND_EXPR:
6173 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6174 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6175 return MIN (c0, c1);
6176
6177 default:
6178 break;
6179 }
6180
6181 return 1;
6182}
818c0c94 6183
d50a16c4
EB
6184/* Similar, except that the alignment requirements of TARGET are
6185 taken into account. Assume it is at least as aligned as its
6186 type, unless it is a COMPONENT_REF in which case the layout of
6187 the structure gives the alignment. */
818c0c94 6188
9ceca302 6189static unsigned HOST_WIDE_INT
d50a16c4 6190highest_pow2_factor_for_target (tree target, tree exp)
818c0c94 6191{
d50a16c4 6192 unsigned HOST_WIDE_INT target_align, factor;
818c0c94
RH
6193
6194 factor = highest_pow2_factor (exp);
d50a16c4
EB
6195 if (TREE_CODE (target) == COMPONENT_REF)
6196 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
6197 else
6198 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
6199 return MAX (factor, target_align);
818c0c94 6200}
0d4903b8 6201\f
6de9cd9a
DN
6202/* Expands variable VAR. */
6203
6204void
6205expand_var (tree var)
6206{
6207 if (DECL_EXTERNAL (var))
6208 return;
6209
6210 if (TREE_STATIC (var))
6211 /* If this is an inlined copy of a static local variable,
6212 look up the original decl. */
6213 var = DECL_ORIGIN (var);
6214
6215 if (TREE_STATIC (var)
6216 ? !TREE_ASM_WRITTEN (var)
6217 : !DECL_RTL_SET_P (var))
6218 {
6219 if (TREE_CODE (var) == VAR_DECL && DECL_DEFER_OUTPUT (var))
6220 {
6221 /* Prepare a mem & address for the decl. */
6222 rtx x;
6223
6224 if (TREE_STATIC (var))
6225 abort ();
6226
6227 x = gen_rtx_MEM (DECL_MODE (var),
6228 gen_reg_rtx (Pmode));
6229
6230 set_mem_attributes (x, var, 1);
6231 SET_DECL_RTL (var, x);
6232 }
673fda6b 6233 else if (lang_hooks.expand_decl (var))
6de9cd9a
DN
6234 /* OK. */;
6235 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6236 expand_decl (var);
6237 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6238 rest_of_decl_compilation (var, NULL, 0, 0);
6239 else if (TREE_CODE (var) == TYPE_DECL
6240 || TREE_CODE (var) == CONST_DECL
6241 || TREE_CODE (var) == FUNCTION_DECL
6242 || TREE_CODE (var) == LABEL_DECL)
6243 /* No expansion needed. */;
6244 else
6245 abort ();
6246 }
6247}
6248
6249/* Expands declarations of variables in list VARS. */
6250
6251static void
6252expand_vars (tree vars)
6253{
6254 for (; vars; vars = TREE_CHAIN (vars))
6255 {
6256 tree var = vars;
6257
6258 if (DECL_EXTERNAL (var))
6259 continue;
6260
6261 expand_var (var);
6262 expand_decl_init (var);
6263 }
6264}
6265
eb698c58
RS
6266/* Subroutine of expand_expr. Expand the two operands of a binary
6267 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6268 The value may be stored in TARGET if TARGET is nonzero. The
6269 MODIFIER argument is as documented by expand_expr. */
6270
6271static void
6272expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6273 enum expand_modifier modifier)
6274{
6275 if (! safe_from_p (target, exp1, 1))
6276 target = 0;
6277 if (operand_equal_p (exp0, exp1, 0))
6278 {
6279 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6280 *op1 = copy_rtx (*op0);
6281 }
6282 else
6283 {
c67e6e14
RS
6284 /* If we need to preserve evaluation order, copy exp0 into its own
6285 temporary variable so that it can't be clobbered by exp1. */
6286 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6287 exp0 = save_expr (exp0);
eb698c58
RS
6288 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6289 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6290 }
6291}
6292
f47e9b4e 6293\f
bbf6f052
RK
6294/* expand_expr: generate code for computing expression EXP.
6295 An rtx for the computed value is returned. The value is never null.
6296 In the case of a void EXP, const0_rtx is returned.
6297
6298 The value may be stored in TARGET if TARGET is nonzero.
6299 TARGET is just a suggestion; callers must assume that
6300 the rtx returned may not be the same as TARGET.
6301
6302 If TARGET is CONST0_RTX, it means that the value will be ignored.
6303
6304 If TMODE is not VOIDmode, it suggests generating the
6305 result in mode TMODE. But this is done only when convenient.
6306 Otherwise, TMODE is ignored and the value generated in its natural mode.
6307 TMODE is just a suggestion; callers must assume that
6308 the rtx returned may not have mode TMODE.
6309
d6a5ac33
RK
6310 Note that TARGET may have neither TMODE nor MODE. In that case, it
6311 probably will not be used.
bbf6f052
RK
6312
6313 If MODIFIER is EXPAND_SUM then when EXP is an addition
6314 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6315 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6316 products as above, or REG or MEM, or constant.
6317 Ordinarily in such cases we would output mul or add instructions
6318 and then return a pseudo reg containing the sum.
6319
6320 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6321 it also marks a label as absolutely required (it can't be dead).
26fcb35a 6322 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
6323 This is used for outputting expressions used in initializers.
6324
6325 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6326 with a constant address even if that address is not normally legitimate.
8403445a
AM
6327 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6328
6329 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6330 a call parameter. Such targets require special care as we haven't yet
6331 marked TARGET so that it's safe from being trashed by libcalls. We
6332 don't want to use TARGET for anything but the final result;
6333 Intermediate values must go elsewhere. Additionally, calls to
0fab64a3
MM
6334 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6335
6336 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6337 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6338 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6339 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6340 recursively. */
bbf6f052 6341
6de9cd9a
DN
6342static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6343 enum expand_modifier, rtx *);
6344
bbf6f052 6345rtx
0fab64a3
MM
6346expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6347 enum expand_modifier modifier, rtx *alt_rtl)
6de9cd9a
DN
6348{
6349 int rn = -1;
6350 rtx ret, last = NULL;
6351
6352 /* Handle ERROR_MARK before anybody tries to access its type. */
6353 if (TREE_CODE (exp) == ERROR_MARK
6354 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6355 {
6356 ret = CONST0_RTX (tmode);
6357 return ret ? ret : const0_rtx;
6358 }
6359
6360 if (flag_non_call_exceptions)
6361 {
6362 rn = lookup_stmt_eh_region (exp);
6363 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6364 if (rn >= 0)
6365 last = get_last_insn ();
6366 }
6367
6368 /* If this is an expression of some kind and it has an associated line
6369 number, then emit the line number before expanding the expression.
6370
6371 We need to save and restore the file and line information so that
6372 errors discovered during expansion are emitted with the right
6373 information. It would be better of the diagnostic routines
6374 used the file/line information embedded in the tree nodes rather
6375 than globals. */
6376 if (cfun && EXPR_HAS_LOCATION (exp))
6377 {
6378 location_t saved_location = input_location;
6379 input_location = EXPR_LOCATION (exp);
6380 emit_line_note (input_location);
6381
6382 /* Record where the insns produced belong. */
1ea463a2 6383 record_block_change (TREE_BLOCK (exp));
6de9cd9a
DN
6384
6385 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6386
6387 input_location = saved_location;
6388 }
6389 else
6390 {
6391 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6392 }
6393
6394 /* If using non-call exceptions, mark all insns that may trap.
6395 expand_call() will mark CALL_INSNs before we get to this code,
6396 but it doesn't handle libcalls, and these may trap. */
6397 if (rn >= 0)
6398 {
6399 rtx insn;
6400 for (insn = next_real_insn (last); insn;
6401 insn = next_real_insn (insn))
6402 {
6403 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6404 /* If we want exceptions for non-call insns, any
6405 may_trap_p instruction may throw. */
6406 && GET_CODE (PATTERN (insn)) != CLOBBER
6407 && GET_CODE (PATTERN (insn)) != USE
4b4bf941 6408 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6de9cd9a
DN
6409 {
6410 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6411 REG_NOTES (insn));
6412 }
6413 }
6414 }
6415
6416 return ret;
6417}
6418
6419static rtx
6420expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6421 enum expand_modifier modifier, rtx *alt_rtl)
bbf6f052 6422{
b3694847 6423 rtx op0, op1, temp;
bbf6f052 6424 tree type = TREE_TYPE (exp);
8df83eae 6425 int unsignedp;
b3694847
SS
6426 enum machine_mode mode;
6427 enum tree_code code = TREE_CODE (exp);
bbf6f052 6428 optab this_optab;
68557e14
ML
6429 rtx subtarget, original_target;
6430 int ignore;
bbf6f052 6431 tree context;
bc15d0ef
JM
6432 bool reduce_bit_field = false;
6433#define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6434 ? reduce_to_bit_field_precision ((expr), \
6435 target, \
6436 type) \
6437 : (expr))
bbf6f052 6438
68557e14 6439 mode = TYPE_MODE (type);
8df83eae 6440 unsignedp = TYPE_UNSIGNED (type);
bc15d0ef
JM
6441 if (lang_hooks.reduce_bit_field_operations
6442 && TREE_CODE (type) == INTEGER_TYPE
6443 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6444 {
6445 /* An operation in what may be a bit-field type needs the
6446 result to be reduced to the precision of the bit-field type,
6447 which is narrower than that of the type's mode. */
6448 reduce_bit_field = true;
6449 if (modifier == EXPAND_STACK_PARM)
6450 target = 0;
6451 }
8df83eae 6452
68557e14 6453 /* Use subtarget as the target for operand 0 of a binary operation. */
296b4ed9 6454 subtarget = get_subtarget (target);
68557e14
ML
6455 original_target = target;
6456 ignore = (target == const0_rtx
6457 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
3a18db48
AP
6458 || code == CONVERT_EXPR || code == COND_EXPR
6459 || code == VIEW_CONVERT_EXPR)
68557e14
ML
6460 && TREE_CODE (type) == VOID_TYPE));
6461
dd27116b
RK
6462 /* If we are going to ignore this result, we need only do something
6463 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
6464 is, short-circuit the most common cases here. Note that we must
6465 not call expand_expr with anything but const0_rtx in case this
6466 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 6467
dd27116b
RK
6468 if (ignore)
6469 {
6470 if (! TREE_SIDE_EFFECTS (exp))
6471 return const0_rtx;
6472
14a774a9
RK
6473 /* Ensure we reference a volatile object even if value is ignored, but
6474 don't do this if all we are doing is taking its address. */
dd27116b
RK
6475 if (TREE_THIS_VOLATILE (exp)
6476 && TREE_CODE (exp) != FUNCTION_DECL
14a774a9
RK
6477 && mode != VOIDmode && mode != BLKmode
6478 && modifier != EXPAND_CONST_ADDRESS)
dd27116b 6479 {
37a08a29 6480 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3c0cb5de 6481 if (MEM_P (temp))
dd27116b
RK
6482 temp = copy_to_reg (temp);
6483 return const0_rtx;
6484 }
6485
14a774a9 6486 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
3a18db48 6487 || code == INDIRECT_REF)
37a08a29
RK
6488 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6489 modifier);
6490
14a774a9 6491 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
b4e3fabb 6492 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
dd27116b 6493 {
37a08a29
RK
6494 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6495 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
dd27116b
RK
6496 return const0_rtx;
6497 }
6498 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6499 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6500 /* If the second operand has no side effects, just evaluate
0f41302f 6501 the first. */
37a08a29
RK
6502 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6503 modifier);
14a774a9
RK
6504 else if (code == BIT_FIELD_REF)
6505 {
37a08a29
RK
6506 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6507 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6508 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
14a774a9
RK
6509 return const0_rtx;
6510 }
37a08a29 6511
90764a87 6512 target = 0;
dd27116b 6513 }
bbf6f052 6514
e44842fe
RK
6515 /* If will do cse, generate all results into pseudo registers
6516 since 1) that allows cse to find more things
6517 and 2) otherwise cse could produce an insn the machine
4977bab6
ZW
6518 cannot support. An exception is a CONSTRUCTOR into a multi-word
6519 MEM: that's much more likely to be most efficient into the MEM.
6520 Another is a CALL_EXPR which must return in memory. */
e44842fe 6521
bbf6f052 6522 if (! cse_not_expected && mode != BLKmode && target
f8cfc6aa 6523 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
4977bab6 6524 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
61f71b34 6525 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
8403445a 6526 target = 0;
bbf6f052 6527
bbf6f052
RK
6528 switch (code)
6529 {
6530 case LABEL_DECL:
b552441b
RS
6531 {
6532 tree function = decl_function_context (exp);
c5c76735 6533
6de9cd9a
DN
6534 temp = label_rtx (exp);
6535 temp = gen_rtx_LABEL_REF (Pmode, temp);
6536
d0977240 6537 if (function != current_function_decl
6de9cd9a
DN
6538 && function != 0)
6539 LABEL_REF_NONLOCAL_P (temp) = 1;
6540
6541 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
26fcb35a 6542 return temp;
b552441b 6543 }
bbf6f052
RK
6544
6545 case PARM_DECL:
1877be45 6546 if (!DECL_RTL_SET_P (exp))
bbf6f052 6547 {
ddd2d57e 6548 error ("%Jprior parameter's size depends on '%D'", exp, exp);
4af3895e 6549 return CONST0_RTX (mode);
bbf6f052
RK
6550 }
6551
0f41302f 6552 /* ... fall through ... */
d6a5ac33 6553
bbf6f052 6554 case VAR_DECL:
2dca20cd
RS
6555 /* If a static var's type was incomplete when the decl was written,
6556 but the type is complete now, lay out the decl now. */
ca06cfe6
RH
6557 if (DECL_SIZE (exp) == 0
6558 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
2dca20cd 6559 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
a46666a9 6560 layout_decl (exp, 0);
921b3427 6561
0f41302f 6562 /* ... fall through ... */
d6a5ac33 6563
2dca20cd 6564 case FUNCTION_DECL:
bbf6f052
RK
6565 case RESULT_DECL:
6566 if (DECL_RTL (exp) == 0)
6567 abort ();
d6a5ac33 6568
e44842fe
RK
6569 /* Ensure variable marked as used even if it doesn't go through
6570 a parser. If it hasn't be used yet, write out an external
6571 definition. */
6572 if (! TREE_USED (exp))
6573 {
6574 assemble_external (exp);
6575 TREE_USED (exp) = 1;
6576 }
6577
dc6d66b3
RK
6578 /* Show we haven't gotten RTL for this yet. */
6579 temp = 0;
6580
bbf6f052
RK
6581 /* Handle variables inherited from containing functions. */
6582 context = decl_function_context (exp);
6583
bbf6f052 6584 if (context != 0 && context != current_function_decl
bbf6f052 6585 /* If var is static, we don't need a static chain to access it. */
3c0cb5de 6586 && ! (MEM_P (DECL_RTL (exp))
bbf6f052
RK
6587 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6588 {
6589 rtx addr;
6590
6591 /* Mark as non-local and addressable. */
81feeecb 6592 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
6593 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6594 abort ();
ae2bcd98 6595 lang_hooks.mark_addressable (exp);
3c0cb5de 6596 if (!MEM_P (DECL_RTL (exp)))
bbf6f052
RK
6597 abort ();
6598 addr = XEXP (DECL_RTL (exp), 0);
3c0cb5de 6599 if (MEM_P (addr))
792760b9
RK
6600 addr
6601 = replace_equiv_address (addr,
6602 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
6603 else
6604 addr = fix_lexical_addr (addr, exp);
3bdf5ad1 6605
792760b9 6606 temp = replace_equiv_address (DECL_RTL (exp), addr);
bbf6f052 6607 }
4af3895e 6608
bbf6f052
RK
6609 /* This is the case of an array whose size is to be determined
6610 from its initializer, while the initializer is still being parsed.
6611 See expand_decl. */
d6a5ac33 6612
3c0cb5de 6613 else if (MEM_P (DECL_RTL (exp))
f8cfc6aa 6614 && REG_P (XEXP (DECL_RTL (exp), 0)))
792760b9 6615 temp = validize_mem (DECL_RTL (exp));
d6a5ac33
RK
6616
6617 /* If DECL_RTL is memory, we are in the normal case and either
6618 the address is not valid or it is not a register and -fforce-addr
6619 is specified, get the address into a register. */
6620
3c0cb5de 6621 else if (MEM_P (DECL_RTL (exp))
dc6d66b3
RK
6622 && modifier != EXPAND_CONST_ADDRESS
6623 && modifier != EXPAND_SUM
6624 && modifier != EXPAND_INITIALIZER
6625 && (! memory_address_p (DECL_MODE (exp),
6626 XEXP (DECL_RTL (exp), 0))
6627 || (flag_force_addr
f8cfc6aa 6628 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
0fab64a3
MM
6629 {
6630 if (alt_rtl)
6631 *alt_rtl = DECL_RTL (exp);
6632 temp = replace_equiv_address (DECL_RTL (exp),
6633 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6634 }
1499e0a8 6635
dc6d66b3 6636 /* If we got something, return it. But first, set the alignment
04956a1a 6637 if the address is a register. */
dc6d66b3
RK
6638 if (temp != 0)
6639 {
3c0cb5de 6640 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
bdb429a5 6641 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
dc6d66b3
RK
6642
6643 return temp;
6644 }
6645
1499e0a8
RK
6646 /* If the mode of DECL_RTL does not match that of the decl, it
6647 must be a promoted value. We return a SUBREG of the wanted mode,
6648 but mark it so that we know that it was already extended. */
6649
f8cfc6aa 6650 if (REG_P (DECL_RTL (exp))
7254c5fa 6651 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
1499e0a8 6652 {
1499e0a8
RK
6653 /* Get the signedness used for this variable. Ensure we get the
6654 same mode we got when the variable was declared. */
78911e8b 6655 if (GET_MODE (DECL_RTL (exp))
0fb7aeda 6656 != promote_mode (type, DECL_MODE (exp), &unsignedp,
e8dcd824 6657 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
1499e0a8
RK
6658 abort ();
6659
ddef6bc7 6660 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
1499e0a8 6661 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6662 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
6663 return temp;
6664 }
6665
bbf6f052
RK
6666 return DECL_RTL (exp);
6667
6668 case INTEGER_CST:
d8a50944 6669 temp = immed_double_const (TREE_INT_CST_LOW (exp),
05bccae2 6670 TREE_INT_CST_HIGH (exp), mode);
bbf6f052 6671
d8a50944
RH
6672 /* ??? If overflow is set, fold will have done an incomplete job,
6673 which can result in (plus xx (const_int 0)), which can get
6674 simplified by validate_replace_rtx during virtual register
6675 instantiation, which can result in unrecognizable insns.
6676 Avoid this by forcing all overflows into registers. */
c2e9dc85
RH
6677 if (TREE_CONSTANT_OVERFLOW (exp)
6678 && modifier != EXPAND_INITIALIZER)
d8a50944
RH
6679 temp = force_reg (mode, temp);
6680
6681 return temp;
6682
d744e06e
AH
6683 case VECTOR_CST:
6684 return const_vector_from_tree (exp);
6685
bbf6f052 6686 case CONST_DECL:
8403445a 6687 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
bbf6f052
RK
6688
6689 case REAL_CST:
6690 /* If optimized, generate immediate CONST_DOUBLE
3a94c984
KH
6691 which will be turned into memory by reload if necessary.
6692
bbf6f052
RK
6693 We used to force a register so that loop.c could see it. But
6694 this does not allow gen_* patterns to perform optimizations with
6695 the constants. It also produces two insns in cases like "x = 1.0;".
6696 On most machines, floating-point constants are not permitted in
6697 many insns, so we'd end up copying it to a register in any case.
6698
6699 Now, we do the copying in expand_binop, if appropriate. */
5692c7bc
ZW
6700 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6701 TYPE_MODE (TREE_TYPE (exp)));
bbf6f052
RK
6702
6703 case COMPLEX_CST:
9ad58e09
RS
6704 /* Handle evaluating a complex constant in a CONCAT target. */
6705 if (original_target && GET_CODE (original_target) == CONCAT)
6706 {
6707 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6708 rtx rtarg, itarg;
6709
6710 rtarg = XEXP (original_target, 0);
6711 itarg = XEXP (original_target, 1);
6712
6713 /* Move the real and imaginary parts separately. */
6714 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6715 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6716
6717 if (op0 != rtarg)
6718 emit_move_insn (rtarg, op0);
6719 if (op1 != itarg)
6720 emit_move_insn (itarg, op1);
6721
6722 return original_target;
6723 }
6724
71c0e7fc 6725 /* ... fall through ... */
9ad58e09 6726
bbf6f052 6727 case STRING_CST:
afc6aaab 6728 temp = output_constant_def (exp, 1);
bbf6f052 6729
afc6aaab 6730 /* temp contains a constant address.
bbf6f052
RK
6731 On RISC machines where a constant address isn't valid,
6732 make some insns to get that address into a register. */
afc6aaab 6733 if (modifier != EXPAND_CONST_ADDRESS
bbf6f052
RK
6734 && modifier != EXPAND_INITIALIZER
6735 && modifier != EXPAND_SUM
afc6aaab
ZW
6736 && (! memory_address_p (mode, XEXP (temp, 0))
6737 || flag_force_addr))
6738 return replace_equiv_address (temp,
6739 copy_rtx (XEXP (temp, 0)));
6740 return temp;
bbf6f052
RK
6741
6742 case SAVE_EXPR:
82c82743
RH
6743 {
6744 tree val = TREE_OPERAND (exp, 0);
6745 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
e5e809f4 6746
82c82743
RH
6747 if (TREE_CODE (val) != VAR_DECL || !DECL_ARTIFICIAL (val))
6748 {
6749 /* We can indeed still hit this case, typically via builtin
6750 expanders calling save_expr immediately before expanding
6751 something. Assume this means that we only have to deal
6752 with non-BLKmode values. */
6753 if (GET_MODE (ret) == BLKmode)
6754 abort ();
1499e0a8 6755
82c82743
RH
6756 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6757 DECL_ARTIFICIAL (val) = 1;
6758 TREE_OPERAND (exp, 0) = val;
1499e0a8 6759
82c82743
RH
6760 if (!CONSTANT_P (ret))
6761 ret = copy_to_reg (ret);
6762 SET_DECL_RTL (val, ret);
6763 }
1499e0a8 6764
82c82743
RH
6765 return ret;
6766 }
bbf6f052 6767
679163cf
MS
6768 case UNSAVE_EXPR:
6769 {
6770 rtx temp;
6771 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
24965e7a 6772 TREE_OPERAND (exp, 0)
ae2bcd98 6773 = lang_hooks.unsave_expr_now (TREE_OPERAND (exp, 0));
679163cf
MS
6774 return temp;
6775 }
6776
70e6ca43
APB
6777 case GOTO_EXPR:
6778 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6779 expand_goto (TREE_OPERAND (exp, 0));
6780 else
6781 expand_computed_goto (TREE_OPERAND (exp, 0));
6782 return const0_rtx;
6783
6af8eb57
SB
6784 /* These are lowered during gimplification, so we should never ever
6785 see them here. */
6786 case LOOP_EXPR:
bbf6f052 6787 case EXIT_EXPR:
6af8eb57 6788 abort ();
bbf6f052 6789
f42e28dd
APB
6790 case LABELED_BLOCK_EXPR:
6791 if (LABELED_BLOCK_BODY (exp))
4dfa0342 6792 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
30f7a378 6793 /* Should perhaps use expand_label, but this is simpler and safer. */
0a5fee32 6794 do_pending_stack_adjust ();
f42e28dd
APB
6795 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6796 return const0_rtx;
6797
6798 case EXIT_BLOCK_EXPR:
6799 if (EXIT_BLOCK_RETURN (exp))
ab87f8c8 6800 sorry ("returned value in block_exit_expr");
f42e28dd
APB
6801 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6802 return const0_rtx;
6803
bbf6f052
RK
6804 case BIND_EXPR:
6805 {
6de9cd9a
DN
6806 tree block = BIND_EXPR_BLOCK (exp);
6807 int mark_ends;
bbf6f052 6808
4dfa0342
RH
6809 /* If we're in functions-as-trees mode, this BIND_EXPR represents
6810 the block, so we need to emit NOTE_INSN_BLOCK_* notes. */
6811 mark_ends = (block != NULL_TREE);
6812 expand_start_bindings_and_block (mark_ends ? 0 : 2, block);
bbf6f052 6813
6de9cd9a
DN
6814 /* If VARS have not yet been expanded, expand them now. */
6815 expand_vars (BIND_EXPR_VARS (exp));
6816
6817 /* TARGET was clobbered early in this function. The correct
6818 indicator or whether or not we need the value of this
6819 expression is the IGNORE variable. */
6820 temp = expand_expr (BIND_EXPR_BODY (exp),
6821 ignore ? const0_rtx : target,
6822 tmode, modifier);
bbf6f052 6823
6de9cd9a 6824 expand_end_bindings (BIND_EXPR_VARS (exp), mark_ends, 0);
bbf6f052
RK
6825
6826 return temp;
6827 }
6828
bbf6f052 6829 case CONSTRUCTOR:
dd27116b
RK
6830 /* If we don't need the result, just ensure we evaluate any
6831 subexpressions. */
6832 if (ignore)
6833 {
6834 tree elt;
37a08a29 6835
dd27116b 6836 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
37a08a29
RK
6837 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6838
dd27116b
RK
6839 return const0_rtx;
6840 }
3207b172 6841
4af3895e
JVA
6842 /* All elts simple constants => refer to a constant in memory. But
6843 if this is a non-BLKmode mode, let it store a field at a time
6844 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 6845 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
6846 store directly into the target unless the type is large enough
6847 that memcpy will be used. If we are making an initializer and
00182e1e
AH
6848 all operands are constant, put it in memory as well.
6849
6850 FIXME: Avoid trying to fill vector constructors piece-meal.
6851 Output them with output_constant_def below unless we're sure
6852 they're zeros. This should go away when vector initializers
6853 are treated like VECTOR_CST instead of arrays.
6854 */
dd27116b 6855 else if ((TREE_STATIC (exp)
3207b172 6856 && ((mode == BLKmode
e5e809f4 6857 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1 6858 || TREE_ADDRESSABLE (exp)
19caa751 6859 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
3a94c984 6860 && (! MOVE_BY_PIECES_P
19caa751
RK
6861 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6862 TYPE_ALIGN (type)))
6de9cd9a 6863 && ! mostly_zeros_p (exp))))
f59700f9
RK
6864 || ((modifier == EXPAND_INITIALIZER
6865 || modifier == EXPAND_CONST_ADDRESS)
6866 && TREE_CONSTANT (exp)))
bbf6f052 6867 {
bd7cf17e 6868 rtx constructor = output_constant_def (exp, 1);
19caa751 6869
b552441b
RS
6870 if (modifier != EXPAND_CONST_ADDRESS
6871 && modifier != EXPAND_INITIALIZER
792760b9
RK
6872 && modifier != EXPAND_SUM)
6873 constructor = validize_mem (constructor);
6874
bbf6f052
RK
6875 return constructor;
6876 }
bbf6f052
RK
6877 else
6878 {
e9ac02a6
JW
6879 /* Handle calls that pass values in multiple non-contiguous
6880 locations. The Irix 6 ABI has examples of this. */
e5e809f4 6881 if (target == 0 || ! safe_from_p (target, exp, 1)
8403445a
AM
6882 || GET_CODE (target) == PARALLEL
6883 || modifier == EXPAND_STACK_PARM)
1da68f56
RK
6884 target
6885 = assign_temp (build_qualified_type (type,
6886 (TYPE_QUALS (type)
6887 | (TREE_READONLY (exp)
6888 * TYPE_QUAL_CONST))),
c24ae149 6889 0, TREE_ADDRESSABLE (exp), 1);
07604beb 6890
dbb5c281 6891 store_constructor (exp, target, 0, int_expr_size (exp));
bbf6f052
RK
6892 return target;
6893 }
6894
6895 case INDIRECT_REF:
6896 {
6897 tree exp1 = TREE_OPERAND (exp, 0);
3a94c984 6898
6de9cd9a
DN
6899 if (modifier != EXPAND_WRITE)
6900 {
6901 tree t;
6902
6903 t = fold_read_from_constant_string (exp);
6904 if (t)
6905 return expand_expr (t, target, tmode, modifier);
6906 }
bbf6f052 6907
405f0da6
JW
6908 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6909 op0 = memory_address (mode, op0);
38a448ca 6910 temp = gen_rtx_MEM (mode, op0);
3bdf5ad1 6911 set_mem_attributes (temp, exp, 0);
1125706f 6912
14a774a9
RK
6913 /* If we are writing to this object and its type is a record with
6914 readonly fields, we must mark it as readonly so it will
6915 conflict with readonly references to those fields. */
37a08a29 6916 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
14a774a9
RK
6917 RTX_UNCHANGING_P (temp) = 1;
6918
8c8a8e34
JW
6919 return temp;
6920 }
bbf6f052
RK
6921
6922 case ARRAY_REF:
6de9cd9a
DN
6923
6924#ifdef ENABLE_CHECKING
742920c7
RK
6925 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6926 abort ();
6de9cd9a 6927#endif
bbf6f052 6928
bbf6f052 6929 {
742920c7 6930 tree array = TREE_OPERAND (exp, 0);
44de5aeb 6931 tree low_bound = array_ref_low_bound (exp);
fed3cef0 6932 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
08293add 6933 HOST_WIDE_INT i;
b50d17a1 6934
d4c89139
PB
6935 /* Optimize the special-case of a zero lower bound.
6936
6937 We convert the low_bound to sizetype to avoid some problems
6938 with constant folding. (E.g. suppose the lower bound is 1,
6939 and its mode is QI. Without the conversion, (ARRAY
6940 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fed3cef0 6941 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
d4c89139 6942
742920c7 6943 if (! integer_zerop (low_bound))
fed3cef0 6944 index = size_diffop (index, convert (sizetype, low_bound));
742920c7 6945
742920c7 6946 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
6947 This is not done in fold so it won't happen inside &.
6948 Don't fold if this is for wide characters since it's too
6949 difficult to do correctly and this is a very rare case. */
742920c7 6950
017e1b43
RH
6951 if (modifier != EXPAND_CONST_ADDRESS
6952 && modifier != EXPAND_INITIALIZER
6de9cd9a
DN
6953 && modifier != EXPAND_MEMORY)
6954 {
6955 tree t = fold_read_from_constant_string (exp);
6956
6957 if (t)
6958 return expand_expr (t, target, tmode, modifier);
6959 }
bbf6f052 6960
742920c7
RK
6961 /* If this is a constant index into a constant array,
6962 just get the value from the array. Handle both the cases when
6963 we have an explicit constructor and when our operand is a variable
6964 that was declared const. */
4af3895e 6965
017e1b43
RH
6966 if (modifier != EXPAND_CONST_ADDRESS
6967 && modifier != EXPAND_INITIALIZER
6968 && modifier != EXPAND_MEMORY
6969 && TREE_CODE (array) == CONSTRUCTOR
6970 && ! TREE_SIDE_EFFECTS (array)
05bccae2 6971 && TREE_CODE (index) == INTEGER_CST
3a94c984 6972 && 0 > compare_tree_int (index,
05bccae2
RK
6973 list_length (CONSTRUCTOR_ELTS
6974 (TREE_OPERAND (exp, 0)))))
742920c7 6975 {
05bccae2
RK
6976 tree elem;
6977
6978 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6979 i = TREE_INT_CST_LOW (index);
6980 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6981 ;
6982
6983 if (elem)
37a08a29
RK
6984 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6985 modifier);
742920c7 6986 }
3a94c984 6987
742920c7 6988 else if (optimize >= 1
cb5fa0f8
RK
6989 && modifier != EXPAND_CONST_ADDRESS
6990 && modifier != EXPAND_INITIALIZER
017e1b43 6991 && modifier != EXPAND_MEMORY
742920c7
RK
6992 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6993 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
beb0c2e0
RH
6994 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6995 && targetm.binds_local_p (array))
742920c7 6996 {
08293add 6997 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
6998 {
6999 tree init = DECL_INITIAL (array);
7000
742920c7
RK
7001 if (TREE_CODE (init) == CONSTRUCTOR)
7002 {
665f2503 7003 tree elem;
742920c7 7004
05bccae2 7005 for (elem = CONSTRUCTOR_ELTS (init);
5cb1bea4
JM
7006 (elem
7007 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
05bccae2
RK
7008 elem = TREE_CHAIN (elem))
7009 ;
7010
c54b0a5e 7011 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
742920c7 7012 return expand_expr (fold (TREE_VALUE (elem)), target,
37a08a29 7013 tmode, modifier);
742920c7
RK
7014 }
7015 else if (TREE_CODE (init) == STRING_CST
05bccae2
RK
7016 && 0 > compare_tree_int (index,
7017 TREE_STRING_LENGTH (init)))
5c80f6e6
JJ
7018 {
7019 tree type = TREE_TYPE (TREE_TYPE (init));
7020 enum machine_mode mode = TYPE_MODE (type);
7021
7022 if (GET_MODE_CLASS (mode) == MODE_INT
7023 && GET_MODE_SIZE (mode) == 1)
21ef78aa
DE
7024 return gen_int_mode (TREE_STRING_POINTER (init)
7025 [TREE_INT_CST_LOW (index)], mode);
5c80f6e6 7026 }
742920c7
RK
7027 }
7028 }
7029 }
afc6aaab 7030 goto normal_inner_ref;
bbf6f052
RK
7031
7032 case COMPONENT_REF:
4af3895e 7033 /* If the operand is a CONSTRUCTOR, we can just extract the
afc6aaab
ZW
7034 appropriate field if it is present. */
7035 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4af3895e
JVA
7036 {
7037 tree elt;
7038
7039 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7040 elt = TREE_CHAIN (elt))
86b5812c
RK
7041 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7042 /* We can normally use the value of the field in the
7043 CONSTRUCTOR. However, if this is a bitfield in
7044 an integral mode that we can fit in a HOST_WIDE_INT,
7045 we must mask only the number of bits in the bitfield,
7046 since this is done implicitly by the constructor. If
7047 the bitfield does not meet either of those conditions,
7048 we can't do this optimization. */
7049 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7050 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7051 == MODE_INT)
7052 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7053 <= HOST_BITS_PER_WIDE_INT))))
7054 {
8403445a
AM
7055 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7056 && modifier == EXPAND_STACK_PARM)
7057 target = 0;
3a94c984 7058 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
86b5812c
RK
7059 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7060 {
9df2c88c
RK
7061 HOST_WIDE_INT bitsize
7062 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
22273300
JJ
7063 enum machine_mode imode
7064 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c 7065
8df83eae 7066 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
86b5812c
RK
7067 {
7068 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
22273300 7069 op0 = expand_and (imode, op0, op1, target);
86b5812c
RK
7070 }
7071 else
7072 {
7073 tree count
e5e809f4
JL
7074 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7075 0);
86b5812c
RK
7076
7077 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7078 target, 0);
7079 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7080 target, 0);
7081 }
7082 }
7083
7084 return op0;
7085 }
4af3895e 7086 }
afc6aaab 7087 goto normal_inner_ref;
4af3895e 7088
afc6aaab
ZW
7089 case BIT_FIELD_REF:
7090 case ARRAY_RANGE_REF:
7091 normal_inner_ref:
bbf6f052
RK
7092 {
7093 enum machine_mode mode1;
770ae6cc 7094 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 7095 tree offset;
bbf6f052 7096 int volatilep = 0;
839c4796 7097 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
a06ef755 7098 &mode1, &unsignedp, &volatilep);
f47e9b4e 7099 rtx orig_op0;
bbf6f052 7100
e7f3c83f
RK
7101 /* If we got back the original object, something is wrong. Perhaps
7102 we are evaluating an expression too early. In any event, don't
7103 infinitely recurse. */
7104 if (tem == exp)
7105 abort ();
7106
3d27140a 7107 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
7108 computation, since it will need a temporary and TARGET is known
7109 to have to do. This occurs in unchecked conversion in Ada. */
3a94c984 7110
f47e9b4e
RK
7111 orig_op0 = op0
7112 = expand_expr (tem,
7113 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7114 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7115 != INTEGER_CST)
8403445a 7116 && modifier != EXPAND_STACK_PARM
f47e9b4e
RK
7117 ? target : NULL_RTX),
7118 VOIDmode,
7119 (modifier == EXPAND_INITIALIZER
8403445a
AM
7120 || modifier == EXPAND_CONST_ADDRESS
7121 || modifier == EXPAND_STACK_PARM)
f47e9b4e 7122 ? modifier : EXPAND_NORMAL);
bbf6f052 7123
8c8a8e34 7124 /* If this is a constant, put it into a register if it is a
14a774a9 7125 legitimate constant and OFFSET is 0 and memory if it isn't. */
8c8a8e34
JW
7126 if (CONSTANT_P (op0))
7127 {
7128 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
14a774a9
RK
7129 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7130 && offset == 0)
8c8a8e34
JW
7131 op0 = force_reg (mode, op0);
7132 else
7133 op0 = validize_mem (force_const_mem (mode, op0));
7134 }
7135
8d2e5f72
RK
7136 /* Otherwise, if this object not in memory and we either have an
7137 offset or a BLKmode result, put it there. This case can't occur in
7138 C, but can in Ada if we have unchecked conversion of an expression
7139 from a scalar type to an array or record type or for an
7140 ARRAY_RANGE_REF whose type is BLKmode. */
3c0cb5de 7141 else if (!MEM_P (op0)
8d2e5f72
RK
7142 && (offset != 0
7143 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7144 {
82c82743
RH
7145 tree nt = build_qualified_type (TREE_TYPE (tem),
7146 (TYPE_QUALS (TREE_TYPE (tem))
7147 | TYPE_QUAL_CONST));
7148 rtx memloc = assign_temp (nt, 1, 1, 1);
450b1728 7149
82c82743
RH
7150 emit_move_insn (memloc, op0);
7151 op0 = memloc;
8d2e5f72
RK
7152 }
7153
7bb0943f
RS
7154 if (offset != 0)
7155 {
8403445a
AM
7156 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7157 EXPAND_SUM);
7bb0943f 7158
3c0cb5de 7159 if (!MEM_P (op0))
7bb0943f 7160 abort ();
2d48c13d 7161
2d48c13d 7162#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 7163 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 7164 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
7165#else
7166 if (GET_MODE (offset_rtx) != ptr_mode)
7167 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d
JL
7168#endif
7169
e82407b5
EB
7170 if (GET_MODE (op0) == BLKmode
7171 /* A constant address in OP0 can have VOIDmode, we must
7172 not try to call force_reg in that case. */
efd07ca7 7173 && GET_MODE (XEXP (op0, 0)) != VOIDmode
14a774a9 7174 && bitsize != 0
3a94c984 7175 && (bitpos % bitsize) == 0
89752202 7176 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 7177 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
89752202 7178 {
e3c8ea67 7179 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
7180 bitpos = 0;
7181 }
7182
0d4903b8
RK
7183 op0 = offset_address (op0, offset_rtx,
7184 highest_pow2_factor (offset));
7bb0943f
RS
7185 }
7186
1ce7f3c2
RK
7187 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7188 record its alignment as BIGGEST_ALIGNMENT. */
3c0cb5de 7189 if (MEM_P (op0) && bitpos == 0 && offset != 0
1ce7f3c2
RK
7190 && is_aligning_offset (offset, tem))
7191 set_mem_align (op0, BIGGEST_ALIGNMENT);
7192
bbf6f052 7193 /* Don't forget about volatility even if this is a bitfield. */
3c0cb5de 7194 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
bbf6f052 7195 {
f47e9b4e
RK
7196 if (op0 == orig_op0)
7197 op0 = copy_rtx (op0);
7198
bbf6f052
RK
7199 MEM_VOLATILE_P (op0) = 1;
7200 }
7201
010f87c4
JJ
7202 /* The following code doesn't handle CONCAT.
7203 Assume only bitpos == 0 can be used for CONCAT, due to
7204 one element arrays having the same mode as its element. */
7205 if (GET_CODE (op0) == CONCAT)
7206 {
7207 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7208 abort ();
7209 return op0;
7210 }
7211
ccc98036
RS
7212 /* In cases where an aligned union has an unaligned object
7213 as a field, we might be extracting a BLKmode value from
7214 an integer-mode (e.g., SImode) object. Handle this case
7215 by doing the extract into an object as wide as the field
7216 (which we know to be the width of a basic mode), then
cb5fa0f8 7217 storing into memory, and changing the mode to BLKmode. */
bbf6f052 7218 if (mode1 == VOIDmode
f8cfc6aa 7219 || REG_P (op0) || GET_CODE (op0) == SUBREG
cb5fa0f8
RK
7220 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7221 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10c2a453
RK
7222 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7223 && modifier != EXPAND_CONST_ADDRESS
7224 && modifier != EXPAND_INITIALIZER)
cb5fa0f8
RK
7225 /* If the field isn't aligned enough to fetch as a memref,
7226 fetch it as a bit field. */
7227 || (mode1 != BLKmode
9e5f281f 7228 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
e82407b5 7229 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
3c0cb5de 7230 || (MEM_P (op0)
e82407b5
EB
7231 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7232 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
a8f3bf52
RK
7233 && ((modifier == EXPAND_CONST_ADDRESS
7234 || modifier == EXPAND_INITIALIZER)
7235 ? STRICT_ALIGNMENT
7236 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9e5f281f 7237 || (bitpos % BITS_PER_UNIT != 0)))
cb5fa0f8
RK
7238 /* If the type and the field are a constant size and the
7239 size of the type isn't the same size as the bitfield,
7240 we must use bitfield operations. */
7241 || (bitsize >= 0
7242 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7243 == INTEGER_CST)
7244 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
a06ef755 7245 bitsize)))
bbf6f052 7246 {
bbf6f052
RK
7247 enum machine_mode ext_mode = mode;
7248
14a774a9 7249 if (ext_mode == BLKmode
3c0cb5de
JQ
7250 && ! (target != 0 && MEM_P (op0)
7251 && MEM_P (target)
14a774a9 7252 && bitpos % BITS_PER_UNIT == 0))
bbf6f052
RK
7253 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7254
7255 if (ext_mode == BLKmode)
a281e72d 7256 {
7a06d606
RK
7257 if (target == 0)
7258 target = assign_temp (type, 0, 1, 1);
7259
7260 if (bitsize == 0)
7261 return target;
7262
a281e72d
RK
7263 /* In this case, BITPOS must start at a byte boundary and
7264 TARGET, if specified, must be a MEM. */
3c0cb5de
JQ
7265 if (!MEM_P (op0)
7266 || (target != 0 && !MEM_P (target))
a281e72d
RK
7267 || bitpos % BITS_PER_UNIT != 0)
7268 abort ();
7269
7a06d606
RK
7270 emit_block_move (target,
7271 adjust_address (op0, VOIDmode,
7272 bitpos / BITS_PER_UNIT),
a06ef755 7273 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a 7274 / BITS_PER_UNIT),
8403445a
AM
7275 (modifier == EXPAND_STACK_PARM
7276 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
3a94c984 7277
a281e72d
RK
7278 return target;
7279 }
bbf6f052 7280
dc6d66b3
RK
7281 op0 = validize_mem (op0);
7282
3c0cb5de 7283 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
04050c69 7284 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7285
8403445a
AM
7286 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7287 (modifier == EXPAND_STACK_PARM
7288 ? NULL_RTX : target),
7289 ext_mode, ext_mode,
bbf6f052 7290 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
7291
7292 /* If the result is a record type and BITSIZE is narrower than
7293 the mode of OP0, an integral mode, and this is a big endian
7294 machine, we must put the field into the high-order bits. */
7295 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7296 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
65a07688 7297 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
ef19912d
RK
7298 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7299 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7300 - bitsize),
7301 op0, 1);
7302
daae8185
RK
7303 /* If the result type is BLKmode, store the data into a temporary
7304 of the appropriate type, but with the mode corresponding to the
7305 mode for the data we have (op0's mode). It's tempting to make
7306 this a constant type, since we know it's only being stored once,
7307 but that can cause problems if we are taking the address of this
7308 COMPONENT_REF because the MEM of any reference via that address
7309 will have flags corresponding to the type, which will not
7310 necessarily be constant. */
bbf6f052
RK
7311 if (mode == BLKmode)
7312 {
daae8185
RK
7313 rtx new
7314 = assign_stack_temp_for_type
7315 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
bbf6f052
RK
7316
7317 emit_move_insn (new, op0);
7318 op0 = copy_rtx (new);
7319 PUT_MODE (op0, BLKmode);
c3d32120 7320 set_mem_attributes (op0, exp, 1);
bbf6f052
RK
7321 }
7322
7323 return op0;
7324 }
7325
05019f83
RK
7326 /* If the result is BLKmode, use that to access the object
7327 now as well. */
7328 if (mode == BLKmode)
7329 mode1 = BLKmode;
7330
bbf6f052
RK
7331 /* Get a reference to just this component. */
7332 if (modifier == EXPAND_CONST_ADDRESS
7333 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
f1ec5147 7334 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
bbf6f052 7335 else
f4ef873c 7336 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
41472af8 7337
f47e9b4e
RK
7338 if (op0 == orig_op0)
7339 op0 = copy_rtx (op0);
7340
3bdf5ad1 7341 set_mem_attributes (op0, exp, 0);
f8cfc6aa 7342 if (REG_P (XEXP (op0, 0)))
a06ef755 7343 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7344
bbf6f052 7345 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 7346 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 7347 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 7348 || modifier == EXPAND_INITIALIZER)
bbf6f052 7349 return op0;
0d15e60c 7350 else if (target == 0)
bbf6f052 7351 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 7352
bbf6f052
RK
7353 convert_move (target, op0, unsignedp);
7354 return target;
7355 }
7356
0f59171d
RH
7357 case OBJ_TYPE_REF:
7358 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
4a8d0c9c 7359
bbf6f052
RK
7360 case CALL_EXPR:
7361 /* Check for a built-in function. */
7362 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
7363 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7364 == FUNCTION_DECL)
bbf6f052 7365 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
0fb7aeda 7366 {
c70eaeaf
KG
7367 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7368 == BUILT_IN_FRONTEND)
673fda6b
SB
7369 return lang_hooks.expand_expr (exp, original_target,
7370 tmode, modifier,
7371 alt_rtl);
c70eaeaf
KG
7372 else
7373 return expand_builtin (exp, target, subtarget, tmode, ignore);
7374 }
d6a5ac33 7375
8129842c 7376 return expand_call (exp, target, ignore);
bbf6f052
RK
7377
7378 case NON_LVALUE_EXPR:
7379 case NOP_EXPR:
7380 case CONVERT_EXPR:
4a53008b 7381 if (TREE_OPERAND (exp, 0) == error_mark_node)
a592f288 7382 return const0_rtx;
4a53008b 7383
bbf6f052
RK
7384 if (TREE_CODE (type) == UNION_TYPE)
7385 {
7386 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
14a774a9 7387
c3d32120
RK
7388 /* If both input and output are BLKmode, this conversion isn't doing
7389 anything except possibly changing memory attribute. */
7390 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7391 {
7392 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7393 modifier);
7394
7395 result = copy_rtx (result);
7396 set_mem_attributes (result, exp, 0);
7397 return result;
7398 }
14a774a9 7399
bbf6f052 7400 if (target == 0)
cf7cb67e
JH
7401 {
7402 if (TYPE_MODE (type) != BLKmode)
7403 target = gen_reg_rtx (TYPE_MODE (type));
7404 else
7405 target = assign_temp (type, 0, 1, 1);
7406 }
d6a5ac33 7407
3c0cb5de 7408 if (MEM_P (target))
bbf6f052
RK
7409 /* Store data into beginning of memory target. */
7410 store_expr (TREE_OPERAND (exp, 0),
8403445a
AM
7411 adjust_address (target, TYPE_MODE (valtype), 0),
7412 modifier == EXPAND_STACK_PARM ? 2 : 0);
1499e0a8 7413
f8cfc6aa 7414 else if (REG_P (target))
bbf6f052 7415 /* Store this field into a union of the proper type. */
14a774a9
RK
7416 store_field (target,
7417 MIN ((int_size_in_bytes (TREE_TYPE
7418 (TREE_OPERAND (exp, 0)))
7419 * BITS_PER_UNIT),
8752c357 7420 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
14a774a9 7421 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
a06ef755 7422 VOIDmode, 0, type, 0);
bbf6f052
RK
7423 else
7424 abort ();
7425
7426 /* Return the entire union. */
7427 return target;
7428 }
d6a5ac33 7429
7f62854a
RK
7430 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7431 {
7432 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
37a08a29 7433 modifier);
7f62854a
RK
7434
7435 /* If the signedness of the conversion differs and OP0 is
7436 a promoted SUBREG, clear that indication since we now
7437 have to do the proper extension. */
8df83eae 7438 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7f62854a
RK
7439 && GET_CODE (op0) == SUBREG)
7440 SUBREG_PROMOTED_VAR_P (op0) = 0;
7441
bc15d0ef 7442 return REDUCE_BIT_FIELD (op0);
7f62854a
RK
7443 }
7444
fdf473ae 7445 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
bc15d0ef 7446 op0 = REDUCE_BIT_FIELD (op0);
12342f90
RS
7447 if (GET_MODE (op0) == mode)
7448 return op0;
12342f90 7449
d6a5ac33
RK
7450 /* If OP0 is a constant, just convert it into the proper mode. */
7451 if (CONSTANT_P (op0))
fdf473ae
RH
7452 {
7453 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7454 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7455
0fb7aeda 7456 if (modifier == EXPAND_INITIALIZER)
fdf473ae
RH
7457 return simplify_gen_subreg (mode, op0, inner_mode,
7458 subreg_lowpart_offset (mode,
7459 inner_mode));
7460 else
7461 return convert_modes (mode, inner_mode, op0,
8df83eae 7462 TYPE_UNSIGNED (inner_type));
fdf473ae 7463 }
12342f90 7464
26fcb35a 7465 if (modifier == EXPAND_INITIALIZER)
38a448ca 7466 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 7467
bbf6f052 7468 if (target == 0)
d6a5ac33
RK
7469 return
7470 convert_to_mode (mode, op0,
8df83eae 7471 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 7472 else
d6a5ac33 7473 convert_move (target, op0,
8df83eae 7474 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
7475 return target;
7476
ed239f5a 7477 case VIEW_CONVERT_EXPR:
37a08a29 7478 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
ed239f5a
RK
7479
7480 /* If the input and output modes are both the same, we are done.
13cf99ec
RK
7481 Otherwise, if neither mode is BLKmode and both are integral and within
7482 a word, we can use gen_lowpart. If neither is true, make sure the
7483 operand is in memory and convert the MEM to the new mode. */
ed239f5a
RK
7484 if (TYPE_MODE (type) == GET_MODE (op0))
7485 ;
7486 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
13cf99ec
RK
7487 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7488 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
ed239f5a
RK
7489 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7490 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7491 op0 = gen_lowpart (TYPE_MODE (type), op0);
3c0cb5de 7492 else if (!MEM_P (op0))
ed239f5a 7493 {
c11c10d8
RK
7494 /* If the operand is not a MEM, force it into memory. Since we
7495 are going to be be changing the mode of the MEM, don't call
7496 force_const_mem for constants because we don't allow pool
7497 constants to change mode. */
ed239f5a 7498 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
ed239f5a 7499
c11c10d8
RK
7500 if (TREE_ADDRESSABLE (exp))
7501 abort ();
ed239f5a 7502
c11c10d8
RK
7503 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7504 target
7505 = assign_stack_temp_for_type
7506 (TYPE_MODE (inner_type),
7507 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
ed239f5a 7508
c11c10d8
RK
7509 emit_move_insn (target, op0);
7510 op0 = target;
ed239f5a
RK
7511 }
7512
c11c10d8
RK
7513 /* At this point, OP0 is in the correct mode. If the output type is such
7514 that the operand is known to be aligned, indicate that it is.
7515 Otherwise, we need only be concerned about alignment for non-BLKmode
7516 results. */
3c0cb5de 7517 if (MEM_P (op0))
ed239f5a
RK
7518 {
7519 op0 = copy_rtx (op0);
7520
ed239f5a
RK
7521 if (TYPE_ALIGN_OK (type))
7522 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7523 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7524 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7525 {
7526 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
65a07688
RK
7527 HOST_WIDE_INT temp_size
7528 = MAX (int_size_in_bytes (inner_type),
7529 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
ed239f5a
RK
7530 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7531 temp_size, 0, type);
c4e59f51 7532 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
ed239f5a 7533
c11c10d8
RK
7534 if (TREE_ADDRESSABLE (exp))
7535 abort ();
7536
ed239f5a
RK
7537 if (GET_MODE (op0) == BLKmode)
7538 emit_block_move (new_with_op0_mode, op0,
44bb111a 7539 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8403445a
AM
7540 (modifier == EXPAND_STACK_PARM
7541 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
ed239f5a
RK
7542 else
7543 emit_move_insn (new_with_op0_mode, op0);
7544
7545 op0 = new;
7546 }
0fb7aeda 7547
c4e59f51 7548 op0 = adjust_address (op0, TYPE_MODE (type), 0);
ed239f5a
RK
7549 }
7550
7551 return op0;
7552
bbf6f052 7553 case PLUS_EXPR:
91ce572a 7554 this_optab = ! unsignedp && flag_trapv
a9785c70 7555 && (GET_MODE_CLASS (mode) == MODE_INT)
91ce572a 7556 ? addv_optab : add_optab;
bbf6f052 7557
4dfa0342 7558 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
bbf6f052
RK
7559 something else, make sure we add the register to the constant and
7560 then to the other thing. This case can occur during strength
7561 reduction and doing it this way will produce better code if the
7562 frame pointer or argument pointer is eliminated.
7563
7564 fold-const.c will ensure that the constant is always in the inner
7565 PLUS_EXPR, so the only case we need to do anything about is if
7566 sp, ap, or fp is our second argument, in which case we must swap
7567 the innermost first argument and our second argument. */
7568
7569 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7570 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4dfa0342
RH
7571 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7572 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7573 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7574 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
bbf6f052
RK
7575 {
7576 tree t = TREE_OPERAND (exp, 1);
7577
7578 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7579 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7580 }
7581
88f63c77 7582 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
7583 something, we might be forming a constant. So try to use
7584 plus_constant. If it produces a sum and we can't accept it,
7585 use force_operand. This allows P = &ARR[const] to generate
7586 efficient code on machines where a SYMBOL_REF is not a valid
7587 address.
7588
7589 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 7590 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
0fb7aeda 7591 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
bbf6f052 7592 {
8403445a
AM
7593 if (modifier == EXPAND_STACK_PARM)
7594 target = 0;
c980ac49
RS
7595 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7596 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7597 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7598 {
cbbc503e
JL
7599 rtx constant_part;
7600
c980ac49
RS
7601 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7602 EXPAND_SUM);
cbbc503e
JL
7603 /* Use immed_double_const to ensure that the constant is
7604 truncated according to the mode of OP1, then sign extended
7605 to a HOST_WIDE_INT. Using the constant directly can result
7606 in non-canonical RTL in a 64x32 cross compile. */
7607 constant_part
7608 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7609 (HOST_WIDE_INT) 0,
a5efcd63 7610 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7f401c74 7611 op1 = plus_constant (op1, INTVAL (constant_part));
c980ac49
RS
7612 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7613 op1 = force_operand (op1, target);
bc15d0ef 7614 return REDUCE_BIT_FIELD (op1);
c980ac49 7615 }
bbf6f052 7616
c980ac49
RS
7617 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7618 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7619 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7620 {
cbbc503e
JL
7621 rtx constant_part;
7622
c980ac49 7623 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
70d95bac
RH
7624 (modifier == EXPAND_INITIALIZER
7625 ? EXPAND_INITIALIZER : EXPAND_SUM));
c980ac49
RS
7626 if (! CONSTANT_P (op0))
7627 {
7628 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7629 VOIDmode, modifier);
f0e9957a
RS
7630 /* Return a PLUS if modifier says it's OK. */
7631 if (modifier == EXPAND_SUM
7632 || modifier == EXPAND_INITIALIZER)
7633 return simplify_gen_binary (PLUS, mode, op0, op1);
7634 goto binop2;
c980ac49 7635 }
cbbc503e
JL
7636 /* Use immed_double_const to ensure that the constant is
7637 truncated according to the mode of OP1, then sign extended
7638 to a HOST_WIDE_INT. Using the constant directly can result
7639 in non-canonical RTL in a 64x32 cross compile. */
7640 constant_part
7641 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7642 (HOST_WIDE_INT) 0,
2a94e396 7643 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7f401c74 7644 op0 = plus_constant (op0, INTVAL (constant_part));
c980ac49
RS
7645 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7646 op0 = force_operand (op0, target);
bc15d0ef 7647 return REDUCE_BIT_FIELD (op0);
c980ac49 7648 }
bbf6f052
RK
7649 }
7650
7651 /* No sense saving up arithmetic to be done
7652 if it's all in the wrong mode to form part of an address.
7653 And force_operand won't know whether to sign-extend or
7654 zero-extend. */
7655 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 7656 || mode != ptr_mode)
4ef7870a 7657 {
eb698c58
RS
7658 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7659 subtarget, &op0, &op1, 0);
6e7727eb
EB
7660 if (op0 == const0_rtx)
7661 return op1;
7662 if (op1 == const0_rtx)
7663 return op0;
4ef7870a
EB
7664 goto binop2;
7665 }
bbf6f052 7666
eb698c58
RS
7667 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7668 subtarget, &op0, &op1, modifier);
bc15d0ef 7669 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
bbf6f052
RK
7670
7671 case MINUS_EXPR:
ea87523e
RK
7672 /* For initializers, we are allowed to return a MINUS of two
7673 symbolic constants. Here we handle all cases when both operands
7674 are constant. */
bbf6f052
RK
7675 /* Handle difference of two symbolic constants,
7676 for the sake of an initializer. */
7677 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7678 && really_constant_p (TREE_OPERAND (exp, 0))
7679 && really_constant_p (TREE_OPERAND (exp, 1)))
7680 {
eb698c58
RS
7681 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7682 NULL_RTX, &op0, &op1, modifier);
ea87523e 7683
ea87523e
RK
7684 /* If the last operand is a CONST_INT, use plus_constant of
7685 the negated constant. Else make the MINUS. */
7686 if (GET_CODE (op1) == CONST_INT)
bc15d0ef 7687 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
ea87523e 7688 else
bc15d0ef 7689 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
bbf6f052 7690 }
ae431183 7691
91ce572a
CC
7692 this_optab = ! unsignedp && flag_trapv
7693 && (GET_MODE_CLASS(mode) == MODE_INT)
7694 ? subv_optab : sub_optab;
1717e19e
UW
7695
7696 /* No sense saving up arithmetic to be done
7697 if it's all in the wrong mode to form part of an address.
7698 And force_operand won't know whether to sign-extend or
7699 zero-extend. */
7700 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7701 || mode != ptr_mode)
7702 goto binop;
7703
eb698c58
RS
7704 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7705 subtarget, &op0, &op1, modifier);
1717e19e
UW
7706
7707 /* Convert A - const to A + (-const). */
7708 if (GET_CODE (op1) == CONST_INT)
7709 {
7710 op1 = negate_rtx (mode, op1);
bc15d0ef 7711 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
1717e19e
UW
7712 }
7713
7714 goto binop2;
bbf6f052
RK
7715
7716 case MULT_EXPR:
bbf6f052
RK
7717 /* If first operand is constant, swap them.
7718 Thus the following special case checks need only
7719 check the second operand. */
7720 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7721 {
b3694847 7722 tree t1 = TREE_OPERAND (exp, 0);
bbf6f052
RK
7723 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7724 TREE_OPERAND (exp, 1) = t1;
7725 }
7726
7727 /* Attempt to return something suitable for generating an
7728 indexed address, for machines that support that. */
7729
88f63c77 7730 if (modifier == EXPAND_SUM && mode == ptr_mode
3b40e71b 7731 && host_integerp (TREE_OPERAND (exp, 1), 0))
bbf6f052 7732 {
48a5f2fa
DJ
7733 tree exp1 = TREE_OPERAND (exp, 1);
7734
921b3427
RK
7735 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7736 EXPAND_SUM);
bbf6f052 7737
f8cfc6aa 7738 if (!REG_P (op0))
906c4e36 7739 op0 = force_operand (op0, NULL_RTX);
f8cfc6aa 7740 if (!REG_P (op0))
bbf6f052
RK
7741 op0 = copy_to_mode_reg (mode, op0);
7742
bc15d0ef 7743 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
48a5f2fa 7744 gen_int_mode (tree_low_cst (exp1, 0),
bc15d0ef 7745 TYPE_MODE (TREE_TYPE (exp1)))));
bbf6f052
RK
7746 }
7747
8403445a
AM
7748 if (modifier == EXPAND_STACK_PARM)
7749 target = 0;
7750
bbf6f052
RK
7751 /* Check for multiplying things that have been extended
7752 from a narrower type. If this machine supports multiplying
7753 in that narrower type with a result in the desired type,
7754 do it that way, and avoid the explicit type-conversion. */
7755 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7756 && TREE_CODE (type) == INTEGER_TYPE
7757 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7758 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7759 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7760 && int_fits_type_p (TREE_OPERAND (exp, 1),
7761 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7762 /* Don't use a widening multiply if a shift will do. */
7763 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 7764 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7765 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7766 ||
7767 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8df83eae
RK
7768 && (TYPE_PRECISION (TREE_TYPE
7769 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7770 == TYPE_PRECISION (TREE_TYPE
7771 (TREE_OPERAND
7772 (TREE_OPERAND (exp, 0), 0))))
bbf6f052
RK
7773 /* If both operands are extended, they must either both
7774 be zero-extended or both be sign-extended. */
8df83eae
RK
7775 && (TYPE_UNSIGNED (TREE_TYPE
7776 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7777 == TYPE_UNSIGNED (TREE_TYPE
7778 (TREE_OPERAND
7779 (TREE_OPERAND (exp, 0), 0)))))))
bbf6f052 7780 {
888d65b5
RS
7781 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7782 enum machine_mode innermode = TYPE_MODE (op0type);
8df83eae 7783 bool zextend_p = TYPE_UNSIGNED (op0type);
888d65b5
RS
7784 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7785 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7786
b10af0c8 7787 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 7788 {
b10af0c8
TG
7789 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7790 {
b10af0c8 7791 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
eb698c58
RS
7792 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7793 TREE_OPERAND (exp, 1),
7794 NULL_RTX, &op0, &op1, 0);
b10af0c8 7795 else
eb698c58
RS
7796 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7797 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7798 NULL_RTX, &op0, &op1, 0);
b10af0c8
TG
7799 goto binop2;
7800 }
7801 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7802 && innermode == word_mode)
7803 {
888d65b5 7804 rtx htem, hipart;
b10af0c8
TG
7805 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7806 NULL_RTX, VOIDmode, 0);
7807 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8c118062
GK
7808 op1 = convert_modes (innermode, mode,
7809 expand_expr (TREE_OPERAND (exp, 1),
7810 NULL_RTX, VOIDmode, 0),
7811 unsignedp);
b10af0c8
TG
7812 else
7813 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7814 NULL_RTX, VOIDmode, 0);
7815 temp = expand_binop (mode, other_optab, op0, op1, target,
7816 unsignedp, OPTAB_LIB_WIDEN);
888d65b5
RS
7817 hipart = gen_highpart (innermode, temp);
7818 htem = expand_mult_highpart_adjust (innermode, hipart,
7819 op0, op1, hipart,
7820 zextend_p);
7821 if (htem != hipart)
7822 emit_move_insn (hipart, htem);
bc15d0ef 7823 return REDUCE_BIT_FIELD (temp);
b10af0c8 7824 }
bbf6f052
RK
7825 }
7826 }
eb698c58
RS
7827 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7828 subtarget, &op0, &op1, 0);
bc15d0ef 7829 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
bbf6f052
RK
7830
7831 case TRUNC_DIV_EXPR:
7832 case FLOOR_DIV_EXPR:
7833 case CEIL_DIV_EXPR:
7834 case ROUND_DIV_EXPR:
7835 case EXACT_DIV_EXPR:
8403445a
AM
7836 if (modifier == EXPAND_STACK_PARM)
7837 target = 0;
bbf6f052
RK
7838 /* Possible optimization: compute the dividend with EXPAND_SUM
7839 then if the divisor is constant can optimize the case
7840 where some terms of the dividend have coeffs divisible by it. */
eb698c58
RS
7841 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7842 subtarget, &op0, &op1, 0);
bbf6f052
RK
7843 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7844
7845 case RDIV_EXPR:
b7e9703c
JH
7846 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7847 expensive divide. If not, combine will rebuild the original
7848 computation. */
7849 if (flag_unsafe_math_optimizations && optimize && !optimize_size
ed7d44bc 7850 && TREE_CODE (type) == REAL_TYPE
b7e9703c
JH
7851 && !real_onep (TREE_OPERAND (exp, 0)))
7852 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7853 build (RDIV_EXPR, type,
7854 build_real (type, dconst1),
7855 TREE_OPERAND (exp, 1))),
8e37cba8 7856 target, tmode, modifier);
ef89d648 7857 this_optab = sdiv_optab;
bbf6f052
RK
7858 goto binop;
7859
7860 case TRUNC_MOD_EXPR:
7861 case FLOOR_MOD_EXPR:
7862 case CEIL_MOD_EXPR:
7863 case ROUND_MOD_EXPR:
8403445a
AM
7864 if (modifier == EXPAND_STACK_PARM)
7865 target = 0;
eb698c58
RS
7866 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7867 subtarget, &op0, &op1, 0);
bbf6f052
RK
7868 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7869
7870 case FIX_ROUND_EXPR:
7871 case FIX_FLOOR_EXPR:
7872 case FIX_CEIL_EXPR:
7873 abort (); /* Not used for C. */
7874
7875 case FIX_TRUNC_EXPR:
906c4e36 7876 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403445a 7877 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
7878 target = gen_reg_rtx (mode);
7879 expand_fix (target, op0, unsignedp);
7880 return target;
7881
7882 case FLOAT_EXPR:
906c4e36 7883 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403445a 7884 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
7885 target = gen_reg_rtx (mode);
7886 /* expand_float can't figure out what to do if FROM has VOIDmode.
7887 So give it the correct mode. With -O, cse will optimize this. */
7888 if (GET_MODE (op0) == VOIDmode)
7889 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7890 op0);
7891 expand_float (target, op0,
8df83eae 7892 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
7893 return target;
7894
7895 case NEGATE_EXPR:
5b22bee8 7896 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
7897 if (modifier == EXPAND_STACK_PARM)
7898 target = 0;
91ce572a 7899 temp = expand_unop (mode,
0fb7aeda
KH
7900 ! unsignedp && flag_trapv
7901 && (GET_MODE_CLASS(mode) == MODE_INT)
7902 ? negv_optab : neg_optab, op0, target, 0);
bbf6f052
RK
7903 if (temp == 0)
7904 abort ();
bc15d0ef 7905 return REDUCE_BIT_FIELD (temp);
bbf6f052
RK
7906
7907 case ABS_EXPR:
7908 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
7909 if (modifier == EXPAND_STACK_PARM)
7910 target = 0;
bbf6f052 7911
11017cc7 7912 /* ABS_EXPR is not valid for complex arguments. */
d6a5ac33
RK
7913 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7914 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
11017cc7 7915 abort ();
2d7050fd 7916
bbf6f052
RK
7917 /* Unsigned abs is simply the operand. Testing here means we don't
7918 risk generating incorrect code below. */
8df83eae 7919 if (TYPE_UNSIGNED (type))
bbf6f052
RK
7920 return op0;
7921
91ce572a 7922 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 7923 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
7924
7925 case MAX_EXPR:
7926 case MIN_EXPR:
7927 target = original_target;
8403445a
AM
7928 if (target == 0
7929 || modifier == EXPAND_STACK_PARM
3c0cb5de 7930 || (MEM_P (target) && MEM_VOLATILE_P (target))
d6a5ac33 7931 || GET_MODE (target) != mode
f8cfc6aa 7932 || (REG_P (target)
bbf6f052
RK
7933 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7934 target = gen_reg_rtx (mode);
eb698c58
RS
7935 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7936 target, &op0, &op1, 0);
bbf6f052
RK
7937
7938 /* First try to do it with a special MIN or MAX instruction.
7939 If that does not win, use a conditional jump to select the proper
7940 value. */
288dc1ea 7941 this_optab = (unsignedp
bbf6f052
RK
7942 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7943 : (code == MIN_EXPR ? smin_optab : smax_optab));
7944
7945 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7946 OPTAB_WIDEN);
7947 if (temp != 0)
7948 return temp;
7949
fa2981d8
JW
7950 /* At this point, a MEM target is no longer useful; we will get better
7951 code without it. */
3a94c984 7952
3c0cb5de 7953 if (MEM_P (target))
fa2981d8
JW
7954 target = gen_reg_rtx (mode);
7955
e3be1116
RS
7956 /* If op1 was placed in target, swap op0 and op1. */
7957 if (target != op0 && target == op1)
7958 {
7959 rtx tem = op0;
7960 op0 = op1;
7961 op1 = tem;
7962 }
7963
ee456b1c
RK
7964 if (target != op0)
7965 emit_move_insn (target, op0);
d6a5ac33 7966
bbf6f052 7967 op0 = gen_label_rtx ();
d6a5ac33 7968
f81497d9
RS
7969 /* If this mode is an integer too wide to compare properly,
7970 compare word by word. Rely on cse to optimize constant cases. */
1eb8759b
RH
7971 if (GET_MODE_CLASS (mode) == MODE_INT
7972 && ! can_compare_p (GE, mode, ccp_jump))
bbf6f052 7973 {
f81497d9 7974 if (code == MAX_EXPR)
288dc1ea
EB
7975 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7976 NULL_RTX, op0);
bbf6f052 7977 else
288dc1ea
EB
7978 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7979 NULL_RTX, op0);
bbf6f052 7980 }
f81497d9
RS
7981 else
7982 {
b30f05db 7983 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
288dc1ea 7984 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
f81497d9 7985 }
b30f05db 7986 emit_move_insn (target, op1);
bbf6f052
RK
7987 emit_label (op0);
7988 return target;
7989
bbf6f052
RK
7990 case BIT_NOT_EXPR:
7991 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
7992 if (modifier == EXPAND_STACK_PARM)
7993 target = 0;
bbf6f052
RK
7994 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7995 if (temp == 0)
7996 abort ();
7997 return temp;
7998
d6a5ac33
RK
7999 /* ??? Can optimize bitwise operations with one arg constant.
8000 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8001 and (a bitwise1 b) bitwise2 b (etc)
8002 but that is probably not worth while. */
8003
8004 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8005 boolean values when we want in all cases to compute both of them. In
8006 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8007 as actual zero-or-1 values and then bitwise anding. In cases where
8008 there cannot be any side effects, better code would be made by
8009 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8010 how to recognize those cases. */
8011
bbf6f052
RK
8012 case TRUTH_AND_EXPR:
8013 case BIT_AND_EXPR:
8014 this_optab = and_optab;
8015 goto binop;
8016
bbf6f052
RK
8017 case TRUTH_OR_EXPR:
8018 case BIT_IOR_EXPR:
8019 this_optab = ior_optab;
8020 goto binop;
8021
874726a8 8022 case TRUTH_XOR_EXPR:
bbf6f052
RK
8023 case BIT_XOR_EXPR:
8024 this_optab = xor_optab;
8025 goto binop;
8026
8027 case LSHIFT_EXPR:
8028 case RSHIFT_EXPR:
8029 case LROTATE_EXPR:
8030 case RROTATE_EXPR:
e5e809f4 8031 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052 8032 subtarget = 0;
8403445a
AM
8033 if (modifier == EXPAND_STACK_PARM)
8034 target = 0;
bbf6f052
RK
8035 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8036 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8037 unsignedp);
8038
d6a5ac33
RK
8039 /* Could determine the answer when only additive constants differ. Also,
8040 the addition of one can be handled by changing the condition. */
bbf6f052
RK
8041 case LT_EXPR:
8042 case LE_EXPR:
8043 case GT_EXPR:
8044 case GE_EXPR:
8045 case EQ_EXPR:
8046 case NE_EXPR:
1eb8759b
RH
8047 case UNORDERED_EXPR:
8048 case ORDERED_EXPR:
8049 case UNLT_EXPR:
8050 case UNLE_EXPR:
8051 case UNGT_EXPR:
8052 case UNGE_EXPR:
8053 case UNEQ_EXPR:
d1a7edaf 8054 case LTGT_EXPR:
8403445a
AM
8055 temp = do_store_flag (exp,
8056 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8057 tmode != VOIDmode ? tmode : mode, 0);
bbf6f052
RK
8058 if (temp != 0)
8059 return temp;
d6a5ac33 8060
0f41302f 8061 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
8062 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8063 && original_target
f8cfc6aa 8064 && REG_P (original_target)
bbf6f052
RK
8065 && (GET_MODE (original_target)
8066 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8067 {
d6a5ac33
RK
8068 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8069 VOIDmode, 0);
8070
c0a3eeac
UW
8071 /* If temp is constant, we can just compute the result. */
8072 if (GET_CODE (temp) == CONST_INT)
8073 {
8074 if (INTVAL (temp) != 0)
8075 emit_move_insn (target, const1_rtx);
8076 else
8077 emit_move_insn (target, const0_rtx);
8078
8079 return target;
8080 }
8081
bbf6f052 8082 if (temp != original_target)
c0a3eeac
UW
8083 {
8084 enum machine_mode mode1 = GET_MODE (temp);
8085 if (mode1 == VOIDmode)
8086 mode1 = tmode != VOIDmode ? tmode : mode;
0fb7aeda 8087
c0a3eeac
UW
8088 temp = copy_to_mode_reg (mode1, temp);
8089 }
d6a5ac33 8090
bbf6f052 8091 op1 = gen_label_rtx ();
c5d5d461 8092 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
a06ef755 8093 GET_MODE (temp), unsignedp, op1);
bbf6f052
RK
8094 emit_move_insn (temp, const1_rtx);
8095 emit_label (op1);
8096 return temp;
8097 }
d6a5ac33 8098
bbf6f052
RK
8099 /* If no set-flag instruction, must generate a conditional
8100 store into a temporary variable. Drop through
8101 and handle this like && and ||. */
8102
8103 case TRUTH_ANDIF_EXPR:
8104 case TRUTH_ORIF_EXPR:
e44842fe 8105 if (! ignore
8403445a
AM
8106 && (target == 0
8107 || modifier == EXPAND_STACK_PARM
8108 || ! safe_from_p (target, exp, 1)
e44842fe
RK
8109 /* Make sure we don't have a hard reg (such as function's return
8110 value) live across basic blocks, if not optimizing. */
f8cfc6aa 8111 || (!optimize && REG_P (target)
e44842fe 8112 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 8113 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
8114
8115 if (target)
8116 emit_clr_insn (target);
8117
bbf6f052
RK
8118 op1 = gen_label_rtx ();
8119 jumpifnot (exp, op1);
e44842fe
RK
8120
8121 if (target)
8122 emit_0_to_1_insn (target);
8123
bbf6f052 8124 emit_label (op1);
e44842fe 8125 return ignore ? const0_rtx : target;
bbf6f052
RK
8126
8127 case TRUTH_NOT_EXPR:
8403445a
AM
8128 if (modifier == EXPAND_STACK_PARM)
8129 target = 0;
bbf6f052
RK
8130 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8131 /* The parser is careful to generate TRUTH_NOT_EXPR
8132 only with operands that are always zero or one. */
906c4e36 8133 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
8134 target, 1, OPTAB_LIB_WIDEN);
8135 if (temp == 0)
8136 abort ();
8137 return temp;
8138
8139 case COMPOUND_EXPR:
8140 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8141 emit_queue ();
0fab64a3
MM
8142 return expand_expr_real (TREE_OPERAND (exp, 1),
8143 (ignore ? const0_rtx : target),
8144 VOIDmode, modifier, alt_rtl);
bbf6f052 8145
6de9cd9a
DN
8146 case STATEMENT_LIST:
8147 {
8148 tree_stmt_iterator iter;
8149
8150 if (!ignore)
8151 abort ();
8152
8153 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8154 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8155 }
8156 return const0_rtx;
8157
bbf6f052 8158 case COND_EXPR:
6de9cd9a
DN
8159 /* If it's void, we don't need to worry about computing a value. */
8160 if (VOID_TYPE_P (TREE_TYPE (exp)))
8161 {
8162 tree pred = TREE_OPERAND (exp, 0);
8163 tree then_ = TREE_OPERAND (exp, 1);
8164 tree else_ = TREE_OPERAND (exp, 2);
8165
ac45df5d
RH
8166 if (TREE_CODE (then_) == GOTO_EXPR
8167 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
6de9cd9a
DN
8168 {
8169 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
8170 return expand_expr (else_, const0_rtx, VOIDmode, 0);
8171 }
8172 else if (TREE_CODE (else_) == GOTO_EXPR
8173 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
8174 {
8175 jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_)));
8176 return expand_expr (then_, const0_rtx, VOIDmode, 0);
8177 }
8178
8179 /* Just use the 'if' machinery. */
8180 expand_start_cond (pred, 0);
6de9cd9a
DN
8181 expand_expr (then_, const0_rtx, VOIDmode, 0);
8182
8183 exp = else_;
8184
8185 /* Iterate over 'else if's instead of recursing. */
8186 for (; TREE_CODE (exp) == COND_EXPR; exp = TREE_OPERAND (exp, 2))
8187 {
8188 expand_start_else ();
8189 if (EXPR_HAS_LOCATION (exp))
8190 {
8191 emit_line_note (EXPR_LOCATION (exp));
1ea463a2 8192 record_block_change (TREE_BLOCK (exp));
6de9cd9a
DN
8193 }
8194 expand_elseif (TREE_OPERAND (exp, 0));
8195 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, 0);
8196 }
8197 /* Don't emit the jump and label if there's no 'else' clause. */
8198 if (TREE_SIDE_EFFECTS (exp))
8199 {
8200 expand_start_else ();
8201 expand_expr (exp, const0_rtx, VOIDmode, 0);
8202 }
6de9cd9a
DN
8203 expand_end_cond ();
8204 return const0_rtx;
8205 }
8206
ac01eace
RK
8207 /* If we would have a "singleton" (see below) were it not for a
8208 conversion in each arm, bring that conversion back out. */
8209 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8210 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8211 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8212 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8213 {
d6edb99e
ZW
8214 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8215 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8216
8217 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8218 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8219 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8220 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8221 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8222 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8223 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8224 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
ac01eace 8225 return expand_expr (build1 (NOP_EXPR, type,
d6edb99e 8226 build (COND_EXPR, TREE_TYPE (iftrue),
ac01eace 8227 TREE_OPERAND (exp, 0),
d6edb99e 8228 iftrue, iffalse)),
ac01eace
RK
8229 target, tmode, modifier);
8230 }
8231
bbf6f052
RK
8232 {
8233 /* Note that COND_EXPRs whose type is a structure or union
8234 are required to be constructed to contain assignments of
8235 a temporary variable, so that we can evaluate them here
8236 for side effect only. If type is void, we must do likewise. */
8237
8238 /* If an arm of the branch requires a cleanup,
8239 only that cleanup is performed. */
8240
8241 tree singleton = 0;
8242 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
8243
8244 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8245 convert it to our mode, if necessary. */
8246 if (integer_onep (TREE_OPERAND (exp, 1))
8247 && integer_zerop (TREE_OPERAND (exp, 2))
8248 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8249 {
dd27116b
RK
8250 if (ignore)
8251 {
8252 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
37a08a29 8253 modifier);
dd27116b
RK
8254 return const0_rtx;
8255 }
8256
8403445a
AM
8257 if (modifier == EXPAND_STACK_PARM)
8258 target = 0;
37a08a29 8259 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
bbf6f052
RK
8260 if (GET_MODE (op0) == mode)
8261 return op0;
d6a5ac33 8262
bbf6f052
RK
8263 if (target == 0)
8264 target = gen_reg_rtx (mode);
8265 convert_move (target, op0, unsignedp);
8266 return target;
8267 }
8268
ac01eace
RK
8269 /* Check for X ? A + B : A. If we have this, we can copy A to the
8270 output and conditionally add B. Similarly for unary operations.
8271 Don't do this if X has side-effects because those side effects
8272 might affect A or B and the "?" operation is a sequence point in
8273 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
8274
8275 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8276 && operand_equal_p (TREE_OPERAND (exp, 2),
8277 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8278 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8279 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8280 && operand_equal_p (TREE_OPERAND (exp, 1),
8281 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8282 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8283 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8284 && operand_equal_p (TREE_OPERAND (exp, 2),
8285 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8286 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8287 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8288 && operand_equal_p (TREE_OPERAND (exp, 1),
8289 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8290 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8291
01c8a7c8
RK
8292 /* If we are not to produce a result, we have no target. Otherwise,
8293 if a target was specified use it; it will not be used as an
3a94c984 8294 intermediate target unless it is safe. If no target, use a
01c8a7c8
RK
8295 temporary. */
8296
8297 if (ignore)
8298 temp = 0;
8403445a
AM
8299 else if (modifier == EXPAND_STACK_PARM)
8300 temp = assign_temp (type, 0, 0, 1);
01c8a7c8 8301 else if (original_target
e5e809f4 8302 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
f8cfc6aa 8303 || (singleton && REG_P (original_target)
01c8a7c8
RK
8304 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8305 && original_target == var_rtx (singleton)))
8306 && GET_MODE (original_target) == mode
7c00d1fe
RK
8307#ifdef HAVE_conditional_move
8308 && (! can_conditionally_move_p (mode)
f8cfc6aa 8309 || REG_P (original_target)
7c00d1fe
RK
8310 || TREE_ADDRESSABLE (type))
8311#endif
3c0cb5de 8312 && (!MEM_P (original_target)
8125d7e9 8313 || TREE_ADDRESSABLE (type)))
01c8a7c8
RK
8314 temp = original_target;
8315 else if (TREE_ADDRESSABLE (type))
8316 abort ();
8317 else
8318 temp = assign_temp (type, 0, 0, 1);
8319
ac01eace
RK
8320 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8321 do the test of X as a store-flag operation, do this as
8322 A + ((X != 0) << log C). Similarly for other simple binary
8323 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 8324 if (temp && singleton && binary_op
bbf6f052
RK
8325 && (TREE_CODE (binary_op) == PLUS_EXPR
8326 || TREE_CODE (binary_op) == MINUS_EXPR
8327 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 8328 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
8329 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8330 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
8331 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8332 {
8333 rtx result;
61f6c84f 8334 tree cond;
91ce572a 8335 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
0fb7aeda
KH
8336 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8337 ? addv_optab : add_optab)
8338 : TREE_CODE (binary_op) == MINUS_EXPR
8339 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8340 ? subv_optab : sub_optab)
8341 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8342 : xor_optab);
bbf6f052 8343
61f6c84f 8344 /* If we had X ? A : A + 1, do this as A + (X == 0). */
bbf6f052 8345 if (singleton == TREE_OPERAND (exp, 1))
61f6c84f
JJ
8346 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8347 else
8348 cond = TREE_OPERAND (exp, 0);
bbf6f052 8349
61f6c84f
JJ
8350 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8351 ? temp : NULL_RTX),
bbf6f052
RK
8352 mode, BRANCH_COST <= 1);
8353
ac01eace
RK
8354 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8355 result = expand_shift (LSHIFT_EXPR, mode, result,
8356 build_int_2 (tree_log2
8357 (TREE_OPERAND
8358 (binary_op, 1)),
8359 0),
e5e809f4 8360 (safe_from_p (temp, singleton, 1)
ac01eace
RK
8361 ? temp : NULL_RTX), 0);
8362
bbf6f052
RK
8363 if (result)
8364 {
906c4e36 8365 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8366 return expand_binop (mode, boptab, op1, result, temp,
8367 unsignedp, OPTAB_LIB_WIDEN);
8368 }
bbf6f052 8369 }
3a94c984 8370
dabf8373 8371 do_pending_stack_adjust ();
bbf6f052
RK
8372 NO_DEFER_POP;
8373 op0 = gen_label_rtx ();
8374
8375 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8376 {
8377 if (temp != 0)
8378 {
8379 /* If the target conflicts with the other operand of the
8380 binary op, we can't use it. Also, we can't use the target
8381 if it is a hard register, because evaluating the condition
8382 might clobber it. */
8383 if ((binary_op
e5e809f4 8384 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
f8cfc6aa 8385 || (REG_P (temp)
bbf6f052
RK
8386 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8387 temp = gen_reg_rtx (mode);
8403445a
AM
8388 store_expr (singleton, temp,
8389 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
8390 }
8391 else
906c4e36 8392 expand_expr (singleton,
2937cf87 8393 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8394 if (singleton == TREE_OPERAND (exp, 1))
8395 jumpif (TREE_OPERAND (exp, 0), op0);
8396 else
8397 jumpifnot (TREE_OPERAND (exp, 0), op0);
8398
8399 if (binary_op && temp == 0)
8400 /* Just touch the other operand. */
8401 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 8402 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8403 else if (binary_op)
8404 store_expr (build (TREE_CODE (binary_op), type,
8405 make_tree (type, temp),
8406 TREE_OPERAND (binary_op, 1)),
8403445a 8407 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
8408 else
8409 store_expr (build1 (TREE_CODE (unary_op), type,
8410 make_tree (type, temp)),
8403445a 8411 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8412 op1 = op0;
bbf6f052 8413 }
bbf6f052
RK
8414 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8415 comparison operator. If we have one of these cases, set the
8416 output to A, branch on A (cse will merge these two references),
8417 then set the output to FOO. */
8418 else if (temp
8419 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8420 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8421 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8422 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
8423 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8424 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 8425 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052 8426 {
f8cfc6aa 8427 if (REG_P (temp)
3a94c984 8428 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052 8429 temp = gen_reg_rtx (mode);
8403445a
AM
8430 store_expr (TREE_OPERAND (exp, 1), temp,
8431 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8432 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 8433
c37b68d4
RS
8434 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8435 store_expr (TREE_OPERAND (exp, 2), temp,
8436 modifier == EXPAND_STACK_PARM ? 2 : 0);
8437 else
8438 expand_expr (TREE_OPERAND (exp, 2),
8439 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8440 op1 = op0;
8441 }
8442 else if (temp
8443 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8444 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8445 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8446 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
8447 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8448 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 8449 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052 8450 {
f8cfc6aa 8451 if (REG_P (temp)
3a94c984 8452 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052 8453 temp = gen_reg_rtx (mode);
8403445a
AM
8454 store_expr (TREE_OPERAND (exp, 2), temp,
8455 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8456 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8457
c37b68d4
RS
8458 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8459 store_expr (TREE_OPERAND (exp, 1), temp,
8460 modifier == EXPAND_STACK_PARM ? 2 : 0);
8461 else
8462 expand_expr (TREE_OPERAND (exp, 1),
8463 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8464 op1 = op0;
8465 }
8466 else
8467 {
8468 op1 = gen_label_rtx ();
8469 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8470
2ac84cfe 8471 /* One branch of the cond can be void, if it never returns. For
3a94c984 8472 example A ? throw : E */
2ac84cfe 8473 if (temp != 0
3a94c984 8474 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8403445a
AM
8475 store_expr (TREE_OPERAND (exp, 1), temp,
8476 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8477 else
906c4e36
RK
8478 expand_expr (TREE_OPERAND (exp, 1),
8479 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8480 emit_queue ();
8481 emit_jump_insn (gen_jump (op1));
8482 emit_barrier ();
8483 emit_label (op0);
2ac84cfe 8484 if (temp != 0
3a94c984 8485 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8403445a
AM
8486 store_expr (TREE_OPERAND (exp, 2), temp,
8487 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8488 else
906c4e36
RK
8489 expand_expr (TREE_OPERAND (exp, 2),
8490 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8491 }
8492
bbf6f052
RK
8493 emit_queue ();
8494 emit_label (op1);
8495 OK_DEFER_POP;
5dab5552 8496
bbf6f052
RK
8497 return temp;
8498 }
8499
bbf6f052
RK
8500 case INIT_EXPR:
8501 {
8502 tree lhs = TREE_OPERAND (exp, 0);
8503 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052 8504
b90f141a 8505 temp = expand_assignment (lhs, rhs, ! ignore);
bbf6f052
RK
8506 return temp;
8507 }
8508
8509 case MODIFY_EXPR:
8510 {
8511 /* If lhs is complex, expand calls in rhs before computing it.
6d0a3f67
NS
8512 That's so we don't compute a pointer and save it over a
8513 call. If lhs is simple, compute it first so we can give it
8514 as a target if the rhs is just a call. This avoids an
8515 extra temp and copy and that prevents a partial-subsumption
8516 which makes bad code. Actually we could treat
8517 component_ref's of vars like vars. */
bbf6f052
RK
8518
8519 tree lhs = TREE_OPERAND (exp, 0);
8520 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052
RK
8521
8522 temp = 0;
8523
bbf6f052
RK
8524 /* Check for |= or &= of a bitfield of size one into another bitfield
8525 of size 1. In this case, (unless we need the result of the
8526 assignment) we can do this more efficiently with a
8527 test followed by an assignment, if necessary.
8528
8529 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8530 things change so we do, this code should be enhanced to
8531 support it. */
8532 if (ignore
8533 && TREE_CODE (lhs) == COMPONENT_REF
8534 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8535 || TREE_CODE (rhs) == BIT_AND_EXPR)
8536 && TREE_OPERAND (rhs, 0) == lhs
8537 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
05bccae2
RK
8538 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8539 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
bbf6f052
RK
8540 {
8541 rtx label = gen_label_rtx ();
8542
8543 do_jump (TREE_OPERAND (rhs, 1),
8544 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8545 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8546 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8547 (TREE_CODE (rhs) == BIT_IOR_EXPR
8548 ? integer_one_node
8549 : integer_zero_node)),
b90f141a 8550 0);
e7c33f54 8551 do_pending_stack_adjust ();
bbf6f052
RK
8552 emit_label (label);
8553 return const0_rtx;
8554 }
8555
b90f141a 8556 temp = expand_assignment (lhs, rhs, ! ignore);
0fb7aeda 8557
bbf6f052
RK
8558 return temp;
8559 }
8560
6e7f84a7
APB
8561 case RETURN_EXPR:
8562 if (!TREE_OPERAND (exp, 0))
8563 expand_null_return ();
8564 else
8565 expand_return (TREE_OPERAND (exp, 0));
8566 return const0_rtx;
8567
bbf6f052
RK
8568 case PREINCREMENT_EXPR:
8569 case PREDECREMENT_EXPR:
bc15d0ef 8570 return REDUCE_BIT_FIELD (expand_increment (exp, 0, ignore));
bbf6f052
RK
8571
8572 case POSTINCREMENT_EXPR:
8573 case POSTDECREMENT_EXPR:
8574 /* Faster to treat as pre-increment if result is not used. */
bc15d0ef 8575 return REDUCE_BIT_FIELD (expand_increment (exp, ! ignore, ignore));
bbf6f052
RK
8576
8577 case ADDR_EXPR:
8403445a
AM
8578 if (modifier == EXPAND_STACK_PARM)
8579 target = 0;
682ba3a6
RK
8580 /* If we are taking the address of something erroneous, just
8581 return a zero. */
6de9cd9a 8582 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
682ba3a6 8583 return const0_rtx;
d6b6783b
RK
8584 /* If we are taking the address of a constant and are at the
8585 top level, we have to use output_constant_def since we can't
8586 call force_const_mem at top level. */
8587 else if (cfun == 0
8588 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8589 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8590 == 'c')))
8591 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
bbf6f052
RK
8592 else
8593 {
e287fd6e
RK
8594 /* We make sure to pass const0_rtx down if we came in with
8595 ignore set, to avoid doing the cleanups twice for something. */
8596 op0 = expand_expr (TREE_OPERAND (exp, 0),
8597 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
8598 (modifier == EXPAND_INITIALIZER
8599 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 8600
119af78a
RK
8601 /* If we are going to ignore the result, OP0 will have been set
8602 to const0_rtx, so just return it. Don't get confused and
8603 think we are taking the address of the constant. */
8604 if (ignore)
8605 return op0;
8606
73b7f58c
BS
8607 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8608 clever and returns a REG when given a MEM. */
8609 op0 = protect_from_queue (op0, 1);
3539e816 8610
c5c76735
JL
8611 /* We would like the object in memory. If it is a constant, we can
8612 have it be statically allocated into memory. For a non-constant,
8613 we need to allocate some memory and store the value into it. */
896102d0
RK
8614
8615 if (CONSTANT_P (op0))
8616 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8617 op0);
f8cfc6aa 8618 else if (REG_P (op0) || GET_CODE (op0) == SUBREG
8fff4fc1
RH
8619 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == PARALLEL
8620 || GET_CODE (op0) == LO_SUM)
896102d0 8621 {
82c82743
RH
8622 /* If this object is in a register, it can't be BLKmode. */
8623 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8624 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8625
8626 if (GET_CODE (op0) == PARALLEL)
8627 /* Handle calls that pass values in multiple
8628 non-contiguous locations. The Irix 6 ABI has examples
8629 of this. */
8630 emit_group_store (memloc, op0, inner_type,
8631 int_size_in_bytes (inner_type));
df6018fd 8632 else
82c82743 8633 emit_move_insn (memloc, op0);
0fb7aeda 8634
82c82743 8635 op0 = memloc;
896102d0
RK
8636 }
8637
3c0cb5de 8638 if (!MEM_P (op0))
bbf6f052 8639 abort ();
3a94c984 8640
34e81b5a 8641 mark_temp_addr_taken (op0);
bbf6f052 8642 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77 8643 {
34e81b5a 8644 op0 = XEXP (op0, 0);
5ae6cd0d 8645 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
34e81b5a 8646 op0 = convert_memory_address (ptr_mode, op0);
34e81b5a 8647 return op0;
88f63c77 8648 }
987c71d9 8649
c952ff4b
RK
8650 /* If OP0 is not aligned as least as much as the type requires, we
8651 need to make a temporary, copy OP0 to it, and take the address of
8652 the temporary. We want to use the alignment of the type, not of
8653 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8654 the test for BLKmode means that can't happen. The test for
8655 BLKmode is because we never make mis-aligned MEMs with
8656 non-BLKmode.
8657
8658 We don't need to do this at all if the machine doesn't have
8659 strict alignment. */
8660 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8661 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
ed239f5a
RK
8662 > MEM_ALIGN (op0))
8663 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
a06ef755
RK
8664 {
8665 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bdaa131b 8666 rtx new;
a06ef755 8667
c3d32120
RK
8668 if (TYPE_ALIGN_OK (inner_type))
8669 abort ();
8670
bdaa131b
JM
8671 if (TREE_ADDRESSABLE (inner_type))
8672 {
8673 /* We can't make a bitwise copy of this object, so fail. */
8674 error ("cannot take the address of an unaligned member");
8675 return const0_rtx;
8676 }
8677
8678 new = assign_stack_temp_for_type
8679 (TYPE_MODE (inner_type),
8680 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8681 : int_size_in_bytes (inner_type),
8682 1, build_qualified_type (inner_type,
8683 (TYPE_QUALS (inner_type)
8684 | TYPE_QUAL_CONST)));
8685
44bb111a 8686 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8403445a
AM
8687 (modifier == EXPAND_STACK_PARM
8688 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
bdaa131b 8689
a06ef755
RK
8690 op0 = new;
8691 }
8692
bbf6f052
RK
8693 op0 = force_operand (XEXP (op0, 0), target);
8694 }
987c71d9 8695
05c8e58b 8696 if (flag_force_addr
f8cfc6aa 8697 && !REG_P (op0)
05c8e58b
HPN
8698 && modifier != EXPAND_CONST_ADDRESS
8699 && modifier != EXPAND_INITIALIZER
8700 && modifier != EXPAND_SUM)
987c71d9
RK
8701 op0 = force_reg (Pmode, op0);
8702
f8cfc6aa 8703 if (REG_P (op0)
dc6d66b3 8704 && ! REG_USERVAR_P (op0))
bdb429a5 8705 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
987c71d9 8706
5ae6cd0d 8707 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
9fcfcce7 8708 op0 = convert_memory_address (ptr_mode, op0);
88f63c77 8709
bbf6f052
RK
8710 return op0;
8711
8712 case ENTRY_VALUE_EXPR:
8713 abort ();
8714
7308a047
RS
8715 /* COMPLEX type for Extended Pascal & Fortran */
8716 case COMPLEX_EXPR:
8717 {
8718 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 8719 rtx insns;
7308a047
RS
8720
8721 /* Get the rtx code of the operands. */
8722 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8723 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8724
8725 if (! target)
8726 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8727
6551fa4d 8728 start_sequence ();
7308a047
RS
8729
8730 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
8731 emit_move_insn (gen_realpart (mode, target), op0);
8732 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 8733
6551fa4d
JW
8734 insns = get_insns ();
8735 end_sequence ();
8736
7308a047 8737 /* Complex construction should appear as a single unit. */
6551fa4d
JW
8738 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8739 each with a separate pseudo as destination.
8740 It's not correct for flow to treat them as a unit. */
6d6e61ce 8741 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8742 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8743 else
2f937369 8744 emit_insn (insns);
7308a047
RS
8745
8746 return target;
8747 }
8748
8749 case REALPART_EXPR:
2d7050fd
RS
8750 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8751 return gen_realpart (mode, op0);
3a94c984 8752
7308a047 8753 case IMAGPART_EXPR:
2d7050fd
RS
8754 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8755 return gen_imagpart (mode, op0);
7308a047
RS
8756
8757 case CONJ_EXPR:
8758 {
62acb978 8759 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 8760 rtx imag_t;
6551fa4d 8761 rtx insns;
3a94c984
KH
8762
8763 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7308a047
RS
8764
8765 if (! target)
d6a5ac33 8766 target = gen_reg_rtx (mode);
3a94c984 8767
6551fa4d 8768 start_sequence ();
7308a047
RS
8769
8770 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
8771 emit_move_insn (gen_realpart (partmode, target),
8772 gen_realpart (partmode, op0));
7308a047 8773
62acb978 8774 imag_t = gen_imagpart (partmode, target);
91ce572a 8775 temp = expand_unop (partmode,
0fb7aeda
KH
8776 ! unsignedp && flag_trapv
8777 && (GET_MODE_CLASS(partmode) == MODE_INT)
8778 ? negv_optab : neg_optab,
3a94c984 8779 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
8780 if (temp != imag_t)
8781 emit_move_insn (imag_t, temp);
8782
6551fa4d
JW
8783 insns = get_insns ();
8784 end_sequence ();
8785
3a94c984 8786 /* Conjugate should appear as a single unit
d6a5ac33 8787 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
8788 each with a separate pseudo as destination.
8789 It's not correct for flow to treat them as a unit. */
6d6e61ce 8790 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8791 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8792 else
2f937369 8793 emit_insn (insns);
7308a047
RS
8794
8795 return target;
8796 }
8797
6de9cd9a
DN
8798 case RESX_EXPR:
8799 expand_resx_expr (exp);
8800 return const0_rtx;
8801
e976b8b2 8802 case TRY_CATCH_EXPR:
6de9cd9a 8803 case CATCH_EXPR:
6de9cd9a 8804 case EH_FILTER_EXPR:
b335b813 8805 case TRY_FINALLY_EXPR:
ac45df5d
RH
8806 /* Lowered by tree-eh.c. */
8807 abort ();
b335b813 8808
ac45df5d
RH
8809 case WITH_CLEANUP_EXPR:
8810 case CLEANUP_POINT_EXPR:
8811 case TARGET_EXPR:
8812 /* Lowered by gimplify.c. */
8813 abort ();
b335b813 8814
d3707adb
RH
8815 case VA_ARG_EXPR:
8816 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8817
52a11cbf 8818 case EXC_PTR_EXPR:
86c99549 8819 return get_exception_pointer (cfun);
52a11cbf 8820
6de9cd9a
DN
8821 case FILTER_EXPR:
8822 return get_exception_filter (cfun);
8823
67231816
RH
8824 case FDESC_EXPR:
8825 /* Function descriptors are not valid except for as
8826 initialization constants, and should not be expanded. */
8827 abort ();
8828
6de9cd9a
DN
8829 case SWITCH_EXPR:
8830 expand_start_case (0, SWITCH_COND (exp), integer_type_node,
8831 "switch");
8832 if (SWITCH_BODY (exp))
8833 expand_expr_stmt (SWITCH_BODY (exp));
8834 if (SWITCH_LABELS (exp))
8835 {
8836 tree duplicate = 0;
8837 tree vec = SWITCH_LABELS (exp);
8838 size_t i, n = TREE_VEC_LENGTH (vec);
8839
8840 for (i = 0; i < n; ++i)
8841 {
8842 tree elt = TREE_VEC_ELT (vec, i);
8843 tree controlling_expr_type = TREE_TYPE (SWITCH_COND (exp));
8844 tree min_value = TYPE_MIN_VALUE (controlling_expr_type);
8845 tree max_value = TYPE_MAX_VALUE (controlling_expr_type);
8846
8847 tree case_low = CASE_LOW (elt);
8848 tree case_high = CASE_HIGH (elt) ? CASE_HIGH (elt) : case_low;
8849 if (case_low && case_high)
8850 {
8851 /* Case label is less than minimum for type. */
ebd5a208
RK
8852 if (TREE_CODE (min_value) == INTEGER_CST
8853 && tree_int_cst_compare (case_low, min_value) < 0
8854 && tree_int_cst_compare (case_high, min_value) < 0)
6de9cd9a
DN
8855 {
8856 warning ("case label value %d is less than minimum value for type",
8857 TREE_INT_CST (case_low));
8858 continue;
8859 }
8860
8861 /* Case value is greater than maximum for type. */
ebd5a208
RK
8862 if (TREE_CODE (max_value) == INTEGER_CST
8863 && tree_int_cst_compare (case_low, max_value) > 0
8864 && tree_int_cst_compare (case_high, max_value) > 0)
6de9cd9a
DN
8865 {
8866 warning ("case label value %d exceeds maximum value for type",
8867 TREE_INT_CST (case_high));
8868 continue;
8869 }
8870
8871 /* Saturate lower case label value to minimum. */
ebd5a208
RK
8872 if (TREE_CODE (min_value) == INTEGER_CST
8873 && tree_int_cst_compare (case_high, min_value) >= 0
8874 && tree_int_cst_compare (case_low, min_value) < 0)
6de9cd9a
DN
8875 {
8876 warning ("lower value %d in case label range less than minimum value for type",
8877 TREE_INT_CST (case_low));
8878 case_low = min_value;
8879 }
8880
8881 /* Saturate upper case label value to maximum. */
ebd5a208
RK
8882 if (TREE_CODE (max_value) == INTEGER_CST
8883 && tree_int_cst_compare (case_low, max_value) <= 0
8884 && tree_int_cst_compare (case_high, max_value) > 0)
6de9cd9a
DN
8885 {
8886 warning ("upper value %d in case label range exceeds maximum value for type",
8887 TREE_INT_CST (case_high));
8888 case_high = max_value;
8889 }
8890 }
8891
8892 add_case_node (case_low, case_high, CASE_LABEL (elt), &duplicate, true);
8893 if (duplicate)
8894 abort ();
8895 }
8896 }
8897 expand_end_case_type (SWITCH_COND (exp), TREE_TYPE (exp));
8898 return const0_rtx;
8899
8900 case LABEL_EXPR:
8901 expand_label (TREE_OPERAND (exp, 0));
8902 return const0_rtx;
8903
8904 case CASE_LABEL_EXPR:
8905 {
8906 tree duplicate = 0;
8907 add_case_node (CASE_LOW (exp), CASE_HIGH (exp), CASE_LABEL (exp),
8908 &duplicate, false);
8909 if (duplicate)
8910 abort ();
8911 return const0_rtx;
8912 }
8913
8914 case ASM_EXPR:
8915 expand_asm_expr (exp);
8916 return const0_rtx;
8917
bbf6f052 8918 default:
673fda6b
SB
8919 return lang_hooks.expand_expr (exp, original_target, tmode,
8920 modifier, alt_rtl);
bbf6f052
RK
8921 }
8922
8923 /* Here to do an ordinary binary operator, generating an instruction
8924 from the optab already placed in `this_optab'. */
8925 binop:
eb698c58
RS
8926 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8927 subtarget, &op0, &op1, 0);
bbf6f052 8928 binop2:
8403445a
AM
8929 if (modifier == EXPAND_STACK_PARM)
8930 target = 0;
bbf6f052
RK
8931 temp = expand_binop (mode, this_optab, op0, op1, target,
8932 unsignedp, OPTAB_LIB_WIDEN);
8933 if (temp == 0)
8934 abort ();
bc15d0ef
JM
8935 return REDUCE_BIT_FIELD (temp);
8936}
8937#undef REDUCE_BIT_FIELD
8938\f
8939/* Subroutine of above: reduce EXP to the precision of TYPE (in the
8940 signedness of TYPE), possibly returning the result in TARGET. */
8941static rtx
8942reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8943{
8944 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8945 if (target && GET_MODE (target) != GET_MODE (exp))
8946 target = 0;
8947 if (TYPE_UNSIGNED (type))
8948 {
8949 rtx mask;
8950 if (prec < HOST_BITS_PER_WIDE_INT)
8951 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8952 GET_MODE (exp));
8953 else
8954 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8955 ((unsigned HOST_WIDE_INT) 1
8956 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8957 GET_MODE (exp));
8958 return expand_and (GET_MODE (exp), exp, mask, target);
8959 }
8960 else
8961 {
8962 tree count = build_int_2 (GET_MODE_BITSIZE (GET_MODE (exp)) - prec, 0);
8963 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8964 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8965 }
bbf6f052 8966}
b93a436e 8967\f
1ce7f3c2
RK
8968/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8969 when applied to the address of EXP produces an address known to be
8970 aligned more than BIGGEST_ALIGNMENT. */
8971
8972static int
502b8322 8973is_aligning_offset (tree offset, tree exp)
1ce7f3c2 8974{
6fce44af 8975 /* Strip off any conversions. */
1ce7f3c2
RK
8976 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8977 || TREE_CODE (offset) == NOP_EXPR
6fce44af 8978 || TREE_CODE (offset) == CONVERT_EXPR)
1ce7f3c2
RK
8979 offset = TREE_OPERAND (offset, 0);
8980
8981 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8982 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8983 if (TREE_CODE (offset) != BIT_AND_EXPR
8984 || !host_integerp (TREE_OPERAND (offset, 1), 1)
c0cfc691
OH
8985 || compare_tree_int (TREE_OPERAND (offset, 1),
8986 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
1ce7f3c2
RK
8987 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8988 return 0;
8989
8990 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8991 It must be NEGATE_EXPR. Then strip any more conversions. */
8992 offset = TREE_OPERAND (offset, 0);
8993 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8994 || TREE_CODE (offset) == NOP_EXPR
8995 || TREE_CODE (offset) == CONVERT_EXPR)
8996 offset = TREE_OPERAND (offset, 0);
8997
8998 if (TREE_CODE (offset) != NEGATE_EXPR)
8999 return 0;
9000
9001 offset = TREE_OPERAND (offset, 0);
9002 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9003 || TREE_CODE (offset) == NOP_EXPR
9004 || TREE_CODE (offset) == CONVERT_EXPR)
9005 offset = TREE_OPERAND (offset, 0);
9006
6fce44af
RK
9007 /* This must now be the address of EXP. */
9008 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
1ce7f3c2
RK
9009}
9010\f
e0a2f705 9011/* Return the tree node if an ARG corresponds to a string constant or zero
cc2902df 9012 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
fed3cef0
RK
9013 in bytes within the string that ARG is accessing. The type of the
9014 offset will be `sizetype'. */
b93a436e 9015
28f4ec01 9016tree
502b8322 9017string_constant (tree arg, tree *ptr_offset)
b93a436e
JL
9018{
9019 STRIP_NOPS (arg);
9020
9021 if (TREE_CODE (arg) == ADDR_EXPR
9022 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9023 {
fed3cef0 9024 *ptr_offset = size_zero_node;
b93a436e
JL
9025 return TREE_OPERAND (arg, 0);
9026 }
6de9cd9a
DN
9027 if (TREE_CODE (arg) == ADDR_EXPR
9028 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
9029 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg, 0), 0)) == STRING_CST)
9030 {
9031 *ptr_offset = convert (sizetype, TREE_OPERAND (TREE_OPERAND (arg, 0), 1));
9032 return TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9033 }
b93a436e
JL
9034 else if (TREE_CODE (arg) == PLUS_EXPR)
9035 {
9036 tree arg0 = TREE_OPERAND (arg, 0);
9037 tree arg1 = TREE_OPERAND (arg, 1);
9038
9039 STRIP_NOPS (arg0);
9040 STRIP_NOPS (arg1);
9041
9042 if (TREE_CODE (arg0) == ADDR_EXPR
9043 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 9044 {
fed3cef0 9045 *ptr_offset = convert (sizetype, arg1);
b93a436e 9046 return TREE_OPERAND (arg0, 0);
bbf6f052 9047 }
b93a436e
JL
9048 else if (TREE_CODE (arg1) == ADDR_EXPR
9049 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 9050 {
fed3cef0 9051 *ptr_offset = convert (sizetype, arg0);
b93a436e 9052 return TREE_OPERAND (arg1, 0);
bbf6f052 9053 }
b93a436e 9054 }
ca695ac9 9055
b93a436e
JL
9056 return 0;
9057}
ca695ac9 9058\f
b93a436e
JL
9059/* Expand code for a post- or pre- increment or decrement
9060 and return the RTX for the result.
9061 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 9062
b93a436e 9063static rtx
502b8322 9064expand_increment (tree exp, int post, int ignore)
ca695ac9 9065{
b3694847
SS
9066 rtx op0, op1;
9067 rtx temp, value;
9068 tree incremented = TREE_OPERAND (exp, 0);
b93a436e
JL
9069 optab this_optab = add_optab;
9070 int icode;
9071 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9072 int op0_is_copy = 0;
9073 int single_insn = 0;
9074 /* 1 means we can't store into OP0 directly,
9075 because it is a subreg narrower than a word,
9076 and we don't dare clobber the rest of the word. */
9077 int bad_subreg = 0;
1499e0a8 9078
b93a436e
JL
9079 /* Stabilize any component ref that might need to be
9080 evaluated more than once below. */
9081 if (!post
9082 || TREE_CODE (incremented) == BIT_FIELD_REF
9083 || (TREE_CODE (incremented) == COMPONENT_REF
9084 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9085 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9086 incremented = stabilize_reference (incremented);
9087 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9088 ones into save exprs so that they don't accidentally get evaluated
9089 more than once by the code below. */
9090 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9091 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9092 incremented = save_expr (incremented);
e9a25f70 9093
b93a436e
JL
9094 /* Compute the operands as RTX.
9095 Note whether OP0 is the actual lvalue or a copy of it:
9096 I believe it is a copy iff it is a register or subreg
6d2f8887 9097 and insns were generated in computing it. */
e9a25f70 9098
b93a436e 9099 temp = get_last_insn ();
37a08a29 9100 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
e9a25f70 9101
b93a436e
JL
9102 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9103 in place but instead must do sign- or zero-extension during assignment,
9104 so we copy it into a new register and let the code below use it as
9105 a copy.
e9a25f70 9106
b93a436e
JL
9107 Note that we can safely modify this SUBREG since it is know not to be
9108 shared (it was made by the expand_expr call above). */
9109
9110 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9111 {
9112 if (post)
9113 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9114 else
9115 bad_subreg = 1;
9116 }
9117 else if (GET_CODE (op0) == SUBREG
9118 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9119 {
9120 /* We cannot increment this SUBREG in place. If we are
9121 post-incrementing, get a copy of the old value. Otherwise,
9122 just mark that we cannot increment in place. */
9123 if (post)
9124 op0 = copy_to_reg (op0);
9125 else
9126 bad_subreg = 1;
e9a25f70
JL
9127 }
9128
f8cfc6aa 9129 op0_is_copy = ((GET_CODE (op0) == SUBREG || REG_P (op0))
b93a436e 9130 && temp != get_last_insn ());
37a08a29 9131 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
1499e0a8 9132
b93a436e
JL
9133 /* Decide whether incrementing or decrementing. */
9134 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9135 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9136 this_optab = sub_optab;
9137
9138 /* Convert decrement by a constant into a negative increment. */
9139 if (this_optab == sub_optab
9140 && GET_CODE (op1) == CONST_INT)
ca695ac9 9141 {
3a94c984 9142 op1 = GEN_INT (-INTVAL (op1));
b93a436e 9143 this_optab = add_optab;
ca695ac9 9144 }
1499e0a8 9145
91ce572a 9146 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
505ddab6 9147 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
91ce572a 9148
b93a436e
JL
9149 /* For a preincrement, see if we can do this with a single instruction. */
9150 if (!post)
9151 {
9152 icode = (int) this_optab->handlers[(int) mode].insn_code;
9153 if (icode != (int) CODE_FOR_nothing
9154 /* Make sure that OP0 is valid for operands 0 and 1
9155 of the insn we want to queue. */
a995e389
RH
9156 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9157 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9158 && (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e
JL
9159 single_insn = 1;
9160 }
bbf6f052 9161
b93a436e
JL
9162 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9163 then we cannot just increment OP0. We must therefore contrive to
9164 increment the original value. Then, for postincrement, we can return
9165 OP0 since it is a copy of the old value. For preincrement, expand here
9166 unless we can do it with a single insn.
bbf6f052 9167
b93a436e
JL
9168 Likewise if storing directly into OP0 would clobber high bits
9169 we need to preserve (bad_subreg). */
9170 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 9171 {
b93a436e
JL
9172 /* This is the easiest way to increment the value wherever it is.
9173 Problems with multiple evaluation of INCREMENTED are prevented
9174 because either (1) it is a component_ref or preincrement,
9175 in which case it was stabilized above, or (2) it is an array_ref
9176 with constant index in an array in a register, which is
9177 safe to reevaluate. */
9178 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9179 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9180 ? MINUS_EXPR : PLUS_EXPR),
9181 TREE_TYPE (exp),
9182 incremented,
9183 TREE_OPERAND (exp, 1));
a358cee0 9184
b93a436e
JL
9185 while (TREE_CODE (incremented) == NOP_EXPR
9186 || TREE_CODE (incremented) == CONVERT_EXPR)
9187 {
9188 newexp = convert (TREE_TYPE (incremented), newexp);
9189 incremented = TREE_OPERAND (incremented, 0);
9190 }
bbf6f052 9191
b90f141a 9192 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
b93a436e
JL
9193 return post ? op0 : temp;
9194 }
bbf6f052 9195
b93a436e
JL
9196 if (post)
9197 {
9198 /* We have a true reference to the value in OP0.
9199 If there is an insn to add or subtract in this mode, queue it.
d91edf86 9200 Queuing the increment insn avoids the register shuffling
b93a436e
JL
9201 that often results if we must increment now and first save
9202 the old value for subsequent use. */
bbf6f052 9203
b93a436e
JL
9204#if 0 /* Turned off to avoid making extra insn for indexed memref. */
9205 op0 = stabilize (op0);
9206#endif
41dfd40c 9207
b93a436e
JL
9208 icode = (int) this_optab->handlers[(int) mode].insn_code;
9209 if (icode != (int) CODE_FOR_nothing
9210 /* Make sure that OP0 is valid for operands 0 and 1
9211 of the insn we want to queue. */
a995e389
RH
9212 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9213 && (*insn_data[icode].operand[1].predicate) (op0, mode))
b93a436e 9214 {
a995e389 9215 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9216 op1 = force_reg (mode, op1);
bbf6f052 9217
b93a436e
JL
9218 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9219 }
3c0cb5de 9220 if (icode != (int) CODE_FOR_nothing && MEM_P (op0))
b93a436e
JL
9221 {
9222 rtx addr = (general_operand (XEXP (op0, 0), mode)
9223 ? force_reg (Pmode, XEXP (op0, 0))
9224 : copy_to_reg (XEXP (op0, 0)));
9225 rtx temp, result;
ca695ac9 9226
792760b9 9227 op0 = replace_equiv_address (op0, addr);
b93a436e 9228 temp = force_reg (GET_MODE (op0), op0);
a995e389 9229 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9230 op1 = force_reg (mode, op1);
ca695ac9 9231
b93a436e
JL
9232 /* The increment queue is LIFO, thus we have to `queue'
9233 the instructions in reverse order. */
9234 enqueue_insn (op0, gen_move_insn (op0, temp));
9235 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9236 return result;
bbf6f052
RK
9237 }
9238 }
ca695ac9 9239
b93a436e
JL
9240 /* Preincrement, or we can't increment with one simple insn. */
9241 if (post)
9242 /* Save a copy of the value before inc or dec, to return it later. */
9243 temp = value = copy_to_reg (op0);
9244 else
9245 /* Arrange to return the incremented value. */
9246 /* Copy the rtx because expand_binop will protect from the queue,
9247 and the results of that would be invalid for us to return
9248 if our caller does emit_queue before using our result. */
9249 temp = copy_rtx (value = op0);
bbf6f052 9250
b93a436e 9251 /* Increment however we can. */
37a08a29 9252 op1 = expand_binop (mode, this_optab, value, op1, op0,
8df83eae 9253 TYPE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
37a08a29 9254
b93a436e
JL
9255 /* Make sure the value is stored into OP0. */
9256 if (op1 != op0)
9257 emit_move_insn (op0, op1);
5718612f 9258
b93a436e
JL
9259 return temp;
9260}
9261\f
b93a436e
JL
9262/* Generate code to calculate EXP using a store-flag instruction
9263 and return an rtx for the result. EXP is either a comparison
9264 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 9265
b93a436e 9266 If TARGET is nonzero, store the result there if convenient.
ca695ac9 9267
cc2902df 9268 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
b93a436e 9269 cheap.
ca695ac9 9270
b93a436e
JL
9271 Return zero if there is no suitable set-flag instruction
9272 available on this machine.
ca695ac9 9273
b93a436e
JL
9274 Once expand_expr has been called on the arguments of the comparison,
9275 we are committed to doing the store flag, since it is not safe to
9276 re-evaluate the expression. We emit the store-flag insn by calling
9277 emit_store_flag, but only expand the arguments if we have a reason
9278 to believe that emit_store_flag will be successful. If we think that
9279 it will, but it isn't, we have to simulate the store-flag with a
9280 set/jump/set sequence. */
ca695ac9 9281
b93a436e 9282static rtx
502b8322 9283do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
b93a436e
JL
9284{
9285 enum rtx_code code;
9286 tree arg0, arg1, type;
9287 tree tem;
9288 enum machine_mode operand_mode;
9289 int invert = 0;
9290 int unsignedp;
9291 rtx op0, op1;
9292 enum insn_code icode;
9293 rtx subtarget = target;
381127e8 9294 rtx result, label;
ca695ac9 9295
b93a436e
JL
9296 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9297 result at the end. We can't simply invert the test since it would
9298 have already been inverted if it were valid. This case occurs for
9299 some floating-point comparisons. */
ca695ac9 9300
b93a436e
JL
9301 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9302 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 9303
b93a436e
JL
9304 arg0 = TREE_OPERAND (exp, 0);
9305 arg1 = TREE_OPERAND (exp, 1);
5129d2ce
AH
9306
9307 /* Don't crash if the comparison was erroneous. */
9308 if (arg0 == error_mark_node || arg1 == error_mark_node)
9309 return const0_rtx;
9310
b93a436e
JL
9311 type = TREE_TYPE (arg0);
9312 operand_mode = TYPE_MODE (type);
8df83eae 9313 unsignedp = TYPE_UNSIGNED (type);
ca695ac9 9314
b93a436e
JL
9315 /* We won't bother with BLKmode store-flag operations because it would mean
9316 passing a lot of information to emit_store_flag. */
9317 if (operand_mode == BLKmode)
9318 return 0;
ca695ac9 9319
b93a436e
JL
9320 /* We won't bother with store-flag operations involving function pointers
9321 when function pointers must be canonicalized before comparisons. */
9322#ifdef HAVE_canonicalize_funcptr_for_compare
9323 if (HAVE_canonicalize_funcptr_for_compare
9324 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9325 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9326 == FUNCTION_TYPE))
9327 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9328 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9329 == FUNCTION_TYPE))))
9330 return 0;
ca695ac9
JB
9331#endif
9332
b93a436e
JL
9333 STRIP_NOPS (arg0);
9334 STRIP_NOPS (arg1);
ca695ac9 9335
b93a436e
JL
9336 /* Get the rtx comparison code to use. We know that EXP is a comparison
9337 operation of some type. Some comparisons against 1 and -1 can be
9338 converted to comparisons with zero. Do so here so that the tests
9339 below will be aware that we have a comparison with zero. These
9340 tests will not catch constants in the first operand, but constants
9341 are rarely passed as the first operand. */
ca695ac9 9342
b93a436e
JL
9343 switch (TREE_CODE (exp))
9344 {
9345 case EQ_EXPR:
9346 code = EQ;
bbf6f052 9347 break;
b93a436e
JL
9348 case NE_EXPR:
9349 code = NE;
bbf6f052 9350 break;
b93a436e
JL
9351 case LT_EXPR:
9352 if (integer_onep (arg1))
9353 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9354 else
9355 code = unsignedp ? LTU : LT;
ca695ac9 9356 break;
b93a436e
JL
9357 case LE_EXPR:
9358 if (! unsignedp && integer_all_onesp (arg1))
9359 arg1 = integer_zero_node, code = LT;
9360 else
9361 code = unsignedp ? LEU : LE;
ca695ac9 9362 break;
b93a436e
JL
9363 case GT_EXPR:
9364 if (! unsignedp && integer_all_onesp (arg1))
9365 arg1 = integer_zero_node, code = GE;
9366 else
9367 code = unsignedp ? GTU : GT;
9368 break;
9369 case GE_EXPR:
9370 if (integer_onep (arg1))
9371 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9372 else
9373 code = unsignedp ? GEU : GE;
ca695ac9 9374 break;
1eb8759b
RH
9375
9376 case UNORDERED_EXPR:
9377 code = UNORDERED;
9378 break;
9379 case ORDERED_EXPR:
9380 code = ORDERED;
9381 break;
9382 case UNLT_EXPR:
9383 code = UNLT;
9384 break;
9385 case UNLE_EXPR:
9386 code = UNLE;
9387 break;
9388 case UNGT_EXPR:
9389 code = UNGT;
9390 break;
9391 case UNGE_EXPR:
9392 code = UNGE;
9393 break;
9394 case UNEQ_EXPR:
9395 code = UNEQ;
9396 break;
d1a7edaf
PB
9397 case LTGT_EXPR:
9398 code = LTGT;
9399 break;
1eb8759b 9400
ca695ac9 9401 default:
b93a436e 9402 abort ();
bbf6f052 9403 }
bbf6f052 9404
b93a436e
JL
9405 /* Put a constant second. */
9406 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9407 {
9408 tem = arg0; arg0 = arg1; arg1 = tem;
9409 code = swap_condition (code);
ca695ac9 9410 }
bbf6f052 9411
b93a436e
JL
9412 /* If this is an equality or inequality test of a single bit, we can
9413 do this by shifting the bit being tested to the low-order bit and
9414 masking the result with the constant 1. If the condition was EQ,
9415 we xor it with 1. This does not require an scc insn and is faster
7960bf22
JL
9416 than an scc insn even if we have it.
9417
9418 The code to make this transformation was moved into fold_single_bit_test,
9419 so we just call into the folder and expand its result. */
d39985fa 9420
b93a436e
JL
9421 if ((code == NE || code == EQ)
9422 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9423 && integer_pow2p (TREE_OPERAND (arg0, 1)))
60cd4dae 9424 {
ae2bcd98 9425 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
60cd4dae 9426 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
450b1728 9427 arg0, arg1, type),
60cd4dae
JL
9428 target, VOIDmode, EXPAND_NORMAL);
9429 }
bbf6f052 9430
b93a436e 9431 /* Now see if we are likely to be able to do this. Return if not. */
1eb8759b 9432 if (! can_compare_p (code, operand_mode, ccp_store_flag))
b93a436e 9433 return 0;
1eb8759b 9434
b93a436e
JL
9435 icode = setcc_gen_code[(int) code];
9436 if (icode == CODE_FOR_nothing
a995e389 9437 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
ca695ac9 9438 {
b93a436e
JL
9439 /* We can only do this if it is one of the special cases that
9440 can be handled without an scc insn. */
9441 if ((code == LT && integer_zerop (arg1))
9442 || (! only_cheap && code == GE && integer_zerop (arg1)))
9443 ;
9444 else if (BRANCH_COST >= 0
9445 && ! only_cheap && (code == NE || code == EQ)
9446 && TREE_CODE (type) != REAL_TYPE
9447 && ((abs_optab->handlers[(int) operand_mode].insn_code
9448 != CODE_FOR_nothing)
9449 || (ffs_optab->handlers[(int) operand_mode].insn_code
9450 != CODE_FOR_nothing)))
9451 ;
9452 else
9453 return 0;
ca695ac9 9454 }
3a94c984 9455
296b4ed9 9456 if (! get_subtarget (target)
e3be1116 9457 || GET_MODE (subtarget) != operand_mode)
b93a436e
JL
9458 subtarget = 0;
9459
eb698c58 9460 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
b93a436e
JL
9461
9462 if (target == 0)
9463 target = gen_reg_rtx (mode);
9464
9465 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9466 because, if the emit_store_flag does anything it will succeed and
9467 OP0 and OP1 will not be used subsequently. */
ca695ac9 9468
b93a436e
JL
9469 result = emit_store_flag (target, code,
9470 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9471 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9472 operand_mode, unsignedp, 1);
ca695ac9 9473
b93a436e
JL
9474 if (result)
9475 {
9476 if (invert)
9477 result = expand_binop (mode, xor_optab, result, const1_rtx,
9478 result, 0, OPTAB_LIB_WIDEN);
9479 return result;
ca695ac9 9480 }
bbf6f052 9481
b93a436e 9482 /* If this failed, we have to do this with set/compare/jump/set code. */
f8cfc6aa 9483 if (!REG_P (target)
b93a436e
JL
9484 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9485 target = gen_reg_rtx (GET_MODE (target));
9486
9487 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9488 result = compare_from_rtx (op0, op1, code, unsignedp,
a06ef755 9489 operand_mode, NULL_RTX);
b93a436e
JL
9490 if (GET_CODE (result) == CONST_INT)
9491 return (((result == const0_rtx && ! invert)
9492 || (result != const0_rtx && invert))
9493 ? const0_rtx : const1_rtx);
ca695ac9 9494
8f08e8c0
JL
9495 /* The code of RESULT may not match CODE if compare_from_rtx
9496 decided to swap its operands and reverse the original code.
9497
9498 We know that compare_from_rtx returns either a CONST_INT or
9499 a new comparison code, so it is safe to just extract the
9500 code from RESULT. */
9501 code = GET_CODE (result);
9502
b93a436e
JL
9503 label = gen_label_rtx ();
9504 if (bcc_gen_fctn[(int) code] == 0)
9505 abort ();
0f41302f 9506
b93a436e
JL
9507 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9508 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9509 emit_label (label);
bbf6f052 9510
b93a436e 9511 return target;
ca695ac9 9512}
b93a436e 9513\f
b93a436e 9514
ad82abb8
ZW
9515/* Stubs in case we haven't got a casesi insn. */
9516#ifndef HAVE_casesi
9517# define HAVE_casesi 0
9518# define gen_casesi(a, b, c, d, e) (0)
9519# define CODE_FOR_casesi CODE_FOR_nothing
9520#endif
9521
9522/* If the machine does not have a case insn that compares the bounds,
9523 this means extra overhead for dispatch tables, which raises the
9524 threshold for using them. */
9525#ifndef CASE_VALUES_THRESHOLD
9526#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9527#endif /* CASE_VALUES_THRESHOLD */
9528
9529unsigned int
502b8322 9530case_values_threshold (void)
ad82abb8
ZW
9531{
9532 return CASE_VALUES_THRESHOLD;
9533}
9534
9535/* Attempt to generate a casesi instruction. Returns 1 if successful,
9536 0 otherwise (i.e. if there is no casesi instruction). */
9537int
502b8322
AJ
9538try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9539 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
ad82abb8
ZW
9540{
9541 enum machine_mode index_mode = SImode;
9542 int index_bits = GET_MODE_BITSIZE (index_mode);
9543 rtx op1, op2, index;
9544 enum machine_mode op_mode;
9545
9546 if (! HAVE_casesi)
9547 return 0;
9548
9549 /* Convert the index to SImode. */
9550 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9551 {
9552 enum machine_mode omode = TYPE_MODE (index_type);
9553 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9554
9555 /* We must handle the endpoints in the original mode. */
9556 index_expr = build (MINUS_EXPR, index_type,
9557 index_expr, minval);
9558 minval = integer_zero_node;
9559 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9560 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
a06ef755 9561 omode, 1, default_label);
ad82abb8
ZW
9562 /* Now we can safely truncate. */
9563 index = convert_to_mode (index_mode, index, 0);
9564 }
9565 else
9566 {
9567 if (TYPE_MODE (index_type) != index_mode)
9568 {
ae2bcd98 9569 index_expr = convert (lang_hooks.types.type_for_size
b0c48229 9570 (index_bits, 0), index_expr);
ad82abb8
ZW
9571 index_type = TREE_TYPE (index_expr);
9572 }
9573
9574 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9575 }
9576 emit_queue ();
9577 index = protect_from_queue (index, 0);
9578 do_pending_stack_adjust ();
9579
9580 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9581 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9582 (index, op_mode))
9583 index = copy_to_mode_reg (op_mode, index);
e87b4f3f 9584
ad82abb8
ZW
9585 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9586
9587 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9588 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8df83eae 9589 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
ad82abb8
ZW
9590 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9591 (op1, op_mode))
9592 op1 = copy_to_mode_reg (op_mode, op1);
9593
9594 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9595
9596 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9597 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8df83eae 9598 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
ad82abb8
ZW
9599 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9600 (op2, op_mode))
9601 op2 = copy_to_mode_reg (op_mode, op2);
9602
9603 emit_jump_insn (gen_casesi (index, op1, op2,
9604 table_label, default_label));
9605 return 1;
9606}
9607
9608/* Attempt to generate a tablejump instruction; same concept. */
9609#ifndef HAVE_tablejump
9610#define HAVE_tablejump 0
9611#define gen_tablejump(x, y) (0)
9612#endif
9613
9614/* Subroutine of the next function.
9615
9616 INDEX is the value being switched on, with the lowest value
b93a436e
JL
9617 in the table already subtracted.
9618 MODE is its expected mode (needed if INDEX is constant).
9619 RANGE is the length of the jump table.
9620 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 9621
b93a436e
JL
9622 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9623 index value is out of range. */
0f41302f 9624
ad82abb8 9625static void
502b8322
AJ
9626do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9627 rtx default_label)
ca695ac9 9628{
b3694847 9629 rtx temp, vector;
88d3b7f0 9630
74f6d071
JH
9631 if (INTVAL (range) > cfun->max_jumptable_ents)
9632 cfun->max_jumptable_ents = INTVAL (range);
1877be45 9633
b93a436e
JL
9634 /* Do an unsigned comparison (in the proper mode) between the index
9635 expression and the value which represents the length of the range.
9636 Since we just finished subtracting the lower bound of the range
9637 from the index expression, this comparison allows us to simultaneously
9638 check that the original index expression value is both greater than
9639 or equal to the minimum value of the range and less than or equal to
9640 the maximum value of the range. */
709f5be1 9641
c5d5d461 9642 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
a06ef755 9643 default_label);
bbf6f052 9644
b93a436e
JL
9645 /* If index is in range, it must fit in Pmode.
9646 Convert to Pmode so we can index with it. */
9647 if (mode != Pmode)
9648 index = convert_to_mode (Pmode, index, 1);
bbf6f052 9649
ba228239 9650 /* Don't let a MEM slip through, because then INDEX that comes
b93a436e
JL
9651 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9652 and break_out_memory_refs will go to work on it and mess it up. */
9653#ifdef PIC_CASE_VECTOR_ADDRESS
f8cfc6aa 9654 if (flag_pic && !REG_P (index))
b93a436e
JL
9655 index = copy_to_mode_reg (Pmode, index);
9656#endif
ca695ac9 9657
b93a436e
JL
9658 /* If flag_force_addr were to affect this address
9659 it could interfere with the tricky assumptions made
9660 about addresses that contain label-refs,
9661 which may be valid only very near the tablejump itself. */
9662 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9663 GET_MODE_SIZE, because this indicates how large insns are. The other
9664 uses should all be Pmode, because they are addresses. This code
9665 could fail if addresses and insns are not the same size. */
9666 index = gen_rtx_PLUS (Pmode,
9667 gen_rtx_MULT (Pmode, index,
9668 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9669 gen_rtx_LABEL_REF (Pmode, table_label));
9670#ifdef PIC_CASE_VECTOR_ADDRESS
9671 if (flag_pic)
9672 index = PIC_CASE_VECTOR_ADDRESS (index);
9673 else
bbf6f052 9674#endif
b93a436e
JL
9675 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9676 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9677 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9678 RTX_UNCHANGING_P (vector) = 1;
4da2eb6b 9679 MEM_NOTRAP_P (vector) = 1;
b93a436e
JL
9680 convert_move (temp, vector, 0);
9681
9682 emit_jump_insn (gen_tablejump (temp, table_label));
9683
9684 /* If we are generating PIC code or if the table is PC-relative, the
9685 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9686 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9687 emit_barrier ();
bbf6f052 9688}
b93a436e 9689
ad82abb8 9690int
502b8322
AJ
9691try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9692 rtx table_label, rtx default_label)
ad82abb8
ZW
9693{
9694 rtx index;
9695
9696 if (! HAVE_tablejump)
9697 return 0;
9698
9699 index_expr = fold (build (MINUS_EXPR, index_type,
9700 convert (index_type, index_expr),
9701 convert (index_type, minval)));
9702 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9703 emit_queue ();
9704 index = protect_from_queue (index, 0);
9705 do_pending_stack_adjust ();
9706
9707 do_tablejump (index, TYPE_MODE (index_type),
9708 convert_modes (TYPE_MODE (index_type),
9709 TYPE_MODE (TREE_TYPE (range)),
9710 expand_expr (range, NULL_RTX,
9711 VOIDmode, 0),
8df83eae 9712 TYPE_UNSIGNED (TREE_TYPE (range))),
ad82abb8
ZW
9713 table_label, default_label);
9714 return 1;
9715}
e2500fed 9716
cb2a532e
AH
9717/* Nonzero if the mode is a valid vector mode for this architecture.
9718 This returns nonzero even if there is no hardware support for the
9719 vector mode, but we can emulate with narrower modes. */
9720
9721int
502b8322 9722vector_mode_valid_p (enum machine_mode mode)
cb2a532e
AH
9723{
9724 enum mode_class class = GET_MODE_CLASS (mode);
9725 enum machine_mode innermode;
9726
9727 /* Doh! What's going on? */
9728 if (class != MODE_VECTOR_INT
9729 && class != MODE_VECTOR_FLOAT)
9730 return 0;
9731
9732 /* Hardware support. Woo hoo! */
9733 if (VECTOR_MODE_SUPPORTED_P (mode))
9734 return 1;
9735
9736 innermode = GET_MODE_INNER (mode);
9737
9738 /* We should probably return 1 if requesting V4DI and we have no DI,
9739 but we have V2DI, but this is probably very unlikely. */
9740
9741 /* If we have support for the inner mode, we can safely emulate it.
9742 We may not have V2DI, but me can emulate with a pair of DIs. */
9743 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9744}
9745
d744e06e
AH
9746/* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9747static rtx
502b8322 9748const_vector_from_tree (tree exp)
d744e06e
AH
9749{
9750 rtvec v;
9751 int units, i;
9752 tree link, elt;
9753 enum machine_mode inner, mode;
9754
9755 mode = TYPE_MODE (TREE_TYPE (exp));
9756
6de9cd9a 9757 if (initializer_zerop (exp))
d744e06e
AH
9758 return CONST0_RTX (mode);
9759
9760 units = GET_MODE_NUNITS (mode);
9761 inner = GET_MODE_INNER (mode);
9762
9763 v = rtvec_alloc (units);
9764
9765 link = TREE_VECTOR_CST_ELTS (exp);
9766 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9767 {
9768 elt = TREE_VALUE (link);
9769
9770 if (TREE_CODE (elt) == REAL_CST)
9771 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9772 inner);
9773 else
9774 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9775 TREE_INT_CST_HIGH (elt),
9776 inner);
9777 }
9778
5f6c070d
AH
9779 /* Initialize remaining elements to 0. */
9780 for (; i < units; ++i)
9781 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9782
d744e06e
AH
9783 return gen_rtx_raw_CONST_VECTOR (mode, v);
9784}
e2500fed 9785#include "gt-expr.h"