]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/expr.c
loop-iv.c: New file.
[thirdparty/gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
8752c357 2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
d9221e01 3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
bbf6f052 4
1322177d 5This file is part of GCC.
bbf6f052 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
bbf6f052 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
bbf6f052
RK
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
bbf6f052 21
bbf6f052 22#include "config.h"
670ee920 23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
ca695ac9 26#include "machmode.h"
11ad4784 27#include "real.h"
bbf6f052
RK
28#include "rtl.h"
29#include "tree.h"
30#include "flags.h"
bf76bb5a 31#include "regs.h"
4ed67205 32#include "hard-reg-set.h"
3d195391 33#include "except.h"
bbf6f052 34#include "function.h"
bbf6f052 35#include "insn-config.h"
34e81b5a 36#include "insn-attr.h"
3a94c984 37/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
d6f4ec51 38#include "expr.h"
e78d8e51
ZW
39#include "optabs.h"
40#include "libfuncs.h"
bbf6f052 41#include "recog.h"
3ef1eef4 42#include "reload.h"
bbf6f052 43#include "output.h"
bbf6f052 44#include "typeclass.h"
10f0ad3d 45#include "toplev.h"
d7db6646 46#include "ggc.h"
ac79cd5a 47#include "langhooks.h"
e2c49ac2 48#include "intl.h"
b1474bb7 49#include "tm_p.h"
c988af2b 50#include "target.h"
bbf6f052 51
bbf6f052 52/* Decide whether a function's arguments should be processed
bbc8a071
RK
53 from first to last or from last to first.
54
55 They should if the stack and args grow in opposite directions, but
56 only if we have push insns. */
bbf6f052 57
bbf6f052 58#ifdef PUSH_ROUNDING
bbc8a071 59
2da4124d 60#ifndef PUSH_ARGS_REVERSED
3319a347 61#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
3a94c984 62#define PUSH_ARGS_REVERSED /* If it's last to first. */
bbf6f052 63#endif
2da4124d 64#endif
bbc8a071 65
bbf6f052
RK
66#endif
67
68#ifndef STACK_PUSH_CODE
69#ifdef STACK_GROWS_DOWNWARD
70#define STACK_PUSH_CODE PRE_DEC
71#else
72#define STACK_PUSH_CODE PRE_INC
73#endif
74#endif
75
4ca79136
RH
76/* Convert defined/undefined to boolean. */
77#ifdef TARGET_MEM_FUNCTIONS
78#undef TARGET_MEM_FUNCTIONS
79#define TARGET_MEM_FUNCTIONS 1
80#else
81#define TARGET_MEM_FUNCTIONS 0
82#endif
83
84
bbf6f052
RK
85/* If this is nonzero, we do not bother generating VOLATILE
86 around volatile memory references, and we are willing to
87 output indirect addresses. If cse is to follow, we reject
88 indirect addresses so a useful potential cse is generated;
89 if it is used only once, instruction combination will produce
90 the same indirect address eventually. */
91int cse_not_expected;
92
14a774a9 93/* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
1cff8964 94tree placeholder_list = 0;
14a774a9 95
4969d05d
RK
96/* This structure is used by move_by_pieces to describe the move to
97 be performed. */
4969d05d
RK
98struct move_by_pieces
99{
100 rtx to;
101 rtx to_addr;
102 int autinc_to;
103 int explicit_inc_to;
104 rtx from;
105 rtx from_addr;
106 int autinc_from;
107 int explicit_inc_from;
3bdf5ad1
RK
108 unsigned HOST_WIDE_INT len;
109 HOST_WIDE_INT offset;
4969d05d
RK
110 int reverse;
111};
112
57814e5e 113/* This structure is used by store_by_pieces to describe the clear to
9de08200
RK
114 be performed. */
115
57814e5e 116struct store_by_pieces
9de08200
RK
117{
118 rtx to;
119 rtx to_addr;
120 int autinc_to;
121 int explicit_inc_to;
3bdf5ad1
RK
122 unsigned HOST_WIDE_INT len;
123 HOST_WIDE_INT offset;
502b8322 124 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
fad205ff 125 void *constfundata;
9de08200
RK
126 int reverse;
127};
128
502b8322
AJ
129static rtx enqueue_insn (rtx, rtx);
130static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
131 unsigned int);
132static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
133 struct move_by_pieces *);
134static bool block_move_libcall_safe_for_call_parm (void);
135static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
136static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
137static tree emit_block_move_libcall_fn (int);
138static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
139static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
140static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
141static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
142static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
143 struct store_by_pieces *);
144static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
145static rtx clear_storage_via_libcall (rtx, rtx);
146static tree clear_storage_libcall_fn (int);
147static rtx compress_float_constant (rtx, rtx);
148static rtx get_subtarget (rtx);
149static int is_zeros_p (tree);
502b8322
AJ
150static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
151 HOST_WIDE_INT, enum machine_mode,
152 tree, tree, int, int);
153static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
154static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
155 tree, enum machine_mode, int, tree, int);
156static rtx var_rtx (tree);
157
158static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
159static unsigned HOST_WIDE_INT highest_pow2_factor_for_type (tree, tree);
160
161static int is_aligning_offset (tree, tree);
162static rtx expand_increment (tree, int, int);
eb698c58
RS
163static void expand_operands (tree, tree, rtx, rtx*, rtx*,
164 enum expand_modifier);
502b8322 165static rtx do_store_flag (tree, rtx, enum machine_mode, int);
21d93687 166#ifdef PUSH_ROUNDING
502b8322 167static void emit_single_push_insn (enum machine_mode, rtx, tree);
21d93687 168#endif
502b8322
AJ
169static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
170static rtx const_vector_from_tree (tree);
bbf6f052 171
4fa52007
RK
172/* Record for each mode whether we can move a register directly to or
173 from an object of that mode in memory. If we can't, we won't try
174 to use that mode directly when accessing a field of that mode. */
175
176static char direct_load[NUM_MACHINE_MODES];
177static char direct_store[NUM_MACHINE_MODES];
178
51286de6
RH
179/* Record for each mode whether we can float-extend from memory. */
180
181static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
182
fbe1758d 183/* This macro is used to determine whether move_by_pieces should be called
3a94c984 184 to perform a structure copy. */
fbe1758d 185#ifndef MOVE_BY_PIECES_P
19caa751 186#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
8752c357 187 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
fbe1758d
AM
188#endif
189
78762e3b
RS
190/* This macro is used to determine whether clear_by_pieces should be
191 called to clear storage. */
192#ifndef CLEAR_BY_PIECES_P
193#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
195#endif
196
4977bab6
ZW
197/* This macro is used to determine whether store_by_pieces should be
198 called to "memset" storage with byte values other than zero, or
199 to "memcpy" storage when the source is a constant string. */
200#ifndef STORE_BY_PIECES_P
201#define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
202#endif
203
266007a7 204/* This array records the insn_code of insns to perform block moves. */
e6677db3 205enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 206
9de08200
RK
207/* This array records the insn_code of insns to perform block clears. */
208enum insn_code clrstr_optab[NUM_MACHINE_MODES];
209
118355a0
ZW
210/* These arrays record the insn_code of two different kinds of insns
211 to perform block compares. */
212enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
213enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
214
72954a4f
JM
215/* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
216struct file_stack *expr_wfl_stack;
217
cc2902df 218/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
e87b4f3f
RS
219
220#ifndef SLOW_UNALIGNED_ACCESS
e1565e65 221#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
e87b4f3f 222#endif
bbf6f052 223\f
4fa52007 224/* This is run once per compilation to set up which modes can be used
266007a7 225 directly in memory and to initialize the block move optab. */
4fa52007
RK
226
227void
502b8322 228init_expr_once (void)
4fa52007
RK
229{
230 rtx insn, pat;
231 enum machine_mode mode;
cff48d8f 232 int num_clobbers;
9ec36da5 233 rtx mem, mem1;
bf1660a6 234 rtx reg;
9ec36da5 235
e2549997
RS
236 /* Try indexing by frame ptr and try by stack ptr.
237 It is known that on the Convex the stack ptr isn't a valid index.
238 With luck, one or the other is valid on any machine. */
9ec36da5
JL
239 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
240 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 241
bf1660a6
JL
242 /* A scratch register we can modify in-place below to avoid
243 useless RTL allocations. */
244 reg = gen_rtx_REG (VOIDmode, -1);
245
1f8c3c5b
RH
246 insn = rtx_alloc (INSN);
247 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
248 PATTERN (insn) = pat;
4fa52007
RK
249
250 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
251 mode = (enum machine_mode) ((int) mode + 1))
252 {
253 int regno;
4fa52007
RK
254
255 direct_load[(int) mode] = direct_store[(int) mode] = 0;
256 PUT_MODE (mem, mode);
e2549997 257 PUT_MODE (mem1, mode);
bf1660a6 258 PUT_MODE (reg, mode);
4fa52007 259
e6fe56a4
RK
260 /* See if there is some register that can be used in this mode and
261 directly loaded or stored from memory. */
262
7308a047
RS
263 if (mode != VOIDmode && mode != BLKmode)
264 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
265 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
266 regno++)
267 {
268 if (! HARD_REGNO_MODE_OK (regno, mode))
269 continue;
e6fe56a4 270
bf1660a6 271 REGNO (reg) = regno;
e6fe56a4 272
7308a047
RS
273 SET_SRC (pat) = mem;
274 SET_DEST (pat) = reg;
275 if (recog (pat, insn, &num_clobbers) >= 0)
276 direct_load[(int) mode] = 1;
e6fe56a4 277
e2549997
RS
278 SET_SRC (pat) = mem1;
279 SET_DEST (pat) = reg;
280 if (recog (pat, insn, &num_clobbers) >= 0)
281 direct_load[(int) mode] = 1;
282
7308a047
RS
283 SET_SRC (pat) = reg;
284 SET_DEST (pat) = mem;
285 if (recog (pat, insn, &num_clobbers) >= 0)
286 direct_store[(int) mode] = 1;
e2549997
RS
287
288 SET_SRC (pat) = reg;
289 SET_DEST (pat) = mem1;
290 if (recog (pat, insn, &num_clobbers) >= 0)
291 direct_store[(int) mode] = 1;
7308a047 292 }
4fa52007
RK
293 }
294
51286de6
RH
295 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
296
297 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
298 mode = GET_MODE_WIDER_MODE (mode))
299 {
300 enum machine_mode srcmode;
301 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
0fb7aeda 302 srcmode = GET_MODE_WIDER_MODE (srcmode))
51286de6
RH
303 {
304 enum insn_code ic;
305
306 ic = can_extend_p (mode, srcmode, 0);
307 if (ic == CODE_FOR_nothing)
308 continue;
309
310 PUT_MODE (mem, srcmode);
0fb7aeda 311
51286de6
RH
312 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
313 float_extend_from_mem[mode][srcmode] = true;
314 }
315 }
4fa52007 316}
cff48d8f 317
bbf6f052
RK
318/* This is run at the start of compiling a function. */
319
320void
502b8322 321init_expr (void)
bbf6f052 322{
3a70d621 323 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
bbf6f052
RK
324}
325
49ad7cfa 326/* Small sanity check that the queue is empty at the end of a function. */
296b4ed9 327
bbf6f052 328void
502b8322 329finish_expr_for_function (void)
bbf6f052 330{
49ad7cfa
BS
331 if (pending_chain)
332 abort ();
bbf6f052
RK
333}
334\f
335/* Manage the queue of increment instructions to be output
336 for POSTINCREMENT_EXPR expressions, etc. */
337
bbf6f052
RK
338/* Queue up to increment (or change) VAR later. BODY says how:
339 BODY should be the same thing you would pass to emit_insn
340 to increment right away. It will go to emit_insn later on.
341
342 The value is a QUEUED expression to be used in place of VAR
343 where you want to guarantee the pre-incrementation value of VAR. */
344
345static rtx
502b8322 346enqueue_insn (rtx var, rtx body)
bbf6f052 347{
c5c76735
JL
348 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
349 body, pending_chain);
bbf6f052
RK
350 return pending_chain;
351}
352
353/* Use protect_from_queue to convert a QUEUED expression
354 into something that you can put immediately into an instruction.
355 If the queued incrementation has not happened yet,
356 protect_from_queue returns the variable itself.
357 If the incrementation has happened, protect_from_queue returns a temp
358 that contains a copy of the old value of the variable.
359
360 Any time an rtx which might possibly be a QUEUED is to be put
361 into an instruction, it must be passed through protect_from_queue first.
362 QUEUED expressions are not meaningful in instructions.
363
364 Do not pass a value through protect_from_queue and then hold
365 on to it for a while before putting it in an instruction!
366 If the queue is flushed in between, incorrect code will result. */
367
368rtx
502b8322 369protect_from_queue (rtx x, int modify)
bbf6f052 370{
b3694847 371 RTX_CODE code = GET_CODE (x);
bbf6f052
RK
372
373#if 0 /* A QUEUED can hang around after the queue is forced out. */
374 /* Shortcut for most common case. */
375 if (pending_chain == 0)
376 return x;
377#endif
378
379 if (code != QUEUED)
380 {
e9baa644
RK
381 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
382 use of autoincrement. Make a copy of the contents of the memory
383 location rather than a copy of the address, but not if the value is
384 of mode BLKmode. Don't modify X in place since it might be
385 shared. */
bbf6f052
RK
386 if (code == MEM && GET_MODE (x) != BLKmode
387 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
388 {
f1ec5147
RK
389 rtx y = XEXP (x, 0);
390 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
e9baa644 391
bbf6f052
RK
392 if (QUEUED_INSN (y))
393 {
f1ec5147
RK
394 rtx temp = gen_reg_rtx (GET_MODE (x));
395
e9baa644 396 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
397 QUEUED_INSN (y));
398 return temp;
399 }
f1ec5147 400
73b7f58c
BS
401 /* Copy the address into a pseudo, so that the returned value
402 remains correct across calls to emit_queue. */
f1ec5147 403 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
bbf6f052 404 }
f1ec5147 405
bbf6f052
RK
406 /* Otherwise, recursively protect the subexpressions of all
407 the kinds of rtx's that can contain a QUEUED. */
408 if (code == MEM)
3f15938e
RS
409 {
410 rtx tem = protect_from_queue (XEXP (x, 0), 0);
411 if (tem != XEXP (x, 0))
412 {
413 x = copy_rtx (x);
414 XEXP (x, 0) = tem;
415 }
416 }
bbf6f052
RK
417 else if (code == PLUS || code == MULT)
418 {
3f15938e
RS
419 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
420 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
421 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
422 {
423 x = copy_rtx (x);
424 XEXP (x, 0) = new0;
425 XEXP (x, 1) = new1;
426 }
bbf6f052
RK
427 }
428 return x;
429 }
73b7f58c
BS
430 /* If the increment has not happened, use the variable itself. Copy it
431 into a new pseudo so that the value remains correct across calls to
432 emit_queue. */
bbf6f052 433 if (QUEUED_INSN (x) == 0)
73b7f58c 434 return copy_to_reg (QUEUED_VAR (x));
bbf6f052
RK
435 /* If the increment has happened and a pre-increment copy exists,
436 use that copy. */
437 if (QUEUED_COPY (x) != 0)
438 return QUEUED_COPY (x);
439 /* The increment has happened but we haven't set up a pre-increment copy.
440 Set one up now, and use it. */
441 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
442 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
443 QUEUED_INSN (x));
444 return QUEUED_COPY (x);
445}
446
447/* Return nonzero if X contains a QUEUED expression:
448 if it contains anything that will be altered by a queued increment.
449 We handle only combinations of MEM, PLUS, MINUS and MULT operators
450 since memory addresses generally contain only those. */
451
1f06ee8d 452int
502b8322 453queued_subexp_p (rtx x)
bbf6f052 454{
b3694847 455 enum rtx_code code = GET_CODE (x);
bbf6f052
RK
456 switch (code)
457 {
458 case QUEUED:
459 return 1;
460 case MEM:
461 return queued_subexp_p (XEXP (x, 0));
462 case MULT:
463 case PLUS:
464 case MINUS:
e9a25f70
JL
465 return (queued_subexp_p (XEXP (x, 0))
466 || queued_subexp_p (XEXP (x, 1)));
467 default:
468 return 0;
bbf6f052 469 }
bbf6f052
RK
470}
471
472/* Perform all the pending incrementations. */
473
474void
502b8322 475emit_queue (void)
bbf6f052 476{
b3694847 477 rtx p;
381127e8 478 while ((p = pending_chain))
bbf6f052 479 {
41b083c4
R
480 rtx body = QUEUED_BODY (p);
481
2f937369
DM
482 switch (GET_CODE (body))
483 {
484 case INSN:
485 case JUMP_INSN:
486 case CALL_INSN:
487 case CODE_LABEL:
488 case BARRIER:
489 case NOTE:
490 QUEUED_INSN (p) = body;
491 emit_insn (body);
492 break;
493
494#ifdef ENABLE_CHECKING
495 case SEQUENCE:
496 abort ();
497 break;
498#endif
499
500 default:
501 QUEUED_INSN (p) = emit_insn (body);
502 break;
41b083c4 503 }
2f937369 504
bbf6f052
RK
505 pending_chain = QUEUED_NEXT (p);
506 }
507}
bbf6f052
RK
508\f
509/* Copy data from FROM to TO, where the machine modes are not the same.
510 Both modes may be integer, or both may be floating.
511 UNSIGNEDP should be nonzero if FROM is an unsigned type.
512 This causes zero-extension instead of sign-extension. */
513
514void
502b8322 515convert_move (rtx to, rtx from, int unsignedp)
bbf6f052
RK
516{
517 enum machine_mode to_mode = GET_MODE (to);
518 enum machine_mode from_mode = GET_MODE (from);
519 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
520 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
521 enum insn_code code;
522 rtx libcall;
523
524 /* rtx code for making an equivalent value. */
37d0b254
SE
525 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
526 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
bbf6f052
RK
527
528 to = protect_from_queue (to, 1);
529 from = protect_from_queue (from, 0);
530
531 if (to_real != from_real)
532 abort ();
533
1499e0a8
RK
534 /* If FROM is a SUBREG that indicates that we have already done at least
535 the required extension, strip it. We don't handle such SUBREGs as
536 TO here. */
537
538 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
539 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
540 >= GET_MODE_SIZE (to_mode))
541 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
542 from = gen_lowpart (to_mode, from), from_mode = to_mode;
543
544 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
545 abort ();
546
bbf6f052
RK
547 if (to_mode == from_mode
548 || (from_mode == VOIDmode && CONSTANT_P (from)))
549 {
550 emit_move_insn (to, from);
551 return;
552 }
553
0b4565c9
BS
554 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
555 {
556 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
557 abort ();
3a94c984 558
0b4565c9 559 if (VECTOR_MODE_P (to_mode))
bafe341a 560 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
0b4565c9 561 else
bafe341a 562 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
0b4565c9
BS
563
564 emit_move_insn (to, from);
565 return;
566 }
567
06765df1
R
568 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
569 {
570 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
571 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
572 return;
573 }
574
bbf6f052
RK
575 if (to_real)
576 {
642dfa8b 577 rtx value, insns;
85363ca0 578 convert_optab tab;
81d79e2c 579
e44846d6 580 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
85363ca0 581 tab = sext_optab;
e44846d6 582 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
85363ca0
ZW
583 tab = trunc_optab;
584 else
585 abort ();
2b01c326 586
85363ca0 587 /* Try converting directly if the insn is supported. */
2b01c326 588
85363ca0
ZW
589 code = tab->handlers[to_mode][from_mode].insn_code;
590 if (code != CODE_FOR_nothing)
b092b471 591 {
85363ca0
ZW
592 emit_unop_insn (code, to, from,
593 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
b092b471
JW
594 return;
595 }
b092b471 596
85363ca0
ZW
597 /* Otherwise use a libcall. */
598 libcall = tab->handlers[to_mode][from_mode].libfunc;
3a94c984 599
85363ca0 600 if (!libcall)
b092b471 601 /* This conversion is not implemented yet. */
bbf6f052
RK
602 abort ();
603
642dfa8b 604 start_sequence ();
ebb1b59a 605 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
81d79e2c 606 1, from, from_mode);
642dfa8b
BS
607 insns = get_insns ();
608 end_sequence ();
450b1728
EC
609 emit_libcall_block (insns, to, value,
610 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
611 from)
612 : gen_rtx_FLOAT_EXTEND (to_mode, from));
bbf6f052
RK
613 return;
614 }
615
85363ca0
ZW
616 /* Handle pointer conversion. */ /* SPEE 900220. */
617 /* Targets are expected to provide conversion insns between PxImode and
618 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
619 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
620 {
621 enum machine_mode full_mode
622 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
623
624 if (trunc_optab->handlers[to_mode][full_mode].insn_code
625 == CODE_FOR_nothing)
626 abort ();
627
628 if (full_mode != from_mode)
629 from = convert_to_mode (full_mode, from, unsignedp);
630 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
631 to, from, UNKNOWN);
632 return;
633 }
634 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
635 {
636 enum machine_mode full_mode
637 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
638
639 if (sext_optab->handlers[full_mode][from_mode].insn_code
640 == CODE_FOR_nothing)
641 abort ();
642
643 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
644 to, from, UNKNOWN);
645 if (to_mode == full_mode)
646 return;
647
648 /* else proceed to integer conversions below */
649 from_mode = full_mode;
650 }
651
bbf6f052
RK
652 /* Now both modes are integers. */
653
654 /* Handle expanding beyond a word. */
655 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
656 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
657 {
658 rtx insns;
659 rtx lowpart;
660 rtx fill_value;
661 rtx lowfrom;
662 int i;
663 enum machine_mode lowpart_mode;
664 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
665
666 /* Try converting directly if the insn is supported. */
667 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
668 != CODE_FOR_nothing)
669 {
cd1b4b44
RK
670 /* If FROM is a SUBREG, put it into a register. Do this
671 so that we always generate the same set of insns for
672 better cse'ing; if an intermediate assignment occurred,
673 we won't be doing the operation directly on the SUBREG. */
674 if (optimize > 0 && GET_CODE (from) == SUBREG)
675 from = force_reg (from_mode, from);
bbf6f052
RK
676 emit_unop_insn (code, to, from, equiv_code);
677 return;
678 }
679 /* Next, try converting via full word. */
680 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
681 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
682 != CODE_FOR_nothing))
683 {
a81fee56 684 if (GET_CODE (to) == REG)
38a448ca 685 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
686 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
687 emit_unop_insn (code, to,
688 gen_lowpart (word_mode, to), equiv_code);
689 return;
690 }
691
692 /* No special multiword conversion insn; do it by hand. */
693 start_sequence ();
694
5c5033c3
RK
695 /* Since we will turn this into a no conflict block, we must ensure
696 that the source does not overlap the target. */
697
698 if (reg_overlap_mentioned_p (to, from))
699 from = force_reg (from_mode, from);
700
bbf6f052
RK
701 /* Get a copy of FROM widened to a word, if necessary. */
702 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
703 lowpart_mode = word_mode;
704 else
705 lowpart_mode = from_mode;
706
707 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
708
709 lowpart = gen_lowpart (lowpart_mode, to);
710 emit_move_insn (lowpart, lowfrom);
711
712 /* Compute the value to put in each remaining word. */
713 if (unsignedp)
714 fill_value = const0_rtx;
715 else
716 {
717#ifdef HAVE_slt
718 if (HAVE_slt
a995e389 719 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
bbf6f052
RK
720 && STORE_FLAG_VALUE == -1)
721 {
906c4e36 722 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
a06ef755 723 lowpart_mode, 0);
bbf6f052
RK
724 fill_value = gen_reg_rtx (word_mode);
725 emit_insn (gen_slt (fill_value));
726 }
727 else
728#endif
729 {
730 fill_value
731 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
732 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 733 NULL_RTX, 0);
bbf6f052
RK
734 fill_value = convert_to_mode (word_mode, fill_value, 1);
735 }
736 }
737
738 /* Fill the remaining words. */
739 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
740 {
741 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
742 rtx subword = operand_subword (to, index, 1, to_mode);
743
744 if (subword == 0)
745 abort ();
746
747 if (fill_value != subword)
748 emit_move_insn (subword, fill_value);
749 }
750
751 insns = get_insns ();
752 end_sequence ();
753
906c4e36 754 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 755 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
756 return;
757 }
758
d3c64ee3
RS
759 /* Truncating multi-word to a word or less. */
760 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
761 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 762 {
431a6eca
JW
763 if (!((GET_CODE (from) == MEM
764 && ! MEM_VOLATILE_P (from)
765 && direct_load[(int) to_mode]
766 && ! mode_dependent_address_p (XEXP (from, 0)))
767 || GET_CODE (from) == REG
768 || GET_CODE (from) == SUBREG))
769 from = force_reg (from_mode, from);
bbf6f052
RK
770 convert_move (to, gen_lowpart (word_mode, from), 0);
771 return;
772 }
773
bbf6f052
RK
774 /* Now follow all the conversions between integers
775 no more than a word long. */
776
777 /* For truncation, usually we can just refer to FROM in a narrower mode. */
778 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
779 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 780 GET_MODE_BITSIZE (from_mode)))
bbf6f052 781 {
d3c64ee3
RS
782 if (!((GET_CODE (from) == MEM
783 && ! MEM_VOLATILE_P (from)
784 && direct_load[(int) to_mode]
785 && ! mode_dependent_address_p (XEXP (from, 0)))
786 || GET_CODE (from) == REG
787 || GET_CODE (from) == SUBREG))
788 from = force_reg (from_mode, from);
34aa3599
RK
789 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
790 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
791 from = copy_to_reg (from);
bbf6f052
RK
792 emit_move_insn (to, gen_lowpart (to_mode, from));
793 return;
794 }
795
d3c64ee3 796 /* Handle extension. */
bbf6f052
RK
797 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
798 {
799 /* Convert directly if that works. */
800 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
801 != CODE_FOR_nothing)
802 {
9413de45
RK
803 if (flag_force_mem)
804 from = force_not_mem (from);
805
bbf6f052
RK
806 emit_unop_insn (code, to, from, equiv_code);
807 return;
808 }
809 else
810 {
811 enum machine_mode intermediate;
2b28d92e
NC
812 rtx tmp;
813 tree shift_amount;
bbf6f052
RK
814
815 /* Search for a mode to convert via. */
816 for (intermediate = from_mode; intermediate != VOIDmode;
817 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
818 if (((can_extend_p (to_mode, intermediate, unsignedp)
819 != CODE_FOR_nothing)
820 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
d60eaeff
JL
821 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
822 GET_MODE_BITSIZE (intermediate))))
bbf6f052
RK
823 && (can_extend_p (intermediate, from_mode, unsignedp)
824 != CODE_FOR_nothing))
825 {
826 convert_move (to, convert_to_mode (intermediate, from,
827 unsignedp), unsignedp);
828 return;
829 }
830
2b28d92e 831 /* No suitable intermediate mode.
3a94c984 832 Generate what we need with shifts. */
2b28d92e
NC
833 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
834 - GET_MODE_BITSIZE (from_mode), 0);
835 from = gen_lowpart (to_mode, force_reg (from_mode, from));
836 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
837 to, unsignedp);
3a94c984 838 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
2b28d92e
NC
839 to, unsignedp);
840 if (tmp != to)
841 emit_move_insn (to, tmp);
842 return;
bbf6f052
RK
843 }
844 }
845
3a94c984 846 /* Support special truncate insns for certain modes. */
85363ca0 847 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
bbf6f052 848 {
85363ca0
ZW
849 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
850 to, from, UNKNOWN);
b9bcad65
RK
851 return;
852 }
853
bbf6f052
RK
854 /* Handle truncation of volatile memrefs, and so on;
855 the things that couldn't be truncated directly,
85363ca0
ZW
856 and for which there was no special instruction.
857
858 ??? Code above formerly short-circuited this, for most integer
859 mode pairs, with a force_reg in from_mode followed by a recursive
860 call to this routine. Appears always to have been wrong. */
bbf6f052
RK
861 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
862 {
863 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
864 emit_move_insn (to, temp);
865 return;
866 }
867
868 /* Mode combination is not recognized. */
869 abort ();
870}
871
872/* Return an rtx for a value that would result
873 from converting X to mode MODE.
874 Both X and MODE may be floating, or both integer.
875 UNSIGNEDP is nonzero if X is an unsigned value.
876 This can be done by referring to a part of X in place
5d901c31
RS
877 or by copying to a new temporary with conversion.
878
879 This function *must not* call protect_from_queue
880 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
881
882rtx
502b8322 883convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
5ffe63ed
RS
884{
885 return convert_modes (mode, VOIDmode, x, unsignedp);
886}
887
888/* Return an rtx for a value that would result
889 from converting X from mode OLDMODE to mode MODE.
890 Both modes may be floating, or both integer.
891 UNSIGNEDP is nonzero if X is an unsigned value.
892
893 This can be done by referring to a part of X in place
894 or by copying to a new temporary with conversion.
895
896 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
897
898 This function *must not* call protect_from_queue
899 except when putting X into an insn (in which case convert_move does it). */
900
901rtx
502b8322 902convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
bbf6f052 903{
b3694847 904 rtx temp;
5ffe63ed 905
1499e0a8
RK
906 /* If FROM is a SUBREG that indicates that we have already done at least
907 the required extension, strip it. */
908
909 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
910 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
911 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
912 x = gen_lowpart (mode, x);
bbf6f052 913
64791b18
RK
914 if (GET_MODE (x) != VOIDmode)
915 oldmode = GET_MODE (x);
3a94c984 916
5ffe63ed 917 if (mode == oldmode)
bbf6f052
RK
918 return x;
919
920 /* There is one case that we must handle specially: If we are converting
906c4e36 921 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
922 we are to interpret the constant as unsigned, gen_lowpart will do
923 the wrong if the constant appears negative. What we want to do is
924 make the high-order word of the constant zero, not all ones. */
925
926 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 927 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 928 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
929 {
930 HOST_WIDE_INT val = INTVAL (x);
931
932 if (oldmode != VOIDmode
933 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
934 {
935 int width = GET_MODE_BITSIZE (oldmode);
936
937 /* We need to zero extend VAL. */
938 val &= ((HOST_WIDE_INT) 1 << width) - 1;
939 }
940
941 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
942 }
bbf6f052
RK
943
944 /* We can do this with a gen_lowpart if both desired and current modes
945 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
946 non-volatile MEM. Except for the constant case where MODE is no
947 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 948
ba2e110c
RK
949 if ((GET_CODE (x) == CONST_INT
950 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 951 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 952 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 953 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 954 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
955 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
956 && direct_load[(int) mode])
2bf29316 957 || (GET_CODE (x) == REG
006c9f4a
SE
958 && (! HARD_REGISTER_P (x)
959 || HARD_REGNO_MODE_OK (REGNO (x), mode))
2bf29316
JW
960 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
961 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
962 {
963 /* ?? If we don't know OLDMODE, we have to assume here that
964 X does not need sign- or zero-extension. This may not be
965 the case, but it's the best we can do. */
966 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
967 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
968 {
969 HOST_WIDE_INT val = INTVAL (x);
970 int width = GET_MODE_BITSIZE (oldmode);
971
972 /* We must sign or zero-extend in this case. Start by
973 zero-extending, then sign extend if we need to. */
974 val &= ((HOST_WIDE_INT) 1 << width) - 1;
975 if (! unsignedp
976 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
977 val |= (HOST_WIDE_INT) (-1) << width;
978
2496c7bd 979 return gen_int_mode (val, mode);
ba2e110c
RK
980 }
981
982 return gen_lowpart (mode, x);
983 }
bbf6f052 984
ebe75517
JH
985 /* Converting from integer constant into mode is always equivalent to an
986 subreg operation. */
987 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
988 {
989 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
990 abort ();
991 return simplify_gen_subreg (mode, x, oldmode, 0);
992 }
993
bbf6f052
RK
994 temp = gen_reg_rtx (mode);
995 convert_move (temp, x, unsignedp);
996 return temp;
997}
998\f
cf5124f6
RS
999/* STORE_MAX_PIECES is the number of bytes at a time that we can
1000 store efficiently. Due to internal GCC limitations, this is
1001 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1002 for an immediate constant. */
1003
1004#define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1005
8fd3cf4e
JJ
1006/* Determine whether the LEN bytes can be moved by using several move
1007 instructions. Return nonzero if a call to move_by_pieces should
1008 succeed. */
1009
1010int
502b8322
AJ
1011can_move_by_pieces (unsigned HOST_WIDE_INT len,
1012 unsigned int align ATTRIBUTE_UNUSED)
8fd3cf4e
JJ
1013{
1014 return MOVE_BY_PIECES_P (len, align);
1015}
1016
21d93687
RK
1017/* Generate several move instructions to copy LEN bytes from block FROM to
1018 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1019 and TO through protect_from_queue before calling.
566aa174 1020
21d93687
RK
1021 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1022 used to push FROM to the stack.
566aa174 1023
8fd3cf4e 1024 ALIGN is maximum stack alignment we can assume.
bbf6f052 1025
8fd3cf4e
JJ
1026 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1027 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1028 stpcpy. */
1029
1030rtx
502b8322
AJ
1031move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1032 unsigned int align, int endp)
bbf6f052
RK
1033{
1034 struct move_by_pieces data;
566aa174 1035 rtx to_addr, from_addr = XEXP (from, 0);
770ae6cc 1036 unsigned int max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
1037 enum machine_mode mode = VOIDmode, tmode;
1038 enum insn_code icode;
bbf6f052 1039
f26aca6d
DD
1040 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1041
bbf6f052 1042 data.offset = 0;
bbf6f052 1043 data.from_addr = from_addr;
566aa174
JH
1044 if (to)
1045 {
1046 to_addr = XEXP (to, 0);
1047 data.to = to;
1048 data.autinc_to
1049 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1050 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1051 data.reverse
1052 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1053 }
1054 else
1055 {
1056 to_addr = NULL_RTX;
1057 data.to = NULL_RTX;
1058 data.autinc_to = 1;
1059#ifdef STACK_GROWS_DOWNWARD
1060 data.reverse = 1;
1061#else
1062 data.reverse = 0;
1063#endif
1064 }
1065 data.to_addr = to_addr;
bbf6f052 1066 data.from = from;
bbf6f052
RK
1067 data.autinc_from
1068 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1069 || GET_CODE (from_addr) == POST_INC
1070 || GET_CODE (from_addr) == POST_DEC);
1071
1072 data.explicit_inc_from = 0;
1073 data.explicit_inc_to = 0;
bbf6f052
RK
1074 if (data.reverse) data.offset = len;
1075 data.len = len;
1076
1077 /* If copying requires more than two move insns,
1078 copy addresses to registers (to make displacements shorter)
1079 and use post-increment if available. */
1080 if (!(data.autinc_from && data.autinc_to)
1081 && move_by_pieces_ninsns (len, align) > 2)
1082 {
3a94c984 1083 /* Find the mode of the largest move... */
fbe1758d
AM
1084 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1085 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1086 if (GET_MODE_SIZE (tmode) < max_size)
1087 mode = tmode;
1088
1089 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
1090 {
1091 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1092 data.autinc_from = 1;
1093 data.explicit_inc_from = -1;
1094 }
fbe1758d 1095 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
1096 {
1097 data.from_addr = copy_addr_to_reg (from_addr);
1098 data.autinc_from = 1;
1099 data.explicit_inc_from = 1;
1100 }
bbf6f052
RK
1101 if (!data.autinc_from && CONSTANT_P (from_addr))
1102 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 1103 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
1104 {
1105 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1106 data.autinc_to = 1;
1107 data.explicit_inc_to = -1;
1108 }
fbe1758d 1109 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
1110 {
1111 data.to_addr = copy_addr_to_reg (to_addr);
1112 data.autinc_to = 1;
1113 data.explicit_inc_to = 1;
1114 }
bbf6f052
RK
1115 if (!data.autinc_to && CONSTANT_P (to_addr))
1116 data.to_addr = copy_addr_to_reg (to_addr);
1117 }
1118
e1565e65 1119 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751
RK
1120 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1121 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1122
1123 /* First move what we can in the largest integer mode, then go to
1124 successively smaller modes. */
1125
1126 while (max_size > 1)
1127 {
e7c33f54
RK
1128 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1129 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1130 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1131 mode = tmode;
1132
1133 if (mode == VOIDmode)
1134 break;
1135
1136 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1137 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1138 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1139
1140 max_size = GET_MODE_SIZE (mode);
1141 }
1142
1143 /* The code above should have handled everything. */
2a8e278c 1144 if (data.len > 0)
bbf6f052 1145 abort ();
8fd3cf4e
JJ
1146
1147 if (endp)
1148 {
1149 rtx to1;
1150
1151 if (data.reverse)
1152 abort ();
1153 if (data.autinc_to)
1154 {
1155 if (endp == 2)
1156 {
1157 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1158 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1159 else
1160 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1161 -1));
1162 }
1163 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1164 data.offset);
1165 }
1166 else
1167 {
1168 if (endp == 2)
1169 --data.offset;
1170 to1 = adjust_address (data.to, QImode, data.offset);
1171 }
1172 return to1;
1173 }
1174 else
1175 return data.to;
bbf6f052
RK
1176}
1177
1178/* Return number of insns required to move L bytes by pieces.
f1eaaf73 1179 ALIGN (in bits) is maximum alignment we can assume. */
bbf6f052 1180
3bdf5ad1 1181static unsigned HOST_WIDE_INT
502b8322 1182move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
bbf6f052 1183{
3bdf5ad1
RK
1184 unsigned HOST_WIDE_INT n_insns = 0;
1185 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
bbf6f052 1186
e1565e65 1187 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 1188 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
14c78e9b 1189 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1190
1191 while (max_size > 1)
1192 {
1193 enum machine_mode mode = VOIDmode, tmode;
1194 enum insn_code icode;
1195
e7c33f54
RK
1196 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1197 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1198 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1199 mode = tmode;
1200
1201 if (mode == VOIDmode)
1202 break;
1203
1204 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1205 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1206 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1207
1208 max_size = GET_MODE_SIZE (mode);
1209 }
1210
13c6f0d5
NS
1211 if (l)
1212 abort ();
bbf6f052
RK
1213 return n_insns;
1214}
1215
1216/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1217 with move instructions for mode MODE. GENFUN is the gen_... function
1218 to make a move insn for that mode. DATA has all the other info. */
1219
1220static void
502b8322
AJ
1221move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1222 struct move_by_pieces *data)
bbf6f052 1223{
3bdf5ad1 1224 unsigned int size = GET_MODE_SIZE (mode);
ae0ed63a 1225 rtx to1 = NULL_RTX, from1;
bbf6f052
RK
1226
1227 while (data->len >= size)
1228 {
3bdf5ad1
RK
1229 if (data->reverse)
1230 data->offset -= size;
1231
566aa174 1232 if (data->to)
3bdf5ad1 1233 {
566aa174 1234 if (data->autinc_to)
630036c6
JJ
1235 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1236 data->offset);
566aa174 1237 else
f4ef873c 1238 to1 = adjust_address (data->to, mode, data->offset);
3bdf5ad1 1239 }
3bdf5ad1
RK
1240
1241 if (data->autinc_from)
630036c6
JJ
1242 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1243 data->offset);
3bdf5ad1 1244 else
f4ef873c 1245 from1 = adjust_address (data->from, mode, data->offset);
bbf6f052 1246
940da324 1247 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
3d709fd3
RH
1248 emit_insn (gen_add2_insn (data->to_addr,
1249 GEN_INT (-(HOST_WIDE_INT)size)));
940da324 1250 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
3d709fd3
RH
1251 emit_insn (gen_add2_insn (data->from_addr,
1252 GEN_INT (-(HOST_WIDE_INT)size)));
bbf6f052 1253
566aa174
JH
1254 if (data->to)
1255 emit_insn ((*genfun) (to1, from1));
1256 else
21d93687
RK
1257 {
1258#ifdef PUSH_ROUNDING
1259 emit_single_push_insn (mode, from1, NULL);
1260#else
1261 abort ();
1262#endif
1263 }
3bdf5ad1 1264
940da324 1265 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1266 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1267 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1268 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052 1269
3bdf5ad1
RK
1270 if (! data->reverse)
1271 data->offset += size;
bbf6f052
RK
1272
1273 data->len -= size;
1274 }
1275}
1276\f
4ca79136
RH
1277/* Emit code to move a block Y to a block X. This may be done with
1278 string-move instructions, with multiple scalar move instructions,
1279 or with a library call.
bbf6f052 1280
4ca79136 1281 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
bbf6f052 1282 SIZE is an rtx that says how long they are.
19caa751 1283 ALIGN is the maximum alignment we can assume they have.
44bb111a 1284 METHOD describes what kind of copy this is, and what mechanisms may be used.
bbf6f052 1285
e9a25f70
JL
1286 Return the address of the new block, if memcpy is called and returns it,
1287 0 otherwise. */
1288
1289rtx
502b8322 1290emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
bbf6f052 1291{
44bb111a 1292 bool may_use_call;
e9a25f70 1293 rtx retval = 0;
44bb111a
RH
1294 unsigned int align;
1295
1296 switch (method)
1297 {
1298 case BLOCK_OP_NORMAL:
1299 may_use_call = true;
1300 break;
1301
1302 case BLOCK_OP_CALL_PARM:
1303 may_use_call = block_move_libcall_safe_for_call_parm ();
1304
1305 /* Make inhibit_defer_pop nonzero around the library call
1306 to force it to pop the arguments right away. */
1307 NO_DEFER_POP;
1308 break;
1309
1310 case BLOCK_OP_NO_LIBCALL:
1311 may_use_call = false;
1312 break;
1313
1314 default:
1315 abort ();
1316 }
1317
1318 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
e9a25f70 1319
bbf6f052
RK
1320 if (GET_MODE (x) != BLKmode)
1321 abort ();
bbf6f052
RK
1322 if (GET_MODE (y) != BLKmode)
1323 abort ();
1324
1325 x = protect_from_queue (x, 1);
1326 y = protect_from_queue (y, 0);
5d901c31 1327 size = protect_from_queue (size, 0);
bbf6f052
RK
1328
1329 if (GET_CODE (x) != MEM)
1330 abort ();
1331 if (GET_CODE (y) != MEM)
1332 abort ();
1333 if (size == 0)
1334 abort ();
1335
cb38fd88
RH
1336 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1337 can be incorrect is coming from __builtin_memcpy. */
1338 if (GET_CODE (size) == CONST_INT)
1339 {
6972c506
JJ
1340 if (INTVAL (size) == 0)
1341 return 0;
1342
cb38fd88
RH
1343 x = shallow_copy_rtx (x);
1344 y = shallow_copy_rtx (y);
1345 set_mem_size (x, size);
1346 set_mem_size (y, size);
1347 }
1348
fbe1758d 1349 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
8fd3cf4e 1350 move_by_pieces (x, y, INTVAL (size), align, 0);
4ca79136
RH
1351 else if (emit_block_move_via_movstr (x, y, size, align))
1352 ;
44bb111a 1353 else if (may_use_call)
4ca79136 1354 retval = emit_block_move_via_libcall (x, y, size);
44bb111a
RH
1355 else
1356 emit_block_move_via_loop (x, y, size, align);
1357
1358 if (method == BLOCK_OP_CALL_PARM)
1359 OK_DEFER_POP;
266007a7 1360
4ca79136
RH
1361 return retval;
1362}
266007a7 1363
502b8322 1364/* A subroutine of emit_block_move. Returns true if calling the
44bb111a
RH
1365 block move libcall will not clobber any parameters which may have
1366 already been placed on the stack. */
1367
1368static bool
502b8322 1369block_move_libcall_safe_for_call_parm (void)
44bb111a 1370{
a357a6d4 1371 /* If arguments are pushed on the stack, then they're safe. */
44bb111a
RH
1372 if (PUSH_ARGS)
1373 return true;
44bb111a 1374
450b1728 1375 /* If registers go on the stack anyway, any argument is sure to clobber
a357a6d4
GK
1376 an outgoing argument. */
1377#if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1378 {
1379 tree fn = emit_block_move_libcall_fn (false);
1380 (void) fn;
1381 if (REG_PARM_STACK_SPACE (fn) != 0)
1382 return false;
1383 }
44bb111a 1384#endif
44bb111a 1385
a357a6d4
GK
1386 /* If any argument goes in memory, then it might clobber an outgoing
1387 argument. */
1388 {
1389 CUMULATIVE_ARGS args_so_far;
1390 tree fn, arg;
450b1728 1391
a357a6d4 1392 fn = emit_block_move_libcall_fn (false);
0f6937fe 1393 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
450b1728 1394
a357a6d4
GK
1395 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1396 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1397 {
1398 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1399 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1400 if (!tmp || !REG_P (tmp))
44bb111a 1401 return false;
a357a6d4
GK
1402#ifdef FUNCTION_ARG_PARTIAL_NREGS
1403 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1404 NULL_TREE, 1))
1405 return false;
1406#endif
1407 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1408 }
1409 }
1410 return true;
44bb111a
RH
1411}
1412
502b8322 1413/* A subroutine of emit_block_move. Expand a movstr pattern;
4ca79136 1414 return true if successful. */
3ef1eef4 1415
4ca79136 1416static bool
502b8322 1417emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
4ca79136 1418{
4ca79136 1419 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
a5e9c810 1420 int save_volatile_ok = volatile_ok;
4ca79136 1421 enum machine_mode mode;
266007a7 1422
4ca79136
RH
1423 /* Since this is a move insn, we don't care about volatility. */
1424 volatile_ok = 1;
1425
ee960939
OH
1426 /* Try the most limited insn first, because there's no point
1427 including more than one in the machine description unless
1428 the more limited one has some advantage. */
1429
4ca79136
RH
1430 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1431 mode = GET_MODE_WIDER_MODE (mode))
1432 {
1433 enum insn_code code = movstr_optab[(int) mode];
1434 insn_operand_predicate_fn pred;
1435
1436 if (code != CODE_FOR_nothing
1437 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1438 here because if SIZE is less than the mode mask, as it is
1439 returned by the macro, it will definitely be less than the
1440 actual mode mask. */
1441 && ((GET_CODE (size) == CONST_INT
1442 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1443 <= (GET_MODE_MASK (mode) >> 1)))
1444 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1445 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1446 || (*pred) (x, BLKmode))
1447 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1448 || (*pred) (y, BLKmode))
1449 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1450 || (*pred) (opalign, VOIDmode)))
1451 {
1452 rtx op2;
1453 rtx last = get_last_insn ();
1454 rtx pat;
1455
1456 op2 = convert_to_mode (mode, size, 1);
1457 pred = insn_data[(int) code].operand[2].predicate;
1458 if (pred != 0 && ! (*pred) (op2, mode))
1459 op2 = copy_to_mode_reg (mode, op2);
1460
1461 /* ??? When called via emit_block_move_for_call, it'd be
1462 nice if there were some way to inform the backend, so
1463 that it doesn't fail the expansion because it thinks
1464 emitting the libcall would be more efficient. */
1465
1466 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1467 if (pat)
1468 {
1469 emit_insn (pat);
a5e9c810 1470 volatile_ok = save_volatile_ok;
4ca79136 1471 return true;
bbf6f052 1472 }
4ca79136
RH
1473 else
1474 delete_insns_since (last);
bbf6f052 1475 }
4ca79136 1476 }
bbf6f052 1477
a5e9c810 1478 volatile_ok = save_volatile_ok;
4ca79136
RH
1479 return false;
1480}
3ef1eef4 1481
4ca79136
RH
1482/* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1483 Return the return value from memcpy, 0 otherwise. */
4bc973ae 1484
4ca79136 1485static rtx
502b8322 1486emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
4ca79136 1487{
ee960939 1488 rtx dst_addr, src_addr;
4ca79136
RH
1489 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1490 enum machine_mode size_mode;
1491 rtx retval;
4bc973ae 1492
4ca79136 1493 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
4bc973ae 1494
ee960939
OH
1495 It is unsafe to save the value generated by protect_from_queue and reuse
1496 it later. Consider what happens if emit_queue is called before the
1497 return value from protect_from_queue is used.
4bc973ae 1498
ee960939
OH
1499 Expansion of the CALL_EXPR below will call emit_queue before we are
1500 finished emitting RTL for argument setup. So if we are not careful we
1501 could get the wrong value for an argument.
4bc973ae 1502
ee960939
OH
1503 To avoid this problem we go ahead and emit code to copy the addresses of
1504 DST and SRC and SIZE into new pseudos. We can then place those new
1505 pseudos into an RTL_EXPR and use them later, even after a call to
4ca79136 1506 emit_queue.
4bc973ae 1507
ee960939
OH
1508 Note this is not strictly needed for library calls since they do not call
1509 emit_queue before loading their arguments. However, we may need to have
1510 library calls call emit_queue in the future since failing to do so could
1511 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1512 arguments in registers. */
1513
1514 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1515 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
4ca79136 1516
ee960939
OH
1517 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1518 src_addr = convert_memory_address (ptr_mode, src_addr);
ee960939
OH
1519
1520 dst_tree = make_tree (ptr_type_node, dst_addr);
1521 src_tree = make_tree (ptr_type_node, src_addr);
4ca79136
RH
1522
1523 if (TARGET_MEM_FUNCTIONS)
1524 size_mode = TYPE_MODE (sizetype);
1525 else
1526 size_mode = TYPE_MODE (unsigned_type_node);
ee960939 1527
4ca79136
RH
1528 size = convert_to_mode (size_mode, size, 1);
1529 size = copy_to_mode_reg (size_mode, size);
1530
1531 /* It is incorrect to use the libcall calling conventions to call
1532 memcpy in this context. This could be a user call to memcpy and
1533 the user may wish to examine the return value from memcpy. For
1534 targets where libcalls and normal calls have different conventions
1535 for returning pointers, we could end up generating incorrect code.
1536
1537 For convenience, we generate the call to bcopy this way as well. */
1538
4ca79136
RH
1539 if (TARGET_MEM_FUNCTIONS)
1540 size_tree = make_tree (sizetype, size);
1541 else
1542 size_tree = make_tree (unsigned_type_node, size);
1543
1544 fn = emit_block_move_libcall_fn (true);
1545 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1546 if (TARGET_MEM_FUNCTIONS)
1547 {
1548 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1549 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1550 }
1551 else
1552 {
1553 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1554 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1555 }
1556
1557 /* Now we have to build up the CALL_EXPR itself. */
1558 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1559 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1560 call_expr, arg_list, NULL_TREE);
4ca79136
RH
1561
1562 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1563
ee960939
OH
1564 /* If we are initializing a readonly value, show the above call clobbered
1565 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1566 the delay slot scheduler might overlook conflicts and take nasty
1567 decisions. */
4ca79136 1568 if (RTX_UNCHANGING_P (dst))
ee960939
OH
1569 add_function_usage_to
1570 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1571 gen_rtx_CLOBBER (VOIDmode, dst),
1572 NULL_RTX));
4ca79136 1573
ee960939 1574 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
4ca79136 1575}
52cf7115 1576
4ca79136
RH
1577/* A subroutine of emit_block_move_via_libcall. Create the tree node
1578 for the function we use for block copies. The first time FOR_CALL
1579 is true, we call assemble_external. */
52cf7115 1580
4ca79136
RH
1581static GTY(()) tree block_move_fn;
1582
9661b15f 1583void
502b8322 1584init_block_move_fn (const char *asmspec)
4ca79136 1585{
9661b15f 1586 if (!block_move_fn)
4ca79136 1587 {
8fd3cf4e 1588 tree args, fn;
9661b15f 1589
4ca79136 1590 if (TARGET_MEM_FUNCTIONS)
52cf7115 1591 {
4ca79136
RH
1592 fn = get_identifier ("memcpy");
1593 args = build_function_type_list (ptr_type_node, ptr_type_node,
1594 const_ptr_type_node, sizetype,
1595 NULL_TREE);
1596 }
1597 else
1598 {
1599 fn = get_identifier ("bcopy");
1600 args = build_function_type_list (void_type_node, const_ptr_type_node,
1601 ptr_type_node, unsigned_type_node,
1602 NULL_TREE);
52cf7115
JL
1603 }
1604
4ca79136
RH
1605 fn = build_decl (FUNCTION_DECL, fn, args);
1606 DECL_EXTERNAL (fn) = 1;
1607 TREE_PUBLIC (fn) = 1;
1608 DECL_ARTIFICIAL (fn) = 1;
1609 TREE_NOTHROW (fn) = 1;
66c60e67 1610
4ca79136 1611 block_move_fn = fn;
bbf6f052 1612 }
e9a25f70 1613
9661b15f
JJ
1614 if (asmspec)
1615 {
1616 SET_DECL_RTL (block_move_fn, NULL_RTX);
1617 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1618 }
1619}
1620
1621static tree
502b8322 1622emit_block_move_libcall_fn (int for_call)
9661b15f
JJ
1623{
1624 static bool emitted_extern;
1625
1626 if (!block_move_fn)
1627 init_block_move_fn (NULL);
1628
4ca79136
RH
1629 if (for_call && !emitted_extern)
1630 {
1631 emitted_extern = true;
9661b15f
JJ
1632 make_decl_rtl (block_move_fn, NULL);
1633 assemble_external (block_move_fn);
4ca79136
RH
1634 }
1635
9661b15f 1636 return block_move_fn;
bbf6f052 1637}
44bb111a
RH
1638
1639/* A subroutine of emit_block_move. Copy the data via an explicit
1640 loop. This is used only when libcalls are forbidden. */
1641/* ??? It'd be nice to copy in hunks larger than QImode. */
1642
1643static void
502b8322
AJ
1644emit_block_move_via_loop (rtx x, rtx y, rtx size,
1645 unsigned int align ATTRIBUTE_UNUSED)
44bb111a
RH
1646{
1647 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1648 enum machine_mode iter_mode;
1649
1650 iter_mode = GET_MODE (size);
1651 if (iter_mode == VOIDmode)
1652 iter_mode = word_mode;
1653
1654 top_label = gen_label_rtx ();
1655 cmp_label = gen_label_rtx ();
1656 iter = gen_reg_rtx (iter_mode);
1657
1658 emit_move_insn (iter, const0_rtx);
1659
1660 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1661 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1662 do_pending_stack_adjust ();
1663
2e040219 1664 emit_note (NOTE_INSN_LOOP_BEG);
44bb111a
RH
1665
1666 emit_jump (cmp_label);
1667 emit_label (top_label);
1668
1669 tmp = convert_modes (Pmode, iter_mode, iter, true);
1670 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1671 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1672 x = change_address (x, QImode, x_addr);
1673 y = change_address (y, QImode, y_addr);
1674
1675 emit_move_insn (x, y);
1676
1677 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1678 true, OPTAB_LIB_WIDEN);
1679 if (tmp != iter)
1680 emit_move_insn (iter, tmp);
1681
2e040219 1682 emit_note (NOTE_INSN_LOOP_CONT);
44bb111a
RH
1683 emit_label (cmp_label);
1684
1685 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1686 true, top_label);
1687
2e040219 1688 emit_note (NOTE_INSN_LOOP_END);
44bb111a 1689}
bbf6f052
RK
1690\f
1691/* Copy all or part of a value X into registers starting at REGNO.
1692 The number of registers to be filled is NREGS. */
1693
1694void
502b8322 1695move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
bbf6f052
RK
1696{
1697 int i;
381127e8 1698#ifdef HAVE_load_multiple
3a94c984 1699 rtx pat;
381127e8
RL
1700 rtx last;
1701#endif
bbf6f052 1702
72bb9717
RK
1703 if (nregs == 0)
1704 return;
1705
bbf6f052
RK
1706 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1707 x = validize_mem (force_const_mem (mode, x));
1708
1709 /* See if the machine can do this with a load multiple insn. */
1710#ifdef HAVE_load_multiple
c3a02afe 1711 if (HAVE_load_multiple)
bbf6f052 1712 {
c3a02afe 1713 last = get_last_insn ();
38a448ca 1714 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1715 GEN_INT (nregs));
1716 if (pat)
1717 {
1718 emit_insn (pat);
1719 return;
1720 }
1721 else
1722 delete_insns_since (last);
bbf6f052 1723 }
bbf6f052
RK
1724#endif
1725
1726 for (i = 0; i < nregs; i++)
38a448ca 1727 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1728 operand_subword_force (x, i, mode));
1729}
1730
1731/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
c6b97fac 1732 The number of registers to be filled is NREGS. */
0040593d 1733
bbf6f052 1734void
502b8322 1735move_block_from_reg (int regno, rtx x, int nregs)
bbf6f052
RK
1736{
1737 int i;
bbf6f052 1738
2954d7db
RK
1739 if (nregs == 0)
1740 return;
1741
bbf6f052
RK
1742 /* See if the machine can do this with a store multiple insn. */
1743#ifdef HAVE_store_multiple
c3a02afe 1744 if (HAVE_store_multiple)
bbf6f052 1745 {
c6b97fac
AM
1746 rtx last = get_last_insn ();
1747 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1748 GEN_INT (nregs));
c3a02afe
RK
1749 if (pat)
1750 {
1751 emit_insn (pat);
1752 return;
1753 }
1754 else
1755 delete_insns_since (last);
bbf6f052 1756 }
bbf6f052
RK
1757#endif
1758
1759 for (i = 0; i < nregs; i++)
1760 {
1761 rtx tem = operand_subword (x, i, 1, BLKmode);
1762
1763 if (tem == 0)
1764 abort ();
1765
38a448ca 1766 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1767 }
1768}
1769
084a1106
JDA
1770/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1771 ORIG, where ORIG is a non-consecutive group of registers represented by
1772 a PARALLEL. The clone is identical to the original except in that the
1773 original set of registers is replaced by a new set of pseudo registers.
1774 The new set has the same modes as the original set. */
1775
1776rtx
502b8322 1777gen_group_rtx (rtx orig)
084a1106
JDA
1778{
1779 int i, length;
1780 rtx *tmps;
1781
1782 if (GET_CODE (orig) != PARALLEL)
1783 abort ();
1784
1785 length = XVECLEN (orig, 0);
703ad42b 1786 tmps = alloca (sizeof (rtx) * length);
084a1106
JDA
1787
1788 /* Skip a NULL entry in first slot. */
1789 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1790
1791 if (i)
1792 tmps[0] = 0;
1793
1794 for (; i < length; i++)
1795 {
1796 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1797 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1798
1799 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1800 }
1801
1802 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1803}
1804
6e985040
AM
1805/* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1806 where DST is non-consecutive registers represented by a PARALLEL.
1807 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
450b1728 1808 if not known. */
fffa9c1d
JW
1809
1810void
6e985040 1811emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
fffa9c1d 1812{
aac5cc16
RH
1813 rtx *tmps, src;
1814 int start, i;
fffa9c1d 1815
aac5cc16 1816 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
1817 abort ();
1818
1819 /* Check for a NULL entry, used to indicate that the parameter goes
1820 both on the stack and in registers. */
aac5cc16
RH
1821 if (XEXP (XVECEXP (dst, 0, 0), 0))
1822 start = 0;
fffa9c1d 1823 else
aac5cc16
RH
1824 start = 1;
1825
703ad42b 1826 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
aac5cc16 1827
aac5cc16
RH
1828 /* Process the pieces. */
1829 for (i = start; i < XVECLEN (dst, 0); i++)
1830 {
1831 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
770ae6cc
RK
1832 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1833 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
1834 int shift = 0;
1835
1836 /* Handle trailing fragments that run over the size of the struct. */
8752c357 1837 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
aac5cc16 1838 {
6e985040
AM
1839 /* Arrange to shift the fragment to where it belongs.
1840 extract_bit_field loads to the lsb of the reg. */
1841 if (
1842#ifdef BLOCK_REG_PADDING
1843 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1844 == (BYTES_BIG_ENDIAN ? upward : downward)
1845#else
1846 BYTES_BIG_ENDIAN
1847#endif
1848 )
1849 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
aac5cc16
RH
1850 bytelen = ssize - bytepos;
1851 if (bytelen <= 0)
729a2125 1852 abort ();
aac5cc16
RH
1853 }
1854
f3ce87a9
DE
1855 /* If we won't be loading directly from memory, protect the real source
1856 from strange tricks we might play; but make sure that the source can
1857 be loaded directly into the destination. */
1858 src = orig_src;
1859 if (GET_CODE (orig_src) != MEM
1860 && (!CONSTANT_P (orig_src)
1861 || (GET_MODE (orig_src) != mode
1862 && GET_MODE (orig_src) != VOIDmode)))
1863 {
1864 if (GET_MODE (orig_src) == VOIDmode)
1865 src = gen_reg_rtx (mode);
1866 else
1867 src = gen_reg_rtx (GET_MODE (orig_src));
04050c69 1868
f3ce87a9
DE
1869 emit_move_insn (src, orig_src);
1870 }
1871
aac5cc16
RH
1872 /* Optimize the access just a bit. */
1873 if (GET_CODE (src) == MEM
6e985040
AM
1874 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1875 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
729a2125 1876 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16
RH
1877 && bytelen == GET_MODE_SIZE (mode))
1878 {
1879 tmps[i] = gen_reg_rtx (mode);
f4ef873c 1880 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
fffa9c1d 1881 }
7c4a6db0
JW
1882 else if (GET_CODE (src) == CONCAT)
1883 {
015b1ad1
JDA
1884 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1885 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1886
1887 if ((bytepos == 0 && bytelen == slen0)
1888 || (bytepos != 0 && bytepos + bytelen <= slen))
cbb92744 1889 {
015b1ad1
JDA
1890 /* The following assumes that the concatenated objects all
1891 have the same size. In this case, a simple calculation
1892 can be used to determine the object and the bit field
1893 to be extracted. */
1894 tmps[i] = XEXP (src, bytepos / slen0);
cbb92744
JJ
1895 if (! CONSTANT_P (tmps[i])
1896 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
1897 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
015b1ad1
JDA
1898 (bytepos % slen0) * BITS_PER_UNIT,
1899 1, NULL_RTX, mode, mode, ssize);
cbb92744 1900 }
58f69841
JH
1901 else if (bytepos == 0)
1902 {
015b1ad1 1903 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
58f69841 1904 emit_move_insn (mem, src);
04050c69 1905 tmps[i] = adjust_address (mem, mode, 0);
58f69841 1906 }
7c4a6db0
JW
1907 else
1908 abort ();
1909 }
9c0631a7
AH
1910 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1911 SIMD register, which is currently broken. While we get GCC
1912 to emit proper RTL for these cases, let's dump to memory. */
1913 else if (VECTOR_MODE_P (GET_MODE (dst))
1914 && GET_CODE (src) == REG)
1915 {
1916 int slen = GET_MODE_SIZE (GET_MODE (src));
1917 rtx mem;
1918
1919 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1920 emit_move_insn (mem, src);
1921 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1922 }
d3a16cbd
FJ
1923 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1924 && XVECLEN (dst, 0) > 1)
1925 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
f3ce87a9 1926 else if (CONSTANT_P (src)
2ee5437b
RH
1927 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1928 tmps[i] = src;
fffa9c1d 1929 else
19caa751
RK
1930 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1931 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
04050c69 1932 mode, mode, ssize);
fffa9c1d 1933
6e985040 1934 if (shift)
19caa751
RK
1935 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1936 tmps[i], 0, OPTAB_WIDEN);
fffa9c1d 1937 }
19caa751 1938
3a94c984 1939 emit_queue ();
aac5cc16
RH
1940
1941 /* Copy the extracted pieces into the proper (probable) hard regs. */
1942 for (i = start; i < XVECLEN (dst, 0); i++)
1943 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
1944}
1945
084a1106
JDA
1946/* Emit code to move a block SRC to block DST, where SRC and DST are
1947 non-consecutive groups of registers, each represented by a PARALLEL. */
1948
1949void
502b8322 1950emit_group_move (rtx dst, rtx src)
084a1106
JDA
1951{
1952 int i;
1953
1954 if (GET_CODE (src) != PARALLEL
1955 || GET_CODE (dst) != PARALLEL
1956 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1957 abort ();
1958
1959 /* Skip first entry if NULL. */
1960 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1961 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1962 XEXP (XVECEXP (src, 0, i), 0));
1963}
1964
6e985040
AM
1965/* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1966 where SRC is non-consecutive registers represented by a PARALLEL.
1967 SSIZE represents the total size of block ORIG_DST, or -1 if not
1968 known. */
fffa9c1d
JW
1969
1970void
6e985040 1971emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
fffa9c1d 1972{
aac5cc16
RH
1973 rtx *tmps, dst;
1974 int start, i;
fffa9c1d 1975
aac5cc16 1976 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
1977 abort ();
1978
1979 /* Check for a NULL entry, used to indicate that the parameter goes
1980 both on the stack and in registers. */
aac5cc16
RH
1981 if (XEXP (XVECEXP (src, 0, 0), 0))
1982 start = 0;
fffa9c1d 1983 else
aac5cc16
RH
1984 start = 1;
1985
703ad42b 1986 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
fffa9c1d 1987
aac5cc16
RH
1988 /* Copy the (probable) hard regs into pseudos. */
1989 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 1990 {
aac5cc16
RH
1991 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1992 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1993 emit_move_insn (tmps[i], reg);
1994 }
3a94c984 1995 emit_queue ();
fffa9c1d 1996
aac5cc16
RH
1997 /* If we won't be storing directly into memory, protect the real destination
1998 from strange tricks we might play. */
1999 dst = orig_dst;
10a9f2be
JW
2000 if (GET_CODE (dst) == PARALLEL)
2001 {
2002 rtx temp;
2003
2004 /* We can get a PARALLEL dst if there is a conditional expression in
2005 a return statement. In that case, the dst and src are the same,
2006 so no action is necessary. */
2007 if (rtx_equal_p (dst, src))
2008 return;
2009
2010 /* It is unclear if we can ever reach here, but we may as well handle
2011 it. Allocate a temporary, and split this into a store/load to/from
2012 the temporary. */
2013
2014 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
6e985040
AM
2015 emit_group_store (temp, src, type, ssize);
2016 emit_group_load (dst, temp, type, ssize);
10a9f2be
JW
2017 return;
2018 }
75897075 2019 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
aac5cc16
RH
2020 {
2021 dst = gen_reg_rtx (GET_MODE (orig_dst));
2022 /* Make life a bit easier for combine. */
8ae91fc0 2023 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
aac5cc16 2024 }
aac5cc16
RH
2025
2026 /* Process the pieces. */
2027 for (i = start; i < XVECLEN (src, 0); i++)
2028 {
770ae6cc 2029 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
aac5cc16 2030 enum machine_mode mode = GET_MODE (tmps[i]);
770ae6cc 2031 unsigned int bytelen = GET_MODE_SIZE (mode);
6ddae612 2032 rtx dest = dst;
aac5cc16
RH
2033
2034 /* Handle trailing fragments that run over the size of the struct. */
8752c357 2035 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
71bc0330 2036 {
6e985040
AM
2037 /* store_bit_field always takes its value from the lsb.
2038 Move the fragment to the lsb if it's not already there. */
2039 if (
2040#ifdef BLOCK_REG_PADDING
2041 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2042 == (BYTES_BIG_ENDIAN ? upward : downward)
2043#else
2044 BYTES_BIG_ENDIAN
2045#endif
2046 )
aac5cc16
RH
2047 {
2048 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2049 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2050 tmps[i], 0, OPTAB_WIDEN);
2051 }
2052 bytelen = ssize - bytepos;
71bc0330 2053 }
fffa9c1d 2054
6ddae612
JJ
2055 if (GET_CODE (dst) == CONCAT)
2056 {
2057 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2058 dest = XEXP (dst, 0);
2059 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2060 {
2061 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2062 dest = XEXP (dst, 1);
2063 }
0d446150
JH
2064 else if (bytepos == 0 && XVECLEN (src, 0))
2065 {
2066 dest = assign_stack_temp (GET_MODE (dest),
2067 GET_MODE_SIZE (GET_MODE (dest)), 0);
2068 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2069 tmps[i]);
2070 dst = dest;
2071 break;
2072 }
6ddae612
JJ
2073 else
2074 abort ();
2075 }
2076
aac5cc16 2077 /* Optimize the access just a bit. */
6ddae612 2078 if (GET_CODE (dest) == MEM
6e985040
AM
2079 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2080 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
729a2125 2081 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16 2082 && bytelen == GET_MODE_SIZE (mode))
6ddae612 2083 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
aac5cc16 2084 else
6ddae612 2085 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
04050c69 2086 mode, tmps[i], ssize);
fffa9c1d 2087 }
729a2125 2088
3a94c984 2089 emit_queue ();
aac5cc16
RH
2090
2091 /* Copy from the pseudo into the (probable) hard reg. */
0d446150 2092 if (orig_dst != dst)
aac5cc16 2093 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2094}
2095
c36fce9a
GRK
2096/* Generate code to copy a BLKmode object of TYPE out of a
2097 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2098 is null, a stack temporary is created. TGTBLK is returned.
2099
c988af2b
RS
2100 The purpose of this routine is to handle functions that return
2101 BLKmode structures in registers. Some machines (the PA for example)
2102 want to return all small structures in registers regardless of the
2103 structure's alignment. */
c36fce9a
GRK
2104
2105rtx
502b8322 2106copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
c36fce9a 2107{
19caa751
RK
2108 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2109 rtx src = NULL, dst = NULL;
2110 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
c988af2b 2111 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
19caa751
RK
2112
2113 if (tgtblk == 0)
2114 {
1da68f56
RK
2115 tgtblk = assign_temp (build_qualified_type (type,
2116 (TYPE_QUALS (type)
2117 | TYPE_QUAL_CONST)),
2118 0, 1, 1);
19caa751
RK
2119 preserve_temp_slots (tgtblk);
2120 }
3a94c984 2121
1ed1b4fb 2122 /* This code assumes srcreg is at least a full word. If it isn't, copy it
9ac3e73b 2123 into a new pseudo which is a full word. */
0d7839da 2124
19caa751
RK
2125 if (GET_MODE (srcreg) != BLKmode
2126 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
9ac3e73b 2127 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
19caa751 2128
c988af2b
RS
2129 /* If the structure doesn't take up a whole number of words, see whether
2130 SRCREG is padded on the left or on the right. If it's on the left,
2131 set PADDING_CORRECTION to the number of bits to skip.
2132
2133 In most ABIs, the structure will be returned at the least end of
2134 the register, which translates to right padding on little-endian
2135 targets and left padding on big-endian targets. The opposite
2136 holds if the structure is returned at the most significant
2137 end of the register. */
2138 if (bytes % UNITS_PER_WORD != 0
2139 && (targetm.calls.return_in_msb (type)
2140 ? !BYTES_BIG_ENDIAN
2141 : BYTES_BIG_ENDIAN))
2142 padding_correction
19caa751
RK
2143 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2144
2145 /* Copy the structure BITSIZE bites at a time.
3a94c984 2146
19caa751
RK
2147 We could probably emit more efficient code for machines which do not use
2148 strict alignment, but it doesn't seem worth the effort at the current
2149 time. */
c988af2b 2150 for (bitpos = 0, xbitpos = padding_correction;
19caa751
RK
2151 bitpos < bytes * BITS_PER_UNIT;
2152 bitpos += bitsize, xbitpos += bitsize)
2153 {
3a94c984 2154 /* We need a new source operand each time xbitpos is on a
c988af2b 2155 word boundary and when xbitpos == padding_correction
19caa751
RK
2156 (the first time through). */
2157 if (xbitpos % BITS_PER_WORD == 0
c988af2b 2158 || xbitpos == padding_correction)
b47f8cfc
JH
2159 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2160 GET_MODE (srcreg));
19caa751
RK
2161
2162 /* We need a new destination operand each time bitpos is on
2163 a word boundary. */
2164 if (bitpos % BITS_PER_WORD == 0)
2165 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
3a94c984 2166
19caa751
RK
2167 /* Use xbitpos for the source extraction (right justified) and
2168 xbitpos for the destination store (left justified). */
2169 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2170 extract_bit_field (src, bitsize,
2171 xbitpos % BITS_PER_WORD, 1,
2172 NULL_RTX, word_mode, word_mode,
04050c69
RK
2173 BITS_PER_WORD),
2174 BITS_PER_WORD);
19caa751
RK
2175 }
2176
2177 return tgtblk;
c36fce9a
GRK
2178}
2179
94b25f81
RK
2180/* Add a USE expression for REG to the (possibly empty) list pointed
2181 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2182
2183void
502b8322 2184use_reg (rtx *call_fusage, rtx reg)
b3f8cf4a 2185{
0304dfbb
DE
2186 if (GET_CODE (reg) != REG
2187 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3a94c984 2188 abort ();
b3f8cf4a
RK
2189
2190 *call_fusage
38a448ca
RH
2191 = gen_rtx_EXPR_LIST (VOIDmode,
2192 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2193}
2194
94b25f81
RK
2195/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2196 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2197
2198void
502b8322 2199use_regs (rtx *call_fusage, int regno, int nregs)
bbf6f052 2200{
0304dfbb 2201 int i;
bbf6f052 2202
0304dfbb
DE
2203 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2204 abort ();
2205
2206 for (i = 0; i < nregs; i++)
e50126e8 2207 use_reg (call_fusage, regno_reg_rtx[regno + i]);
bbf6f052 2208}
fffa9c1d
JW
2209
2210/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2211 PARALLEL REGS. This is for calls that pass values in multiple
2212 non-contiguous locations. The Irix 6 ABI has examples of this. */
2213
2214void
502b8322 2215use_group_regs (rtx *call_fusage, rtx regs)
fffa9c1d
JW
2216{
2217 int i;
2218
6bd35f86
DE
2219 for (i = 0; i < XVECLEN (regs, 0); i++)
2220 {
2221 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2222
6bd35f86
DE
2223 /* A NULL entry means the parameter goes both on the stack and in
2224 registers. This can also be a MEM for targets that pass values
2225 partially on the stack and partially in registers. */
e9a25f70 2226 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
2227 use_reg (call_fusage, reg);
2228 }
fffa9c1d 2229}
bbf6f052 2230\f
57814e5e 2231
cf5124f6
RS
2232/* Determine whether the LEN bytes generated by CONSTFUN can be
2233 stored to memory using several move instructions. CONSTFUNDATA is
2234 a pointer which will be passed as argument in every CONSTFUN call.
2235 ALIGN is maximum alignment we can assume. Return nonzero if a
2236 call to store_by_pieces should succeed. */
2237
57814e5e 2238int
502b8322
AJ
2239can_store_by_pieces (unsigned HOST_WIDE_INT len,
2240 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2241 void *constfundata, unsigned int align)
57814e5e 2242{
98166639 2243 unsigned HOST_WIDE_INT max_size, l;
57814e5e
JJ
2244 HOST_WIDE_INT offset = 0;
2245 enum machine_mode mode, tmode;
2246 enum insn_code icode;
2247 int reverse;
2248 rtx cst;
2249
2c430630
RS
2250 if (len == 0)
2251 return 1;
2252
4977bab6 2253 if (! STORE_BY_PIECES_P (len, align))
57814e5e
JJ
2254 return 0;
2255
2256 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2257 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2258 align = MOVE_MAX * BITS_PER_UNIT;
2259
2260 /* We would first store what we can in the largest integer mode, then go to
2261 successively smaller modes. */
2262
2263 for (reverse = 0;
2264 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2265 reverse++)
2266 {
2267 l = len;
2268 mode = VOIDmode;
cf5124f6 2269 max_size = STORE_MAX_PIECES + 1;
57814e5e
JJ
2270 while (max_size > 1)
2271 {
2272 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2273 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2274 if (GET_MODE_SIZE (tmode) < max_size)
2275 mode = tmode;
2276
2277 if (mode == VOIDmode)
2278 break;
2279
2280 icode = mov_optab->handlers[(int) mode].insn_code;
2281 if (icode != CODE_FOR_nothing
2282 && align >= GET_MODE_ALIGNMENT (mode))
2283 {
2284 unsigned int size = GET_MODE_SIZE (mode);
2285
2286 while (l >= size)
2287 {
2288 if (reverse)
2289 offset -= size;
2290
2291 cst = (*constfun) (constfundata, offset, mode);
2292 if (!LEGITIMATE_CONSTANT_P (cst))
2293 return 0;
2294
2295 if (!reverse)
2296 offset += size;
2297
2298 l -= size;
2299 }
2300 }
2301
2302 max_size = GET_MODE_SIZE (mode);
2303 }
2304
2305 /* The code above should have handled everything. */
2306 if (l != 0)
2307 abort ();
2308 }
2309
2310 return 1;
2311}
2312
2313/* Generate several move instructions to store LEN bytes generated by
2314 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2315 pointer which will be passed as argument in every CONSTFUN call.
8fd3cf4e
JJ
2316 ALIGN is maximum alignment we can assume.
2317 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2318 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2319 stpcpy. */
57814e5e 2320
8fd3cf4e 2321rtx
502b8322
AJ
2322store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2323 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2324 void *constfundata, unsigned int align, int endp)
57814e5e
JJ
2325{
2326 struct store_by_pieces data;
2327
2c430630
RS
2328 if (len == 0)
2329 {
2330 if (endp == 2)
2331 abort ();
2332 return to;
2333 }
2334
4977bab6 2335 if (! STORE_BY_PIECES_P (len, align))
57814e5e
JJ
2336 abort ();
2337 to = protect_from_queue (to, 1);
2338 data.constfun = constfun;
2339 data.constfundata = constfundata;
2340 data.len = len;
2341 data.to = to;
2342 store_by_pieces_1 (&data, align);
8fd3cf4e
JJ
2343 if (endp)
2344 {
2345 rtx to1;
2346
2347 if (data.reverse)
2348 abort ();
2349 if (data.autinc_to)
2350 {
2351 if (endp == 2)
2352 {
2353 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2354 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2355 else
2356 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2357 -1));
2358 }
2359 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2360 data.offset);
2361 }
2362 else
2363 {
2364 if (endp == 2)
2365 --data.offset;
2366 to1 = adjust_address (data.to, QImode, data.offset);
2367 }
2368 return to1;
2369 }
2370 else
2371 return data.to;
57814e5e
JJ
2372}
2373
19caa751
RK
2374/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2375 rtx with BLKmode). The caller must pass TO through protect_from_queue
2376 before calling. ALIGN is maximum alignment we can assume. */
9de08200
RK
2377
2378static void
342e2b74 2379clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
9de08200 2380{
57814e5e
JJ
2381 struct store_by_pieces data;
2382
2c430630
RS
2383 if (len == 0)
2384 return;
2385
57814e5e 2386 data.constfun = clear_by_pieces_1;
df4ae160 2387 data.constfundata = NULL;
57814e5e
JJ
2388 data.len = len;
2389 data.to = to;
2390 store_by_pieces_1 (&data, align);
2391}
2392
2393/* Callback routine for clear_by_pieces.
2394 Return const0_rtx unconditionally. */
2395
2396static rtx
502b8322
AJ
2397clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2398 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2399 enum machine_mode mode ATTRIBUTE_UNUSED)
57814e5e
JJ
2400{
2401 return const0_rtx;
2402}
2403
2404/* Subroutine of clear_by_pieces and store_by_pieces.
2405 Generate several move instructions to store LEN bytes of block TO. (A MEM
2406 rtx with BLKmode). The caller must pass TO through protect_from_queue
2407 before calling. ALIGN is maximum alignment we can assume. */
2408
2409static void
502b8322
AJ
2410store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2411 unsigned int align ATTRIBUTE_UNUSED)
57814e5e
JJ
2412{
2413 rtx to_addr = XEXP (data->to, 0);
cf5124f6 2414 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
fbe1758d
AM
2415 enum machine_mode mode = VOIDmode, tmode;
2416 enum insn_code icode;
9de08200 2417
57814e5e
JJ
2418 data->offset = 0;
2419 data->to_addr = to_addr;
2420 data->autinc_to
9de08200
RK
2421 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2422 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2423
57814e5e
JJ
2424 data->explicit_inc_to = 0;
2425 data->reverse
9de08200 2426 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
57814e5e
JJ
2427 if (data->reverse)
2428 data->offset = data->len;
9de08200 2429
57814e5e 2430 /* If storing requires more than two move insns,
9de08200
RK
2431 copy addresses to registers (to make displacements shorter)
2432 and use post-increment if available. */
57814e5e
JJ
2433 if (!data->autinc_to
2434 && move_by_pieces_ninsns (data->len, align) > 2)
9de08200 2435 {
3a94c984 2436 /* Determine the main mode we'll be using. */
fbe1758d
AM
2437 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2438 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2439 if (GET_MODE_SIZE (tmode) < max_size)
2440 mode = tmode;
2441
57814e5e 2442 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
9de08200 2443 {
57814e5e
JJ
2444 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2445 data->autinc_to = 1;
2446 data->explicit_inc_to = -1;
9de08200 2447 }
3bdf5ad1 2448
57814e5e
JJ
2449 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2450 && ! data->autinc_to)
9de08200 2451 {
57814e5e
JJ
2452 data->to_addr = copy_addr_to_reg (to_addr);
2453 data->autinc_to = 1;
2454 data->explicit_inc_to = 1;
9de08200 2455 }
3bdf5ad1 2456
57814e5e
JJ
2457 if ( !data->autinc_to && CONSTANT_P (to_addr))
2458 data->to_addr = copy_addr_to_reg (to_addr);
9de08200
RK
2459 }
2460
e1565e65 2461 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 2462 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
bdb429a5 2463 align = MOVE_MAX * BITS_PER_UNIT;
9de08200 2464
57814e5e 2465 /* First store what we can in the largest integer mode, then go to
9de08200
RK
2466 successively smaller modes. */
2467
2468 while (max_size > 1)
2469 {
9de08200
RK
2470 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2471 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2472 if (GET_MODE_SIZE (tmode) < max_size)
2473 mode = tmode;
2474
2475 if (mode == VOIDmode)
2476 break;
2477
2478 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 2479 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
57814e5e 2480 store_by_pieces_2 (GEN_FCN (icode), mode, data);
9de08200
RK
2481
2482 max_size = GET_MODE_SIZE (mode);
2483 }
2484
2485 /* The code above should have handled everything. */
57814e5e 2486 if (data->len != 0)
9de08200
RK
2487 abort ();
2488}
2489
57814e5e 2490/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
9de08200
RK
2491 with move instructions for mode MODE. GENFUN is the gen_... function
2492 to make a move insn for that mode. DATA has all the other info. */
2493
2494static void
502b8322
AJ
2495store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2496 struct store_by_pieces *data)
9de08200 2497{
3bdf5ad1 2498 unsigned int size = GET_MODE_SIZE (mode);
57814e5e 2499 rtx to1, cst;
9de08200
RK
2500
2501 while (data->len >= size)
2502 {
3bdf5ad1
RK
2503 if (data->reverse)
2504 data->offset -= size;
9de08200 2505
3bdf5ad1 2506 if (data->autinc_to)
630036c6
JJ
2507 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2508 data->offset);
3a94c984 2509 else
f4ef873c 2510 to1 = adjust_address (data->to, mode, data->offset);
9de08200 2511
940da324 2512 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
57814e5e
JJ
2513 emit_insn (gen_add2_insn (data->to_addr,
2514 GEN_INT (-(HOST_WIDE_INT) size)));
9de08200 2515
57814e5e
JJ
2516 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2517 emit_insn ((*genfun) (to1, cst));
3bdf5ad1 2518
940da324 2519 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2520 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200 2521
3bdf5ad1
RK
2522 if (! data->reverse)
2523 data->offset += size;
9de08200
RK
2524
2525 data->len -= size;
2526 }
2527}
2528\f
19caa751 2529/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
8ac61af7 2530 its length in bytes. */
e9a25f70
JL
2531
2532rtx
502b8322 2533clear_storage (rtx object, rtx size)
bbf6f052 2534{
e9a25f70 2535 rtx retval = 0;
8ac61af7
RK
2536 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2537 : GET_MODE_ALIGNMENT (GET_MODE (object)));
e9a25f70 2538
fcf1b822
RK
2539 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2540 just move a zero. Otherwise, do this a piece at a time. */
69ef87e2 2541 if (GET_MODE (object) != BLKmode
fcf1b822 2542 && GET_CODE (size) == CONST_INT
4ca79136 2543 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
fcf1b822
RK
2544 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2545 else
bbf6f052 2546 {
9de08200
RK
2547 object = protect_from_queue (object, 1);
2548 size = protect_from_queue (size, 0);
2549
6972c506 2550 if (size == const0_rtx)
2c430630
RS
2551 ;
2552 else if (GET_CODE (size) == CONST_INT
78762e3b 2553 && CLEAR_BY_PIECES_P (INTVAL (size), align))
9de08200 2554 clear_by_pieces (object, INTVAL (size), align);
4ca79136
RH
2555 else if (clear_storage_via_clrstr (object, size, align))
2556 ;
9de08200 2557 else
4ca79136
RH
2558 retval = clear_storage_via_libcall (object, size);
2559 }
2560
2561 return retval;
2562}
2563
2564/* A subroutine of clear_storage. Expand a clrstr pattern;
2565 return true if successful. */
2566
2567static bool
502b8322 2568clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
4ca79136
RH
2569{
2570 /* Try the most limited insn first, because there's no point
2571 including more than one in the machine description unless
2572 the more limited one has some advantage. */
2573
2574 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2575 enum machine_mode mode;
2576
2577 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2578 mode = GET_MODE_WIDER_MODE (mode))
2579 {
2580 enum insn_code code = clrstr_optab[(int) mode];
2581 insn_operand_predicate_fn pred;
2582
2583 if (code != CODE_FOR_nothing
2584 /* We don't need MODE to be narrower than
2585 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2586 the mode mask, as it is returned by the macro, it will
2587 definitely be less than the actual mode mask. */
2588 && ((GET_CODE (size) == CONST_INT
2589 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2590 <= (GET_MODE_MASK (mode) >> 1)))
2591 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2592 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2593 || (*pred) (object, BLKmode))
2594 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2595 || (*pred) (opalign, VOIDmode)))
9de08200 2596 {
4ca79136
RH
2597 rtx op1;
2598 rtx last = get_last_insn ();
2599 rtx pat;
9de08200 2600
4ca79136
RH
2601 op1 = convert_to_mode (mode, size, 1);
2602 pred = insn_data[(int) code].operand[1].predicate;
2603 if (pred != 0 && ! (*pred) (op1, mode))
2604 op1 = copy_to_mode_reg (mode, op1);
9de08200 2605
4ca79136
RH
2606 pat = GEN_FCN ((int) code) (object, op1, opalign);
2607 if (pat)
9de08200 2608 {
4ca79136
RH
2609 emit_insn (pat);
2610 return true;
2611 }
2612 else
2613 delete_insns_since (last);
2614 }
2615 }
9de08200 2616
4ca79136
RH
2617 return false;
2618}
9de08200 2619
4ca79136
RH
2620/* A subroutine of clear_storage. Expand a call to memset or bzero.
2621 Return the return value of memset, 0 otherwise. */
9de08200 2622
4ca79136 2623static rtx
502b8322 2624clear_storage_via_libcall (rtx object, rtx size)
4ca79136
RH
2625{
2626 tree call_expr, arg_list, fn, object_tree, size_tree;
2627 enum machine_mode size_mode;
2628 rtx retval;
9de08200 2629
4ca79136 2630 /* OBJECT or SIZE may have been passed through protect_from_queue.
52cf7115 2631
4ca79136
RH
2632 It is unsafe to save the value generated by protect_from_queue
2633 and reuse it later. Consider what happens if emit_queue is
2634 called before the return value from protect_from_queue is used.
52cf7115 2635
4ca79136
RH
2636 Expansion of the CALL_EXPR below will call emit_queue before
2637 we are finished emitting RTL for argument setup. So if we are
2638 not careful we could get the wrong value for an argument.
52cf7115 2639
4ca79136
RH
2640 To avoid this problem we go ahead and emit code to copy OBJECT
2641 and SIZE into new pseudos. We can then place those new pseudos
2642 into an RTL_EXPR and use them later, even after a call to
2643 emit_queue.
52cf7115 2644
4ca79136
RH
2645 Note this is not strictly needed for library calls since they
2646 do not call emit_queue before loading their arguments. However,
2647 we may need to have library calls call emit_queue in the future
2648 since failing to do so could cause problems for targets which
2649 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
52cf7115 2650
4ca79136 2651 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 2652
4ca79136
RH
2653 if (TARGET_MEM_FUNCTIONS)
2654 size_mode = TYPE_MODE (sizetype);
2655 else
2656 size_mode = TYPE_MODE (unsigned_type_node);
2657 size = convert_to_mode (size_mode, size, 1);
2658 size = copy_to_mode_reg (size_mode, size);
52cf7115 2659
4ca79136
RH
2660 /* It is incorrect to use the libcall calling conventions to call
2661 memset in this context. This could be a user call to memset and
2662 the user may wish to examine the return value from memset. For
2663 targets where libcalls and normal calls have different conventions
2664 for returning pointers, we could end up generating incorrect code.
4bc973ae 2665
4ca79136 2666 For convenience, we generate the call to bzero this way as well. */
4bc973ae 2667
4ca79136
RH
2668 object_tree = make_tree (ptr_type_node, object);
2669 if (TARGET_MEM_FUNCTIONS)
2670 size_tree = make_tree (sizetype, size);
2671 else
2672 size_tree = make_tree (unsigned_type_node, size);
2673
2674 fn = clear_storage_libcall_fn (true);
2675 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2676 if (TARGET_MEM_FUNCTIONS)
2677 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2678 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2679
2680 /* Now we have to build up the CALL_EXPR itself. */
2681 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2682 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2683 call_expr, arg_list, NULL_TREE);
4ca79136
RH
2684
2685 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2686
2687 /* If we are initializing a readonly value, show the above call
2688 clobbered it. Otherwise, a load from it may erroneously be
2689 hoisted from a loop. */
2690 if (RTX_UNCHANGING_P (object))
2691 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2692
2693 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2694}
2695
2696/* A subroutine of clear_storage_via_libcall. Create the tree node
2697 for the function we use for block clears. The first time FOR_CALL
2698 is true, we call assemble_external. */
2699
2700static GTY(()) tree block_clear_fn;
66c60e67 2701
9661b15f 2702void
502b8322 2703init_block_clear_fn (const char *asmspec)
4ca79136 2704{
9661b15f 2705 if (!block_clear_fn)
4ca79136 2706 {
9661b15f
JJ
2707 tree fn, args;
2708
4ca79136
RH
2709 if (TARGET_MEM_FUNCTIONS)
2710 {
2711 fn = get_identifier ("memset");
2712 args = build_function_type_list (ptr_type_node, ptr_type_node,
2713 integer_type_node, sizetype,
2714 NULL_TREE);
2715 }
2716 else
2717 {
2718 fn = get_identifier ("bzero");
2719 args = build_function_type_list (void_type_node, ptr_type_node,
2720 unsigned_type_node, NULL_TREE);
9de08200 2721 }
4ca79136
RH
2722
2723 fn = build_decl (FUNCTION_DECL, fn, args);
2724 DECL_EXTERNAL (fn) = 1;
2725 TREE_PUBLIC (fn) = 1;
2726 DECL_ARTIFICIAL (fn) = 1;
2727 TREE_NOTHROW (fn) = 1;
2728
2729 block_clear_fn = fn;
bbf6f052 2730 }
e9a25f70 2731
9661b15f
JJ
2732 if (asmspec)
2733 {
2734 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2735 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2736 }
2737}
2738
2739static tree
502b8322 2740clear_storage_libcall_fn (int for_call)
9661b15f
JJ
2741{
2742 static bool emitted_extern;
2743
2744 if (!block_clear_fn)
2745 init_block_clear_fn (NULL);
2746
4ca79136
RH
2747 if (for_call && !emitted_extern)
2748 {
2749 emitted_extern = true;
9661b15f
JJ
2750 make_decl_rtl (block_clear_fn, NULL);
2751 assemble_external (block_clear_fn);
4ca79136 2752 }
bbf6f052 2753
9661b15f 2754 return block_clear_fn;
4ca79136
RH
2755}
2756\f
bbf6f052
RK
2757/* Generate code to copy Y into X.
2758 Both Y and X must have the same mode, except that
2759 Y can be a constant with VOIDmode.
2760 This mode cannot be BLKmode; use emit_block_move for that.
2761
2762 Return the last instruction emitted. */
2763
2764rtx
502b8322 2765emit_move_insn (rtx x, rtx y)
bbf6f052
RK
2766{
2767 enum machine_mode mode = GET_MODE (x);
de1b33dd 2768 rtx y_cst = NULL_RTX;
0c19a26f 2769 rtx last_insn, set;
bbf6f052
RK
2770
2771 x = protect_from_queue (x, 1);
2772 y = protect_from_queue (y, 0);
2773
2774 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2775 abort ();
2776
ee5332b8
RH
2777 /* Never force constant_p_rtx to memory. */
2778 if (GET_CODE (y) == CONSTANT_P_RTX)
2779 ;
51286de6 2780 else if (CONSTANT_P (y))
de1b33dd 2781 {
51286de6 2782 if (optimize
075fc17a 2783 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
51286de6
RH
2784 && (last_insn = compress_float_constant (x, y)))
2785 return last_insn;
2786
0c19a26f
RS
2787 y_cst = y;
2788
51286de6
RH
2789 if (!LEGITIMATE_CONSTANT_P (y))
2790 {
51286de6 2791 y = force_const_mem (mode, y);
3a04ff64
RH
2792
2793 /* If the target's cannot_force_const_mem prevented the spill,
2794 assume that the target's move expanders will also take care
2795 of the non-legitimate constant. */
2796 if (!y)
2797 y = y_cst;
51286de6 2798 }
de1b33dd 2799 }
bbf6f052
RK
2800
2801 /* If X or Y are memory references, verify that their addresses are valid
2802 for the machine. */
2803 if (GET_CODE (x) == MEM
2804 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2805 && ! push_operand (x, GET_MODE (x)))
2806 || (flag_force_addr
2807 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
792760b9 2808 x = validize_mem (x);
bbf6f052
RK
2809
2810 if (GET_CODE (y) == MEM
2811 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2812 || (flag_force_addr
2813 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
792760b9 2814 y = validize_mem (y);
bbf6f052
RK
2815
2816 if (mode == BLKmode)
2817 abort ();
2818
de1b33dd
AO
2819 last_insn = emit_move_insn_1 (x, y);
2820
0c19a26f
RS
2821 if (y_cst && GET_CODE (x) == REG
2822 && (set = single_set (last_insn)) != NULL_RTX
2823 && SET_DEST (set) == x
2824 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3d238248 2825 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
de1b33dd
AO
2826
2827 return last_insn;
261c4230
RS
2828}
2829
2830/* Low level part of emit_move_insn.
2831 Called just like emit_move_insn, but assumes X and Y
2832 are basically valid. */
2833
2834rtx
502b8322 2835emit_move_insn_1 (rtx x, rtx y)
261c4230
RS
2836{
2837 enum machine_mode mode = GET_MODE (x);
2838 enum machine_mode submode;
2839 enum mode_class class = GET_MODE_CLASS (mode);
261c4230 2840
dbbbbf3b 2841 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3a94c984 2842 abort ();
76bbe028 2843
bbf6f052
RK
2844 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2845 return
2846 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2847
89742723 2848 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2849 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
27e58a70 2850 && BLKmode != (submode = GET_MODE_INNER (mode))
7308a047
RS
2851 && (mov_optab->handlers[(int) submode].insn_code
2852 != CODE_FOR_nothing))
2853 {
2854 /* Don't split destination if it is a stack push. */
2855 int stack = push_operand (x, GET_MODE (x));
7308a047 2856
79ce92d7 2857#ifdef PUSH_ROUNDING
0e9cbd11
KH
2858 /* In case we output to the stack, but the size is smaller than the
2859 machine can push exactly, we need to use move instructions. */
1a06f5fe 2860 if (stack
bb93b973
RK
2861 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2862 != GET_MODE_SIZE (submode)))
1a06f5fe
JH
2863 {
2864 rtx temp;
bb93b973 2865 HOST_WIDE_INT offset1, offset2;
1a06f5fe
JH
2866
2867 /* Do not use anti_adjust_stack, since we don't want to update
2868 stack_pointer_delta. */
2869 temp = expand_binop (Pmode,
2870#ifdef STACK_GROWS_DOWNWARD
2871 sub_optab,
2872#else
2873 add_optab,
2874#endif
2875 stack_pointer_rtx,
2876 GEN_INT
bb93b973
RK
2877 (PUSH_ROUNDING
2878 (GET_MODE_SIZE (GET_MODE (x)))),
2879 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2880
1a06f5fe
JH
2881 if (temp != stack_pointer_rtx)
2882 emit_move_insn (stack_pointer_rtx, temp);
bb93b973 2883
1a06f5fe
JH
2884#ifdef STACK_GROWS_DOWNWARD
2885 offset1 = 0;
2886 offset2 = GET_MODE_SIZE (submode);
2887#else
2888 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2889 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2890 + GET_MODE_SIZE (submode));
2891#endif
bb93b973 2892
1a06f5fe
JH
2893 emit_move_insn (change_address (x, submode,
2894 gen_rtx_PLUS (Pmode,
2895 stack_pointer_rtx,
2896 GEN_INT (offset1))),
2897 gen_realpart (submode, y));
2898 emit_move_insn (change_address (x, submode,
2899 gen_rtx_PLUS (Pmode,
2900 stack_pointer_rtx,
2901 GEN_INT (offset2))),
2902 gen_imagpart (submode, y));
2903 }
e9c0bd54 2904 else
79ce92d7 2905#endif
7308a047
RS
2906 /* If this is a stack, push the highpart first, so it
2907 will be in the argument order.
2908
2909 In that case, change_address is used only to convert
2910 the mode, not to change the address. */
e9c0bd54 2911 if (stack)
c937357e 2912 {
e33c0d66
RS
2913 /* Note that the real part always precedes the imag part in memory
2914 regardless of machine's endianness. */
c937357e 2915#ifdef STACK_GROWS_DOWNWARD
a79b3dc7
RS
2916 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2917 gen_imagpart (submode, y));
2918 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2919 gen_realpart (submode, y));
c937357e 2920#else
a79b3dc7
RS
2921 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2922 gen_realpart (submode, y));
2923 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2924 gen_imagpart (submode, y));
c937357e
RS
2925#endif
2926 }
2927 else
2928 {
235ae7be
DM
2929 rtx realpart_x, realpart_y;
2930 rtx imagpart_x, imagpart_y;
2931
405f63da
MM
2932 /* If this is a complex value with each part being smaller than a
2933 word, the usual calling sequence will likely pack the pieces into
2934 a single register. Unfortunately, SUBREG of hard registers only
2935 deals in terms of words, so we have a problem converting input
2936 arguments to the CONCAT of two registers that is used elsewhere
2937 for complex values. If this is before reload, we can copy it into
2938 memory and reload. FIXME, we should see about using extract and
2939 insert on integer registers, but complex short and complex char
2940 variables should be rarely used. */
3a94c984 2941 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
405f63da
MM
2942 && (reload_in_progress | reload_completed) == 0)
2943 {
bb93b973
RK
2944 int packed_dest_p
2945 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2946 int packed_src_p
2947 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
405f63da
MM
2948
2949 if (packed_dest_p || packed_src_p)
2950 {
2951 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2952 ? MODE_FLOAT : MODE_INT);
2953
1da68f56
RK
2954 enum machine_mode reg_mode
2955 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
405f63da
MM
2956
2957 if (reg_mode != BLKmode)
2958 {
2959 rtx mem = assign_stack_temp (reg_mode,
2960 GET_MODE_SIZE (mode), 0);
f4ef873c 2961 rtx cmem = adjust_address (mem, mode, 0);
405f63da 2962
1da68f56
RK
2963 cfun->cannot_inline
2964 = N_("function using short complex types cannot be inline");
405f63da
MM
2965
2966 if (packed_dest_p)
2967 {
2968 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
bb93b973 2969
405f63da
MM
2970 emit_move_insn_1 (cmem, y);
2971 return emit_move_insn_1 (sreg, mem);
2972 }
2973 else
2974 {
2975 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
bb93b973 2976
405f63da
MM
2977 emit_move_insn_1 (mem, sreg);
2978 return emit_move_insn_1 (x, cmem);
2979 }
2980 }
2981 }
2982 }
2983
235ae7be
DM
2984 realpart_x = gen_realpart (submode, x);
2985 realpart_y = gen_realpart (submode, y);
2986 imagpart_x = gen_imagpart (submode, x);
2987 imagpart_y = gen_imagpart (submode, y);
2988
2989 /* Show the output dies here. This is necessary for SUBREGs
2990 of pseudos since we cannot track their lifetimes correctly;
c14c6529
RH
2991 hard regs shouldn't appear here except as return values.
2992 We never want to emit such a clobber after reload. */
2993 if (x != y
235ae7be
DM
2994 && ! (reload_in_progress || reload_completed)
2995 && (GET_CODE (realpart_x) == SUBREG
2996 || GET_CODE (imagpart_x) == SUBREG))
bb93b973 2997 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2638126a 2998
a79b3dc7
RS
2999 emit_move_insn (realpart_x, realpart_y);
3000 emit_move_insn (imagpart_x, imagpart_y);
c937357e 3001 }
7308a047 3002
7a1ab50a 3003 return get_last_insn ();
7308a047
RS
3004 }
3005
a3600c71
HPN
3006 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3007 find a mode to do it in. If we have a movcc, use it. Otherwise,
3008 find the MODE_INT mode of the same width. */
3009 else if (GET_MODE_CLASS (mode) == MODE_CC
3010 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3011 {
3012 enum insn_code insn_code;
3013 enum machine_mode tmode = VOIDmode;
3014 rtx x1 = x, y1 = y;
3015
3016 if (mode != CCmode
3017 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3018 tmode = CCmode;
3019 else
3020 for (tmode = QImode; tmode != VOIDmode;
3021 tmode = GET_MODE_WIDER_MODE (tmode))
3022 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3023 break;
3024
3025 if (tmode == VOIDmode)
3026 abort ();
3027
3028 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3029 may call change_address which is not appropriate if we were
3030 called when a reload was in progress. We don't have to worry
3031 about changing the address since the size in bytes is supposed to
3032 be the same. Copy the MEM to change the mode and move any
3033 substitutions from the old MEM to the new one. */
3034
3035 if (reload_in_progress)
3036 {
3037 x = gen_lowpart_common (tmode, x1);
3038 if (x == 0 && GET_CODE (x1) == MEM)
3039 {
3040 x = adjust_address_nv (x1, tmode, 0);
3041 copy_replacements (x1, x);
3042 }
3043
3044 y = gen_lowpart_common (tmode, y1);
3045 if (y == 0 && GET_CODE (y1) == MEM)
3046 {
3047 y = adjust_address_nv (y1, tmode, 0);
3048 copy_replacements (y1, y);
3049 }
3050 }
3051 else
3052 {
3053 x = gen_lowpart (tmode, x);
3054 y = gen_lowpart (tmode, y);
3055 }
502b8322 3056
a3600c71
HPN
3057 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3058 return emit_insn (GEN_FCN (insn_code) (x, y));
3059 }
3060
5581fc91
RS
3061 /* Try using a move pattern for the corresponding integer mode. This is
3062 only safe when simplify_subreg can convert MODE constants into integer
3063 constants. At present, it can only do this reliably if the value
3064 fits within a HOST_WIDE_INT. */
3065 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3066 && (submode = int_mode_for_mode (mode)) != BLKmode
3067 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3068 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3069 (simplify_gen_subreg (submode, x, mode, 0),
3070 simplify_gen_subreg (submode, y, mode, 0)));
3071
cffa2189
R
3072 /* This will handle any multi-word or full-word mode that lacks a move_insn
3073 pattern. However, you will get better code if you define such patterns,
bbf6f052 3074 even if they must turn into multiple assembler instructions. */
cffa2189 3075 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
bbf6f052
RK
3076 {
3077 rtx last_insn = 0;
3ef1eef4 3078 rtx seq, inner;
235ae7be 3079 int need_clobber;
bb93b973 3080 int i;
3a94c984 3081
a98c9f1a
RK
3082#ifdef PUSH_ROUNDING
3083
3084 /* If X is a push on the stack, do the push now and replace
3085 X with a reference to the stack pointer. */
3086 if (push_operand (x, GET_MODE (x)))
3087 {
918a6124
GK
3088 rtx temp;
3089 enum rtx_code code;
0fb7aeda 3090
918a6124
GK
3091 /* Do not use anti_adjust_stack, since we don't want to update
3092 stack_pointer_delta. */
3093 temp = expand_binop (Pmode,
3094#ifdef STACK_GROWS_DOWNWARD
3095 sub_optab,
3096#else
3097 add_optab,
3098#endif
3099 stack_pointer_rtx,
3100 GEN_INT
bb93b973
RK
3101 (PUSH_ROUNDING
3102 (GET_MODE_SIZE (GET_MODE (x)))),
a426c92e 3103 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
bb93b973 3104
0fb7aeda
KH
3105 if (temp != stack_pointer_rtx)
3106 emit_move_insn (stack_pointer_rtx, temp);
918a6124
GK
3107
3108 code = GET_CODE (XEXP (x, 0));
bb93b973 3109
918a6124
GK
3110 /* Just hope that small offsets off SP are OK. */
3111 if (code == POST_INC)
0fb7aeda 3112 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
bb93b973
RK
3113 GEN_INT (-((HOST_WIDE_INT)
3114 GET_MODE_SIZE (GET_MODE (x)))));
918a6124 3115 else if (code == POST_DEC)
0fb7aeda 3116 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
918a6124
GK
3117 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3118 else
3119 temp = stack_pointer_rtx;
3120
3121 x = change_address (x, VOIDmode, temp);
a98c9f1a
RK
3122 }
3123#endif
3a94c984 3124
3ef1eef4
RK
3125 /* If we are in reload, see if either operand is a MEM whose address
3126 is scheduled for replacement. */
3127 if (reload_in_progress && GET_CODE (x) == MEM
3128 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
f1ec5147 3129 x = replace_equiv_address_nv (x, inner);
3ef1eef4
RK
3130 if (reload_in_progress && GET_CODE (y) == MEM
3131 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
f1ec5147 3132 y = replace_equiv_address_nv (y, inner);
3ef1eef4 3133
235ae7be 3134 start_sequence ();
15a7a8ec 3135
235ae7be 3136 need_clobber = 0;
bbf6f052 3137 for (i = 0;
3a94c984 3138 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
bbf6f052
RK
3139 i++)
3140 {
3141 rtx xpart = operand_subword (x, i, 1, mode);
3142 rtx ypart = operand_subword (y, i, 1, mode);
3143
3144 /* If we can't get a part of Y, put Y into memory if it is a
3145 constant. Otherwise, force it into a register. If we still
3146 can't get a part of Y, abort. */
3147 if (ypart == 0 && CONSTANT_P (y))
3148 {
3149 y = force_const_mem (mode, y);
3150 ypart = operand_subword (y, i, 1, mode);
3151 }
3152 else if (ypart == 0)
3153 ypart = operand_subword_force (y, i, mode);
3154
3155 if (xpart == 0 || ypart == 0)
3156 abort ();
3157
235ae7be
DM
3158 need_clobber |= (GET_CODE (xpart) == SUBREG);
3159
bbf6f052
RK
3160 last_insn = emit_move_insn (xpart, ypart);
3161 }
6551fa4d 3162
2f937369 3163 seq = get_insns ();
235ae7be
DM
3164 end_sequence ();
3165
3166 /* Show the output dies here. This is necessary for SUBREGs
3167 of pseudos since we cannot track their lifetimes correctly;
3168 hard regs shouldn't appear here except as return values.
3169 We never want to emit such a clobber after reload. */
3170 if (x != y
3171 && ! (reload_in_progress || reload_completed)
3172 && need_clobber != 0)
bb93b973 3173 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
235ae7be
DM
3174
3175 emit_insn (seq);
3176
bbf6f052
RK
3177 return last_insn;
3178 }
3179 else
3180 abort ();
3181}
51286de6
RH
3182
3183/* If Y is representable exactly in a narrower mode, and the target can
3184 perform the extension directly from constant or memory, then emit the
3185 move as an extension. */
3186
3187static rtx
502b8322 3188compress_float_constant (rtx x, rtx y)
51286de6
RH
3189{
3190 enum machine_mode dstmode = GET_MODE (x);
3191 enum machine_mode orig_srcmode = GET_MODE (y);
3192 enum machine_mode srcmode;
3193 REAL_VALUE_TYPE r;
3194
3195 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3196
3197 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3198 srcmode != orig_srcmode;
3199 srcmode = GET_MODE_WIDER_MODE (srcmode))
3200 {
3201 enum insn_code ic;
3202 rtx trunc_y, last_insn;
3203
3204 /* Skip if the target can't extend this way. */
3205 ic = can_extend_p (dstmode, srcmode, 0);
3206 if (ic == CODE_FOR_nothing)
3207 continue;
3208
3209 /* Skip if the narrowed value isn't exact. */
3210 if (! exact_real_truncate (srcmode, &r))
3211 continue;
3212
3213 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3214
3215 if (LEGITIMATE_CONSTANT_P (trunc_y))
3216 {
3217 /* Skip if the target needs extra instructions to perform
3218 the extension. */
3219 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3220 continue;
3221 }
3222 else if (float_extend_from_mem[dstmode][srcmode])
3223 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3224 else
3225 continue;
3226
3227 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3228 last_insn = get_last_insn ();
3229
3230 if (GET_CODE (x) == REG)
0c19a26f 3231 set_unique_reg_note (last_insn, REG_EQUAL, y);
51286de6
RH
3232
3233 return last_insn;
3234 }
3235
3236 return NULL_RTX;
3237}
bbf6f052
RK
3238\f
3239/* Pushing data onto the stack. */
3240
3241/* Push a block of length SIZE (perhaps variable)
3242 and return an rtx to address the beginning of the block.
3243 Note that it is not possible for the value returned to be a QUEUED.
3244 The value may be virtual_outgoing_args_rtx.
3245
3246 EXTRA is the number of bytes of padding to push in addition to SIZE.
3247 BELOW nonzero means this padding comes at low addresses;
3248 otherwise, the padding comes at high addresses. */
3249
3250rtx
502b8322 3251push_block (rtx size, int extra, int below)
bbf6f052 3252{
b3694847 3253 rtx temp;
88f63c77
RK
3254
3255 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
3256 if (CONSTANT_P (size))
3257 anti_adjust_stack (plus_constant (size, extra));
3258 else if (GET_CODE (size) == REG && extra == 0)
3259 anti_adjust_stack (size);
3260 else
3261 {
ce48579b 3262 temp = copy_to_mode_reg (Pmode, size);
bbf6f052 3263 if (extra != 0)
906c4e36 3264 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
3265 temp, 0, OPTAB_LIB_WIDEN);
3266 anti_adjust_stack (temp);
3267 }
3268
f73ad30e 3269#ifndef STACK_GROWS_DOWNWARD
f73ad30e 3270 if (0)
f73ad30e
JH
3271#else
3272 if (1)
bbf6f052 3273#endif
f73ad30e 3274 {
f73ad30e
JH
3275 temp = virtual_outgoing_args_rtx;
3276 if (extra != 0 && below)
3277 temp = plus_constant (temp, extra);
3278 }
3279 else
3280 {
3281 if (GET_CODE (size) == CONST_INT)
3282 temp = plus_constant (virtual_outgoing_args_rtx,
3a94c984 3283 -INTVAL (size) - (below ? 0 : extra));
f73ad30e
JH
3284 else if (extra != 0 && !below)
3285 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3bdf5ad1 3286 negate_rtx (Pmode, plus_constant (size, extra)));
f73ad30e
JH
3287 else
3288 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3289 negate_rtx (Pmode, size));
3290 }
bbf6f052
RK
3291
3292 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3293}
3294
21d93687
RK
3295#ifdef PUSH_ROUNDING
3296
566aa174 3297/* Emit single push insn. */
21d93687 3298
566aa174 3299static void
502b8322 3300emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
566aa174 3301{
566aa174 3302 rtx dest_addr;
918a6124 3303 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
566aa174 3304 rtx dest;
371b8fc0
JH
3305 enum insn_code icode;
3306 insn_operand_predicate_fn pred;
566aa174 3307
371b8fc0
JH
3308 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3309 /* If there is push pattern, use it. Otherwise try old way of throwing
3310 MEM representing push operation to move expander. */
3311 icode = push_optab->handlers[(int) mode].insn_code;
3312 if (icode != CODE_FOR_nothing)
3313 {
3314 if (((pred = insn_data[(int) icode].operand[0].predicate)
505ddab6 3315 && !((*pred) (x, mode))))
371b8fc0
JH
3316 x = force_reg (mode, x);
3317 emit_insn (GEN_FCN (icode) (x));
3318 return;
3319 }
566aa174
JH
3320 if (GET_MODE_SIZE (mode) == rounded_size)
3321 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
329d586f
KH
3322 /* If we are to pad downward, adjust the stack pointer first and
3323 then store X into the stack location using an offset. This is
3324 because emit_move_insn does not know how to pad; it does not have
3325 access to type. */
3326 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3327 {
3328 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3329 HOST_WIDE_INT offset;
3330
3331 emit_move_insn (stack_pointer_rtx,
3332 expand_binop (Pmode,
3333#ifdef STACK_GROWS_DOWNWARD
3334 sub_optab,
3335#else
3336 add_optab,
3337#endif
3338 stack_pointer_rtx,
3339 GEN_INT (rounded_size),
3340 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3341
3342 offset = (HOST_WIDE_INT) padding_size;
3343#ifdef STACK_GROWS_DOWNWARD
3344 if (STACK_PUSH_CODE == POST_DEC)
3345 /* We have already decremented the stack pointer, so get the
3346 previous value. */
3347 offset += (HOST_WIDE_INT) rounded_size;
3348#else
3349 if (STACK_PUSH_CODE == POST_INC)
3350 /* We have already incremented the stack pointer, so get the
3351 previous value. */
3352 offset -= (HOST_WIDE_INT) rounded_size;
3353#endif
3354 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3355 }
566aa174
JH
3356 else
3357 {
3358#ifdef STACK_GROWS_DOWNWARD
329d586f 3359 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
566aa174 3360 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
505ddab6 3361 GEN_INT (-(HOST_WIDE_INT) rounded_size));
566aa174 3362#else
329d586f 3363 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
566aa174
JH
3364 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3365 GEN_INT (rounded_size));
3366#endif
3367 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3368 }
3369
3370 dest = gen_rtx_MEM (mode, dest_addr);
3371
566aa174
JH
3372 if (type != 0)
3373 {
3374 set_mem_attributes (dest, type, 1);
c3d32120
RK
3375
3376 if (flag_optimize_sibling_calls)
3377 /* Function incoming arguments may overlap with sibling call
3378 outgoing arguments and we cannot allow reordering of reads
3379 from function arguments with stores to outgoing arguments
3380 of sibling calls. */
3381 set_mem_alias_set (dest, 0);
566aa174
JH
3382 }
3383 emit_move_insn (dest, x);
566aa174 3384}
21d93687 3385#endif
566aa174 3386
bbf6f052
RK
3387/* Generate code to push X onto the stack, assuming it has mode MODE and
3388 type TYPE.
3389 MODE is redundant except when X is a CONST_INT (since they don't
3390 carry mode info).
3391 SIZE is an rtx for the size of data to be copied (in bytes),
3392 needed only if X is BLKmode.
3393
f1eaaf73 3394 ALIGN (in bits) is maximum alignment we can assume.
bbf6f052 3395
cd048831
RK
3396 If PARTIAL and REG are both nonzero, then copy that many of the first
3397 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
3398 The amount of space pushed is decreased by PARTIAL words,
3399 rounded *down* to a multiple of PARM_BOUNDARY.
3400 REG must be a hard register in this case.
cd048831
RK
3401 If REG is zero but PARTIAL is not, take any all others actions for an
3402 argument partially in registers, but do not actually load any
3403 registers.
bbf6f052
RK
3404
3405 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 3406 This is ignored if an argument block has already been allocated.
bbf6f052
RK
3407
3408 On a machine that lacks real push insns, ARGS_ADDR is the address of
3409 the bottom of the argument block for this call. We use indexing off there
3410 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3411 argument block has not been preallocated.
3412
e5e809f4
JL
3413 ARGS_SO_FAR is the size of args previously pushed for this call.
3414
3415 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3416 for arguments passed in registers. If nonzero, it will be the number
3417 of bytes required. */
bbf6f052
RK
3418
3419void
502b8322
AJ
3420emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3421 unsigned int align, int partial, rtx reg, int extra,
3422 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3423 rtx alignment_pad)
bbf6f052
RK
3424{
3425 rtx xinner;
3426 enum direction stack_direction
3427#ifdef STACK_GROWS_DOWNWARD
3428 = downward;
3429#else
3430 = upward;
3431#endif
3432
3433 /* Decide where to pad the argument: `downward' for below,
3434 `upward' for above, or `none' for don't pad it.
3435 Default is below for small data on big-endian machines; else above. */
3436 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3437
0fb7aeda 3438 /* Invert direction if stack is post-decrement.
9e0e11bf
GK
3439 FIXME: why? */
3440 if (STACK_PUSH_CODE == POST_DEC)
bbf6f052
RK
3441 if (where_pad != none)
3442 where_pad = (where_pad == downward ? upward : downward);
3443
3444 xinner = x = protect_from_queue (x, 0);
3445
3446 if (mode == BLKmode)
3447 {
3448 /* Copy a block into the stack, entirely or partially. */
3449
b3694847 3450 rtx temp;
bbf6f052 3451 int used = partial * UNITS_PER_WORD;
531547e9 3452 int offset;
bbf6f052 3453 int skip;
3a94c984 3454
531547e9
FJ
3455 if (reg && GET_CODE (reg) == PARALLEL)
3456 {
3457 /* Use the size of the elt to compute offset. */
3458 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3459 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3460 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3461 }
3462 else
3463 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3464
bbf6f052
RK
3465 if (size == 0)
3466 abort ();
3467
3468 used -= offset;
3469
3470 /* USED is now the # of bytes we need not copy to the stack
3471 because registers will take care of them. */
3472
3473 if (partial != 0)
f4ef873c 3474 xinner = adjust_address (xinner, BLKmode, used);
bbf6f052
RK
3475
3476 /* If the partial register-part of the arg counts in its stack size,
3477 skip the part of stack space corresponding to the registers.
3478 Otherwise, start copying to the beginning of the stack space,
3479 by setting SKIP to 0. */
e5e809f4 3480 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
3481
3482#ifdef PUSH_ROUNDING
3483 /* Do it with several push insns if that doesn't take lots of insns
3484 and if there is no difficulty with push insns that skip bytes
3485 on the stack for alignment purposes. */
3486 if (args_addr == 0
f73ad30e 3487 && PUSH_ARGS
bbf6f052
RK
3488 && GET_CODE (size) == CONST_INT
3489 && skip == 0
f26aca6d 3490 && MEM_ALIGN (xinner) >= align
15914757 3491 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
3492 /* Here we avoid the case of a structure whose weak alignment
3493 forces many pushes of a small amount of data,
3494 and such small pushes do rounding that causes trouble. */
e1565e65 3495 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
19caa751 3496 || align >= BIGGEST_ALIGNMENT
f1eaaf73
DE
3497 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3498 == (align / BITS_PER_UNIT)))
bbf6f052
RK
3499 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3500 {
3501 /* Push padding now if padding above and stack grows down,
3502 or if padding below and stack grows up.
3503 But if space already allocated, this has already been done. */
3504 if (extra && args_addr == 0
3505 && where_pad != none && where_pad != stack_direction)
906c4e36 3506 anti_adjust_stack (GEN_INT (extra));
bbf6f052 3507
8fd3cf4e 3508 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
bbf6f052
RK
3509 }
3510 else
3a94c984 3511#endif /* PUSH_ROUNDING */
bbf6f052 3512 {
7ab923cc
JJ
3513 rtx target;
3514
bbf6f052
RK
3515 /* Otherwise make space on the stack and copy the data
3516 to the address of that space. */
3517
3518 /* Deduct words put into registers from the size we must copy. */
3519 if (partial != 0)
3520 {
3521 if (GET_CODE (size) == CONST_INT)
906c4e36 3522 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
3523 else
3524 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
3525 GEN_INT (used), NULL_RTX, 0,
3526 OPTAB_LIB_WIDEN);
bbf6f052
RK
3527 }
3528
3529 /* Get the address of the stack space.
3530 In this case, we do not deal with EXTRA separately.
3531 A single stack adjust will do. */
3532 if (! args_addr)
3533 {
3534 temp = push_block (size, extra, where_pad == downward);
3535 extra = 0;
3536 }
3537 else if (GET_CODE (args_so_far) == CONST_INT)
3538 temp = memory_address (BLKmode,
3539 plus_constant (args_addr,
3540 skip + INTVAL (args_so_far)));
3541 else
3542 temp = memory_address (BLKmode,
38a448ca
RH
3543 plus_constant (gen_rtx_PLUS (Pmode,
3544 args_addr,
3545 args_so_far),
bbf6f052 3546 skip));
4ca79136
RH
3547
3548 if (!ACCUMULATE_OUTGOING_ARGS)
3549 {
3550 /* If the source is referenced relative to the stack pointer,
3551 copy it to another register to stabilize it. We do not need
3552 to do this if we know that we won't be changing sp. */
3553
3554 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3555 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3556 temp = copy_to_reg (temp);
3557 }
3558
3a94c984 3559 target = gen_rtx_MEM (BLKmode, temp);
7ab923cc 3560
3a94c984
KH
3561 if (type != 0)
3562 {
3563 set_mem_attributes (target, type, 1);
3564 /* Function incoming arguments may overlap with sibling call
3565 outgoing arguments and we cannot allow reordering of reads
3566 from function arguments with stores to outgoing arguments
3567 of sibling calls. */
ba4828e0 3568 set_mem_alias_set (target, 0);
3a94c984 3569 }
4ca79136 3570
44bb111a
RH
3571 /* ALIGN may well be better aligned than TYPE, e.g. due to
3572 PARM_BOUNDARY. Assume the caller isn't lying. */
3573 set_mem_align (target, align);
4ca79136 3574
44bb111a 3575 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
bbf6f052
RK
3576 }
3577 }
3578 else if (partial > 0)
3579 {
3580 /* Scalar partly in registers. */
3581
3582 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3583 int i;
3584 int not_stack;
3585 /* # words of start of argument
3586 that we must make space for but need not store. */
3587 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3588 int args_offset = INTVAL (args_so_far);
3589 int skip;
3590
3591 /* Push padding now if padding above and stack grows down,
3592 or if padding below and stack grows up.
3593 But if space already allocated, this has already been done. */
3594 if (extra && args_addr == 0
3595 && where_pad != none && where_pad != stack_direction)
906c4e36 3596 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3597
3598 /* If we make space by pushing it, we might as well push
3599 the real data. Otherwise, we can leave OFFSET nonzero
3600 and leave the space uninitialized. */
3601 if (args_addr == 0)
3602 offset = 0;
3603
3604 /* Now NOT_STACK gets the number of words that we don't need to
3605 allocate on the stack. */
3606 not_stack = partial - offset;
3607
3608 /* If the partial register-part of the arg counts in its stack size,
3609 skip the part of stack space corresponding to the registers.
3610 Otherwise, start copying to the beginning of the stack space,
3611 by setting SKIP to 0. */
e5e809f4 3612 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3613
3614 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3615 x = validize_mem (force_const_mem (mode, x));
3616
3617 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3618 SUBREGs of such registers are not allowed. */
3619 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3620 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3621 x = copy_to_reg (x);
3622
3623 /* Loop over all the words allocated on the stack for this arg. */
3624 /* We can do it by words, because any scalar bigger than a word
3625 has a size a multiple of a word. */
3626#ifndef PUSH_ARGS_REVERSED
3627 for (i = not_stack; i < size; i++)
3628#else
3629 for (i = size - 1; i >= not_stack; i--)
3630#endif
3631 if (i >= not_stack + offset)
3632 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3633 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3634 0, args_addr,
3635 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4 3636 * UNITS_PER_WORD)),
4fc026cd 3637 reg_parm_stack_space, alignment_pad);
bbf6f052
RK
3638 }
3639 else
3640 {
3641 rtx addr;
3bdf5ad1 3642 rtx dest;
bbf6f052
RK
3643
3644 /* Push padding now if padding above and stack grows down,
3645 or if padding below and stack grows up.
3646 But if space already allocated, this has already been done. */
3647 if (extra && args_addr == 0
3648 && where_pad != none && where_pad != stack_direction)
906c4e36 3649 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3650
3651#ifdef PUSH_ROUNDING
f73ad30e 3652 if (args_addr == 0 && PUSH_ARGS)
566aa174 3653 emit_single_push_insn (mode, x, type);
bbf6f052
RK
3654 else
3655#endif
921b3427
RK
3656 {
3657 if (GET_CODE (args_so_far) == CONST_INT)
3658 addr
3659 = memory_address (mode,
3a94c984 3660 plus_constant (args_addr,
921b3427 3661 INTVAL (args_so_far)));
3a94c984 3662 else
38a448ca
RH
3663 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3664 args_so_far));
566aa174
JH
3665 dest = gen_rtx_MEM (mode, addr);
3666 if (type != 0)
3667 {
3668 set_mem_attributes (dest, type, 1);
3669 /* Function incoming arguments may overlap with sibling call
3670 outgoing arguments and we cannot allow reordering of reads
3671 from function arguments with stores to outgoing arguments
3672 of sibling calls. */
ba4828e0 3673 set_mem_alias_set (dest, 0);
566aa174 3674 }
bbf6f052 3675
566aa174 3676 emit_move_insn (dest, x);
566aa174 3677 }
bbf6f052
RK
3678 }
3679
bbf6f052
RK
3680 /* If part should go in registers, copy that part
3681 into the appropriate registers. Do this now, at the end,
3682 since mem-to-mem copies above may do function calls. */
cd048831 3683 if (partial > 0 && reg != 0)
fffa9c1d
JW
3684 {
3685 /* Handle calls that pass values in multiple non-contiguous locations.
3686 The Irix 6 ABI has examples of this. */
3687 if (GET_CODE (reg) == PARALLEL)
6e985040 3688 emit_group_load (reg, x, type, -1);
fffa9c1d
JW
3689 else
3690 move_block_to_reg (REGNO (reg), x, partial, mode);
3691 }
bbf6f052
RK
3692
3693 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3694 anti_adjust_stack (GEN_INT (extra));
3a94c984 3695
3ea2292a 3696 if (alignment_pad && args_addr == 0)
4fc026cd 3697 anti_adjust_stack (alignment_pad);
bbf6f052
RK
3698}
3699\f
296b4ed9
RK
3700/* Return X if X can be used as a subtarget in a sequence of arithmetic
3701 operations. */
3702
3703static rtx
502b8322 3704get_subtarget (rtx x)
296b4ed9
RK
3705{
3706 return ((x == 0
3707 /* Only registers can be subtargets. */
3708 || GET_CODE (x) != REG
3709 /* If the register is readonly, it can't be set more than once. */
3710 || RTX_UNCHANGING_P (x)
3711 /* Don't use hard regs to avoid extending their life. */
3712 || REGNO (x) < FIRST_PSEUDO_REGISTER
3713 /* Avoid subtargets inside loops,
3714 since they hide some invariant expressions. */
3715 || preserve_subexpressions_p ())
3716 ? 0 : x);
3717}
3718
bbf6f052
RK
3719/* Expand an assignment that stores the value of FROM into TO.
3720 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
3721 (This may contain a QUEUED rtx;
3722 if the value is constant, this rtx is a constant.)
b90f141a 3723 Otherwise, the returned value is NULL_RTX. */
bbf6f052
RK
3724
3725rtx
b90f141a 3726expand_assignment (tree to, tree from, int want_value)
bbf6f052 3727{
b3694847 3728 rtx to_rtx = 0;
bbf6f052
RK
3729 rtx result;
3730
3731 /* Don't crash if the lhs of the assignment was erroneous. */
3732
3733 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
3734 {
3735 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3736 return want_value ? result : NULL_RTX;
3737 }
bbf6f052
RK
3738
3739 /* Assignment of a structure component needs special treatment
3740 if the structure component's rtx is not simply a MEM.
6be58303
JW
3741 Assignment of an array element at a constant index, and assignment of
3742 an array element in an unaligned packed structure field, has the same
3743 problem. */
bbf6f052 3744
08293add 3745 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
7c02ae17
DE
3746 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3747 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
bbf6f052
RK
3748 {
3749 enum machine_mode mode1;
770ae6cc 3750 HOST_WIDE_INT bitsize, bitpos;
a06ef755 3751 rtx orig_to_rtx;
7bb0943f 3752 tree offset;
bbf6f052
RK
3753 int unsignedp;
3754 int volatilep = 0;
0088fcb1
RK
3755 tree tem;
3756
3757 push_temp_slots ();
839c4796 3758 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
a06ef755 3759 &unsignedp, &volatilep);
bbf6f052
RK
3760
3761 /* If we are going to use store_bit_field and extract_bit_field,
3762 make sure to_rtx will be safe for multiple use. */
3763
3764 if (mode1 == VOIDmode && want_value)
3765 tem = stabilize_reference (tem);
3766
1ed1b4fb
RK
3767 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3768
7bb0943f
RS
3769 if (offset != 0)
3770 {
e3c8ea67 3771 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7bb0943f
RS
3772
3773 if (GET_CODE (to_rtx) != MEM)
3774 abort ();
bd070e1a 3775
bd070e1a 3776#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 3777 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 3778 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
3779#else
3780 if (GET_MODE (offset_rtx) != ptr_mode)
3781 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 3782#endif
bd070e1a 3783
9a7b9f4f
JL
3784 /* A constant address in TO_RTX can have VOIDmode, we must not try
3785 to call force_reg for that case. Avoid that case. */
89752202
HB
3786 if (GET_CODE (to_rtx) == MEM
3787 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 3788 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
a06ef755 3789 && bitsize > 0
3a94c984 3790 && (bitpos % bitsize) == 0
89752202 3791 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 3792 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
89752202 3793 {
e3c8ea67 3794 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
3795 bitpos = 0;
3796 }
3797
0d4903b8 3798 to_rtx = offset_address (to_rtx, offset_rtx,
818c0c94
RH
3799 highest_pow2_factor_for_type (TREE_TYPE (to),
3800 offset));
7bb0943f 3801 }
c5c76735 3802
998d7deb
RH
3803 if (GET_CODE (to_rtx) == MEM)
3804 {
998d7deb
RH
3805 /* If the field is at offset zero, we could have been given the
3806 DECL_RTX of the parent struct. Don't munge it. */
3807 to_rtx = shallow_copy_rtx (to_rtx);
3808
6f1087be 3809 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
998d7deb 3810 }
effbcc6a 3811
a06ef755
RK
3812 /* Deal with volatile and readonly fields. The former is only done
3813 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3814 if (volatilep && GET_CODE (to_rtx) == MEM)
3815 {
3816 if (to_rtx == orig_to_rtx)
3817 to_rtx = copy_rtx (to_rtx);
3818 MEM_VOLATILE_P (to_rtx) = 1;
bbf6f052
RK
3819 }
3820
956d6950 3821 if (TREE_CODE (to) == COMPONENT_REF
d76bc29c
EB
3822 && TREE_READONLY (TREE_OPERAND (to, 1))
3823 /* We can't assert that a MEM won't be set more than once
3824 if the component is not addressable because another
3825 non-addressable component may be referenced by the same MEM. */
3826 && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
956d6950 3827 {
a06ef755 3828 if (to_rtx == orig_to_rtx)
956d6950 3829 to_rtx = copy_rtx (to_rtx);
956d6950
JL
3830 RTX_UNCHANGING_P (to_rtx) = 1;
3831 }
3832
a84b4898 3833 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
a06ef755
RK
3834 {
3835 if (to_rtx == orig_to_rtx)
3836 to_rtx = copy_rtx (to_rtx);
3837 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3838 }
3839
a06ef755
RK
3840 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3841 (want_value
3842 /* Spurious cast for HPUX compiler. */
3843 ? ((enum machine_mode)
3844 TYPE_MODE (TREE_TYPE (to)))
3845 : VOIDmode),
3846 unsignedp, TREE_TYPE (tem), get_alias_set (to));
a69beca1 3847
a06ef755
RK
3848 preserve_temp_slots (result);
3849 free_temp_slots ();
3850 pop_temp_slots ();
a69beca1 3851
a06ef755
RK
3852 /* If the value is meaningful, convert RESULT to the proper mode.
3853 Otherwise, return nothing. */
3854 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3855 TYPE_MODE (TREE_TYPE (from)),
3856 result,
3857 TREE_UNSIGNED (TREE_TYPE (to)))
3858 : NULL_RTX);
bbf6f052
RK
3859 }
3860
cd1db108
RS
3861 /* If the rhs is a function call and its value is not an aggregate,
3862 call the function before we start to compute the lhs.
3863 This is needed for correct code for cases such as
3864 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
3865 requires loading up part of an address in a separate insn.
3866
1858863b
JW
3867 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3868 since it might be a promoted variable where the zero- or sign- extension
3869 needs to be done. Handling this in the normal way is safe because no
3870 computation is done before the call. */
61f71b34 3871 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
b35cd3c1 3872 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1858863b
JW
3873 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3874 && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 3875 {
0088fcb1
RK
3876 rtx value;
3877
3878 push_temp_slots ();
3879 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 3880 if (to_rtx == 0)
37a08a29 3881 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
aaf87c45 3882
fffa9c1d
JW
3883 /* Handle calls that return values in multiple non-contiguous locations.
3884 The Irix 6 ABI has examples of this. */
3885 if (GET_CODE (to_rtx) == PARALLEL)
6e985040
AM
3886 emit_group_load (to_rtx, value, TREE_TYPE (from),
3887 int_size_in_bytes (TREE_TYPE (from)));
fffa9c1d 3888 else if (GET_MODE (to_rtx) == BLKmode)
44bb111a 3889 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
aaf87c45 3890 else
6419e5b0 3891 {
5ae6cd0d 3892 if (POINTER_TYPE_P (TREE_TYPE (to)))
6419e5b0 3893 value = convert_memory_address (GET_MODE (to_rtx), value);
6419e5b0
DT
3894 emit_move_insn (to_rtx, value);
3895 }
cd1db108
RS
3896 preserve_temp_slots (to_rtx);
3897 free_temp_slots ();
0088fcb1 3898 pop_temp_slots ();
709f5be1 3899 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3900 }
3901
bbf6f052
RK
3902 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3903 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3904
3905 if (to_rtx == 0)
37a08a29 3906 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
bbf6f052 3907
86d38d25 3908 /* Don't move directly into a return register. */
14a774a9
RK
3909 if (TREE_CODE (to) == RESULT_DECL
3910 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
86d38d25 3911 {
0088fcb1
RK
3912 rtx temp;
3913
3914 push_temp_slots ();
3915 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
14a774a9
RK
3916
3917 if (GET_CODE (to_rtx) == PARALLEL)
6e985040
AM
3918 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3919 int_size_in_bytes (TREE_TYPE (from)));
14a774a9
RK
3920 else
3921 emit_move_insn (to_rtx, temp);
3922
86d38d25
RS
3923 preserve_temp_slots (to_rtx);
3924 free_temp_slots ();
0088fcb1 3925 pop_temp_slots ();
709f5be1 3926 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3927 }
3928
bbf6f052
RK
3929 /* In case we are returning the contents of an object which overlaps
3930 the place the value is being stored, use a safe function when copying
3931 a value through a pointer into a structure value return block. */
3932 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3933 && current_function_returns_struct
3934 && !current_function_returns_pcc_struct)
3935 {
0088fcb1
RK
3936 rtx from_rtx, size;
3937
3938 push_temp_slots ();
33a20d10 3939 size = expr_size (from);
37a08a29 3940 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052 3941
4ca79136
RH
3942 if (TARGET_MEM_FUNCTIONS)
3943 emit_library_call (memmove_libfunc, LCT_NORMAL,
3944 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3945 XEXP (from_rtx, 0), Pmode,
3946 convert_to_mode (TYPE_MODE (sizetype),
3947 size, TREE_UNSIGNED (sizetype)),
3948 TYPE_MODE (sizetype));
3949 else
3950 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3951 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3952 XEXP (to_rtx, 0), Pmode,
3953 convert_to_mode (TYPE_MODE (integer_type_node),
3954 size,
3955 TREE_UNSIGNED (integer_type_node)),
3956 TYPE_MODE (integer_type_node));
bbf6f052
RK
3957
3958 preserve_temp_slots (to_rtx);
3959 free_temp_slots ();
0088fcb1 3960 pop_temp_slots ();
709f5be1 3961 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3962 }
3963
3964 /* Compute FROM and store the value in the rtx we got. */
3965
0088fcb1 3966 push_temp_slots ();
bbf6f052
RK
3967 result = store_expr (from, to_rtx, want_value);
3968 preserve_temp_slots (result);
3969 free_temp_slots ();
0088fcb1 3970 pop_temp_slots ();
709f5be1 3971 return want_value ? result : NULL_RTX;
bbf6f052
RK
3972}
3973
3974/* Generate code for computing expression EXP,
3975 and storing the value into TARGET.
bbf6f052
RK
3976 TARGET may contain a QUEUED rtx.
3977
8403445a 3978 If WANT_VALUE & 1 is nonzero, return a copy of the value
709f5be1
RS
3979 not in TARGET, so that we can be sure to use the proper
3980 value in a containing expression even if TARGET has something
3981 else stored in it. If possible, we copy the value through a pseudo
3982 and return that pseudo. Or, if the value is constant, we try to
3983 return the constant. In some cases, we return a pseudo
3984 copied *from* TARGET.
3985
3986 If the mode is BLKmode then we may return TARGET itself.
3987 It turns out that in BLKmode it doesn't cause a problem.
3988 because C has no operators that could combine two different
3989 assignments into the same BLKmode object with different values
3990 with no sequence point. Will other languages need this to
3991 be more thorough?
3992
8403445a 3993 If WANT_VALUE & 1 is 0, we return NULL, to make sure
709f5be1 3994 to catch quickly any cases where the caller uses the value
8403445a
AM
3995 and fails to set WANT_VALUE.
3996
3997 If WANT_VALUE & 2 is set, this is a store into a call param on the
3998 stack, and block moves may need to be treated specially. */
bbf6f052
RK
3999
4000rtx
502b8322 4001store_expr (tree exp, rtx target, int want_value)
bbf6f052 4002{
b3694847 4003 rtx temp;
0fab64a3 4004 rtx alt_rtl = NULL_RTX;
bbf6f052 4005 int dont_return_target = 0;
e5408e52 4006 int dont_store_target = 0;
bbf6f052 4007
847311f4
AL
4008 if (VOID_TYPE_P (TREE_TYPE (exp)))
4009 {
4010 /* C++ can generate ?: expressions with a throw expression in one
4011 branch and an rvalue in the other. Here, we resolve attempts to
4d6922ee 4012 store the throw expression's nonexistent result. */
847311f4
AL
4013 if (want_value)
4014 abort ();
4015 expand_expr (exp, const0_rtx, VOIDmode, 0);
4016 return NULL_RTX;
4017 }
bbf6f052
RK
4018 if (TREE_CODE (exp) == COMPOUND_EXPR)
4019 {
4020 /* Perform first part of compound expression, then assign from second
4021 part. */
8403445a
AM
4022 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4023 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
bbf6f052 4024 emit_queue ();
709f5be1 4025 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
4026 }
4027 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4028 {
4029 /* For conditional expression, get safe form of the target. Then
4030 test the condition, doing the appropriate assignment on either
4031 side. This avoids the creation of unnecessary temporaries.
4032 For non-BLKmode, it is more efficient not to do this. */
4033
4034 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4035
4036 emit_queue ();
4037 target = protect_from_queue (target, 1);
4038
dabf8373 4039 do_pending_stack_adjust ();
bbf6f052
RK
4040 NO_DEFER_POP;
4041 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 4042 start_cleanup_deferral ();
8403445a 4043 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
956d6950 4044 end_cleanup_deferral ();
bbf6f052
RK
4045 emit_queue ();
4046 emit_jump_insn (gen_jump (lab2));
4047 emit_barrier ();
4048 emit_label (lab1);
956d6950 4049 start_cleanup_deferral ();
8403445a 4050 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
956d6950 4051 end_cleanup_deferral ();
bbf6f052
RK
4052 emit_queue ();
4053 emit_label (lab2);
4054 OK_DEFER_POP;
a3a58acc 4055
8403445a 4056 return want_value & 1 ? target : NULL_RTX;
bbf6f052 4057 }
bbf6f052 4058 else if (queued_subexp_p (target))
709f5be1
RS
4059 /* If target contains a postincrement, let's not risk
4060 using it as the place to generate the rhs. */
bbf6f052
RK
4061 {
4062 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4063 {
4064 /* Expand EXP into a new pseudo. */
4065 temp = gen_reg_rtx (GET_MODE (target));
8403445a
AM
4066 temp = expand_expr (exp, temp, GET_MODE (target),
4067 (want_value & 2
4068 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
bbf6f052
RK
4069 }
4070 else
8403445a
AM
4071 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4072 (want_value & 2
4073 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
709f5be1
RS
4074
4075 /* If target is volatile, ANSI requires accessing the value
4076 *from* the target, if it is accessed. So make that happen.
4077 In no case return the target itself. */
8403445a 4078 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
709f5be1 4079 dont_return_target = 1;
bbf6f052 4080 }
8403445a
AM
4081 else if ((want_value & 1) != 0
4082 && GET_CODE (target) == MEM
4083 && ! MEM_VOLATILE_P (target)
12f06d17
CH
4084 && GET_MODE (target) != BLKmode)
4085 /* If target is in memory and caller wants value in a register instead,
4086 arrange that. Pass TARGET as target for expand_expr so that,
4087 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4088 We know expand_expr will not use the target in that case.
4089 Don't do this if TARGET is volatile because we are supposed
4090 to write it and then read it. */
4091 {
8403445a
AM
4092 temp = expand_expr (exp, target, GET_MODE (target),
4093 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
12f06d17 4094 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
e5408e52
JJ
4095 {
4096 /* If TEMP is already in the desired TARGET, only copy it from
4097 memory and don't store it there again. */
4098 if (temp == target
4099 || (rtx_equal_p (temp, target)
4100 && ! side_effects_p (temp) && ! side_effects_p (target)))
4101 dont_store_target = 1;
4102 temp = copy_to_reg (temp);
4103 }
12f06d17
CH
4104 dont_return_target = 1;
4105 }
1499e0a8 4106 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
09da1532 4107 /* If this is a scalar in a register that is stored in a wider mode
1499e0a8
RK
4108 than the declared mode, compute the result into its declared mode
4109 and then convert to the wider mode. Our value is the computed
4110 expression. */
4111 {
b76b08ef
RK
4112 rtx inner_target = 0;
4113
5a32d038 4114 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
4115 which will often result in some optimizations. Do the conversion
4116 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
4117 the extend. But don't do this if the type of EXP is a subtype
4118 of something else since then the conversion might involve
4119 more than just converting modes. */
8403445a
AM
4120 if ((want_value & 1) == 0
4121 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
ab6c58f1 4122 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
4123 {
4124 if (TREE_UNSIGNED (TREE_TYPE (exp))
4125 != SUBREG_PROMOTED_UNSIGNED_P (target))
ceef8ce4
NB
4126 exp = convert
4127 ((*lang_hooks.types.signed_or_unsigned_type)
4128 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
f635a84d 4129
b0c48229
NB
4130 exp = convert ((*lang_hooks.types.type_for_mode)
4131 (GET_MODE (SUBREG_REG (target)),
4132 SUBREG_PROMOTED_UNSIGNED_P (target)),
f635a84d 4133 exp);
b76b08ef
RK
4134
4135 inner_target = SUBREG_REG (target);
f635a84d 4136 }
3a94c984 4137
8403445a
AM
4138 temp = expand_expr (exp, inner_target, VOIDmode,
4139 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
b258707c 4140
7abec5be 4141 /* If TEMP is a MEM and we want a result value, make the access
502b8322
AJ
4142 now so it gets done only once. Strictly speaking, this is
4143 only necessary if the MEM is volatile, or if the address
7abec5be
RH
4144 overlaps TARGET. But not performing the load twice also
4145 reduces the amount of rtl we generate and then have to CSE. */
8403445a 4146 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
766f36c7
RK
4147 temp = copy_to_reg (temp);
4148
b258707c
RS
4149 /* If TEMP is a VOIDmode constant, use convert_modes to make
4150 sure that we properly convert it. */
4151 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1f1b0541
RH
4152 {
4153 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4154 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4155 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4156 GET_MODE (target), temp,
4157 SUBREG_PROMOTED_UNSIGNED_P (target));
4158 }
b258707c 4159
1499e0a8
RK
4160 convert_move (SUBREG_REG (target), temp,
4161 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4162
4163 /* If we promoted a constant, change the mode back down to match
4164 target. Otherwise, the caller might get confused by a result whose
4165 mode is larger than expected. */
4166
8403445a 4167 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
3dbecef9 4168 {
b3ca30df
JJ
4169 if (GET_MODE (temp) != VOIDmode)
4170 {
4171 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4172 SUBREG_PROMOTED_VAR_P (temp) = 1;
0fb7aeda 4173 SUBREG_PROMOTED_UNSIGNED_SET (temp,
7879b81e 4174 SUBREG_PROMOTED_UNSIGNED_P (target));
b3ca30df
JJ
4175 }
4176 else
4177 temp = convert_modes (GET_MODE (target),
4178 GET_MODE (SUBREG_REG (target)),
4179 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4180 }
4181
8403445a 4182 return want_value & 1 ? temp : NULL_RTX;
1499e0a8 4183 }
bbf6f052
RK
4184 else
4185 {
0fab64a3
MM
4186 temp = expand_expr_real (exp, target, GET_MODE (target),
4187 (want_value & 2
4188 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4189 &alt_rtl);
766f36c7 4190 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
4191 If TARGET is a volatile mem ref, either return TARGET
4192 or return a reg copied *from* TARGET; ANSI requires this.
4193
4194 Otherwise, if TEMP is not TARGET, return TEMP
4195 if it is constant (for efficiency),
4196 or if we really want the correct value. */
bbf6f052
RK
4197 if (!(target && GET_CODE (target) == REG
4198 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 4199 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 4200 && ! rtx_equal_p (temp, target)
8403445a 4201 && (CONSTANT_P (temp) || (want_value & 1) != 0))
bbf6f052
RK
4202 dont_return_target = 1;
4203 }
4204
b258707c
RS
4205 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4206 the same as that of TARGET, adjust the constant. This is needed, for
4207 example, in case it is a CONST_DOUBLE and we want only a word-sized
4208 value. */
4209 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 4210 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
4211 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4212 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4213 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4214
bbf6f052 4215 /* If value was not generated in the target, store it there.
37a08a29
RK
4216 Convert the value to TARGET's type first if necessary.
4217 If TEMP and TARGET compare equal according to rtx_equal_p, but
f3f2255a
R
4218 one or both of them are volatile memory refs, we have to distinguish
4219 two cases:
4220 - expand_expr has used TARGET. In this case, we must not generate
4221 another copy. This can be detected by TARGET being equal according
4222 to == .
4223 - expand_expr has not used TARGET - that means that the source just
4224 happens to have the same RTX form. Since temp will have been created
4225 by expand_expr, it will compare unequal according to == .
4226 We must generate a copy in this case, to reach the correct number
4227 of volatile memory references. */
bbf6f052 4228
6036acbb 4229 if ((! rtx_equal_p (temp, target)
f3f2255a
R
4230 || (temp != target && (side_effects_p (temp)
4231 || side_effects_p (target))))
e5408e52 4232 && TREE_CODE (exp) != ERROR_MARK
a9772b60 4233 && ! dont_store_target
9c5c5f2c
MM
4234 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4235 but TARGET is not valid memory reference, TEMP will differ
4236 from TARGET although it is really the same location. */
0fab64a3 4237 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
e56fc090
HPN
4238 /* If there's nothing to copy, don't bother. Don't call expr_size
4239 unless necessary, because some front-ends (C++) expr_size-hook
4240 aborts on objects that are not supposed to be bit-copied or
4241 bit-initialized. */
4242 && expr_size (exp) != const0_rtx)
bbf6f052
RK
4243 {
4244 target = protect_from_queue (target, 1);
4245 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 4246 && GET_MODE (temp) != VOIDmode)
bbf6f052
RK
4247 {
4248 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4249 if (dont_return_target)
4250 {
4251 /* In this case, we will return TEMP,
4252 so make sure it has the proper mode.
4253 But don't forget to store the value into TARGET. */
4254 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4255 emit_move_insn (target, temp);
4256 }
4257 else
4258 convert_move (target, temp, unsignedp);
4259 }
4260
4261 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4262 {
c24ae149
RK
4263 /* Handle copying a string constant into an array. The string
4264 constant may be shorter than the array. So copy just the string's
4265 actual length, and clear the rest. First get the size of the data
4266 type of the string, which is actually the size of the target. */
4267 rtx size = expr_size (exp);
bbf6f052 4268
e87b4f3f
RS
4269 if (GET_CODE (size) == CONST_INT
4270 && INTVAL (size) < TREE_STRING_LENGTH (exp))
8403445a
AM
4271 emit_block_move (target, temp, size,
4272 (want_value & 2
4273 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4274 else
bbf6f052 4275 {
e87b4f3f
RS
4276 /* Compute the size of the data to copy from the string. */
4277 tree copy_size
c03b7665 4278 = size_binop (MIN_EXPR,
b50d17a1 4279 make_tree (sizetype, size),
fed3cef0 4280 size_int (TREE_STRING_LENGTH (exp)));
8403445a
AM
4281 rtx copy_size_rtx
4282 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4283 (want_value & 2
4284 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
e87b4f3f
RS
4285 rtx label = 0;
4286
4287 /* Copy that much. */
267b28bd
SE
4288 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4289 TREE_UNSIGNED (sizetype));
8403445a
AM
4290 emit_block_move (target, temp, copy_size_rtx,
4291 (want_value & 2
4292 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4293
88f63c77
RK
4294 /* Figure out how much is left in TARGET that we have to clear.
4295 Do all calculations in ptr_mode. */
e87b4f3f
RS
4296 if (GET_CODE (copy_size_rtx) == CONST_INT)
4297 {
c24ae149
RK
4298 size = plus_constant (size, -INTVAL (copy_size_rtx));
4299 target = adjust_address (target, BLKmode,
4300 INTVAL (copy_size_rtx));
e87b4f3f
RS
4301 }
4302 else
4303 {
fa06ab5c 4304 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
906c4e36
RK
4305 copy_size_rtx, NULL_RTX, 0,
4306 OPTAB_LIB_WIDEN);
e87b4f3f 4307
c24ae149
RK
4308#ifdef POINTERS_EXTEND_UNSIGNED
4309 if (GET_MODE (copy_size_rtx) != Pmode)
267b28bd
SE
4310 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4311 TREE_UNSIGNED (sizetype));
c24ae149
RK
4312#endif
4313
4314 target = offset_address (target, copy_size_rtx,
4315 highest_pow2_factor (copy_size));
e87b4f3f 4316 label = gen_label_rtx ();
c5d5d461 4317 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
a06ef755 4318 GET_MODE (size), 0, label);
e87b4f3f
RS
4319 }
4320
4321 if (size != const0_rtx)
37a08a29 4322 clear_storage (target, size);
22619c3f 4323
e87b4f3f
RS
4324 if (label)
4325 emit_label (label);
bbf6f052
RK
4326 }
4327 }
fffa9c1d
JW
4328 /* Handle calls that return values in multiple non-contiguous locations.
4329 The Irix 6 ABI has examples of this. */
4330 else if (GET_CODE (target) == PARALLEL)
6e985040
AM
4331 emit_group_load (target, temp, TREE_TYPE (exp),
4332 int_size_in_bytes (TREE_TYPE (exp)));
bbf6f052 4333 else if (GET_MODE (temp) == BLKmode)
8403445a
AM
4334 emit_block_move (target, temp, expr_size (exp),
4335 (want_value & 2
4336 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
bbf6f052 4337 else
b0dccb00
RH
4338 {
4339 temp = force_operand (temp, target);
4340 if (temp != target)
4341 emit_move_insn (target, temp);
4342 }
bbf6f052 4343 }
709f5be1 4344
766f36c7 4345 /* If we don't want a value, return NULL_RTX. */
8403445a 4346 if ((want_value & 1) == 0)
766f36c7
RK
4347 return NULL_RTX;
4348
4349 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4350 ??? The latter test doesn't seem to make sense. */
4351 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 4352 return temp;
766f36c7
RK
4353
4354 /* Return TARGET itself if it is a hard register. */
8403445a
AM
4355 else if ((want_value & 1) != 0
4356 && GET_MODE (target) != BLKmode
766f36c7
RK
4357 && ! (GET_CODE (target) == REG
4358 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 4359 return copy_to_reg (target);
3a94c984 4360
766f36c7 4361 else
709f5be1 4362 return target;
bbf6f052
RK
4363}
4364\f
40209195 4365/* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
9de08200
RK
4366
4367static int
502b8322 4368is_zeros_p (tree exp)
9de08200
RK
4369{
4370 tree elt;
4371
4372 switch (TREE_CODE (exp))
4373 {
4374 case CONVERT_EXPR:
4375 case NOP_EXPR:
4376 case NON_LVALUE_EXPR:
ed239f5a 4377 case VIEW_CONVERT_EXPR:
9de08200
RK
4378 return is_zeros_p (TREE_OPERAND (exp, 0));
4379
4380 case INTEGER_CST:
05bccae2 4381 return integer_zerop (exp);
9de08200
RK
4382
4383 case COMPLEX_CST:
4384 return
4385 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4386
4387 case REAL_CST:
41c9120b 4388 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200 4389
69ef87e2
AH
4390 case VECTOR_CST:
4391 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4392 elt = TREE_CHAIN (elt))
4393 if (!is_zeros_p (TREE_VALUE (elt)))
4394 return 0;
4395
4396 return 1;
4397
9de08200 4398 case CONSTRUCTOR:
e1a43f73
PB
4399 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4400 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
4401 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4402 if (! is_zeros_p (TREE_VALUE (elt)))
4403 return 0;
4404
4405 return 1;
3a94c984 4406
e9a25f70
JL
4407 default:
4408 return 0;
9de08200 4409 }
9de08200
RK
4410}
4411
4412/* Return 1 if EXP contains mostly (3/4) zeros. */
4413
40209195 4414int
502b8322 4415mostly_zeros_p (tree exp)
9de08200 4416{
9de08200
RK
4417 if (TREE_CODE (exp) == CONSTRUCTOR)
4418 {
e1a43f73
PB
4419 int elts = 0, zeros = 0;
4420 tree elt = CONSTRUCTOR_ELTS (exp);
4421 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4422 {
4423 /* If there are no ranges of true bits, it is all zero. */
4424 return elt == NULL_TREE;
4425 }
4426 for (; elt; elt = TREE_CHAIN (elt))
4427 {
4428 /* We do not handle the case where the index is a RANGE_EXPR,
4429 so the statistic will be somewhat inaccurate.
4430 We do make a more accurate count in store_constructor itself,
4431 so since this function is only used for nested array elements,
0f41302f 4432 this should be close enough. */
e1a43f73
PB
4433 if (mostly_zeros_p (TREE_VALUE (elt)))
4434 zeros++;
4435 elts++;
4436 }
9de08200
RK
4437
4438 return 4 * zeros >= 3 * elts;
4439 }
4440
4441 return is_zeros_p (exp);
4442}
4443\f
e1a43f73
PB
4444/* Helper function for store_constructor.
4445 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4446 TYPE is the type of the CONSTRUCTOR, not the element type.
04050c69 4447 CLEARED is as for store_constructor.
23cb1766 4448 ALIAS_SET is the alias set to use for any stores.
23ccec44
JW
4449
4450 This provides a recursive shortcut back to store_constructor when it isn't
4451 necessary to go through store_field. This is so that we can pass through
4452 the cleared field to let store_constructor know that we may not have to
4453 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
4454
4455static void
502b8322
AJ
4456store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4457 HOST_WIDE_INT bitpos, enum machine_mode mode,
4458 tree exp, tree type, int cleared, int alias_set)
e1a43f73
PB
4459{
4460 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44 4461 && bitpos % BITS_PER_UNIT == 0
cc2902df 4462 /* If we have a nonzero bitpos for a register target, then we just
23ccec44
JW
4463 let store_field do the bitfield handling. This is unlikely to
4464 generate unnecessary clear instructions anyways. */
4465 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 4466 {
61cb205c
RK
4467 if (GET_CODE (target) == MEM)
4468 target
4469 = adjust_address (target,
4470 GET_MODE (target) == BLKmode
4471 || 0 != (bitpos
4472 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4473 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
23cb1766 4474
e0339ef7 4475
04050c69 4476 /* Update the alias set, if required. */
10b76d73
RK
4477 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4478 && MEM_ALIAS_SET (target) != 0)
70072ed9
RK
4479 {
4480 target = copy_rtx (target);
4481 set_mem_alias_set (target, alias_set);
4482 }
e0339ef7 4483
04050c69 4484 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
e1a43f73
PB
4485 }
4486 else
a06ef755
RK
4487 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4488 alias_set);
e1a43f73
PB
4489}
4490
bbf6f052 4491/* Store the value of constructor EXP into the rtx TARGET.
04050c69
RK
4492 TARGET is either a REG or a MEM; we know it cannot conflict, since
4493 safe_from_p has been called.
b7010412
RK
4494 CLEARED is true if TARGET is known to have been zero'd.
4495 SIZE is the number of bytes of TARGET we are allowed to modify: this
4496 may not be the same as the size of EXP if we are assigning to a field
4497 which has been packed to exclude padding bits. */
bbf6f052
RK
4498
4499static void
502b8322 4500store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
bbf6f052 4501{
4af3895e 4502 tree type = TREE_TYPE (exp);
a5efcd63 4503#ifdef WORD_REGISTER_OPERATIONS
13eb1f7f 4504 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
a5efcd63 4505#endif
4af3895e 4506
e44842fe
RK
4507 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4508 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052 4509 {
b3694847 4510 tree elt;
bbf6f052 4511
2c430630
RS
4512 /* If size is zero or the target is already cleared, do nothing. */
4513 if (size == 0 || cleared)
4514 cleared = 1;
04050c69 4515 /* We either clear the aggregate or indicate the value is dead. */
2c430630
RS
4516 else if ((TREE_CODE (type) == UNION_TYPE
4517 || TREE_CODE (type) == QUAL_UNION_TYPE)
4518 && ! CONSTRUCTOR_ELTS (exp))
04050c69 4519 /* If the constructor is empty, clear the union. */
a59f8640 4520 {
04050c69
RK
4521 clear_storage (target, expr_size (exp));
4522 cleared = 1;
a59f8640 4523 }
4af3895e
JVA
4524
4525 /* If we are building a static constructor into a register,
4526 set the initial value as zero so we can fold the value into
67225c15
RK
4527 a constant. But if more than one register is involved,
4528 this probably loses. */
2c430630 4529 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
67225c15 4530 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200 4531 {
04050c69 4532 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
9de08200
RK
4533 cleared = 1;
4534 }
4535
4536 /* If the constructor has fewer fields than the structure
4537 or if we are initializing the structure to mostly zeros,
0d97bf4c 4538 clear the whole structure first. Don't do this if TARGET is a
fcf1b822
RK
4539 register whose mode size isn't equal to SIZE since clear_storage
4540 can't handle this case. */
2c430630
RS
4541 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4542 || mostly_zeros_p (exp))
fcf1b822 4543 && (GET_CODE (target) != REG
04050c69
RK
4544 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4545 == size)))
9de08200 4546 {
337f4314
RK
4547 rtx xtarget = target;
4548
4549 if (readonly_fields_p (type))
4550 {
4551 xtarget = copy_rtx (xtarget);
4552 RTX_UNCHANGING_P (xtarget) = 1;
4553 }
4554
4555 clear_storage (xtarget, GEN_INT (size));
9de08200
RK
4556 cleared = 1;
4557 }
04050c69
RK
4558
4559 if (! cleared)
38a448ca 4560 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4561
4562 /* Store each element of the constructor into
4563 the corresponding field of TARGET. */
4564
4565 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4566 {
b3694847 4567 tree field = TREE_PURPOSE (elt);
34c73909 4568 tree value = TREE_VALUE (elt);
b3694847 4569 enum machine_mode mode;
770ae6cc
RK
4570 HOST_WIDE_INT bitsize;
4571 HOST_WIDE_INT bitpos = 0;
770ae6cc 4572 tree offset;
b50d17a1 4573 rtx to_rtx = target;
bbf6f052 4574
f32fd778
RS
4575 /* Just ignore missing fields.
4576 We cleared the whole structure, above,
4577 if any fields are missing. */
4578 if (field == 0)
4579 continue;
4580
8b6000fc 4581 if (cleared && is_zeros_p (value))
e1a43f73 4582 continue;
9de08200 4583
770ae6cc
RK
4584 if (host_integerp (DECL_SIZE (field), 1))
4585 bitsize = tree_low_cst (DECL_SIZE (field), 1);
14a774a9
RK
4586 else
4587 bitsize = -1;
4588
bbf6f052
RK
4589 mode = DECL_MODE (field);
4590 if (DECL_BIT_FIELD (field))
4591 mode = VOIDmode;
4592
770ae6cc
RK
4593 offset = DECL_FIELD_OFFSET (field);
4594 if (host_integerp (offset, 0)
4595 && host_integerp (bit_position (field), 0))
4596 {
4597 bitpos = int_bit_position (field);
4598 offset = 0;
4599 }
b50d17a1 4600 else
770ae6cc 4601 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
3a94c984 4602
b50d17a1
RK
4603 if (offset)
4604 {
4605 rtx offset_rtx;
4606
7a6cdb44 4607 if (CONTAINS_PLACEHOLDER_P (offset))
7fa96708 4608 offset = build (WITH_RECORD_EXPR, sizetype,
956d6950 4609 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 4610
b50d17a1
RK
4611 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4612 if (GET_CODE (to_rtx) != MEM)
4613 abort ();
4614
bd070e1a 4615#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 4616 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 4617 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
4618#else
4619 if (GET_MODE (offset_rtx) != ptr_mode)
4620 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 4621#endif
bd070e1a 4622
0d4903b8
RK
4623 to_rtx = offset_address (to_rtx, offset_rtx,
4624 highest_pow2_factor (offset));
b50d17a1 4625 }
c5c76735 4626
4e44c1ef 4627 if (TREE_READONLY (field))
cf04eb80 4628 {
9151b3bf 4629 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
4630 to_rtx = copy_rtx (to_rtx);
4631
cf04eb80
RK
4632 RTX_UNCHANGING_P (to_rtx) = 1;
4633 }
4634
34c73909
R
4635#ifdef WORD_REGISTER_OPERATIONS
4636 /* If this initializes a field that is smaller than a word, at the
4637 start of a word, try to widen it to a full word.
4638 This special case allows us to output C++ member function
4639 initializations in a form that the optimizers can understand. */
770ae6cc 4640 if (GET_CODE (target) == REG
34c73909
R
4641 && bitsize < BITS_PER_WORD
4642 && bitpos % BITS_PER_WORD == 0
4643 && GET_MODE_CLASS (mode) == MODE_INT
4644 && TREE_CODE (value) == INTEGER_CST
13eb1f7f
RK
4645 && exp_size >= 0
4646 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
34c73909
R
4647 {
4648 tree type = TREE_TYPE (value);
04050c69 4649
34c73909
R
4650 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4651 {
b0c48229
NB
4652 type = (*lang_hooks.types.type_for_size)
4653 (BITS_PER_WORD, TREE_UNSIGNED (type));
34c73909
R
4654 value = convert (type, value);
4655 }
04050c69 4656
34c73909
R
4657 if (BYTES_BIG_ENDIAN)
4658 value
4659 = fold (build (LSHIFT_EXPR, type, value,
4660 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4661 bitsize = BITS_PER_WORD;
4662 mode = word_mode;
4663 }
4664#endif
10b76d73
RK
4665
4666 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4667 && DECL_NONADDRESSABLE_P (field))
4668 {
4669 to_rtx = copy_rtx (to_rtx);
4670 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4671 }
4672
c5c76735 4673 store_constructor_field (to_rtx, bitsize, bitpos, mode,
8b6000fc 4674 value, type, cleared,
10b76d73 4675 get_alias_set (TREE_TYPE (field)));
bbf6f052
RK
4676 }
4677 }
e6834654
SS
4678 else if (TREE_CODE (type) == ARRAY_TYPE
4679 || TREE_CODE (type) == VECTOR_TYPE)
bbf6f052 4680 {
b3694847
SS
4681 tree elt;
4682 int i;
e1a43f73 4683 int need_to_clear;
4af3895e 4684 tree domain = TYPE_DOMAIN (type);
4af3895e 4685 tree elttype = TREE_TYPE (type);
e6834654 4686 int const_bounds_p;
ae0ed63a
JM
4687 HOST_WIDE_INT minelt = 0;
4688 HOST_WIDE_INT maxelt = 0;
997404de
JH
4689 int icode = 0;
4690 rtx *vector = NULL;
4691 int elt_size = 0;
4692 unsigned n_elts = 0;
85f3d674 4693
e6834654
SS
4694 /* Vectors are like arrays, but the domain is stored via an array
4695 type indirectly. */
4696 if (TREE_CODE (type) == VECTOR_TYPE)
4697 {
4698 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4699 the same field as TYPE_DOMAIN, we are not guaranteed that
4700 it always will. */
4701 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4702 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
997404de
JH
4703 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4704 {
4705 enum machine_mode mode = GET_MODE (target);
4706
4707 icode = (int) vec_init_optab->handlers[mode].insn_code;
4708 if (icode != CODE_FOR_nothing)
4709 {
4710 unsigned int i;
4711
4712 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4713 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4714 vector = alloca (n_elts);
4715 for (i = 0; i < n_elts; i++)
4716 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4717 }
4718 }
e6834654
SS
4719 }
4720
4721 const_bounds_p = (TYPE_MIN_VALUE (domain)
4722 && TYPE_MAX_VALUE (domain)
4723 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4724 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4725
85f3d674
RK
4726 /* If we have constant bounds for the range of the type, get them. */
4727 if (const_bounds_p)
4728 {
4729 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4730 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4731 }
bbf6f052 4732
e1a43f73 4733 /* If the constructor has fewer elements than the array,
38e01259 4734 clear the whole array first. Similarly if this is
e1a43f73
PB
4735 static constructor of a non-BLKmode object. */
4736 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4737 need_to_clear = 1;
4738 else
4739 {
4740 HOST_WIDE_INT count = 0, zero_count = 0;
85f3d674
RK
4741 need_to_clear = ! const_bounds_p;
4742
e1a43f73
PB
4743 /* This loop is a more accurate version of the loop in
4744 mostly_zeros_p (it handles RANGE_EXPR in an index).
4745 It is also needed to check for missing elements. */
4746 for (elt = CONSTRUCTOR_ELTS (exp);
85f3d674 4747 elt != NULL_TREE && ! need_to_clear;
df0faff1 4748 elt = TREE_CHAIN (elt))
e1a43f73
PB
4749 {
4750 tree index = TREE_PURPOSE (elt);
4751 HOST_WIDE_INT this_node_count;
19caa751 4752
e1a43f73
PB
4753 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4754 {
4755 tree lo_index = TREE_OPERAND (index, 0);
4756 tree hi_index = TREE_OPERAND (index, 1);
05bccae2 4757
19caa751
RK
4758 if (! host_integerp (lo_index, 1)
4759 || ! host_integerp (hi_index, 1))
e1a43f73
PB
4760 {
4761 need_to_clear = 1;
4762 break;
4763 }
19caa751
RK
4764
4765 this_node_count = (tree_low_cst (hi_index, 1)
4766 - tree_low_cst (lo_index, 1) + 1);
e1a43f73
PB
4767 }
4768 else
4769 this_node_count = 1;
85f3d674 4770
e1a43f73
PB
4771 count += this_node_count;
4772 if (mostly_zeros_p (TREE_VALUE (elt)))
4773 zero_count += this_node_count;
4774 }
85f3d674 4775
8e958f70 4776 /* Clear the entire array first if there are any missing elements,
0f41302f 4777 or if the incidence of zero elements is >= 75%. */
85f3d674
RK
4778 if (! need_to_clear
4779 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
e1a43f73
PB
4780 need_to_clear = 1;
4781 }
85f3d674 4782
997404de 4783 if (need_to_clear && size > 0 && !vector)
9de08200
RK
4784 {
4785 if (! cleared)
725e58b1
RK
4786 {
4787 if (REG_P (target))
4788 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4789 else
4790 clear_storage (target, GEN_INT (size));
4791 }
9de08200
RK
4792 cleared = 1;
4793 }
df4556a3 4794 else if (REG_P (target))
bbf6f052 4795 /* Inform later passes that the old value is dead. */
38a448ca 4796 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4797
4798 /* Store each element of the constructor into
4799 the corresponding element of TARGET, determined
4800 by counting the elements. */
4801 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4802 elt;
4803 elt = TREE_CHAIN (elt), i++)
4804 {
b3694847 4805 enum machine_mode mode;
19caa751
RK
4806 HOST_WIDE_INT bitsize;
4807 HOST_WIDE_INT bitpos;
bbf6f052 4808 int unsignedp;
e1a43f73 4809 tree value = TREE_VALUE (elt);
03dc44a6
RS
4810 tree index = TREE_PURPOSE (elt);
4811 rtx xtarget = target;
bbf6f052 4812
e1a43f73
PB
4813 if (cleared && is_zeros_p (value))
4814 continue;
9de08200 4815
bbf6f052 4816 unsignedp = TREE_UNSIGNED (elttype);
14a774a9
RK
4817 mode = TYPE_MODE (elttype);
4818 if (mode == BLKmode)
19caa751
RK
4819 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4820 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4821 : -1);
14a774a9
RK
4822 else
4823 bitsize = GET_MODE_BITSIZE (mode);
bbf6f052 4824
e1a43f73
PB
4825 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4826 {
4827 tree lo_index = TREE_OPERAND (index, 0);
4828 tree hi_index = TREE_OPERAND (index, 1);
4977bab6 4829 rtx index_r, pos_rtx, loop_end;
e1a43f73 4830 struct nesting *loop;
05c0b405
PB
4831 HOST_WIDE_INT lo, hi, count;
4832 tree position;
e1a43f73 4833
997404de
JH
4834 if (vector)
4835 abort ();
4836
0f41302f 4837 /* If the range is constant and "small", unroll the loop. */
85f3d674
RK
4838 if (const_bounds_p
4839 && host_integerp (lo_index, 0)
19caa751
RK
4840 && host_integerp (hi_index, 0)
4841 && (lo = tree_low_cst (lo_index, 0),
4842 hi = tree_low_cst (hi_index, 0),
05c0b405
PB
4843 count = hi - lo + 1,
4844 (GET_CODE (target) != MEM
4845 || count <= 2
19caa751
RK
4846 || (host_integerp (TYPE_SIZE (elttype), 1)
4847 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4848 <= 40 * 8)))))
e1a43f73 4849 {
05c0b405
PB
4850 lo -= minelt; hi -= minelt;
4851 for (; lo <= hi; lo++)
e1a43f73 4852 {
19caa751 4853 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
10b76d73
RK
4854
4855 if (GET_CODE (target) == MEM
4856 && !MEM_KEEP_ALIAS_SET_P (target)
e6834654 4857 && TREE_CODE (type) == ARRAY_TYPE
10b76d73
RK
4858 && TYPE_NONALIASED_COMPONENT (type))
4859 {
4860 target = copy_rtx (target);
4861 MEM_KEEP_ALIAS_SET_P (target) = 1;
4862 }
4863
23cb1766 4864 store_constructor_field
04050c69
RK
4865 (target, bitsize, bitpos, mode, value, type, cleared,
4866 get_alias_set (elttype));
e1a43f73
PB
4867 }
4868 }
4869 else
4870 {
4977bab6 4871 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
e1a43f73
PB
4872 loop_end = gen_label_rtx ();
4873
4874 unsignedp = TREE_UNSIGNED (domain);
4875
4876 index = build_decl (VAR_DECL, NULL_TREE, domain);
4877
19e7881c 4878 index_r
e1a43f73
PB
4879 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4880 &unsignedp, 0));
19e7881c 4881 SET_DECL_RTL (index, index_r);
e1a43f73
PB
4882 if (TREE_CODE (value) == SAVE_EXPR
4883 && SAVE_EXPR_RTL (value) == 0)
4884 {
0f41302f
MS
4885 /* Make sure value gets expanded once before the
4886 loop. */
e1a43f73
PB
4887 expand_expr (value, const0_rtx, VOIDmode, 0);
4888 emit_queue ();
4889 }
4890 store_expr (lo_index, index_r, 0);
4891 loop = expand_start_loop (0);
4892
0f41302f 4893 /* Assign value to element index. */
fed3cef0
RK
4894 position
4895 = convert (ssizetype,
4896 fold (build (MINUS_EXPR, TREE_TYPE (index),
4897 index, TYPE_MIN_VALUE (domain))));
4898 position = size_binop (MULT_EXPR, position,
4899 convert (ssizetype,
4900 TYPE_SIZE_UNIT (elttype)));
4901
e1a43f73 4902 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
0d4903b8
RK
4903 xtarget = offset_address (target, pos_rtx,
4904 highest_pow2_factor (position));
4905 xtarget = adjust_address (xtarget, mode, 0);
e1a43f73 4906 if (TREE_CODE (value) == CONSTRUCTOR)
04050c69 4907 store_constructor (value, xtarget, cleared,
b7010412 4908 bitsize / BITS_PER_UNIT);
e1a43f73
PB
4909 else
4910 store_expr (value, xtarget, 0);
4911
4912 expand_exit_loop_if_false (loop,
4913 build (LT_EXPR, integer_type_node,
4914 index, hi_index));
4915
4916 expand_increment (build (PREINCREMENT_EXPR,
4917 TREE_TYPE (index),
7b8b9722 4918 index, integer_one_node), 0, 0);
e1a43f73
PB
4919 expand_end_loop ();
4920 emit_label (loop_end);
e1a43f73
PB
4921 }
4922 }
19caa751
RK
4923 else if ((index != 0 && ! host_integerp (index, 0))
4924 || ! host_integerp (TYPE_SIZE (elttype), 1))
03dc44a6 4925 {
03dc44a6
RS
4926 tree position;
4927
997404de
JH
4928 if (vector)
4929 abort ();
4930
5b6c44ff 4931 if (index == 0)
fed3cef0 4932 index = ssize_int (1);
5b6c44ff 4933
e1a43f73 4934 if (minelt)
fed3cef0
RK
4935 index = convert (ssizetype,
4936 fold (build (MINUS_EXPR, index,
4937 TYPE_MIN_VALUE (domain))));
19caa751 4938
fed3cef0
RK
4939 position = size_binop (MULT_EXPR, index,
4940 convert (ssizetype,
4941 TYPE_SIZE_UNIT (elttype)));
0d4903b8
RK
4942 xtarget = offset_address (target,
4943 expand_expr (position, 0, VOIDmode, 0),
4944 highest_pow2_factor (position));
4945 xtarget = adjust_address (xtarget, mode, 0);
e1a43f73 4946 store_expr (value, xtarget, 0);
03dc44a6 4947 }
997404de
JH
4948 else if (vector)
4949 {
4950 int pos;
4951
4952 if (index != 0)
4953 pos = tree_low_cst (index, 0) - minelt;
4954 else
4955 pos = i;
4956 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4957 }
03dc44a6
RS
4958 else
4959 {
4960 if (index != 0)
19caa751
RK
4961 bitpos = ((tree_low_cst (index, 0) - minelt)
4962 * tree_low_cst (TYPE_SIZE (elttype), 1));
03dc44a6 4963 else
19caa751
RK
4964 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4965
10b76d73 4966 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
e6834654 4967 && TREE_CODE (type) == ARRAY_TYPE
10b76d73
RK
4968 && TYPE_NONALIASED_COMPONENT (type))
4969 {
4970 target = copy_rtx (target);
4971 MEM_KEEP_ALIAS_SET_P (target) = 1;
4972 }
9b9bd3b2
JH
4973 store_constructor_field (target, bitsize, bitpos, mode, value,
4974 type, cleared, get_alias_set (elttype));
03dc44a6 4975 }
bbf6f052 4976 }
997404de
JH
4977 if (vector)
4978 {
4979 emit_insn (GEN_FCN (icode) (target,
4980 gen_rtx_PARALLEL (GET_MODE (target),
4981 gen_rtvec_v (n_elts, vector))));
4982 }
bbf6f052 4983 }
19caa751 4984
3a94c984 4985 /* Set constructor assignments. */
071a6595
PB
4986 else if (TREE_CODE (type) == SET_TYPE)
4987 {
e1a43f73 4988 tree elt = CONSTRUCTOR_ELTS (exp);
19caa751 4989 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
4990 tree domain = TYPE_DOMAIN (type);
4991 tree domain_min, domain_max, bitlength;
4992
9faa82d8 4993 /* The default implementation strategy is to extract the constant
071a6595
PB
4994 parts of the constructor, use that to initialize the target,
4995 and then "or" in whatever non-constant ranges we need in addition.
4996
4997 If a large set is all zero or all ones, it is
4998 probably better to set it using memset (if available) or bzero.
4999 Also, if a large set has just a single range, it may also be
5000 better to first clear all the first clear the set (using
0f41302f 5001 bzero/memset), and set the bits we want. */
3a94c984 5002
0f41302f 5003 /* Check for all zeros. */
9376fcd6 5004 if (elt == NULL_TREE && size > 0)
071a6595 5005 {
e1a43f73 5006 if (!cleared)
8ac61af7 5007 clear_storage (target, GEN_INT (size));
071a6595
PB
5008 return;
5009 }
5010
071a6595
PB
5011 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5012 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5013 bitlength = size_binop (PLUS_EXPR,
fed3cef0
RK
5014 size_diffop (domain_max, domain_min),
5015 ssize_int (1));
071a6595 5016
19caa751 5017 nbits = tree_low_cst (bitlength, 1);
e1a43f73
PB
5018
5019 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5020 are "complicated" (more than one range), initialize (the
3a94c984 5021 constant parts) by copying from a constant. */
e1a43f73
PB
5022 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5023 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 5024 {
19caa751 5025 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
b4ee5a72 5026 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
703ad42b 5027 char *bit_buffer = alloca (nbits);
b4ee5a72 5028 HOST_WIDE_INT word = 0;
19caa751
RK
5029 unsigned int bit_pos = 0;
5030 unsigned int ibit = 0;
5031 unsigned int offset = 0; /* In bytes from beginning of set. */
5032
e1a43f73 5033 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 5034 for (;;)
071a6595 5035 {
b4ee5a72
PB
5036 if (bit_buffer[ibit])
5037 {
b09f3348 5038 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
5039 word |= (1 << (set_word_size - 1 - bit_pos));
5040 else
5041 word |= 1 << bit_pos;
5042 }
19caa751 5043
b4ee5a72
PB
5044 bit_pos++; ibit++;
5045 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 5046 {
e1a43f73
PB
5047 if (word != 0 || ! cleared)
5048 {
5049 rtx datum = GEN_INT (word);
5050 rtx to_rtx;
19caa751 5051
0f41302f
MS
5052 /* The assumption here is that it is safe to use
5053 XEXP if the set is multi-word, but not if
5054 it's single-word. */
e1a43f73 5055 if (GET_CODE (target) == MEM)
f4ef873c 5056 to_rtx = adjust_address (target, mode, offset);
3a94c984 5057 else if (offset == 0)
e1a43f73
PB
5058 to_rtx = target;
5059 else
5060 abort ();
5061 emit_move_insn (to_rtx, datum);
5062 }
19caa751 5063
b4ee5a72
PB
5064 if (ibit == nbits)
5065 break;
5066 word = 0;
5067 bit_pos = 0;
5068 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
5069 }
5070 }
071a6595 5071 }
e1a43f73 5072 else if (!cleared)
19caa751
RK
5073 /* Don't bother clearing storage if the set is all ones. */
5074 if (TREE_CHAIN (elt) != NULL_TREE
5075 || (TREE_PURPOSE (elt) == NULL_TREE
5076 ? nbits != 1
5077 : ( ! host_integerp (TREE_VALUE (elt), 0)
5078 || ! host_integerp (TREE_PURPOSE (elt), 0)
5079 || (tree_low_cst (TREE_VALUE (elt), 0)
5080 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5081 != (HOST_WIDE_INT) nbits))))
8ac61af7 5082 clear_storage (target, expr_size (exp));
3a94c984 5083
e1a43f73 5084 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595 5085 {
3a94c984 5086 /* Start of range of element or NULL. */
071a6595 5087 tree startbit = TREE_PURPOSE (elt);
3a94c984 5088 /* End of range of element, or element value. */
071a6595
PB
5089 tree endbit = TREE_VALUE (elt);
5090 HOST_WIDE_INT startb, endb;
19caa751 5091 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
071a6595
PB
5092
5093 bitlength_rtx = expand_expr (bitlength,
19caa751 5094 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
071a6595 5095
3a94c984 5096 /* Handle non-range tuple element like [ expr ]. */
071a6595
PB
5097 if (startbit == NULL_TREE)
5098 {
5099 startbit = save_expr (endbit);
5100 endbit = startbit;
5101 }
19caa751 5102
071a6595
PB
5103 startbit = convert (sizetype, startbit);
5104 endbit = convert (sizetype, endbit);
5105 if (! integer_zerop (domain_min))
5106 {
5107 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5108 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5109 }
3a94c984 5110 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
071a6595 5111 EXPAND_CONST_ADDRESS);
3a94c984 5112 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
071a6595
PB
5113 EXPAND_CONST_ADDRESS);
5114
5115 if (REG_P (target))
5116 {
1da68f56
RK
5117 targetx
5118 = assign_temp
b0c48229
NB
5119 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5120 (GET_MODE (target), 0),
1da68f56
RK
5121 TYPE_QUAL_CONST)),
5122 0, 1, 1);
071a6595
PB
5123 emit_move_insn (targetx, target);
5124 }
19caa751 5125
071a6595
PB
5126 else if (GET_CODE (target) == MEM)
5127 targetx = target;
5128 else
5129 abort ();
5130
4ca79136
RH
5131 /* Optimization: If startbit and endbit are constants divisible
5132 by BITS_PER_UNIT, call memset instead. */
5133 if (TARGET_MEM_FUNCTIONS
5134 && TREE_CODE (startbit) == INTEGER_CST
071a6595
PB
5135 && TREE_CODE (endbit) == INTEGER_CST
5136 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 5137 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 5138 {
ebb1b59a 5139 emit_library_call (memset_libfunc, LCT_NORMAL,
071a6595 5140 VOIDmode, 3,
e1a43f73
PB
5141 plus_constant (XEXP (targetx, 0),
5142 startb / BITS_PER_UNIT),
071a6595 5143 Pmode,
3b6f75e2 5144 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 5145 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 5146 TYPE_MODE (sizetype));
071a6595
PB
5147 }
5148 else
68d28100
RH
5149 emit_library_call (setbits_libfunc, LCT_NORMAL,
5150 VOIDmode, 4, XEXP (targetx, 0),
ebb1b59a 5151 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
19caa751
RK
5152 startbit_rtx, TYPE_MODE (sizetype),
5153 endbit_rtx, TYPE_MODE (sizetype));
5154
071a6595
PB
5155 if (REG_P (target))
5156 emit_move_insn (target, targetx);
5157 }
5158 }
bbf6f052
RK
5159
5160 else
5161 abort ();
5162}
5163
5164/* Store the value of EXP (an expression tree)
5165 into a subfield of TARGET which has mode MODE and occupies
5166 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5167 If MODE is VOIDmode, it means that we are storing into a bit-field.
5168
5169 If VALUE_MODE is VOIDmode, return nothing in particular.
5170 UNSIGNEDP is not used in this case.
5171
5172 Otherwise, return an rtx for the value stored. This rtx
5173 has mode VALUE_MODE if that is convenient to do.
5174 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5175
a06ef755 5176 TYPE is the type of the underlying object,
ece32014
MM
5177
5178 ALIAS_SET is the alias set for the destination. This value will
5179 (in general) be different from that for TARGET, since TARGET is a
5180 reference to the containing structure. */
bbf6f052
RK
5181
5182static rtx
502b8322
AJ
5183store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5184 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5185 int unsignedp, tree type, int alias_set)
bbf6f052 5186{
906c4e36 5187 HOST_WIDE_INT width_mask = 0;
bbf6f052 5188
e9a25f70
JL
5189 if (TREE_CODE (exp) == ERROR_MARK)
5190 return const0_rtx;
5191
2be6a7e9
RK
5192 /* If we have nothing to store, do nothing unless the expression has
5193 side-effects. */
5194 if (bitsize == 0)
5195 return expand_expr (exp, const0_rtx, VOIDmode, 0);
6a87d634 5196 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
906c4e36 5197 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
5198
5199 /* If we are storing into an unaligned field of an aligned union that is
5200 in a register, we may have the mode of TARGET being an integer mode but
5201 MODE == BLKmode. In that case, get an aligned object whose size and
5202 alignment are the same as TARGET and store TARGET into it (we can avoid
5203 the store if the field being stored is the entire width of TARGET). Then
5204 call ourselves recursively to store the field into a BLKmode version of
5205 that object. Finally, load from the object into TARGET. This is not
5206 very efficient in general, but should only be slightly more expensive
5207 than the otherwise-required unaligned accesses. Perhaps this can be
85a43a2f
RK
5208 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5209 twice, once with emit_move_insn and once via store_field. */
bbf6f052
RK
5210
5211 if (mode == BLKmode
5212 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5213 {
85a43a2f 5214 rtx object = assign_temp (type, 0, 1, 1);
c4e59f51 5215 rtx blk_object = adjust_address (object, BLKmode, 0);
bbf6f052 5216
8752c357 5217 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
bbf6f052
RK
5218 emit_move_insn (object, target);
5219
a06ef755
RK
5220 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5221 alias_set);
bbf6f052
RK
5222
5223 emit_move_insn (target, object);
5224
a06ef755 5225 /* We want to return the BLKmode version of the data. */
46093b97 5226 return blk_object;
bbf6f052 5227 }
c3b247b4
JM
5228
5229 if (GET_CODE (target) == CONCAT)
5230 {
5231 /* We're storing into a struct containing a single __complex. */
5232
5233 if (bitpos != 0)
5234 abort ();
5235 return store_expr (exp, target, 0);
5236 }
bbf6f052
RK
5237
5238 /* If the structure is in a register or if the component
5239 is a bit field, we cannot use addressing to access it.
5240 Use bit-field techniques or SUBREG to store in it. */
5241
4fa52007 5242 if (mode == VOIDmode
6ab06cbb
JW
5243 || (mode != BLKmode && ! direct_store[(int) mode]
5244 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5245 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4fa52007 5246 || GET_CODE (target) == REG
c980ac49 5247 || GET_CODE (target) == SUBREG
ccc98036
RS
5248 /* If the field isn't aligned enough to store as an ordinary memref,
5249 store it as a bit field. */
15b19a7d 5250 || (mode != BLKmode
9e5f281f
OH
5251 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5252 || bitpos % GET_MODE_ALIGNMENT (mode))
5253 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
502b8322 5254 || (bitpos % BITS_PER_UNIT != 0)))
14a774a9
RK
5255 /* If the RHS and field are a constant size and the size of the
5256 RHS isn't the same size as the bitfield, we must use bitfield
5257 operations. */
05bccae2
RK
5258 || (bitsize >= 0
5259 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5260 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
bbf6f052 5261 {
906c4e36 5262 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 5263
ef19912d
RK
5264 /* If BITSIZE is narrower than the size of the type of EXP
5265 we will be narrowing TEMP. Normally, what's wanted are the
5266 low-order bits. However, if EXP's type is a record and this is
5267 big-endian machine, we want the upper BITSIZE bits. */
5268 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
65a07688 5269 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
ef19912d
RK
5270 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5271 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5272 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5273 - bitsize),
c1853da7 5274 NULL_RTX, 1);
ef19912d 5275
bbd6cf73
RK
5276 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5277 MODE. */
5278 if (mode != VOIDmode && mode != BLKmode
5279 && mode != TYPE_MODE (TREE_TYPE (exp)))
5280 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5281
a281e72d
RK
5282 /* If the modes of TARGET and TEMP are both BLKmode, both
5283 must be in memory and BITPOS must be aligned on a byte
5284 boundary. If so, we simply do a block copy. */
5285 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5286 {
5287 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5288 || bitpos % BITS_PER_UNIT != 0)
5289 abort ();
5290
f4ef873c 5291 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
a281e72d 5292 emit_block_move (target, temp,
a06ef755 5293 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a
RH
5294 / BITS_PER_UNIT),
5295 BLOCK_OP_NORMAL);
a281e72d
RK
5296
5297 return value_mode == VOIDmode ? const0_rtx : target;
5298 }
5299
bbf6f052 5300 /* Store the value in the bitfield. */
a06ef755
RK
5301 store_bit_field (target, bitsize, bitpos, mode, temp,
5302 int_size_in_bytes (type));
5303
bbf6f052
RK
5304 if (value_mode != VOIDmode)
5305 {
04050c69
RK
5306 /* The caller wants an rtx for the value.
5307 If possible, avoid refetching from the bitfield itself. */
bbf6f052
RK
5308 if (width_mask != 0
5309 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 5310 {
9074de27 5311 tree count;
5c4d7cfb 5312 enum machine_mode tmode;
86a2c12a 5313
5c4d7cfb 5314 tmode = GET_MODE (temp);
86a2c12a
RS
5315 if (tmode == VOIDmode)
5316 tmode = value_mode;
22273300
JJ
5317
5318 if (unsignedp)
5319 return expand_and (tmode, temp,
2496c7bd 5320 gen_int_mode (width_mask, tmode),
22273300
JJ
5321 NULL_RTX);
5322
5c4d7cfb
RS
5323 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5324 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5325 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5326 }
04050c69 5327
bbf6f052 5328 return extract_bit_field (target, bitsize, bitpos, unsignedp,
04050c69 5329 NULL_RTX, value_mode, VOIDmode,
a06ef755 5330 int_size_in_bytes (type));
bbf6f052
RK
5331 }
5332 return const0_rtx;
5333 }
5334 else
5335 {
5336 rtx addr = XEXP (target, 0);
a06ef755 5337 rtx to_rtx = target;
bbf6f052
RK
5338
5339 /* If a value is wanted, it must be the lhs;
5340 so make the address stable for multiple use. */
5341
5342 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5343 && ! CONSTANT_ADDRESS_P (addr)
5344 /* A frame-pointer reference is already stable. */
5345 && ! (GET_CODE (addr) == PLUS
5346 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5347 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5348 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
a06ef755 5349 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
bbf6f052
RK
5350
5351 /* Now build a reference to just the desired component. */
5352
a06ef755
RK
5353 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5354
5355 if (to_rtx == target)
5356 to_rtx = copy_rtx (to_rtx);
792760b9 5357
c6df88cb 5358 MEM_SET_IN_STRUCT_P (to_rtx, 1);
10b76d73 5359 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
a06ef755 5360 set_mem_alias_set (to_rtx, alias_set);
bbf6f052
RK
5361
5362 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5363 }
5364}
5365\f
5366/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
b4e3fabb
RK
5367 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5368 codes and find the ultimate containing object, which we return.
bbf6f052
RK
5369
5370 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5371 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
5372 If the position of the field is variable, we store a tree
5373 giving the variable offset (in units) in *POFFSET.
5374 This offset is in addition to the bit position.
5375 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
5376
5377 If any of the extraction expressions is volatile,
5378 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5379
5380 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5381 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
5382 is redundant.
5383
5384 If the field describes a variable-sized object, *PMODE is set to
5385 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
6d2f8887 5386 this case, but the address of the object can be found. */
bbf6f052
RK
5387
5388tree
502b8322
AJ
5389get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5390 HOST_WIDE_INT *pbitpos, tree *poffset,
5391 enum machine_mode *pmode, int *punsignedp,
5392 int *pvolatilep)
bbf6f052
RK
5393{
5394 tree size_tree = 0;
5395 enum machine_mode mode = VOIDmode;
fed3cef0 5396 tree offset = size_zero_node;
770ae6cc 5397 tree bit_offset = bitsize_zero_node;
738cc472 5398 tree placeholder_ptr = 0;
770ae6cc 5399 tree tem;
bbf6f052 5400
770ae6cc
RK
5401 /* First get the mode, signedness, and size. We do this from just the
5402 outermost expression. */
bbf6f052
RK
5403 if (TREE_CODE (exp) == COMPONENT_REF)
5404 {
5405 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5406 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5407 mode = DECL_MODE (TREE_OPERAND (exp, 1));
770ae6cc 5408
bbf6f052
RK
5409 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5410 }
5411 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5412 {
5413 size_tree = TREE_OPERAND (exp, 1);
5414 *punsignedp = TREE_UNSIGNED (exp);
5415 }
5416 else
5417 {
5418 mode = TYPE_MODE (TREE_TYPE (exp));
770ae6cc
RK
5419 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5420
ab87f8c8
JL
5421 if (mode == BLKmode)
5422 size_tree = TYPE_SIZE (TREE_TYPE (exp));
770ae6cc
RK
5423 else
5424 *pbitsize = GET_MODE_BITSIZE (mode);
bbf6f052 5425 }
3a94c984 5426
770ae6cc 5427 if (size_tree != 0)
bbf6f052 5428 {
770ae6cc 5429 if (! host_integerp (size_tree, 1))
e7c33f54
RK
5430 mode = BLKmode, *pbitsize = -1;
5431 else
770ae6cc 5432 *pbitsize = tree_low_cst (size_tree, 1);
bbf6f052
RK
5433 }
5434
5435 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5436 and find the ultimate containing object. */
bbf6f052
RK
5437 while (1)
5438 {
770ae6cc
RK
5439 if (TREE_CODE (exp) == BIT_FIELD_REF)
5440 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5441 else if (TREE_CODE (exp) == COMPONENT_REF)
bbf6f052 5442 {
770ae6cc
RK
5443 tree field = TREE_OPERAND (exp, 1);
5444 tree this_offset = DECL_FIELD_OFFSET (field);
bbf6f052 5445
e7f3c83f
RK
5446 /* If this field hasn't been filled in yet, don't go
5447 past it. This should only happen when folding expressions
5448 made during type construction. */
770ae6cc 5449 if (this_offset == 0)
e7f3c83f 5450 break;
7a6cdb44 5451 else if (CONTAINS_PLACEHOLDER_P (this_offset))
770ae6cc 5452 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
e7f3c83f 5453
7156dead 5454 offset = size_binop (PLUS_EXPR, offset, this_offset);
770ae6cc
RK
5455 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5456 DECL_FIELD_BIT_OFFSET (field));
e6d8c385 5457
a06ef755 5458 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
bbf6f052 5459 }
7156dead 5460
b4e3fabb
RK
5461 else if (TREE_CODE (exp) == ARRAY_REF
5462 || TREE_CODE (exp) == ARRAY_RANGE_REF)
bbf6f052 5463 {
742920c7 5464 tree index = TREE_OPERAND (exp, 1);
b4e3fabb
RK
5465 tree array = TREE_OPERAND (exp, 0);
5466 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
770ae6cc 5467 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
b4e3fabb 5468 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
742920c7 5469
770ae6cc
RK
5470 /* We assume all arrays have sizes that are a multiple of a byte.
5471 First subtract the lower bound, if any, in the type of the
5472 index, then convert to sizetype and multiply by the size of the
5473 array element. */
5474 if (low_bound != 0 && ! integer_zerop (low_bound))
5475 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5476 index, low_bound));
f8dac6eb 5477
7156dead
RK
5478 /* If the index has a self-referential type, pass it to a
5479 WITH_RECORD_EXPR; if the component size is, pass our
5480 component to one. */
7a6cdb44 5481 if (CONTAINS_PLACEHOLDER_P (index))
770ae6cc 5482 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
7a6cdb44 5483 if (CONTAINS_PLACEHOLDER_P (unit_size))
b4e3fabb 5484 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
742920c7 5485
770ae6cc
RK
5486 offset = size_binop (PLUS_EXPR, offset,
5487 size_binop (MULT_EXPR,
5488 convert (sizetype, index),
7156dead 5489 unit_size));
bbf6f052 5490 }
7156dead 5491
738cc472
RK
5492 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5493 {
70072ed9
RK
5494 tree new = find_placeholder (exp, &placeholder_ptr);
5495
5496 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5497 We might have been called from tree optimization where we
5498 haven't set up an object yet. */
5499 if (new == 0)
5500 break;
5501 else
5502 exp = new;
5503
738cc472
RK
5504 continue;
5505 }
c1853da7
RK
5506
5507 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5508 conversions that don't change the mode, and all view conversions
5509 except those that need to "step up" the alignment. */
bbf6f052 5510 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
c1853da7
RK
5511 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5512 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5513 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5514 && STRICT_ALIGNMENT
5515 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5516 < BIGGEST_ALIGNMENT)
5517 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5518 || TYPE_ALIGN_OK (TREE_TYPE
5519 (TREE_OPERAND (exp, 0))))))
bbf6f052
RK
5520 && ! ((TREE_CODE (exp) == NOP_EXPR
5521 || TREE_CODE (exp) == CONVERT_EXPR)
5522 && (TYPE_MODE (TREE_TYPE (exp))
5523 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5524 break;
7bb0943f
RS
5525
5526 /* If any reference in the chain is volatile, the effect is volatile. */
5527 if (TREE_THIS_VOLATILE (exp))
5528 *pvolatilep = 1;
839c4796 5529
bbf6f052
RK
5530 exp = TREE_OPERAND (exp, 0);
5531 }
5532
770ae6cc
RK
5533 /* If OFFSET is constant, see if we can return the whole thing as a
5534 constant bit position. Otherwise, split it up. */
5535 if (host_integerp (offset, 0)
5536 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5537 bitsize_unit_node))
5538 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5539 && host_integerp (tem, 0))
5540 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5541 else
5542 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
b50d17a1 5543
bbf6f052 5544 *pmode = mode;
bbf6f052
RK
5545 return exp;
5546}
921b3427 5547
ed239f5a
RK
5548/* Return 1 if T is an expression that get_inner_reference handles. */
5549
5550int
502b8322 5551handled_component_p (tree t)
ed239f5a
RK
5552{
5553 switch (TREE_CODE (t))
5554 {
5555 case BIT_FIELD_REF:
5556 case COMPONENT_REF:
5557 case ARRAY_REF:
5558 case ARRAY_RANGE_REF:
5559 case NON_LVALUE_EXPR:
5560 case VIEW_CONVERT_EXPR:
5561 return 1;
5562
1a8c4ca6
EB
5563 /* ??? Sure they are handled, but get_inner_reference may return
5564 a different PBITSIZE, depending upon whether the expression is
5565 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
ed239f5a
RK
5566 case NOP_EXPR:
5567 case CONVERT_EXPR:
5568 return (TYPE_MODE (TREE_TYPE (t))
5569 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5570
5571 default:
5572 return 0;
5573 }
5574}
bbf6f052 5575\f
3fe44edd
RK
5576/* Given an rtx VALUE that may contain additions and multiplications, return
5577 an equivalent value that just refers to a register, memory, or constant.
5578 This is done by generating instructions to perform the arithmetic and
5579 returning a pseudo-register containing the value.
c45a13a6
RK
5580
5581 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
5582
5583rtx
502b8322 5584force_operand (rtx value, rtx target)
bbf6f052 5585{
8a28dbcc 5586 rtx op1, op2;
bbf6f052 5587 /* Use subtarget as the target for operand 0 of a binary operation. */
b3694847 5588 rtx subtarget = get_subtarget (target);
8a28dbcc 5589 enum rtx_code code = GET_CODE (value);
bbf6f052 5590
50654f6c
ZD
5591 /* Check for subreg applied to an expression produced by loop optimizer. */
5592 if (code == SUBREG
5593 && GET_CODE (SUBREG_REG (value)) != REG
5594 && GET_CODE (SUBREG_REG (value)) != MEM)
5595 {
5596 value = simplify_gen_subreg (GET_MODE (value),
5597 force_reg (GET_MODE (SUBREG_REG (value)),
5598 force_operand (SUBREG_REG (value),
5599 NULL_RTX)),
5600 GET_MODE (SUBREG_REG (value)),
5601 SUBREG_BYTE (value));
5602 code = GET_CODE (value);
5603 }
5604
8b015896 5605 /* Check for a PIC address load. */
8a28dbcc 5606 if ((code == PLUS || code == MINUS)
8b015896
RH
5607 && XEXP (value, 0) == pic_offset_table_rtx
5608 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5609 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5610 || GET_CODE (XEXP (value, 1)) == CONST))
5611 {
5612 if (!subtarget)
5613 subtarget = gen_reg_rtx (GET_MODE (value));
5614 emit_move_insn (subtarget, value);
5615 return subtarget;
5616 }
5617
8a28dbcc 5618 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
bbf6f052 5619 {
8a28dbcc
JH
5620 if (!target)
5621 target = gen_reg_rtx (GET_MODE (value));
ce0f3925 5622 convert_move (target, force_operand (XEXP (value, 0), NULL),
8a28dbcc
JH
5623 code == ZERO_EXTEND);
5624 return target;
bbf6f052
RK
5625 }
5626
8a28dbcc 5627 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
bbf6f052
RK
5628 {
5629 op2 = XEXP (value, 1);
8a28dbcc 5630 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
bbf6f052 5631 subtarget = 0;
8a28dbcc 5632 if (code == MINUS && GET_CODE (op2) == CONST_INT)
bbf6f052 5633 {
8a28dbcc 5634 code = PLUS;
bbf6f052
RK
5635 op2 = negate_rtx (GET_MODE (value), op2);
5636 }
5637
5638 /* Check for an addition with OP2 a constant integer and our first
8a28dbcc
JH
5639 operand a PLUS of a virtual register and something else. In that
5640 case, we want to emit the sum of the virtual register and the
5641 constant first and then add the other value. This allows virtual
5642 register instantiation to simply modify the constant rather than
5643 creating another one around this addition. */
5644 if (code == PLUS && GET_CODE (op2) == CONST_INT
bbf6f052
RK
5645 && GET_CODE (XEXP (value, 0)) == PLUS
5646 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5647 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5648 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5649 {
8a28dbcc
JH
5650 rtx temp = expand_simple_binop (GET_MODE (value), code,
5651 XEXP (XEXP (value, 0), 0), op2,
5652 subtarget, 0, OPTAB_LIB_WIDEN);
5653 return expand_simple_binop (GET_MODE (value), code, temp,
5654 force_operand (XEXP (XEXP (value,
5655 0), 1), 0),
5656 target, 0, OPTAB_LIB_WIDEN);
bbf6f052 5657 }
3a94c984 5658
8a28dbcc
JH
5659 op1 = force_operand (XEXP (value, 0), subtarget);
5660 op2 = force_operand (op2, NULL_RTX);
5661 switch (code)
5662 {
5663 case MULT:
5664 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5665 case DIV:
5666 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5667 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5668 target, 1, OPTAB_LIB_WIDEN);
5669 else
5670 return expand_divmod (0,
5671 FLOAT_MODE_P (GET_MODE (value))
5672 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5673 GET_MODE (value), op1, op2, target, 0);
5674 break;
5675 case MOD:
5676 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5677 target, 0);
5678 break;
5679 case UDIV:
5680 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5681 target, 1);
5682 break;
5683 case UMOD:
5684 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5685 target, 1);
5686 break;
5687 case ASHIFTRT:
5688 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5689 target, 0, OPTAB_LIB_WIDEN);
5690 break;
5691 default:
5692 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5693 target, 1, OPTAB_LIB_WIDEN);
5694 }
5695 }
5696 if (GET_RTX_CLASS (code) == '1')
5697 {
5698 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5699 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
bbf6f052 5700 }
34e81b5a
RK
5701
5702#ifdef INSN_SCHEDULING
5703 /* On machines that have insn scheduling, we want all memory reference to be
5704 explicit, so we need to deal with such paradoxical SUBREGs. */
5705 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5706 && (GET_MODE_SIZE (GET_MODE (value))
5707 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5708 value
5709 = simplify_gen_subreg (GET_MODE (value),
5710 force_reg (GET_MODE (SUBREG_REG (value)),
5711 force_operand (SUBREG_REG (value),
5712 NULL_RTX)),
5713 GET_MODE (SUBREG_REG (value)),
5714 SUBREG_BYTE (value));
5715#endif
5716
bbf6f052
RK
5717 return value;
5718}
5719\f
bbf6f052 5720/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
5721 EXP can reference X, which is being modified. TOP_P is nonzero if this
5722 call is going to be used to determine whether we need a temporary
ff439b5f
CB
5723 for EXP, as opposed to a recursive call to this function.
5724
5725 It is always safe for this routine to return zero since it merely
5726 searches for optimization opportunities. */
bbf6f052 5727
8f17b5c5 5728int
502b8322 5729safe_from_p (rtx x, tree exp, int top_p)
bbf6f052
RK
5730{
5731 rtx exp_rtl = 0;
5732 int i, nops;
1da68f56 5733 static tree save_expr_list;
bbf6f052 5734
6676e72f
RK
5735 if (x == 0
5736 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
5737 have no way of allocating temporaries of variable size
5738 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5739 So we assume here that something at a higher level has prevented a
f4510f37 5740 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4 5741 do this when X is BLKmode and when we are at the top level. */
d0f062fb 5742 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
f4510f37 5743 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
5744 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5745 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5746 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5747 != INTEGER_CST)
1da68f56
RK
5748 && GET_MODE (x) == BLKmode)
5749 /* If X is in the outgoing argument area, it is always safe. */
5750 || (GET_CODE (x) == MEM
5751 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5752 || (GET_CODE (XEXP (x, 0)) == PLUS
5753 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
bbf6f052
RK
5754 return 1;
5755
5756 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5757 find the underlying pseudo. */
5758 if (GET_CODE (x) == SUBREG)
5759 {
5760 x = SUBREG_REG (x);
5761 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5762 return 0;
5763 }
5764
1da68f56
RK
5765 /* A SAVE_EXPR might appear many times in the expression passed to the
5766 top-level safe_from_p call, and if it has a complex subexpression,
5767 examining it multiple times could result in a combinatorial explosion.
7ef0daad 5768 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
1da68f56
RK
5769 with optimization took about 28 minutes to compile -- even though it was
5770 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5771 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5772 we have processed. Note that the only test of top_p was above. */
5773
5774 if (top_p)
5775 {
5776 int rtn;
5777 tree t;
5778
5779 save_expr_list = 0;
5780
5781 rtn = safe_from_p (x, exp, 0);
5782
5783 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5784 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5785
5786 return rtn;
5787 }
bbf6f052 5788
1da68f56 5789 /* Now look at our tree code and possibly recurse. */
bbf6f052
RK
5790 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5791 {
5792 case 'd':
a9772b60 5793 exp_rtl = DECL_RTL_IF_SET (exp);
bbf6f052
RK
5794 break;
5795
5796 case 'c':
5797 return 1;
5798
5799 case 'x':
5800 if (TREE_CODE (exp) == TREE_LIST)
f8d4be57
CE
5801 {
5802 while (1)
5803 {
5804 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5805 return 0;
5806 exp = TREE_CHAIN (exp);
5807 if (!exp)
5808 return 1;
5809 if (TREE_CODE (exp) != TREE_LIST)
5810 return safe_from_p (x, exp, 0);
5811 }
5812 }
ff439b5f
CB
5813 else if (TREE_CODE (exp) == ERROR_MARK)
5814 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
5815 else
5816 return 0;
5817
bbf6f052
RK
5818 case '2':
5819 case '<':
f8d4be57
CE
5820 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5821 return 0;
5d3cc252 5822 /* Fall through. */
f8d4be57
CE
5823
5824 case '1':
5825 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
5826
5827 case 'e':
5828 case 'r':
5829 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5830 the expression. If it is set, we conflict iff we are that rtx or
5831 both are in memory. Otherwise, we check all operands of the
5832 expression recursively. */
5833
5834 switch (TREE_CODE (exp))
5835 {
5836 case ADDR_EXPR:
70072ed9
RK
5837 /* If the operand is static or we are static, we can't conflict.
5838 Likewise if we don't conflict with the operand at all. */
5839 if (staticp (TREE_OPERAND (exp, 0))
5840 || TREE_STATIC (exp)
5841 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5842 return 1;
5843
5844 /* Otherwise, the only way this can conflict is if we are taking
5845 the address of a DECL a that address if part of X, which is
5846 very rare. */
5847 exp = TREE_OPERAND (exp, 0);
5848 if (DECL_P (exp))
5849 {
5850 if (!DECL_RTL_SET_P (exp)
5851 || GET_CODE (DECL_RTL (exp)) != MEM)
5852 return 0;
5853 else
5854 exp_rtl = XEXP (DECL_RTL (exp), 0);
5855 }
5856 break;
bbf6f052
RK
5857
5858 case INDIRECT_REF:
1da68f56
RK
5859 if (GET_CODE (x) == MEM
5860 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5861 get_alias_set (exp)))
bbf6f052
RK
5862 return 0;
5863 break;
5864
5865 case CALL_EXPR:
f9808f81
MM
5866 /* Assume that the call will clobber all hard registers and
5867 all of memory. */
5868 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5869 || GET_CODE (x) == MEM)
5870 return 0;
bbf6f052
RK
5871 break;
5872
5873 case RTL_EXPR:
3bb5826a
RK
5874 /* If a sequence exists, we would have to scan every instruction
5875 in the sequence to see if it was safe. This is probably not
5876 worthwhile. */
5877 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
5878 return 0;
5879
3bb5826a 5880 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
5881 break;
5882
5883 case WITH_CLEANUP_EXPR:
6ad7895a 5884 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
bbf6f052
RK
5885 break;
5886
5dab5552 5887 case CLEANUP_POINT_EXPR:
e5e809f4 5888 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 5889
bbf6f052
RK
5890 case SAVE_EXPR:
5891 exp_rtl = SAVE_EXPR_RTL (exp);
ff439b5f
CB
5892 if (exp_rtl)
5893 break;
5894
1da68f56
RK
5895 /* If we've already scanned this, don't do it again. Otherwise,
5896 show we've scanned it and record for clearing the flag if we're
5897 going on. */
5898 if (TREE_PRIVATE (exp))
5899 return 1;
ff439b5f 5900
1da68f56
RK
5901 TREE_PRIVATE (exp) = 1;
5902 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
ff59bfe6 5903 {
1da68f56
RK
5904 TREE_PRIVATE (exp) = 0;
5905 return 0;
ff59bfe6 5906 }
1da68f56
RK
5907
5908 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
ff439b5f 5909 return 1;
bbf6f052 5910
8129842c
RS
5911 case BIND_EXPR:
5912 /* The only operand we look at is operand 1. The rest aren't
5913 part of the expression. */
e5e809f4 5914 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 5915
e9a25f70
JL
5916 default:
5917 break;
bbf6f052
RK
5918 }
5919
5920 /* If we have an rtx, we do not need to scan our operands. */
5921 if (exp_rtl)
5922 break;
5923
8f17b5c5 5924 nops = first_rtl_op (TREE_CODE (exp));
bbf6f052
RK
5925 for (i = 0; i < nops; i++)
5926 if (TREE_OPERAND (exp, i) != 0
e5e809f4 5927 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052 5928 return 0;
8f17b5c5
MM
5929
5930 /* If this is a language-specific tree code, it may require
5931 special handling. */
dbbbbf3b
JDA
5932 if ((unsigned int) TREE_CODE (exp)
5933 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
ac79cd5a 5934 && !(*lang_hooks.safe_from_p) (x, exp))
8f17b5c5 5935 return 0;
bbf6f052
RK
5936 }
5937
5938 /* If we have an rtl, find any enclosed object. Then see if we conflict
5939 with it. */
5940 if (exp_rtl)
5941 {
5942 if (GET_CODE (exp_rtl) == SUBREG)
5943 {
5944 exp_rtl = SUBREG_REG (exp_rtl);
5945 if (GET_CODE (exp_rtl) == REG
5946 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5947 return 0;
5948 }
5949
5950 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
1da68f56 5951 are memory and they conflict. */
bbf6f052
RK
5952 return ! (rtx_equal_p (x, exp_rtl)
5953 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
21117a17 5954 && true_dependence (exp_rtl, VOIDmode, x,
1da68f56 5955 rtx_addr_varies_p)));
bbf6f052
RK
5956 }
5957
5958 /* If we reach here, it is safe. */
5959 return 1;
5960}
5961
01c8a7c8
RK
5962/* Subroutine of expand_expr: return rtx if EXP is a
5963 variable or parameter; else return 0. */
5964
5965static rtx
502b8322 5966var_rtx (tree exp)
01c8a7c8
RK
5967{
5968 STRIP_NOPS (exp);
5969 switch (TREE_CODE (exp))
5970 {
5971 case PARM_DECL:
5972 case VAR_DECL:
5973 return DECL_RTL (exp);
5974 default:
5975 return 0;
5976 }
5977}
14a774a9 5978\f
0d4903b8
RK
5979/* Return the highest power of two that EXP is known to be a multiple of.
5980 This is used in updating alignment of MEMs in array references. */
5981
9ceca302 5982static unsigned HOST_WIDE_INT
502b8322 5983highest_pow2_factor (tree exp)
0d4903b8 5984{
9ceca302 5985 unsigned HOST_WIDE_INT c0, c1;
0d4903b8
RK
5986
5987 switch (TREE_CODE (exp))
5988 {
5989 case INTEGER_CST:
e0f1be5c
JJ
5990 /* We can find the lowest bit that's a one. If the low
5991 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5992 We need to handle this case since we can find it in a COND_EXPR,
a98ebe2e 5993 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
e0f1be5c 5994 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
3a531a8b 5995 later ICE. */
e0f1be5c 5996 if (TREE_CONSTANT_OVERFLOW (exp))
1ed1b4fb 5997 return BIGGEST_ALIGNMENT;
e0f1be5c 5998 else
0d4903b8 5999 {
e0f1be5c
JJ
6000 /* Note: tree_low_cst is intentionally not used here,
6001 we don't care about the upper bits. */
6002 c0 = TREE_INT_CST_LOW (exp);
6003 c0 &= -c0;
6004 return c0 ? c0 : BIGGEST_ALIGNMENT;
0d4903b8
RK
6005 }
6006 break;
6007
65a07688 6008 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
0d4903b8
RK
6009 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6010 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6011 return MIN (c0, c1);
6012
6013 case MULT_EXPR:
6014 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6015 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6016 return c0 * c1;
6017
6018 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6019 case CEIL_DIV_EXPR:
65a07688
RK
6020 if (integer_pow2p (TREE_OPERAND (exp, 1))
6021 && host_integerp (TREE_OPERAND (exp, 1), 1))
6022 {
6023 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6024 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6025 return MAX (1, c0 / c1);
6026 }
6027 break;
0d4903b8
RK
6028
6029 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
65a07688 6030 case SAVE_EXPR: case WITH_RECORD_EXPR:
0d4903b8
RK
6031 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6032
65a07688
RK
6033 case COMPOUND_EXPR:
6034 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6035
0d4903b8
RK
6036 case COND_EXPR:
6037 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6038 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6039 return MIN (c0, c1);
6040
6041 default:
6042 break;
6043 }
6044
6045 return 1;
6046}
818c0c94
RH
6047
6048/* Similar, except that it is known that the expression must be a multiple
6049 of the alignment of TYPE. */
6050
9ceca302 6051static unsigned HOST_WIDE_INT
502b8322 6052highest_pow2_factor_for_type (tree type, tree exp)
818c0c94 6053{
9ceca302 6054 unsigned HOST_WIDE_INT type_align, factor;
818c0c94
RH
6055
6056 factor = highest_pow2_factor (exp);
6057 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6058 return MAX (factor, type_align);
6059}
0d4903b8 6060\f
f47e9b4e
RK
6061/* Return an object on the placeholder list that matches EXP, a
6062 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
738cc472 6063 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
70072ed9
RK
6064 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6065 is a location which initially points to a starting location in the
738cc472
RK
6066 placeholder list (zero means start of the list) and where a pointer into
6067 the placeholder list at which the object is found is placed. */
f47e9b4e
RK
6068
6069tree
502b8322 6070find_placeholder (tree exp, tree *plist)
f47e9b4e
RK
6071{
6072 tree type = TREE_TYPE (exp);
6073 tree placeholder_expr;
6074
738cc472
RK
6075 for (placeholder_expr
6076 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6077 placeholder_expr != 0;
f47e9b4e
RK
6078 placeholder_expr = TREE_CHAIN (placeholder_expr))
6079 {
6080 tree need_type = TYPE_MAIN_VARIANT (type);
6081 tree elt;
6082
6083 /* Find the outermost reference that is of the type we want. If none,
6084 see if any object has a type that is a pointer to the type we
6085 want. */
6086 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6087 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6088 || TREE_CODE (elt) == COND_EXPR)
6089 ? TREE_OPERAND (elt, 1)
6090 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6091 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6092 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6093 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6094 ? TREE_OPERAND (elt, 0) : 0))
6095 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6096 {
6097 if (plist)
6098 *plist = placeholder_expr;
6099 return elt;
6100 }
6101
6102 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6103 elt
6104 = ((TREE_CODE (elt) == COMPOUND_EXPR
6105 || TREE_CODE (elt) == COND_EXPR)
6106 ? TREE_OPERAND (elt, 1)
6107 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6108 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6109 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6110 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6111 ? TREE_OPERAND (elt, 0) : 0))
6112 if (POINTER_TYPE_P (TREE_TYPE (elt))
6113 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6114 == need_type))
6115 {
6116 if (plist)
6117 *plist = placeholder_expr;
6118 return build1 (INDIRECT_REF, need_type, elt);
6119 }
6120 }
6121
70072ed9 6122 return 0;
f47e9b4e 6123}
eb698c58
RS
6124
6125/* Subroutine of expand_expr. Expand the two operands of a binary
6126 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6127 The value may be stored in TARGET if TARGET is nonzero. The
6128 MODIFIER argument is as documented by expand_expr. */
6129
6130static void
6131expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6132 enum expand_modifier modifier)
6133{
6134 if (! safe_from_p (target, exp1, 1))
6135 target = 0;
6136 if (operand_equal_p (exp0, exp1, 0))
6137 {
6138 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6139 *op1 = copy_rtx (*op0);
6140 }
6141 else
6142 {
c67e6e14
RS
6143 /* If we need to preserve evaluation order, copy exp0 into its own
6144 temporary variable so that it can't be clobbered by exp1. */
6145 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6146 exp0 = save_expr (exp0);
eb698c58
RS
6147 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6148 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6149 }
6150}
6151
f47e9b4e 6152\f
bbf6f052
RK
6153/* expand_expr: generate code for computing expression EXP.
6154 An rtx for the computed value is returned. The value is never null.
6155 In the case of a void EXP, const0_rtx is returned.
6156
6157 The value may be stored in TARGET if TARGET is nonzero.
6158 TARGET is just a suggestion; callers must assume that
6159 the rtx returned may not be the same as TARGET.
6160
6161 If TARGET is CONST0_RTX, it means that the value will be ignored.
6162
6163 If TMODE is not VOIDmode, it suggests generating the
6164 result in mode TMODE. But this is done only when convenient.
6165 Otherwise, TMODE is ignored and the value generated in its natural mode.
6166 TMODE is just a suggestion; callers must assume that
6167 the rtx returned may not have mode TMODE.
6168
d6a5ac33
RK
6169 Note that TARGET may have neither TMODE nor MODE. In that case, it
6170 probably will not be used.
bbf6f052
RK
6171
6172 If MODIFIER is EXPAND_SUM then when EXP is an addition
6173 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6174 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6175 products as above, or REG or MEM, or constant.
6176 Ordinarily in such cases we would output mul or add instructions
6177 and then return a pseudo reg containing the sum.
6178
6179 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6180 it also marks a label as absolutely required (it can't be dead).
26fcb35a 6181 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
6182 This is used for outputting expressions used in initializers.
6183
6184 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6185 with a constant address even if that address is not normally legitimate.
8403445a
AM
6186 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6187
6188 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6189 a call parameter. Such targets require special care as we haven't yet
6190 marked TARGET so that it's safe from being trashed by libcalls. We
6191 don't want to use TARGET for anything but the final result;
6192 Intermediate values must go elsewhere. Additionally, calls to
0fab64a3
MM
6193 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6194
6195 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6196 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6197 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6198 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6199 recursively. */
bbf6f052
RK
6200
6201rtx
0fab64a3
MM
6202expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6203 enum expand_modifier modifier, rtx *alt_rtl)
bbf6f052 6204{
b3694847 6205 rtx op0, op1, temp;
bbf6f052
RK
6206 tree type = TREE_TYPE (exp);
6207 int unsignedp = TREE_UNSIGNED (type);
b3694847
SS
6208 enum machine_mode mode;
6209 enum tree_code code = TREE_CODE (exp);
bbf6f052 6210 optab this_optab;
68557e14
ML
6211 rtx subtarget, original_target;
6212 int ignore;
bbf6f052
RK
6213 tree context;
6214
3a94c984 6215 /* Handle ERROR_MARK before anybody tries to access its type. */
85f3d674 6216 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
68557e14
ML
6217 {
6218 op0 = CONST0_RTX (tmode);
6219 if (op0 != 0)
6220 return op0;
6221 return const0_rtx;
6222 }
6223
6224 mode = TYPE_MODE (type);
6225 /* Use subtarget as the target for operand 0 of a binary operation. */
296b4ed9 6226 subtarget = get_subtarget (target);
68557e14
ML
6227 original_target = target;
6228 ignore = (target == const0_rtx
6229 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6230 || code == CONVERT_EXPR || code == REFERENCE_EXPR
ac79cd5a 6231 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
68557e14
ML
6232 && TREE_CODE (type) == VOID_TYPE));
6233
dd27116b
RK
6234 /* If we are going to ignore this result, we need only do something
6235 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
6236 is, short-circuit the most common cases here. Note that we must
6237 not call expand_expr with anything but const0_rtx in case this
6238 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 6239
dd27116b
RK
6240 if (ignore)
6241 {
6242 if (! TREE_SIDE_EFFECTS (exp))
6243 return const0_rtx;
6244
14a774a9
RK
6245 /* Ensure we reference a volatile object even if value is ignored, but
6246 don't do this if all we are doing is taking its address. */
dd27116b
RK
6247 if (TREE_THIS_VOLATILE (exp)
6248 && TREE_CODE (exp) != FUNCTION_DECL
14a774a9
RK
6249 && mode != VOIDmode && mode != BLKmode
6250 && modifier != EXPAND_CONST_ADDRESS)
dd27116b 6251 {
37a08a29 6252 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
dd27116b
RK
6253 if (GET_CODE (temp) == MEM)
6254 temp = copy_to_reg (temp);
6255 return const0_rtx;
6256 }
6257
14a774a9
RK
6258 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6259 || code == INDIRECT_REF || code == BUFFER_REF)
37a08a29
RK
6260 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6261 modifier);
6262
14a774a9 6263 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
b4e3fabb 6264 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
dd27116b 6265 {
37a08a29
RK
6266 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6267 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
dd27116b
RK
6268 return const0_rtx;
6269 }
6270 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6271 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6272 /* If the second operand has no side effects, just evaluate
0f41302f 6273 the first. */
37a08a29
RK
6274 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6275 modifier);
14a774a9
RK
6276 else if (code == BIT_FIELD_REF)
6277 {
37a08a29
RK
6278 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6279 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6280 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
14a774a9
RK
6281 return const0_rtx;
6282 }
37a08a29 6283
90764a87 6284 target = 0;
dd27116b 6285 }
bbf6f052 6286
e44842fe
RK
6287 /* If will do cse, generate all results into pseudo registers
6288 since 1) that allows cse to find more things
6289 and 2) otherwise cse could produce an insn the machine
4977bab6
ZW
6290 cannot support. An exception is a CONSTRUCTOR into a multi-word
6291 MEM: that's much more likely to be most efficient into the MEM.
6292 Another is a CALL_EXPR which must return in memory. */
e44842fe 6293
bbf6f052 6294 if (! cse_not_expected && mode != BLKmode && target
c24ae149 6295 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
4977bab6 6296 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
61f71b34 6297 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
8403445a 6298 target = 0;
bbf6f052 6299
bbf6f052
RK
6300 switch (code)
6301 {
6302 case LABEL_DECL:
b552441b
RS
6303 {
6304 tree function = decl_function_context (exp);
046e4e36
ZW
6305 /* Labels in containing functions, or labels used from initializers,
6306 must be forced. */
6307 if (modifier == EXPAND_INITIALIZER
6308 || (function != current_function_decl
6309 && function != inline_function_decl
6310 && function != 0))
6311 temp = force_label_rtx (exp);
ab87f8c8 6312 else
046e4e36 6313 temp = label_rtx (exp);
c5c76735 6314
046e4e36 6315 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
d0977240
RK
6316 if (function != current_function_decl
6317 && function != inline_function_decl && function != 0)
26fcb35a
RS
6318 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6319 return temp;
b552441b 6320 }
bbf6f052
RK
6321
6322 case PARM_DECL:
1877be45 6323 if (!DECL_RTL_SET_P (exp))
bbf6f052 6324 {
ddd2d57e 6325 error ("%Jprior parameter's size depends on '%D'", exp, exp);
4af3895e 6326 return CONST0_RTX (mode);
bbf6f052
RK
6327 }
6328
0f41302f 6329 /* ... fall through ... */
d6a5ac33 6330
bbf6f052 6331 case VAR_DECL:
2dca20cd
RS
6332 /* If a static var's type was incomplete when the decl was written,
6333 but the type is complete now, lay out the decl now. */
ca06cfe6
RH
6334 if (DECL_SIZE (exp) == 0
6335 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
2dca20cd 6336 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
a46666a9 6337 layout_decl (exp, 0);
921b3427 6338
0f41302f 6339 /* ... fall through ... */
d6a5ac33 6340
2dca20cd 6341 case FUNCTION_DECL:
bbf6f052
RK
6342 case RESULT_DECL:
6343 if (DECL_RTL (exp) == 0)
6344 abort ();
d6a5ac33 6345
e44842fe
RK
6346 /* Ensure variable marked as used even if it doesn't go through
6347 a parser. If it hasn't be used yet, write out an external
6348 definition. */
6349 if (! TREE_USED (exp))
6350 {
6351 assemble_external (exp);
6352 TREE_USED (exp) = 1;
6353 }
6354
dc6d66b3
RK
6355 /* Show we haven't gotten RTL for this yet. */
6356 temp = 0;
6357
bbf6f052
RK
6358 /* Handle variables inherited from containing functions. */
6359 context = decl_function_context (exp);
6360
6361 /* We treat inline_function_decl as an alias for the current function
6362 because that is the inline function whose vars, types, etc.
6363 are being merged into the current function.
6364 See expand_inline_function. */
d6a5ac33 6365
bbf6f052
RK
6366 if (context != 0 && context != current_function_decl
6367 && context != inline_function_decl
6368 /* If var is static, we don't need a static chain to access it. */
6369 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6370 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6371 {
6372 rtx addr;
6373
6374 /* Mark as non-local and addressable. */
81feeecb 6375 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
6376 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6377 abort ();
dffd7eb6 6378 (*lang_hooks.mark_addressable) (exp);
bbf6f052
RK
6379 if (GET_CODE (DECL_RTL (exp)) != MEM)
6380 abort ();
6381 addr = XEXP (DECL_RTL (exp), 0);
6382 if (GET_CODE (addr) == MEM)
792760b9
RK
6383 addr
6384 = replace_equiv_address (addr,
6385 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
6386 else
6387 addr = fix_lexical_addr (addr, exp);
3bdf5ad1 6388
792760b9 6389 temp = replace_equiv_address (DECL_RTL (exp), addr);
bbf6f052 6390 }
4af3895e 6391
bbf6f052
RK
6392 /* This is the case of an array whose size is to be determined
6393 from its initializer, while the initializer is still being parsed.
6394 See expand_decl. */
d6a5ac33 6395
dc6d66b3
RK
6396 else if (GET_CODE (DECL_RTL (exp)) == MEM
6397 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
792760b9 6398 temp = validize_mem (DECL_RTL (exp));
d6a5ac33
RK
6399
6400 /* If DECL_RTL is memory, we are in the normal case and either
6401 the address is not valid or it is not a register and -fforce-addr
6402 is specified, get the address into a register. */
6403
dc6d66b3
RK
6404 else if (GET_CODE (DECL_RTL (exp)) == MEM
6405 && modifier != EXPAND_CONST_ADDRESS
6406 && modifier != EXPAND_SUM
6407 && modifier != EXPAND_INITIALIZER
6408 && (! memory_address_p (DECL_MODE (exp),
6409 XEXP (DECL_RTL (exp), 0))
6410 || (flag_force_addr
6411 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
0fab64a3
MM
6412 {
6413 if (alt_rtl)
6414 *alt_rtl = DECL_RTL (exp);
6415 temp = replace_equiv_address (DECL_RTL (exp),
6416 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6417 }
1499e0a8 6418
dc6d66b3 6419 /* If we got something, return it. But first, set the alignment
04956a1a 6420 if the address is a register. */
dc6d66b3
RK
6421 if (temp != 0)
6422 {
6423 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
bdb429a5 6424 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
dc6d66b3
RK
6425
6426 return temp;
6427 }
6428
1499e0a8
RK
6429 /* If the mode of DECL_RTL does not match that of the decl, it
6430 must be a promoted value. We return a SUBREG of the wanted mode,
6431 but mark it so that we know that it was already extended. */
6432
6433 if (GET_CODE (DECL_RTL (exp)) == REG
7254c5fa 6434 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
1499e0a8 6435 {
1499e0a8
RK
6436 /* Get the signedness used for this variable. Ensure we get the
6437 same mode we got when the variable was declared. */
78911e8b 6438 if (GET_MODE (DECL_RTL (exp))
0fb7aeda 6439 != promote_mode (type, DECL_MODE (exp), &unsignedp,
e8dcd824 6440 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
1499e0a8
RK
6441 abort ();
6442
ddef6bc7 6443 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
1499e0a8 6444 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6445 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
6446 return temp;
6447 }
6448
bbf6f052
RK
6449 return DECL_RTL (exp);
6450
6451 case INTEGER_CST:
d8a50944 6452 temp = immed_double_const (TREE_INT_CST_LOW (exp),
05bccae2 6453 TREE_INT_CST_HIGH (exp), mode);
bbf6f052 6454
d8a50944
RH
6455 /* ??? If overflow is set, fold will have done an incomplete job,
6456 which can result in (plus xx (const_int 0)), which can get
6457 simplified by validate_replace_rtx during virtual register
6458 instantiation, which can result in unrecognizable insns.
6459 Avoid this by forcing all overflows into registers. */
c2e9dc85
RH
6460 if (TREE_CONSTANT_OVERFLOW (exp)
6461 && modifier != EXPAND_INITIALIZER)
d8a50944
RH
6462 temp = force_reg (mode, temp);
6463
6464 return temp;
6465
d744e06e
AH
6466 case VECTOR_CST:
6467 return const_vector_from_tree (exp);
6468
bbf6f052 6469 case CONST_DECL:
8403445a 6470 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
bbf6f052
RK
6471
6472 case REAL_CST:
6473 /* If optimized, generate immediate CONST_DOUBLE
3a94c984
KH
6474 which will be turned into memory by reload if necessary.
6475
bbf6f052
RK
6476 We used to force a register so that loop.c could see it. But
6477 this does not allow gen_* patterns to perform optimizations with
6478 the constants. It also produces two insns in cases like "x = 1.0;".
6479 On most machines, floating-point constants are not permitted in
6480 many insns, so we'd end up copying it to a register in any case.
6481
6482 Now, we do the copying in expand_binop, if appropriate. */
5692c7bc
ZW
6483 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6484 TYPE_MODE (TREE_TYPE (exp)));
bbf6f052
RK
6485
6486 case COMPLEX_CST:
9ad58e09
RS
6487 /* Handle evaluating a complex constant in a CONCAT target. */
6488 if (original_target && GET_CODE (original_target) == CONCAT)
6489 {
6490 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6491 rtx rtarg, itarg;
6492
6493 rtarg = XEXP (original_target, 0);
6494 itarg = XEXP (original_target, 1);
6495
6496 /* Move the real and imaginary parts separately. */
6497 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6498 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6499
6500 if (op0 != rtarg)
6501 emit_move_insn (rtarg, op0);
6502 if (op1 != itarg)
6503 emit_move_insn (itarg, op1);
6504
6505 return original_target;
6506 }
6507
71c0e7fc 6508 /* ... fall through ... */
9ad58e09 6509
bbf6f052 6510 case STRING_CST:
afc6aaab 6511 temp = output_constant_def (exp, 1);
bbf6f052 6512
afc6aaab 6513 /* temp contains a constant address.
bbf6f052
RK
6514 On RISC machines where a constant address isn't valid,
6515 make some insns to get that address into a register. */
afc6aaab 6516 if (modifier != EXPAND_CONST_ADDRESS
bbf6f052
RK
6517 && modifier != EXPAND_INITIALIZER
6518 && modifier != EXPAND_SUM
afc6aaab
ZW
6519 && (! memory_address_p (mode, XEXP (temp, 0))
6520 || flag_force_addr))
6521 return replace_equiv_address (temp,
6522 copy_rtx (XEXP (temp, 0)));
6523 return temp;
bbf6f052 6524
bf1e5319 6525 case EXPR_WITH_FILE_LOCATION:
b24f65cd
APB
6526 {
6527 rtx to_return;
72954a4f
JM
6528 struct file_stack fs;
6529
6530 fs.location = input_location;
6531 fs.next = expr_wfl_stack;
b24f65cd 6532 input_filename = EXPR_WFL_FILENAME (exp);
d479d37f 6533 input_line = EXPR_WFL_LINENO (exp);
72954a4f 6534 expr_wfl_stack = &fs;
b24f65cd 6535 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
0cea056b 6536 emit_line_note (input_location);
6ad7895a 6537 /* Possibly avoid switching back and forth here. */
72954a4f
JM
6538 to_return = expand_expr (EXPR_WFL_NODE (exp),
6539 (ignore ? const0_rtx : target),
6540 tmode, modifier);
6541 if (expr_wfl_stack != &fs)
6542 abort ();
6543 input_location = fs.location;
6544 expr_wfl_stack = fs.next;
b24f65cd
APB
6545 return to_return;
6546 }
bf1e5319 6547
bbf6f052
RK
6548 case SAVE_EXPR:
6549 context = decl_function_context (exp);
d6a5ac33 6550
d0977240
RK
6551 /* If this SAVE_EXPR was at global context, assume we are an
6552 initialization function and move it into our context. */
6553 if (context == 0)
6554 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6555
bbf6f052
RK
6556 /* We treat inline_function_decl as an alias for the current function
6557 because that is the inline function whose vars, types, etc.
6558 are being merged into the current function.
6559 See expand_inline_function. */
6560 if (context == current_function_decl || context == inline_function_decl)
6561 context = 0;
6562
6563 /* If this is non-local, handle it. */
6564 if (context)
6565 {
d0977240
RK
6566 /* The following call just exists to abort if the context is
6567 not of a containing function. */
6568 find_function_data (context);
6569
bbf6f052
RK
6570 temp = SAVE_EXPR_RTL (exp);
6571 if (temp && GET_CODE (temp) == REG)
6572 {
f29a2bd1 6573 put_var_into_stack (exp, /*rescan=*/true);
bbf6f052
RK
6574 temp = SAVE_EXPR_RTL (exp);
6575 }
6576 if (temp == 0 || GET_CODE (temp) != MEM)
6577 abort ();
792760b9
RK
6578 return
6579 replace_equiv_address (temp,
6580 fix_lexical_addr (XEXP (temp, 0), exp));
bbf6f052
RK
6581 }
6582 if (SAVE_EXPR_RTL (exp) == 0)
6583 {
06089a8b
RK
6584 if (mode == VOIDmode)
6585 temp = const0_rtx;
6586 else
1da68f56
RK
6587 temp = assign_temp (build_qualified_type (type,
6588 (TYPE_QUALS (type)
6589 | TYPE_QUAL_CONST)),
6590 3, 0, 0);
1499e0a8 6591
bbf6f052 6592 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 6593 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
6594 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6595 save_expr_regs);
ff78f773
RK
6596
6597 /* If the mode of TEMP does not match that of the expression, it
6598 must be a promoted value. We pass store_expr a SUBREG of the
6599 wanted mode but mark it so that we know that it was already
3ac1a319 6600 extended. */
ff78f773
RK
6601
6602 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6603 {
ddef6bc7 6604 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
3ac1a319 6605 promote_mode (type, mode, &unsignedp, 0);
ff78f773 6606 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6607 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
ff78f773
RK
6608 }
6609
4c7a0be9 6610 if (temp == const0_rtx)
37a08a29 6611 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4c7a0be9 6612 else
8403445a
AM
6613 store_expr (TREE_OPERAND (exp, 0), temp,
6614 modifier == EXPAND_STACK_PARM ? 2 : 0);
e5e809f4
JL
6615
6616 TREE_USED (exp) = 1;
bbf6f052 6617 }
1499e0a8
RK
6618
6619 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6620 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 6621 but mark it so that we know that it was already extended. */
1499e0a8
RK
6622
6623 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6624 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6625 {
e70d22c8
RK
6626 /* Compute the signedness and make the proper SUBREG. */
6627 promote_mode (type, mode, &unsignedp, 0);
ddef6bc7 6628 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
1499e0a8 6629 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6630 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
6631 return temp;
6632 }
6633
bbf6f052
RK
6634 return SAVE_EXPR_RTL (exp);
6635
679163cf
MS
6636 case UNSAVE_EXPR:
6637 {
6638 rtx temp;
6639 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
24965e7a
NB
6640 TREE_OPERAND (exp, 0)
6641 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
679163cf
MS
6642 return temp;
6643 }
6644
b50d17a1 6645 case PLACEHOLDER_EXPR:
e9a25f70 6646 {
f47e9b4e 6647 tree old_list = placeholder_list;
738cc472 6648 tree placeholder_expr = 0;
e9a25f70 6649
f47e9b4e 6650 exp = find_placeholder (exp, &placeholder_expr);
70072ed9
RK
6651 if (exp == 0)
6652 abort ();
6653
f47e9b4e 6654 placeholder_list = TREE_CHAIN (placeholder_expr);
37a08a29 6655 temp = expand_expr (exp, original_target, tmode, modifier);
f47e9b4e
RK
6656 placeholder_list = old_list;
6657 return temp;
e9a25f70 6658 }
b50d17a1 6659
b50d17a1
RK
6660 case WITH_RECORD_EXPR:
6661 /* Put the object on the placeholder list, expand our first operand,
6662 and pop the list. */
6663 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6664 placeholder_list);
37a08a29
RK
6665 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6666 modifier);
b50d17a1
RK
6667 placeholder_list = TREE_CHAIN (placeholder_list);
6668 return target;
6669
70e6ca43
APB
6670 case GOTO_EXPR:
6671 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6672 expand_goto (TREE_OPERAND (exp, 0));
6673 else
6674 expand_computed_goto (TREE_OPERAND (exp, 0));
6675 return const0_rtx;
6676
bbf6f052 6677 case EXIT_EXPR:
df4ae160 6678 expand_exit_loop_if_false (NULL,
e44842fe 6679 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
6680 return const0_rtx;
6681
f42e28dd
APB
6682 case LABELED_BLOCK_EXPR:
6683 if (LABELED_BLOCK_BODY (exp))
b0832fe1 6684 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
30f7a378 6685 /* Should perhaps use expand_label, but this is simpler and safer. */
0a5fee32 6686 do_pending_stack_adjust ();
f42e28dd
APB
6687 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6688 return const0_rtx;
6689
6690 case EXIT_BLOCK_EXPR:
6691 if (EXIT_BLOCK_RETURN (exp))
ab87f8c8 6692 sorry ("returned value in block_exit_expr");
f42e28dd
APB
6693 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6694 return const0_rtx;
6695
bbf6f052 6696 case LOOP_EXPR:
0088fcb1 6697 push_temp_slots ();
bbf6f052 6698 expand_start_loop (1);
b0832fe1 6699 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
bbf6f052 6700 expand_end_loop ();
0088fcb1 6701 pop_temp_slots ();
bbf6f052
RK
6702
6703 return const0_rtx;
6704
6705 case BIND_EXPR:
6706 {
6707 tree vars = TREE_OPERAND (exp, 0);
bbf6f052
RK
6708
6709 /* Need to open a binding contour here because
e976b8b2 6710 if there are any cleanups they must be contained here. */
8e91754e 6711 expand_start_bindings (2);
bbf6f052 6712
2df53c0b
RS
6713 /* Mark the corresponding BLOCK for output in its proper place. */
6714 if (TREE_OPERAND (exp, 2) != 0
6715 && ! TREE_USED (TREE_OPERAND (exp, 2)))
43577e6b 6716 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
bbf6f052
RK
6717
6718 /* If VARS have not yet been expanded, expand them now. */
6719 while (vars)
6720 {
19e7881c 6721 if (!DECL_RTL_SET_P (vars))
4977bab6 6722 expand_decl (vars);
bbf6f052
RK
6723 expand_decl_init (vars);
6724 vars = TREE_CHAIN (vars);
6725 }
6726
37a08a29 6727 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
bbf6f052
RK
6728
6729 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6730
6731 return temp;
6732 }
6733
6734 case RTL_EXPR:
83b853c9
JM
6735 if (RTL_EXPR_SEQUENCE (exp))
6736 {
6737 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6738 abort ();
2f937369 6739 emit_insn (RTL_EXPR_SEQUENCE (exp));
83b853c9
JM
6740 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6741 }
64dc53f3
MM
6742 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6743 free_temps_for_rtl_expr (exp);
0fab64a3
MM
6744 if (alt_rtl)
6745 *alt_rtl = RTL_EXPR_ALT_RTL (exp);
bbf6f052
RK
6746 return RTL_EXPR_RTL (exp);
6747
6748 case CONSTRUCTOR:
dd27116b
RK
6749 /* If we don't need the result, just ensure we evaluate any
6750 subexpressions. */
6751 if (ignore)
6752 {
6753 tree elt;
37a08a29 6754
dd27116b 6755 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
37a08a29
RK
6756 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6757
dd27116b
RK
6758 return const0_rtx;
6759 }
3207b172 6760
4af3895e
JVA
6761 /* All elts simple constants => refer to a constant in memory. But
6762 if this is a non-BLKmode mode, let it store a field at a time
6763 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 6764 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
6765 store directly into the target unless the type is large enough
6766 that memcpy will be used. If we are making an initializer and
00182e1e
AH
6767 all operands are constant, put it in memory as well.
6768
6769 FIXME: Avoid trying to fill vector constructors piece-meal.
6770 Output them with output_constant_def below unless we're sure
6771 they're zeros. This should go away when vector initializers
6772 are treated like VECTOR_CST instead of arrays.
6773 */
dd27116b 6774 else if ((TREE_STATIC (exp)
3207b172 6775 && ((mode == BLKmode
e5e809f4 6776 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1 6777 || TREE_ADDRESSABLE (exp)
19caa751 6778 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
3a94c984 6779 && (! MOVE_BY_PIECES_P
19caa751
RK
6780 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6781 TYPE_ALIGN (type)))
0fb7aeda
KH
6782 && ((TREE_CODE (type) == VECTOR_TYPE
6783 && !is_zeros_p (exp))
6784 || ! mostly_zeros_p (exp)))))
f59700f9
RK
6785 || ((modifier == EXPAND_INITIALIZER
6786 || modifier == EXPAND_CONST_ADDRESS)
6787 && TREE_CONSTANT (exp)))
bbf6f052 6788 {
bd7cf17e 6789 rtx constructor = output_constant_def (exp, 1);
19caa751 6790
b552441b
RS
6791 if (modifier != EXPAND_CONST_ADDRESS
6792 && modifier != EXPAND_INITIALIZER
792760b9
RK
6793 && modifier != EXPAND_SUM)
6794 constructor = validize_mem (constructor);
6795
bbf6f052
RK
6796 return constructor;
6797 }
bbf6f052
RK
6798 else
6799 {
e9ac02a6
JW
6800 /* Handle calls that pass values in multiple non-contiguous
6801 locations. The Irix 6 ABI has examples of this. */
e5e809f4 6802 if (target == 0 || ! safe_from_p (target, exp, 1)
8403445a
AM
6803 || GET_CODE (target) == PARALLEL
6804 || modifier == EXPAND_STACK_PARM)
1da68f56
RK
6805 target
6806 = assign_temp (build_qualified_type (type,
6807 (TYPE_QUALS (type)
6808 | (TREE_READONLY (exp)
6809 * TYPE_QUAL_CONST))),
c24ae149 6810 0, TREE_ADDRESSABLE (exp), 1);
07604beb 6811
de8920be 6812 store_constructor (exp, target, 0, int_expr_size (exp));
bbf6f052
RK
6813 return target;
6814 }
6815
6816 case INDIRECT_REF:
6817 {
6818 tree exp1 = TREE_OPERAND (exp, 0);
7581a30f 6819 tree index;
3a94c984
KH
6820 tree string = string_constant (exp1, &index);
6821
06eaa86f 6822 /* Try to optimize reads from const strings. */
0fb7aeda
KH
6823 if (string
6824 && TREE_CODE (string) == STRING_CST
6825 && TREE_CODE (index) == INTEGER_CST
05bccae2 6826 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
0fb7aeda
KH
6827 && GET_MODE_CLASS (mode) == MODE_INT
6828 && GET_MODE_SIZE (mode) == 1
37a08a29 6829 && modifier != EXPAND_WRITE)
0fb7aeda 6830 return gen_int_mode (TREE_STRING_POINTER (string)
21ef78aa 6831 [TREE_INT_CST_LOW (index)], mode);
bbf6f052 6832
405f0da6
JW
6833 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6834 op0 = memory_address (mode, op0);
38a448ca 6835 temp = gen_rtx_MEM (mode, op0);
3bdf5ad1 6836 set_mem_attributes (temp, exp, 0);
1125706f 6837
14a774a9
RK
6838 /* If we are writing to this object and its type is a record with
6839 readonly fields, we must mark it as readonly so it will
6840 conflict with readonly references to those fields. */
37a08a29 6841 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
14a774a9
RK
6842 RTX_UNCHANGING_P (temp) = 1;
6843
8c8a8e34
JW
6844 return temp;
6845 }
bbf6f052
RK
6846
6847 case ARRAY_REF:
742920c7
RK
6848 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6849 abort ();
bbf6f052 6850
bbf6f052 6851 {
742920c7
RK
6852 tree array = TREE_OPERAND (exp, 0);
6853 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6854 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
fed3cef0 6855 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
08293add 6856 HOST_WIDE_INT i;
b50d17a1 6857
d4c89139
PB
6858 /* Optimize the special-case of a zero lower bound.
6859
6860 We convert the low_bound to sizetype to avoid some problems
6861 with constant folding. (E.g. suppose the lower bound is 1,
6862 and its mode is QI. Without the conversion, (ARRAY
6863 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fed3cef0 6864 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
d4c89139 6865
742920c7 6866 if (! integer_zerop (low_bound))
fed3cef0 6867 index = size_diffop (index, convert (sizetype, low_bound));
742920c7 6868
742920c7 6869 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
6870 This is not done in fold so it won't happen inside &.
6871 Don't fold if this is for wide characters since it's too
6872 difficult to do correctly and this is a very rare case. */
742920c7 6873
017e1b43
RH
6874 if (modifier != EXPAND_CONST_ADDRESS
6875 && modifier != EXPAND_INITIALIZER
6876 && modifier != EXPAND_MEMORY
cb5fa0f8 6877 && TREE_CODE (array) == STRING_CST
742920c7 6878 && TREE_CODE (index) == INTEGER_CST
05bccae2 6879 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
ad2e7dd0
RK
6880 && GET_MODE_CLASS (mode) == MODE_INT
6881 && GET_MODE_SIZE (mode) == 1)
21ef78aa
DE
6882 return gen_int_mode (TREE_STRING_POINTER (array)
6883 [TREE_INT_CST_LOW (index)], mode);
bbf6f052 6884
742920c7
RK
6885 /* If this is a constant index into a constant array,
6886 just get the value from the array. Handle both the cases when
6887 we have an explicit constructor and when our operand is a variable
6888 that was declared const. */
4af3895e 6889
017e1b43
RH
6890 if (modifier != EXPAND_CONST_ADDRESS
6891 && modifier != EXPAND_INITIALIZER
6892 && modifier != EXPAND_MEMORY
6893 && TREE_CODE (array) == CONSTRUCTOR
6894 && ! TREE_SIDE_EFFECTS (array)
05bccae2 6895 && TREE_CODE (index) == INTEGER_CST
3a94c984 6896 && 0 > compare_tree_int (index,
05bccae2
RK
6897 list_length (CONSTRUCTOR_ELTS
6898 (TREE_OPERAND (exp, 0)))))
742920c7 6899 {
05bccae2
RK
6900 tree elem;
6901
6902 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6903 i = TREE_INT_CST_LOW (index);
6904 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6905 ;
6906
6907 if (elem)
37a08a29
RK
6908 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6909 modifier);
742920c7 6910 }
3a94c984 6911
742920c7 6912 else if (optimize >= 1
cb5fa0f8
RK
6913 && modifier != EXPAND_CONST_ADDRESS
6914 && modifier != EXPAND_INITIALIZER
017e1b43 6915 && modifier != EXPAND_MEMORY
742920c7
RK
6916 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6917 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
beb0c2e0
RH
6918 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6919 && targetm.binds_local_p (array))
742920c7 6920 {
08293add 6921 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
6922 {
6923 tree init = DECL_INITIAL (array);
6924
742920c7
RK
6925 if (TREE_CODE (init) == CONSTRUCTOR)
6926 {
665f2503 6927 tree elem;
742920c7 6928
05bccae2 6929 for (elem = CONSTRUCTOR_ELTS (init);
5cb1bea4
JM
6930 (elem
6931 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
05bccae2
RK
6932 elem = TREE_CHAIN (elem))
6933 ;
6934
c54b0a5e 6935 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
742920c7 6936 return expand_expr (fold (TREE_VALUE (elem)), target,
37a08a29 6937 tmode, modifier);
742920c7
RK
6938 }
6939 else if (TREE_CODE (init) == STRING_CST
05bccae2
RK
6940 && 0 > compare_tree_int (index,
6941 TREE_STRING_LENGTH (init)))
5c80f6e6
JJ
6942 {
6943 tree type = TREE_TYPE (TREE_TYPE (init));
6944 enum machine_mode mode = TYPE_MODE (type);
6945
6946 if (GET_MODE_CLASS (mode) == MODE_INT
6947 && GET_MODE_SIZE (mode) == 1)
21ef78aa
DE
6948 return gen_int_mode (TREE_STRING_POINTER (init)
6949 [TREE_INT_CST_LOW (index)], mode);
5c80f6e6 6950 }
742920c7
RK
6951 }
6952 }
6953 }
afc6aaab 6954 goto normal_inner_ref;
bbf6f052
RK
6955
6956 case COMPONENT_REF:
4af3895e 6957 /* If the operand is a CONSTRUCTOR, we can just extract the
afc6aaab
ZW
6958 appropriate field if it is present. */
6959 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4af3895e
JVA
6960 {
6961 tree elt;
6962
6963 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6964 elt = TREE_CHAIN (elt))
86b5812c
RK
6965 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6966 /* We can normally use the value of the field in the
6967 CONSTRUCTOR. However, if this is a bitfield in
6968 an integral mode that we can fit in a HOST_WIDE_INT,
6969 we must mask only the number of bits in the bitfield,
6970 since this is done implicitly by the constructor. If
6971 the bitfield does not meet either of those conditions,
6972 we can't do this optimization. */
6973 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6974 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6975 == MODE_INT)
6976 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6977 <= HOST_BITS_PER_WIDE_INT))))
6978 {
8403445a
AM
6979 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6980 && modifier == EXPAND_STACK_PARM)
6981 target = 0;
3a94c984 6982 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
86b5812c
RK
6983 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6984 {
9df2c88c
RK
6985 HOST_WIDE_INT bitsize
6986 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
22273300
JJ
6987 enum machine_mode imode
6988 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c
RK
6989
6990 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6991 {
6992 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
22273300 6993 op0 = expand_and (imode, op0, op1, target);
86b5812c
RK
6994 }
6995 else
6996 {
6997 tree count
e5e809f4
JL
6998 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6999 0);
86b5812c
RK
7000
7001 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7002 target, 0);
7003 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7004 target, 0);
7005 }
7006 }
7007
7008 return op0;
7009 }
4af3895e 7010 }
afc6aaab 7011 goto normal_inner_ref;
4af3895e 7012
afc6aaab
ZW
7013 case BIT_FIELD_REF:
7014 case ARRAY_RANGE_REF:
7015 normal_inner_ref:
bbf6f052
RK
7016 {
7017 enum machine_mode mode1;
770ae6cc 7018 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 7019 tree offset;
bbf6f052 7020 int volatilep = 0;
839c4796 7021 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
a06ef755 7022 &mode1, &unsignedp, &volatilep);
f47e9b4e 7023 rtx orig_op0;
bbf6f052 7024
e7f3c83f
RK
7025 /* If we got back the original object, something is wrong. Perhaps
7026 we are evaluating an expression too early. In any event, don't
7027 infinitely recurse. */
7028 if (tem == exp)
7029 abort ();
7030
3d27140a 7031 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
7032 computation, since it will need a temporary and TARGET is known
7033 to have to do. This occurs in unchecked conversion in Ada. */
3a94c984 7034
f47e9b4e
RK
7035 orig_op0 = op0
7036 = expand_expr (tem,
7037 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7038 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7039 != INTEGER_CST)
8403445a 7040 && modifier != EXPAND_STACK_PARM
f47e9b4e
RK
7041 ? target : NULL_RTX),
7042 VOIDmode,
7043 (modifier == EXPAND_INITIALIZER
8403445a
AM
7044 || modifier == EXPAND_CONST_ADDRESS
7045 || modifier == EXPAND_STACK_PARM)
f47e9b4e 7046 ? modifier : EXPAND_NORMAL);
bbf6f052 7047
8c8a8e34 7048 /* If this is a constant, put it into a register if it is a
14a774a9 7049 legitimate constant and OFFSET is 0 and memory if it isn't. */
8c8a8e34
JW
7050 if (CONSTANT_P (op0))
7051 {
7052 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
14a774a9
RK
7053 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7054 && offset == 0)
8c8a8e34
JW
7055 op0 = force_reg (mode, op0);
7056 else
7057 op0 = validize_mem (force_const_mem (mode, op0));
7058 }
7059
8d2e5f72
RK
7060 /* Otherwise, if this object not in memory and we either have an
7061 offset or a BLKmode result, put it there. This case can't occur in
7062 C, but can in Ada if we have unchecked conversion of an expression
7063 from a scalar type to an array or record type or for an
7064 ARRAY_RANGE_REF whose type is BLKmode. */
7065 else if (GET_CODE (op0) != MEM
7066 && (offset != 0
7067 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7068 {
7069 /* If the operand is a SAVE_EXPR, we can deal with this by
7070 forcing the SAVE_EXPR into memory. */
7071 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7072 {
7073 put_var_into_stack (TREE_OPERAND (exp, 0),
7074 /*rescan=*/true);
7075 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7076 }
7077 else
7078 {
7079 tree nt
7080 = build_qualified_type (TREE_TYPE (tem),
7081 (TYPE_QUALS (TREE_TYPE (tem))
7082 | TYPE_QUAL_CONST));
7083 rtx memloc = assign_temp (nt, 1, 1, 1);
450b1728 7084
8d2e5f72
RK
7085 emit_move_insn (memloc, op0);
7086 op0 = memloc;
7087 }
7088 }
7089
7bb0943f
RS
7090 if (offset != 0)
7091 {
8403445a
AM
7092 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7093 EXPAND_SUM);
7bb0943f
RS
7094
7095 if (GET_CODE (op0) != MEM)
7096 abort ();
2d48c13d 7097
2d48c13d 7098#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 7099 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 7100 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
7101#else
7102 if (GET_MODE (offset_rtx) != ptr_mode)
7103 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d
JL
7104#endif
7105
e82407b5
EB
7106 if (GET_MODE (op0) == BLKmode
7107 /* A constant address in OP0 can have VOIDmode, we must
7108 not try to call force_reg in that case. */
efd07ca7 7109 && GET_MODE (XEXP (op0, 0)) != VOIDmode
14a774a9 7110 && bitsize != 0
3a94c984 7111 && (bitpos % bitsize) == 0
89752202 7112 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 7113 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
89752202 7114 {
e3c8ea67 7115 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
7116 bitpos = 0;
7117 }
7118
0d4903b8
RK
7119 op0 = offset_address (op0, offset_rtx,
7120 highest_pow2_factor (offset));
7bb0943f
RS
7121 }
7122
1ce7f3c2
RK
7123 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7124 record its alignment as BIGGEST_ALIGNMENT. */
7125 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7126 && is_aligning_offset (offset, tem))
7127 set_mem_align (op0, BIGGEST_ALIGNMENT);
7128
bbf6f052
RK
7129 /* Don't forget about volatility even if this is a bitfield. */
7130 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7131 {
f47e9b4e
RK
7132 if (op0 == orig_op0)
7133 op0 = copy_rtx (op0);
7134
bbf6f052
RK
7135 MEM_VOLATILE_P (op0) = 1;
7136 }
7137
010f87c4
JJ
7138 /* The following code doesn't handle CONCAT.
7139 Assume only bitpos == 0 can be used for CONCAT, due to
7140 one element arrays having the same mode as its element. */
7141 if (GET_CODE (op0) == CONCAT)
7142 {
7143 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7144 abort ();
7145 return op0;
7146 }
7147
ccc98036
RS
7148 /* In cases where an aligned union has an unaligned object
7149 as a field, we might be extracting a BLKmode value from
7150 an integer-mode (e.g., SImode) object. Handle this case
7151 by doing the extract into an object as wide as the field
7152 (which we know to be the width of a basic mode), then
cb5fa0f8 7153 storing into memory, and changing the mode to BLKmode. */
bbf6f052 7154 if (mode1 == VOIDmode
ccc98036 7155 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
cb5fa0f8
RK
7156 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7157 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10c2a453
RK
7158 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7159 && modifier != EXPAND_CONST_ADDRESS
7160 && modifier != EXPAND_INITIALIZER)
cb5fa0f8
RK
7161 /* If the field isn't aligned enough to fetch as a memref,
7162 fetch it as a bit field. */
7163 || (mode1 != BLKmode
9e5f281f 7164 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
e82407b5
EB
7165 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7166 || (GET_CODE (op0) == MEM
7167 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7168 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
a8f3bf52
RK
7169 && ((modifier == EXPAND_CONST_ADDRESS
7170 || modifier == EXPAND_INITIALIZER)
7171 ? STRICT_ALIGNMENT
7172 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9e5f281f 7173 || (bitpos % BITS_PER_UNIT != 0)))
cb5fa0f8
RK
7174 /* If the type and the field are a constant size and the
7175 size of the type isn't the same size as the bitfield,
7176 we must use bitfield operations. */
7177 || (bitsize >= 0
7178 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7179 == INTEGER_CST)
7180 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
a06ef755 7181 bitsize)))
bbf6f052 7182 {
bbf6f052
RK
7183 enum machine_mode ext_mode = mode;
7184
14a774a9
RK
7185 if (ext_mode == BLKmode
7186 && ! (target != 0 && GET_CODE (op0) == MEM
7187 && GET_CODE (target) == MEM
7188 && bitpos % BITS_PER_UNIT == 0))
bbf6f052
RK
7189 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7190
7191 if (ext_mode == BLKmode)
a281e72d 7192 {
7a06d606
RK
7193 if (target == 0)
7194 target = assign_temp (type, 0, 1, 1);
7195
7196 if (bitsize == 0)
7197 return target;
7198
a281e72d
RK
7199 /* In this case, BITPOS must start at a byte boundary and
7200 TARGET, if specified, must be a MEM. */
7201 if (GET_CODE (op0) != MEM
7202 || (target != 0 && GET_CODE (target) != MEM)
7203 || bitpos % BITS_PER_UNIT != 0)
7204 abort ();
7205
7a06d606
RK
7206 emit_block_move (target,
7207 adjust_address (op0, VOIDmode,
7208 bitpos / BITS_PER_UNIT),
a06ef755 7209 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a 7210 / BITS_PER_UNIT),
8403445a
AM
7211 (modifier == EXPAND_STACK_PARM
7212 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
3a94c984 7213
a281e72d
RK
7214 return target;
7215 }
bbf6f052 7216
dc6d66b3
RK
7217 op0 = validize_mem (op0);
7218
7219 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
04050c69 7220 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7221
8403445a
AM
7222 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7223 (modifier == EXPAND_STACK_PARM
7224 ? NULL_RTX : target),
7225 ext_mode, ext_mode,
bbf6f052 7226 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
7227
7228 /* If the result is a record type and BITSIZE is narrower than
7229 the mode of OP0, an integral mode, and this is a big endian
7230 machine, we must put the field into the high-order bits. */
7231 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7232 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
65a07688 7233 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
ef19912d
RK
7234 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7235 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7236 - bitsize),
7237 op0, 1);
7238
bbf6f052
RK
7239 if (mode == BLKmode)
7240 {
c3d32120 7241 rtx new = assign_temp (build_qualified_type
b0c48229
NB
7242 ((*lang_hooks.types.type_for_mode)
7243 (ext_mode, 0),
c3d32120 7244 TYPE_QUAL_CONST), 0, 1, 1);
bbf6f052
RK
7245
7246 emit_move_insn (new, op0);
7247 op0 = copy_rtx (new);
7248 PUT_MODE (op0, BLKmode);
c3d32120 7249 set_mem_attributes (op0, exp, 1);
bbf6f052
RK
7250 }
7251
7252 return op0;
7253 }
7254
05019f83
RK
7255 /* If the result is BLKmode, use that to access the object
7256 now as well. */
7257 if (mode == BLKmode)
7258 mode1 = BLKmode;
7259
bbf6f052
RK
7260 /* Get a reference to just this component. */
7261 if (modifier == EXPAND_CONST_ADDRESS
7262 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
f1ec5147 7263 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
bbf6f052 7264 else
f4ef873c 7265 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
41472af8 7266
f47e9b4e
RK
7267 if (op0 == orig_op0)
7268 op0 = copy_rtx (op0);
7269
3bdf5ad1 7270 set_mem_attributes (op0, exp, 0);
dc6d66b3 7271 if (GET_CODE (XEXP (op0, 0)) == REG)
a06ef755 7272 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7273
bbf6f052 7274 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 7275 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 7276 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 7277 || modifier == EXPAND_INITIALIZER)
bbf6f052 7278 return op0;
0d15e60c 7279 else if (target == 0)
bbf6f052 7280 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 7281
bbf6f052
RK
7282 convert_move (target, op0, unsignedp);
7283 return target;
7284 }
7285
4a8d0c9c
RH
7286 case VTABLE_REF:
7287 {
7288 rtx insn, before = get_last_insn (), vtbl_ref;
7289
7290 /* Evaluate the interior expression. */
7291 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7292 tmode, modifier);
7293
7294 /* Get or create an instruction off which to hang a note. */
7295 if (REG_P (subtarget))
7296 {
7297 target = subtarget;
7298 insn = get_last_insn ();
7299 if (insn == before)
7300 abort ();
7301 if (! INSN_P (insn))
7302 insn = prev_nonnote_insn (insn);
7303 }
7304 else
7305 {
7306 target = gen_reg_rtx (GET_MODE (subtarget));
7307 insn = emit_move_insn (target, subtarget);
7308 }
7309
7310 /* Collect the data for the note. */
7311 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7312 vtbl_ref = plus_constant (vtbl_ref,
7313 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7314 /* Discard the initial CONST that was added. */
7315 vtbl_ref = XEXP (vtbl_ref, 0);
7316
7317 REG_NOTES (insn)
7318 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7319
7320 return target;
7321 }
7322
bbf6f052
RK
7323 /* Intended for a reference to a buffer of a file-object in Pascal.
7324 But it's not certain that a special tree code will really be
7325 necessary for these. INDIRECT_REF might work for them. */
7326 case BUFFER_REF:
7327 abort ();
7328
7308a047 7329 case IN_EXPR:
7308a047 7330 {
d6a5ac33
RK
7331 /* Pascal set IN expression.
7332
7333 Algorithm:
7334 rlo = set_low - (set_low%bits_per_word);
7335 the_word = set [ (index - rlo)/bits_per_word ];
7336 bit_index = index % bits_per_word;
7337 bitmask = 1 << bit_index;
7338 return !!(the_word & bitmask); */
7339
7308a047
RS
7340 tree set = TREE_OPERAND (exp, 0);
7341 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 7342 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 7343 tree set_type = TREE_TYPE (set);
7308a047
RS
7344 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7345 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
7346 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7347 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7348 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7349 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7350 rtx setaddr = XEXP (setval, 0);
7351 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
7352 rtx rlow;
7353 rtx diff, quo, rem, addr, bit, result;
7308a047 7354
d6a5ac33
RK
7355 /* If domain is empty, answer is no. Likewise if index is constant
7356 and out of bounds. */
51723711 7357 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 7358 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 7359 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
7360 || (TREE_CODE (index) == INTEGER_CST
7361 && TREE_CODE (set_low_bound) == INTEGER_CST
7362 && tree_int_cst_lt (index, set_low_bound))
7363 || (TREE_CODE (set_high_bound) == INTEGER_CST
7364 && TREE_CODE (index) == INTEGER_CST
7365 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
7366 return const0_rtx;
7367
d6a5ac33
RK
7368 if (target == 0)
7369 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
7370
7371 /* If we get here, we have to generate the code for both cases
7372 (in range and out of range). */
7373
7374 op0 = gen_label_rtx ();
7375 op1 = gen_label_rtx ();
7376
7377 if (! (GET_CODE (index_val) == CONST_INT
7378 && GET_CODE (lo_r) == CONST_INT))
a06ef755
RK
7379 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7380 GET_MODE (index_val), iunsignedp, op1);
7308a047
RS
7381
7382 if (! (GET_CODE (index_val) == CONST_INT
7383 && GET_CODE (hi_r) == CONST_INT))
a06ef755
RK
7384 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7385 GET_MODE (index_val), iunsignedp, op1);
7308a047
RS
7386
7387 /* Calculate the element number of bit zero in the first word
7388 of the set. */
7389 if (GET_CODE (lo_r) == CONST_INT)
17938e57 7390 rlow = GEN_INT (INTVAL (lo_r)
3a94c984 7391 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 7392 else
17938e57
RK
7393 rlow = expand_binop (index_mode, and_optab, lo_r,
7394 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 7395 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 7396
d6a5ac33
RK
7397 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7398 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
7399
7400 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 7401 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 7402 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
7403 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7404
7308a047 7405 addr = memory_address (byte_mode,
d6a5ac33
RK
7406 expand_binop (index_mode, add_optab, diff,
7407 setaddr, NULL_RTX, iunsignedp,
17938e57 7408 OPTAB_LIB_WIDEN));
d6a5ac33 7409
3a94c984 7410 /* Extract the bit we want to examine. */
7308a047 7411 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 7412 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
7413 make_tree (TREE_TYPE (index), rem),
7414 NULL_RTX, 1);
7415 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7416 GET_MODE (target) == byte_mode ? target : 0,
7308a047 7417 1, OPTAB_LIB_WIDEN);
17938e57
RK
7418
7419 if (result != target)
7420 convert_move (target, result, 1);
7308a047
RS
7421
7422 /* Output the code to handle the out-of-range case. */
7423 emit_jump (op0);
7424 emit_label (op1);
7425 emit_move_insn (target, const0_rtx);
7426 emit_label (op0);
7427 return target;
7428 }
7429
bbf6f052 7430 case WITH_CLEANUP_EXPR:
6ad7895a 7431 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
bbf6f052 7432 {
6ad7895a 7433 WITH_CLEANUP_EXPR_RTL (exp)
37a08a29 7434 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
659e5a7a
JM
7435 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7436 CLEANUP_EH_ONLY (exp));
e976b8b2 7437
bbf6f052 7438 /* That's it for this cleanup. */
6ad7895a 7439 TREE_OPERAND (exp, 1) = 0;
bbf6f052 7440 }
6ad7895a 7441 return WITH_CLEANUP_EXPR_RTL (exp);
bbf6f052 7442
5dab5552
MS
7443 case CLEANUP_POINT_EXPR:
7444 {
e976b8b2
MS
7445 /* Start a new binding layer that will keep track of all cleanup
7446 actions to be performed. */
8e91754e 7447 expand_start_bindings (2);
e976b8b2 7448
d93d4205 7449 target_temp_slot_level = temp_slot_level;
e976b8b2 7450
37a08a29 7451 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
f283f66b
JM
7452 /* If we're going to use this value, load it up now. */
7453 if (! ignore)
7454 op0 = force_not_mem (op0);
d93d4205 7455 preserve_temp_slots (op0);
e976b8b2 7456 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
7457 }
7458 return op0;
7459
bbf6f052
RK
7460 case CALL_EXPR:
7461 /* Check for a built-in function. */
7462 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
7463 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7464 == FUNCTION_DECL)
bbf6f052 7465 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
0fb7aeda 7466 {
c70eaeaf
KG
7467 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7468 == BUILT_IN_FRONTEND)
8403445a 7469 return (*lang_hooks.expand_expr) (exp, original_target,
0fab64a3
MM
7470 tmode, modifier,
7471 alt_rtl);
c70eaeaf
KG
7472 else
7473 return expand_builtin (exp, target, subtarget, tmode, ignore);
7474 }
d6a5ac33 7475
8129842c 7476 return expand_call (exp, target, ignore);
bbf6f052
RK
7477
7478 case NON_LVALUE_EXPR:
7479 case NOP_EXPR:
7480 case CONVERT_EXPR:
7481 case REFERENCE_EXPR:
4a53008b 7482 if (TREE_OPERAND (exp, 0) == error_mark_node)
a592f288 7483 return const0_rtx;
4a53008b 7484
bbf6f052
RK
7485 if (TREE_CODE (type) == UNION_TYPE)
7486 {
7487 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
14a774a9 7488
c3d32120
RK
7489 /* If both input and output are BLKmode, this conversion isn't doing
7490 anything except possibly changing memory attribute. */
7491 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7492 {
7493 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7494 modifier);
7495
7496 result = copy_rtx (result);
7497 set_mem_attributes (result, exp, 0);
7498 return result;
7499 }
14a774a9 7500
bbf6f052 7501 if (target == 0)
cf7cb67e
JH
7502 {
7503 if (TYPE_MODE (type) != BLKmode)
7504 target = gen_reg_rtx (TYPE_MODE (type));
7505 else
7506 target = assign_temp (type, 0, 1, 1);
7507 }
d6a5ac33 7508
bbf6f052
RK
7509 if (GET_CODE (target) == MEM)
7510 /* Store data into beginning of memory target. */
7511 store_expr (TREE_OPERAND (exp, 0),
8403445a
AM
7512 adjust_address (target, TYPE_MODE (valtype), 0),
7513 modifier == EXPAND_STACK_PARM ? 2 : 0);
1499e0a8 7514
bbf6f052
RK
7515 else if (GET_CODE (target) == REG)
7516 /* Store this field into a union of the proper type. */
14a774a9
RK
7517 store_field (target,
7518 MIN ((int_size_in_bytes (TREE_TYPE
7519 (TREE_OPERAND (exp, 0)))
7520 * BITS_PER_UNIT),
8752c357 7521 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
14a774a9 7522 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
a06ef755 7523 VOIDmode, 0, type, 0);
bbf6f052
RK
7524 else
7525 abort ();
7526
7527 /* Return the entire union. */
7528 return target;
7529 }
d6a5ac33 7530
7f62854a
RK
7531 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7532 {
7533 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
37a08a29 7534 modifier);
7f62854a
RK
7535
7536 /* If the signedness of the conversion differs and OP0 is
7537 a promoted SUBREG, clear that indication since we now
7538 have to do the proper extension. */
7539 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7540 && GET_CODE (op0) == SUBREG)
7541 SUBREG_PROMOTED_VAR_P (op0) = 0;
7542
7543 return op0;
7544 }
7545
fdf473ae 7546 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
12342f90
RS
7547 if (GET_MODE (op0) == mode)
7548 return op0;
12342f90 7549
d6a5ac33
RK
7550 /* If OP0 is a constant, just convert it into the proper mode. */
7551 if (CONSTANT_P (op0))
fdf473ae
RH
7552 {
7553 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7554 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7555
0fb7aeda 7556 if (modifier == EXPAND_INITIALIZER)
fdf473ae
RH
7557 return simplify_gen_subreg (mode, op0, inner_mode,
7558 subreg_lowpart_offset (mode,
7559 inner_mode));
7560 else
7561 return convert_modes (mode, inner_mode, op0,
7562 TREE_UNSIGNED (inner_type));
7563 }
12342f90 7564
26fcb35a 7565 if (modifier == EXPAND_INITIALIZER)
38a448ca 7566 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 7567
bbf6f052 7568 if (target == 0)
d6a5ac33
RK
7569 return
7570 convert_to_mode (mode, op0,
7571 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 7572 else
d6a5ac33
RK
7573 convert_move (target, op0,
7574 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
7575 return target;
7576
ed239f5a 7577 case VIEW_CONVERT_EXPR:
37a08a29 7578 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
ed239f5a
RK
7579
7580 /* If the input and output modes are both the same, we are done.
13cf99ec
RK
7581 Otherwise, if neither mode is BLKmode and both are integral and within
7582 a word, we can use gen_lowpart. If neither is true, make sure the
7583 operand is in memory and convert the MEM to the new mode. */
ed239f5a
RK
7584 if (TYPE_MODE (type) == GET_MODE (op0))
7585 ;
7586 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
13cf99ec
RK
7587 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7588 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
ed239f5a
RK
7589 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7590 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7591 op0 = gen_lowpart (TYPE_MODE (type), op0);
c11c10d8 7592 else if (GET_CODE (op0) != MEM)
ed239f5a 7593 {
c11c10d8
RK
7594 /* If the operand is not a MEM, force it into memory. Since we
7595 are going to be be changing the mode of the MEM, don't call
7596 force_const_mem for constants because we don't allow pool
7597 constants to change mode. */
ed239f5a 7598 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
ed239f5a 7599
c11c10d8
RK
7600 if (TREE_ADDRESSABLE (exp))
7601 abort ();
ed239f5a 7602
c11c10d8
RK
7603 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7604 target
7605 = assign_stack_temp_for_type
7606 (TYPE_MODE (inner_type),
7607 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
ed239f5a 7608
c11c10d8
RK
7609 emit_move_insn (target, op0);
7610 op0 = target;
ed239f5a
RK
7611 }
7612
c11c10d8
RK
7613 /* At this point, OP0 is in the correct mode. If the output type is such
7614 that the operand is known to be aligned, indicate that it is.
7615 Otherwise, we need only be concerned about alignment for non-BLKmode
7616 results. */
ed239f5a
RK
7617 if (GET_CODE (op0) == MEM)
7618 {
7619 op0 = copy_rtx (op0);
7620
ed239f5a
RK
7621 if (TYPE_ALIGN_OK (type))
7622 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7623 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7624 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7625 {
7626 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
65a07688
RK
7627 HOST_WIDE_INT temp_size
7628 = MAX (int_size_in_bytes (inner_type),
7629 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
ed239f5a
RK
7630 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7631 temp_size, 0, type);
c4e59f51 7632 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
ed239f5a 7633
c11c10d8
RK
7634 if (TREE_ADDRESSABLE (exp))
7635 abort ();
7636
ed239f5a
RK
7637 if (GET_MODE (op0) == BLKmode)
7638 emit_block_move (new_with_op0_mode, op0,
44bb111a 7639 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8403445a
AM
7640 (modifier == EXPAND_STACK_PARM
7641 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
ed239f5a
RK
7642 else
7643 emit_move_insn (new_with_op0_mode, op0);
7644
7645 op0 = new;
7646 }
0fb7aeda 7647
c4e59f51 7648 op0 = adjust_address (op0, TYPE_MODE (type), 0);
ed239f5a
RK
7649 }
7650
7651 return op0;
7652
bbf6f052 7653 case PLUS_EXPR:
91ce572a 7654 this_optab = ! unsignedp && flag_trapv
a9785c70 7655 && (GET_MODE_CLASS (mode) == MODE_INT)
91ce572a 7656 ? addv_optab : add_optab;
bbf6f052
RK
7657
7658 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7659 something else, make sure we add the register to the constant and
7660 then to the other thing. This case can occur during strength
7661 reduction and doing it this way will produce better code if the
7662 frame pointer or argument pointer is eliminated.
7663
7664 fold-const.c will ensure that the constant is always in the inner
7665 PLUS_EXPR, so the only case we need to do anything about is if
7666 sp, ap, or fp is our second argument, in which case we must swap
7667 the innermost first argument and our second argument. */
7668
7669 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7670 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7671 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7672 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7673 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7674 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7675 {
7676 tree t = TREE_OPERAND (exp, 1);
7677
7678 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7679 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7680 }
7681
88f63c77 7682 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
7683 something, we might be forming a constant. So try to use
7684 plus_constant. If it produces a sum and we can't accept it,
7685 use force_operand. This allows P = &ARR[const] to generate
7686 efficient code on machines where a SYMBOL_REF is not a valid
7687 address.
7688
7689 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 7690 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
0fb7aeda 7691 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
bbf6f052 7692 {
8403445a
AM
7693 if (modifier == EXPAND_STACK_PARM)
7694 target = 0;
c980ac49
RS
7695 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7696 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7697 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7698 {
cbbc503e
JL
7699 rtx constant_part;
7700
c980ac49
RS
7701 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7702 EXPAND_SUM);
cbbc503e
JL
7703 /* Use immed_double_const to ensure that the constant is
7704 truncated according to the mode of OP1, then sign extended
7705 to a HOST_WIDE_INT. Using the constant directly can result
7706 in non-canonical RTL in a 64x32 cross compile. */
7707 constant_part
7708 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7709 (HOST_WIDE_INT) 0,
a5efcd63 7710 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7f401c74 7711 op1 = plus_constant (op1, INTVAL (constant_part));
c980ac49
RS
7712 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7713 op1 = force_operand (op1, target);
7714 return op1;
7715 }
bbf6f052 7716
c980ac49
RS
7717 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7718 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7719 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7720 {
cbbc503e
JL
7721 rtx constant_part;
7722
c980ac49 7723 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
70d95bac
RH
7724 (modifier == EXPAND_INITIALIZER
7725 ? EXPAND_INITIALIZER : EXPAND_SUM));
c980ac49
RS
7726 if (! CONSTANT_P (op0))
7727 {
7728 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7729 VOIDmode, modifier);
f0e9957a
RS
7730 /* Return a PLUS if modifier says it's OK. */
7731 if (modifier == EXPAND_SUM
7732 || modifier == EXPAND_INITIALIZER)
7733 return simplify_gen_binary (PLUS, mode, op0, op1);
7734 goto binop2;
c980ac49 7735 }
cbbc503e
JL
7736 /* Use immed_double_const to ensure that the constant is
7737 truncated according to the mode of OP1, then sign extended
7738 to a HOST_WIDE_INT. Using the constant directly can result
7739 in non-canonical RTL in a 64x32 cross compile. */
7740 constant_part
7741 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7742 (HOST_WIDE_INT) 0,
2a94e396 7743 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7f401c74 7744 op0 = plus_constant (op0, INTVAL (constant_part));
c980ac49
RS
7745 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7746 op0 = force_operand (op0, target);
7747 return op0;
7748 }
bbf6f052
RK
7749 }
7750
7751 /* No sense saving up arithmetic to be done
7752 if it's all in the wrong mode to form part of an address.
7753 And force_operand won't know whether to sign-extend or
7754 zero-extend. */
7755 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 7756 || mode != ptr_mode)
4ef7870a 7757 {
eb698c58
RS
7758 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7759 subtarget, &op0, &op1, 0);
6e7727eb
EB
7760 if (op0 == const0_rtx)
7761 return op1;
7762 if (op1 == const0_rtx)
7763 return op0;
4ef7870a
EB
7764 goto binop2;
7765 }
bbf6f052 7766
eb698c58
RS
7767 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7768 subtarget, &op0, &op1, modifier);
f0e9957a 7769 return simplify_gen_binary (PLUS, mode, op0, op1);
bbf6f052
RK
7770
7771 case MINUS_EXPR:
ea87523e
RK
7772 /* For initializers, we are allowed to return a MINUS of two
7773 symbolic constants. Here we handle all cases when both operands
7774 are constant. */
bbf6f052
RK
7775 /* Handle difference of two symbolic constants,
7776 for the sake of an initializer. */
7777 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7778 && really_constant_p (TREE_OPERAND (exp, 0))
7779 && really_constant_p (TREE_OPERAND (exp, 1)))
7780 {
eb698c58
RS
7781 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7782 NULL_RTX, &op0, &op1, modifier);
ea87523e 7783
ea87523e
RK
7784 /* If the last operand is a CONST_INT, use plus_constant of
7785 the negated constant. Else make the MINUS. */
7786 if (GET_CODE (op1) == CONST_INT)
7787 return plus_constant (op0, - INTVAL (op1));
7788 else
38a448ca 7789 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052 7790 }
ae431183 7791
91ce572a
CC
7792 this_optab = ! unsignedp && flag_trapv
7793 && (GET_MODE_CLASS(mode) == MODE_INT)
7794 ? subv_optab : sub_optab;
1717e19e
UW
7795
7796 /* No sense saving up arithmetic to be done
7797 if it's all in the wrong mode to form part of an address.
7798 And force_operand won't know whether to sign-extend or
7799 zero-extend. */
7800 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7801 || mode != ptr_mode)
7802 goto binop;
7803
eb698c58
RS
7804 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7805 subtarget, &op0, &op1, modifier);
1717e19e
UW
7806
7807 /* Convert A - const to A + (-const). */
7808 if (GET_CODE (op1) == CONST_INT)
7809 {
7810 op1 = negate_rtx (mode, op1);
f0e9957a 7811 return simplify_gen_binary (PLUS, mode, op0, op1);
1717e19e
UW
7812 }
7813
7814 goto binop2;
bbf6f052
RK
7815
7816 case MULT_EXPR:
bbf6f052
RK
7817 /* If first operand is constant, swap them.
7818 Thus the following special case checks need only
7819 check the second operand. */
7820 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7821 {
b3694847 7822 tree t1 = TREE_OPERAND (exp, 0);
bbf6f052
RK
7823 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7824 TREE_OPERAND (exp, 1) = t1;
7825 }
7826
7827 /* Attempt to return something suitable for generating an
7828 indexed address, for machines that support that. */
7829
88f63c77 7830 if (modifier == EXPAND_SUM && mode == ptr_mode
3b40e71b 7831 && host_integerp (TREE_OPERAND (exp, 1), 0))
bbf6f052 7832 {
48a5f2fa
DJ
7833 tree exp1 = TREE_OPERAND (exp, 1);
7834
921b3427
RK
7835 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7836 EXPAND_SUM);
bbf6f052 7837
bbf6f052 7838 if (GET_CODE (op0) != REG)
906c4e36 7839 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
7840 if (GET_CODE (op0) != REG)
7841 op0 = copy_to_mode_reg (mode, op0);
7842
48a5f2fa
DJ
7843 return gen_rtx_MULT (mode, op0,
7844 gen_int_mode (tree_low_cst (exp1, 0),
7845 TYPE_MODE (TREE_TYPE (exp1))));
bbf6f052
RK
7846 }
7847
8403445a
AM
7848 if (modifier == EXPAND_STACK_PARM)
7849 target = 0;
7850
bbf6f052
RK
7851 /* Check for multiplying things that have been extended
7852 from a narrower type. If this machine supports multiplying
7853 in that narrower type with a result in the desired type,
7854 do it that way, and avoid the explicit type-conversion. */
7855 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7856 && TREE_CODE (type) == INTEGER_TYPE
7857 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7858 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7859 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7860 && int_fits_type_p (TREE_OPERAND (exp, 1),
7861 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7862 /* Don't use a widening multiply if a shift will do. */
7863 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 7864 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7865 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7866 ||
7867 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7868 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7869 ==
7870 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7871 /* If both operands are extended, they must either both
7872 be zero-extended or both be sign-extended. */
7873 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7874 ==
7875 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7876 {
888d65b5
RS
7877 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7878 enum machine_mode innermode = TYPE_MODE (op0type);
7879 bool zextend_p = TREE_UNSIGNED (op0type);
7880 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7881 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7882
b10af0c8 7883 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 7884 {
b10af0c8
TG
7885 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7886 {
b10af0c8 7887 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
eb698c58
RS
7888 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7889 TREE_OPERAND (exp, 1),
7890 NULL_RTX, &op0, &op1, 0);
b10af0c8 7891 else
eb698c58
RS
7892 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7893 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7894 NULL_RTX, &op0, &op1, 0);
b10af0c8
TG
7895 goto binop2;
7896 }
7897 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7898 && innermode == word_mode)
7899 {
888d65b5 7900 rtx htem, hipart;
b10af0c8
TG
7901 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7902 NULL_RTX, VOIDmode, 0);
7903 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8c118062
GK
7904 op1 = convert_modes (innermode, mode,
7905 expand_expr (TREE_OPERAND (exp, 1),
7906 NULL_RTX, VOIDmode, 0),
7907 unsignedp);
b10af0c8
TG
7908 else
7909 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7910 NULL_RTX, VOIDmode, 0);
7911 temp = expand_binop (mode, other_optab, op0, op1, target,
7912 unsignedp, OPTAB_LIB_WIDEN);
888d65b5
RS
7913 hipart = gen_highpart (innermode, temp);
7914 htem = expand_mult_highpart_adjust (innermode, hipart,
7915 op0, op1, hipart,
7916 zextend_p);
7917 if (htem != hipart)
7918 emit_move_insn (hipart, htem);
b10af0c8
TG
7919 return temp;
7920 }
bbf6f052
RK
7921 }
7922 }
eb698c58
RS
7923 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7924 subtarget, &op0, &op1, 0);
bbf6f052
RK
7925 return expand_mult (mode, op0, op1, target, unsignedp);
7926
7927 case TRUNC_DIV_EXPR:
7928 case FLOOR_DIV_EXPR:
7929 case CEIL_DIV_EXPR:
7930 case ROUND_DIV_EXPR:
7931 case EXACT_DIV_EXPR:
8403445a
AM
7932 if (modifier == EXPAND_STACK_PARM)
7933 target = 0;
bbf6f052
RK
7934 /* Possible optimization: compute the dividend with EXPAND_SUM
7935 then if the divisor is constant can optimize the case
7936 where some terms of the dividend have coeffs divisible by it. */
eb698c58
RS
7937 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7938 subtarget, &op0, &op1, 0);
bbf6f052
RK
7939 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7940
7941 case RDIV_EXPR:
b7e9703c
JH
7942 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7943 expensive divide. If not, combine will rebuild the original
7944 computation. */
7945 if (flag_unsafe_math_optimizations && optimize && !optimize_size
ed7d44bc 7946 && TREE_CODE (type) == REAL_TYPE
b7e9703c
JH
7947 && !real_onep (TREE_OPERAND (exp, 0)))
7948 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7949 build (RDIV_EXPR, type,
7950 build_real (type, dconst1),
7951 TREE_OPERAND (exp, 1))),
8e37cba8 7952 target, tmode, modifier);
ef89d648 7953 this_optab = sdiv_optab;
bbf6f052
RK
7954 goto binop;
7955
7956 case TRUNC_MOD_EXPR:
7957 case FLOOR_MOD_EXPR:
7958 case CEIL_MOD_EXPR:
7959 case ROUND_MOD_EXPR:
8403445a
AM
7960 if (modifier == EXPAND_STACK_PARM)
7961 target = 0;
eb698c58
RS
7962 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7963 subtarget, &op0, &op1, 0);
bbf6f052
RK
7964 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7965
7966 case FIX_ROUND_EXPR:
7967 case FIX_FLOOR_EXPR:
7968 case FIX_CEIL_EXPR:
7969 abort (); /* Not used for C. */
7970
7971 case FIX_TRUNC_EXPR:
906c4e36 7972 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403445a 7973 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
7974 target = gen_reg_rtx (mode);
7975 expand_fix (target, op0, unsignedp);
7976 return target;
7977
7978 case FLOAT_EXPR:
906c4e36 7979 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403445a 7980 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
7981 target = gen_reg_rtx (mode);
7982 /* expand_float can't figure out what to do if FROM has VOIDmode.
7983 So give it the correct mode. With -O, cse will optimize this. */
7984 if (GET_MODE (op0) == VOIDmode)
7985 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7986 op0);
7987 expand_float (target, op0,
7988 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7989 return target;
7990
7991 case NEGATE_EXPR:
5b22bee8 7992 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
7993 if (modifier == EXPAND_STACK_PARM)
7994 target = 0;
91ce572a 7995 temp = expand_unop (mode,
0fb7aeda
KH
7996 ! unsignedp && flag_trapv
7997 && (GET_MODE_CLASS(mode) == MODE_INT)
7998 ? negv_optab : neg_optab, op0, target, 0);
bbf6f052
RK
7999 if (temp == 0)
8000 abort ();
8001 return temp;
8002
8003 case ABS_EXPR:
8004 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8005 if (modifier == EXPAND_STACK_PARM)
8006 target = 0;
bbf6f052 8007
11017cc7 8008 /* ABS_EXPR is not valid for complex arguments. */
d6a5ac33
RK
8009 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8010 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
11017cc7 8011 abort ();
2d7050fd 8012
bbf6f052
RK
8013 /* Unsigned abs is simply the operand. Testing here means we don't
8014 risk generating incorrect code below. */
8015 if (TREE_UNSIGNED (type))
8016 return op0;
8017
91ce572a 8018 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 8019 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
8020
8021 case MAX_EXPR:
8022 case MIN_EXPR:
8023 target = original_target;
8403445a
AM
8024 if (target == 0
8025 || modifier == EXPAND_STACK_PARM
fc155707 8026 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 8027 || GET_MODE (target) != mode
bbf6f052
RK
8028 || (GET_CODE (target) == REG
8029 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8030 target = gen_reg_rtx (mode);
eb698c58
RS
8031 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8032 target, &op0, &op1, 0);
bbf6f052
RK
8033
8034 /* First try to do it with a special MIN or MAX instruction.
8035 If that does not win, use a conditional jump to select the proper
8036 value. */
8037 this_optab = (TREE_UNSIGNED (type)
8038 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8039 : (code == MIN_EXPR ? smin_optab : smax_optab));
8040
8041 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8042 OPTAB_WIDEN);
8043 if (temp != 0)
8044 return temp;
8045
fa2981d8
JW
8046 /* At this point, a MEM target is no longer useful; we will get better
8047 code without it. */
3a94c984 8048
fa2981d8
JW
8049 if (GET_CODE (target) == MEM)
8050 target = gen_reg_rtx (mode);
8051
e3be1116
RS
8052 /* If op1 was placed in target, swap op0 and op1. */
8053 if (target != op0 && target == op1)
8054 {
8055 rtx tem = op0;
8056 op0 = op1;
8057 op1 = tem;
8058 }
8059
ee456b1c
RK
8060 if (target != op0)
8061 emit_move_insn (target, op0);
d6a5ac33 8062
bbf6f052 8063 op0 = gen_label_rtx ();
d6a5ac33 8064
f81497d9
RS
8065 /* If this mode is an integer too wide to compare properly,
8066 compare word by word. Rely on cse to optimize constant cases. */
1eb8759b
RH
8067 if (GET_MODE_CLASS (mode) == MODE_INT
8068 && ! can_compare_p (GE, mode, ccp_jump))
bbf6f052 8069 {
f81497d9 8070 if (code == MAX_EXPR)
d6a5ac33
RK
8071 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8072 target, op1, NULL_RTX, op0);
bbf6f052 8073 else
d6a5ac33
RK
8074 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8075 op1, target, NULL_RTX, op0);
bbf6f052 8076 }
f81497d9
RS
8077 else
8078 {
b30f05db
BS
8079 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8080 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
a06ef755 8081 unsignedp, mode, NULL_RTX, NULL_RTX,
b30f05db 8082 op0);
f81497d9 8083 }
b30f05db 8084 emit_move_insn (target, op1);
bbf6f052
RK
8085 emit_label (op0);
8086 return target;
8087
bbf6f052
RK
8088 case BIT_NOT_EXPR:
8089 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8090 if (modifier == EXPAND_STACK_PARM)
8091 target = 0;
bbf6f052
RK
8092 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8093 if (temp == 0)
8094 abort ();
8095 return temp;
8096
d6a5ac33
RK
8097 /* ??? Can optimize bitwise operations with one arg constant.
8098 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8099 and (a bitwise1 b) bitwise2 b (etc)
8100 but that is probably not worth while. */
8101
8102 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8103 boolean values when we want in all cases to compute both of them. In
8104 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8105 as actual zero-or-1 values and then bitwise anding. In cases where
8106 there cannot be any side effects, better code would be made by
8107 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8108 how to recognize those cases. */
8109
bbf6f052
RK
8110 case TRUTH_AND_EXPR:
8111 case BIT_AND_EXPR:
8112 this_optab = and_optab;
8113 goto binop;
8114
bbf6f052
RK
8115 case TRUTH_OR_EXPR:
8116 case BIT_IOR_EXPR:
8117 this_optab = ior_optab;
8118 goto binop;
8119
874726a8 8120 case TRUTH_XOR_EXPR:
bbf6f052
RK
8121 case BIT_XOR_EXPR:
8122 this_optab = xor_optab;
8123 goto binop;
8124
8125 case LSHIFT_EXPR:
8126 case RSHIFT_EXPR:
8127 case LROTATE_EXPR:
8128 case RROTATE_EXPR:
e5e809f4 8129 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052 8130 subtarget = 0;
8403445a
AM
8131 if (modifier == EXPAND_STACK_PARM)
8132 target = 0;
bbf6f052
RK
8133 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8134 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8135 unsignedp);
8136
d6a5ac33
RK
8137 /* Could determine the answer when only additive constants differ. Also,
8138 the addition of one can be handled by changing the condition. */
bbf6f052
RK
8139 case LT_EXPR:
8140 case LE_EXPR:
8141 case GT_EXPR:
8142 case GE_EXPR:
8143 case EQ_EXPR:
8144 case NE_EXPR:
1eb8759b
RH
8145 case UNORDERED_EXPR:
8146 case ORDERED_EXPR:
8147 case UNLT_EXPR:
8148 case UNLE_EXPR:
8149 case UNGT_EXPR:
8150 case UNGE_EXPR:
8151 case UNEQ_EXPR:
8403445a
AM
8152 temp = do_store_flag (exp,
8153 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8154 tmode != VOIDmode ? tmode : mode, 0);
bbf6f052
RK
8155 if (temp != 0)
8156 return temp;
d6a5ac33 8157
0f41302f 8158 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
8159 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8160 && original_target
8161 && GET_CODE (original_target) == REG
8162 && (GET_MODE (original_target)
8163 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8164 {
d6a5ac33
RK
8165 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8166 VOIDmode, 0);
8167
c0a3eeac
UW
8168 /* If temp is constant, we can just compute the result. */
8169 if (GET_CODE (temp) == CONST_INT)
8170 {
8171 if (INTVAL (temp) != 0)
8172 emit_move_insn (target, const1_rtx);
8173 else
8174 emit_move_insn (target, const0_rtx);
8175
8176 return target;
8177 }
8178
bbf6f052 8179 if (temp != original_target)
c0a3eeac
UW
8180 {
8181 enum machine_mode mode1 = GET_MODE (temp);
8182 if (mode1 == VOIDmode)
8183 mode1 = tmode != VOIDmode ? tmode : mode;
0fb7aeda 8184
c0a3eeac
UW
8185 temp = copy_to_mode_reg (mode1, temp);
8186 }
d6a5ac33 8187
bbf6f052 8188 op1 = gen_label_rtx ();
c5d5d461 8189 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
a06ef755 8190 GET_MODE (temp), unsignedp, op1);
bbf6f052
RK
8191 emit_move_insn (temp, const1_rtx);
8192 emit_label (op1);
8193 return temp;
8194 }
d6a5ac33 8195
bbf6f052
RK
8196 /* If no set-flag instruction, must generate a conditional
8197 store into a temporary variable. Drop through
8198 and handle this like && and ||. */
8199
8200 case TRUTH_ANDIF_EXPR:
8201 case TRUTH_ORIF_EXPR:
e44842fe 8202 if (! ignore
8403445a
AM
8203 && (target == 0
8204 || modifier == EXPAND_STACK_PARM
8205 || ! safe_from_p (target, exp, 1)
e44842fe
RK
8206 /* Make sure we don't have a hard reg (such as function's return
8207 value) live across basic blocks, if not optimizing. */
8208 || (!optimize && GET_CODE (target) == REG
8209 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 8210 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
8211
8212 if (target)
8213 emit_clr_insn (target);
8214
bbf6f052
RK
8215 op1 = gen_label_rtx ();
8216 jumpifnot (exp, op1);
e44842fe
RK
8217
8218 if (target)
8219 emit_0_to_1_insn (target);
8220
bbf6f052 8221 emit_label (op1);
e44842fe 8222 return ignore ? const0_rtx : target;
bbf6f052
RK
8223
8224 case TRUTH_NOT_EXPR:
8403445a
AM
8225 if (modifier == EXPAND_STACK_PARM)
8226 target = 0;
bbf6f052
RK
8227 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8228 /* The parser is careful to generate TRUTH_NOT_EXPR
8229 only with operands that are always zero or one. */
906c4e36 8230 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
8231 target, 1, OPTAB_LIB_WIDEN);
8232 if (temp == 0)
8233 abort ();
8234 return temp;
8235
8236 case COMPOUND_EXPR:
8237 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8238 emit_queue ();
0fab64a3
MM
8239 return expand_expr_real (TREE_OPERAND (exp, 1),
8240 (ignore ? const0_rtx : target),
8241 VOIDmode, modifier, alt_rtl);
bbf6f052
RK
8242
8243 case COND_EXPR:
ac01eace
RK
8244 /* If we would have a "singleton" (see below) were it not for a
8245 conversion in each arm, bring that conversion back out. */
8246 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8247 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8248 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8249 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8250 {
d6edb99e
ZW
8251 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8252 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8253
8254 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8255 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8256 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8257 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8258 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8259 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8260 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8261 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
ac01eace 8262 return expand_expr (build1 (NOP_EXPR, type,
d6edb99e 8263 build (COND_EXPR, TREE_TYPE (iftrue),
ac01eace 8264 TREE_OPERAND (exp, 0),
d6edb99e 8265 iftrue, iffalse)),
ac01eace
RK
8266 target, tmode, modifier);
8267 }
8268
bbf6f052
RK
8269 {
8270 /* Note that COND_EXPRs whose type is a structure or union
8271 are required to be constructed to contain assignments of
8272 a temporary variable, so that we can evaluate them here
8273 for side effect only. If type is void, we must do likewise. */
8274
8275 /* If an arm of the branch requires a cleanup,
8276 only that cleanup is performed. */
8277
8278 tree singleton = 0;
8279 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
8280
8281 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8282 convert it to our mode, if necessary. */
8283 if (integer_onep (TREE_OPERAND (exp, 1))
8284 && integer_zerop (TREE_OPERAND (exp, 2))
8285 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8286 {
dd27116b
RK
8287 if (ignore)
8288 {
8289 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
37a08a29 8290 modifier);
dd27116b
RK
8291 return const0_rtx;
8292 }
8293
8403445a
AM
8294 if (modifier == EXPAND_STACK_PARM)
8295 target = 0;
37a08a29 8296 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
bbf6f052
RK
8297 if (GET_MODE (op0) == mode)
8298 return op0;
d6a5ac33 8299
bbf6f052
RK
8300 if (target == 0)
8301 target = gen_reg_rtx (mode);
8302 convert_move (target, op0, unsignedp);
8303 return target;
8304 }
8305
ac01eace
RK
8306 /* Check for X ? A + B : A. If we have this, we can copy A to the
8307 output and conditionally add B. Similarly for unary operations.
8308 Don't do this if X has side-effects because those side effects
8309 might affect A or B and the "?" operation is a sequence point in
8310 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
8311
8312 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8313 && operand_equal_p (TREE_OPERAND (exp, 2),
8314 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8315 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8316 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8317 && operand_equal_p (TREE_OPERAND (exp, 1),
8318 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8319 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8320 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8321 && operand_equal_p (TREE_OPERAND (exp, 2),
8322 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8323 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8324 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8325 && operand_equal_p (TREE_OPERAND (exp, 1),
8326 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8327 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8328
01c8a7c8
RK
8329 /* If we are not to produce a result, we have no target. Otherwise,
8330 if a target was specified use it; it will not be used as an
3a94c984 8331 intermediate target unless it is safe. If no target, use a
01c8a7c8
RK
8332 temporary. */
8333
8334 if (ignore)
8335 temp = 0;
8403445a
AM
8336 else if (modifier == EXPAND_STACK_PARM)
8337 temp = assign_temp (type, 0, 0, 1);
01c8a7c8 8338 else if (original_target
e5e809f4 8339 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
8340 || (singleton && GET_CODE (original_target) == REG
8341 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8342 && original_target == var_rtx (singleton)))
8343 && GET_MODE (original_target) == mode
7c00d1fe
RK
8344#ifdef HAVE_conditional_move
8345 && (! can_conditionally_move_p (mode)
8346 || GET_CODE (original_target) == REG
8347 || TREE_ADDRESSABLE (type))
8348#endif
8125d7e9
BS
8349 && (GET_CODE (original_target) != MEM
8350 || TREE_ADDRESSABLE (type)))
01c8a7c8
RK
8351 temp = original_target;
8352 else if (TREE_ADDRESSABLE (type))
8353 abort ();
8354 else
8355 temp = assign_temp (type, 0, 0, 1);
8356
ac01eace
RK
8357 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8358 do the test of X as a store-flag operation, do this as
8359 A + ((X != 0) << log C). Similarly for other simple binary
8360 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 8361 if (temp && singleton && binary_op
bbf6f052
RK
8362 && (TREE_CODE (binary_op) == PLUS_EXPR
8363 || TREE_CODE (binary_op) == MINUS_EXPR
8364 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 8365 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
8366 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8367 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
8368 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8369 {
8370 rtx result;
61f6c84f 8371 tree cond;
91ce572a 8372 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
0fb7aeda
KH
8373 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8374 ? addv_optab : add_optab)
8375 : TREE_CODE (binary_op) == MINUS_EXPR
8376 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8377 ? subv_optab : sub_optab)
8378 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8379 : xor_optab);
bbf6f052 8380
61f6c84f 8381 /* If we had X ? A : A + 1, do this as A + (X == 0). */
bbf6f052 8382 if (singleton == TREE_OPERAND (exp, 1))
61f6c84f
JJ
8383 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8384 else
8385 cond = TREE_OPERAND (exp, 0);
bbf6f052 8386
61f6c84f
JJ
8387 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8388 ? temp : NULL_RTX),
bbf6f052
RK
8389 mode, BRANCH_COST <= 1);
8390
ac01eace
RK
8391 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8392 result = expand_shift (LSHIFT_EXPR, mode, result,
8393 build_int_2 (tree_log2
8394 (TREE_OPERAND
8395 (binary_op, 1)),
8396 0),
e5e809f4 8397 (safe_from_p (temp, singleton, 1)
ac01eace
RK
8398 ? temp : NULL_RTX), 0);
8399
bbf6f052
RK
8400 if (result)
8401 {
906c4e36 8402 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8403 return expand_binop (mode, boptab, op1, result, temp,
8404 unsignedp, OPTAB_LIB_WIDEN);
8405 }
bbf6f052 8406 }
3a94c984 8407
dabf8373 8408 do_pending_stack_adjust ();
bbf6f052
RK
8409 NO_DEFER_POP;
8410 op0 = gen_label_rtx ();
8411
8412 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8413 {
8414 if (temp != 0)
8415 {
8416 /* If the target conflicts with the other operand of the
8417 binary op, we can't use it. Also, we can't use the target
8418 if it is a hard register, because evaluating the condition
8419 might clobber it. */
8420 if ((binary_op
e5e809f4 8421 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
8422 || (GET_CODE (temp) == REG
8423 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8424 temp = gen_reg_rtx (mode);
8403445a
AM
8425 store_expr (singleton, temp,
8426 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
8427 }
8428 else
906c4e36 8429 expand_expr (singleton,
2937cf87 8430 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8431 if (singleton == TREE_OPERAND (exp, 1))
8432 jumpif (TREE_OPERAND (exp, 0), op0);
8433 else
8434 jumpifnot (TREE_OPERAND (exp, 0), op0);
8435
956d6950 8436 start_cleanup_deferral ();
bbf6f052
RK
8437 if (binary_op && temp == 0)
8438 /* Just touch the other operand. */
8439 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 8440 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8441 else if (binary_op)
8442 store_expr (build (TREE_CODE (binary_op), type,
8443 make_tree (type, temp),
8444 TREE_OPERAND (binary_op, 1)),
8403445a 8445 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
8446 else
8447 store_expr (build1 (TREE_CODE (unary_op), type,
8448 make_tree (type, temp)),
8403445a 8449 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8450 op1 = op0;
bbf6f052 8451 }
bbf6f052
RK
8452 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8453 comparison operator. If we have one of these cases, set the
8454 output to A, branch on A (cse will merge these two references),
8455 then set the output to FOO. */
8456 else if (temp
8457 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8458 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8459 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8460 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
8461 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8462 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 8463 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052 8464 {
3a94c984
KH
8465 if (GET_CODE (temp) == REG
8466 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052 8467 temp = gen_reg_rtx (mode);
8403445a
AM
8468 store_expr (TREE_OPERAND (exp, 1), temp,
8469 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8470 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 8471
956d6950 8472 start_cleanup_deferral ();
c37b68d4
RS
8473 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8474 store_expr (TREE_OPERAND (exp, 2), temp,
8475 modifier == EXPAND_STACK_PARM ? 2 : 0);
8476 else
8477 expand_expr (TREE_OPERAND (exp, 2),
8478 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8479 op1 = op0;
8480 }
8481 else if (temp
8482 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8483 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8484 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8485 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
8486 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8487 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 8488 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052 8489 {
3a94c984
KH
8490 if (GET_CODE (temp) == REG
8491 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052 8492 temp = gen_reg_rtx (mode);
8403445a
AM
8493 store_expr (TREE_OPERAND (exp, 2), temp,
8494 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8495 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8496
956d6950 8497 start_cleanup_deferral ();
c37b68d4
RS
8498 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8499 store_expr (TREE_OPERAND (exp, 1), temp,
8500 modifier == EXPAND_STACK_PARM ? 2 : 0);
8501 else
8502 expand_expr (TREE_OPERAND (exp, 1),
8503 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8504 op1 = op0;
8505 }
8506 else
8507 {
8508 op1 = gen_label_rtx ();
8509 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8510
956d6950 8511 start_cleanup_deferral ();
3a94c984 8512
2ac84cfe 8513 /* One branch of the cond can be void, if it never returns. For
3a94c984 8514 example A ? throw : E */
2ac84cfe 8515 if (temp != 0
3a94c984 8516 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8403445a
AM
8517 store_expr (TREE_OPERAND (exp, 1), temp,
8518 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8519 else
906c4e36
RK
8520 expand_expr (TREE_OPERAND (exp, 1),
8521 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 8522 end_cleanup_deferral ();
bbf6f052
RK
8523 emit_queue ();
8524 emit_jump_insn (gen_jump (op1));
8525 emit_barrier ();
8526 emit_label (op0);
956d6950 8527 start_cleanup_deferral ();
2ac84cfe 8528 if (temp != 0
3a94c984 8529 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8403445a
AM
8530 store_expr (TREE_OPERAND (exp, 2), temp,
8531 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8532 else
906c4e36
RK
8533 expand_expr (TREE_OPERAND (exp, 2),
8534 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8535 }
8536
956d6950 8537 end_cleanup_deferral ();
bbf6f052
RK
8538
8539 emit_queue ();
8540 emit_label (op1);
8541 OK_DEFER_POP;
5dab5552 8542
bbf6f052
RK
8543 return temp;
8544 }
8545
8546 case TARGET_EXPR:
8547 {
8548 /* Something needs to be initialized, but we didn't know
8549 where that thing was when building the tree. For example,
8550 it could be the return value of a function, or a parameter
8551 to a function which lays down in the stack, or a temporary
8552 variable which must be passed by reference.
8553
8554 We guarantee that the expression will either be constructed
8555 or copied into our original target. */
8556
8557 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 8558 tree cleanups = NULL_TREE;
5c062816 8559 tree exp1;
bbf6f052
RK
8560
8561 if (TREE_CODE (slot) != VAR_DECL)
8562 abort ();
8563
9c51f375
RK
8564 if (! ignore)
8565 target = original_target;
8566
6fbfac92
JM
8567 /* Set this here so that if we get a target that refers to a
8568 register variable that's already been used, put_reg_into_stack
3a94c984 8569 knows that it should fix up those uses. */
6fbfac92
JM
8570 TREE_USED (slot) = 1;
8571
bbf6f052
RK
8572 if (target == 0)
8573 {
19e7881c 8574 if (DECL_RTL_SET_P (slot))
ac993f4f
MS
8575 {
8576 target = DECL_RTL (slot);
5c062816 8577 /* If we have already expanded the slot, so don't do
ac993f4f 8578 it again. (mrs) */
5c062816
MS
8579 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8580 return target;
ac993f4f 8581 }
bbf6f052
RK
8582 else
8583 {
e9a25f70 8584 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
8585 /* All temp slots at this level must not conflict. */
8586 preserve_temp_slots (target);
19e7881c 8587 SET_DECL_RTL (slot, target);
e9a25f70 8588 if (TREE_ADDRESSABLE (slot))
f29a2bd1 8589 put_var_into_stack (slot, /*rescan=*/false);
bbf6f052 8590
e287fd6e
RK
8591 /* Since SLOT is not known to the called function
8592 to belong to its stack frame, we must build an explicit
8593 cleanup. This case occurs when we must build up a reference
8594 to pass the reference as an argument. In this case,
8595 it is very likely that such a reference need not be
8596 built here. */
8597
8598 if (TREE_OPERAND (exp, 2) == 0)
c88770e9
NB
8599 TREE_OPERAND (exp, 2)
8600 = (*lang_hooks.maybe_build_cleanup) (slot);
2a888d4c 8601 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 8602 }
bbf6f052
RK
8603 }
8604 else
8605 {
8606 /* This case does occur, when expanding a parameter which
8607 needs to be constructed on the stack. The target
8608 is the actual stack address that we want to initialize.
8609 The function we call will perform the cleanup in this case. */
8610
8c042b47
RS
8611 /* If we have already assigned it space, use that space,
8612 not target that we were passed in, as our target
8613 parameter is only a hint. */
19e7881c 8614 if (DECL_RTL_SET_P (slot))
3a94c984
KH
8615 {
8616 target = DECL_RTL (slot);
8617 /* If we have already expanded the slot, so don't do
8c042b47 8618 it again. (mrs) */
3a94c984
KH
8619 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8620 return target;
8c042b47 8621 }
21002281
JW
8622 else
8623 {
19e7881c 8624 SET_DECL_RTL (slot, target);
21002281
JW
8625 /* If we must have an addressable slot, then make sure that
8626 the RTL that we just stored in slot is OK. */
8627 if (TREE_ADDRESSABLE (slot))
f29a2bd1 8628 put_var_into_stack (slot, /*rescan=*/true);
21002281 8629 }
bbf6f052
RK
8630 }
8631
4847c938 8632 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
8633 /* Mark it as expanded. */
8634 TREE_OPERAND (exp, 1) = NULL_TREE;
8635
8403445a 8636 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
61d6b1cc 8637
659e5a7a 8638 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
3a94c984 8639
41531e5b 8640 return target;
bbf6f052
RK
8641 }
8642
8643 case INIT_EXPR:
8644 {
8645 tree lhs = TREE_OPERAND (exp, 0);
8646 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052 8647
b90f141a 8648 temp = expand_assignment (lhs, rhs, ! ignore);
bbf6f052
RK
8649 return temp;
8650 }
8651
8652 case MODIFY_EXPR:
8653 {
8654 /* If lhs is complex, expand calls in rhs before computing it.
6d0a3f67
NS
8655 That's so we don't compute a pointer and save it over a
8656 call. If lhs is simple, compute it first so we can give it
8657 as a target if the rhs is just a call. This avoids an
8658 extra temp and copy and that prevents a partial-subsumption
8659 which makes bad code. Actually we could treat
8660 component_ref's of vars like vars. */
bbf6f052
RK
8661
8662 tree lhs = TREE_OPERAND (exp, 0);
8663 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052
RK
8664
8665 temp = 0;
8666
bbf6f052
RK
8667 /* Check for |= or &= of a bitfield of size one into another bitfield
8668 of size 1. In this case, (unless we need the result of the
8669 assignment) we can do this more efficiently with a
8670 test followed by an assignment, if necessary.
8671
8672 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8673 things change so we do, this code should be enhanced to
8674 support it. */
8675 if (ignore
8676 && TREE_CODE (lhs) == COMPONENT_REF
8677 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8678 || TREE_CODE (rhs) == BIT_AND_EXPR)
8679 && TREE_OPERAND (rhs, 0) == lhs
8680 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
05bccae2
RK
8681 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8682 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
bbf6f052
RK
8683 {
8684 rtx label = gen_label_rtx ();
8685
8686 do_jump (TREE_OPERAND (rhs, 1),
8687 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8688 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8689 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8690 (TREE_CODE (rhs) == BIT_IOR_EXPR
8691 ? integer_one_node
8692 : integer_zero_node)),
b90f141a 8693 0);
e7c33f54 8694 do_pending_stack_adjust ();
bbf6f052
RK
8695 emit_label (label);
8696 return const0_rtx;
8697 }
8698
b90f141a 8699 temp = expand_assignment (lhs, rhs, ! ignore);
0fb7aeda 8700
bbf6f052
RK
8701 return temp;
8702 }
8703
6e7f84a7
APB
8704 case RETURN_EXPR:
8705 if (!TREE_OPERAND (exp, 0))
8706 expand_null_return ();
8707 else
8708 expand_return (TREE_OPERAND (exp, 0));
8709 return const0_rtx;
8710
bbf6f052
RK
8711 case PREINCREMENT_EXPR:
8712 case PREDECREMENT_EXPR:
7b8b9722 8713 return expand_increment (exp, 0, ignore);
bbf6f052
RK
8714
8715 case POSTINCREMENT_EXPR:
8716 case POSTDECREMENT_EXPR:
8717 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 8718 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
8719
8720 case ADDR_EXPR:
8403445a
AM
8721 if (modifier == EXPAND_STACK_PARM)
8722 target = 0;
bbf6f052
RK
8723 /* Are we taking the address of a nested function? */
8724 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 8725 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
8726 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8727 && ! TREE_STATIC (exp))
bbf6f052
RK
8728 {
8729 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8730 op0 = force_operand (op0, target);
8731 }
682ba3a6
RK
8732 /* If we are taking the address of something erroneous, just
8733 return a zero. */
8734 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8735 return const0_rtx;
d6b6783b
RK
8736 /* If we are taking the address of a constant and are at the
8737 top level, we have to use output_constant_def since we can't
8738 call force_const_mem at top level. */
8739 else if (cfun == 0
8740 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8741 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8742 == 'c')))
8743 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
bbf6f052
RK
8744 else
8745 {
e287fd6e
RK
8746 /* We make sure to pass const0_rtx down if we came in with
8747 ignore set, to avoid doing the cleanups twice for something. */
8748 op0 = expand_expr (TREE_OPERAND (exp, 0),
8749 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
8750 (modifier == EXPAND_INITIALIZER
8751 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 8752
119af78a
RK
8753 /* If we are going to ignore the result, OP0 will have been set
8754 to const0_rtx, so just return it. Don't get confused and
8755 think we are taking the address of the constant. */
8756 if (ignore)
8757 return op0;
8758
73b7f58c
BS
8759 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8760 clever and returns a REG when given a MEM. */
8761 op0 = protect_from_queue (op0, 1);
3539e816 8762
c5c76735
JL
8763 /* We would like the object in memory. If it is a constant, we can
8764 have it be statically allocated into memory. For a non-constant,
8765 we need to allocate some memory and store the value into it. */
896102d0
RK
8766
8767 if (CONSTANT_P (op0))
8768 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8769 op0);
682ba3a6 8770 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
df6018fd 8771 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
c1853da7 8772 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
896102d0 8773 {
6c7d86ec
RK
8774 /* If the operand is a SAVE_EXPR, we can deal with this by
8775 forcing the SAVE_EXPR into memory. */
8776 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8777 {
f29a2bd1
MM
8778 put_var_into_stack (TREE_OPERAND (exp, 0),
8779 /*rescan=*/true);
6c7d86ec
RK
8780 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8781 }
df6018fd 8782 else
6c7d86ec
RK
8783 {
8784 /* If this object is in a register, it can't be BLKmode. */
8785 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
19f90fad 8786 rtx memloc = assign_temp (inner_type, 1, 1, 1);
6c7d86ec
RK
8787
8788 if (GET_CODE (op0) == PARALLEL)
8789 /* Handle calls that pass values in multiple
8790 non-contiguous locations. The Irix 6 ABI has examples
8791 of this. */
6e985040 8792 emit_group_store (memloc, op0, inner_type,
6c7d86ec
RK
8793 int_size_in_bytes (inner_type));
8794 else
8795 emit_move_insn (memloc, op0);
0fb7aeda 8796
6c7d86ec
RK
8797 op0 = memloc;
8798 }
896102d0
RK
8799 }
8800
bbf6f052
RK
8801 if (GET_CODE (op0) != MEM)
8802 abort ();
3a94c984 8803
34e81b5a 8804 mark_temp_addr_taken (op0);
bbf6f052 8805 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77 8806 {
34e81b5a 8807 op0 = XEXP (op0, 0);
5ae6cd0d 8808 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
34e81b5a 8809 op0 = convert_memory_address (ptr_mode, op0);
34e81b5a 8810 return op0;
88f63c77 8811 }
987c71d9 8812
c952ff4b
RK
8813 /* If OP0 is not aligned as least as much as the type requires, we
8814 need to make a temporary, copy OP0 to it, and take the address of
8815 the temporary. We want to use the alignment of the type, not of
8816 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8817 the test for BLKmode means that can't happen. The test for
8818 BLKmode is because we never make mis-aligned MEMs with
8819 non-BLKmode.
8820
8821 We don't need to do this at all if the machine doesn't have
8822 strict alignment. */
8823 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8824 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
ed239f5a
RK
8825 > MEM_ALIGN (op0))
8826 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
a06ef755
RK
8827 {
8828 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bdaa131b 8829 rtx new;
a06ef755 8830
c3d32120
RK
8831 if (TYPE_ALIGN_OK (inner_type))
8832 abort ();
8833
bdaa131b
JM
8834 if (TREE_ADDRESSABLE (inner_type))
8835 {
8836 /* We can't make a bitwise copy of this object, so fail. */
8837 error ("cannot take the address of an unaligned member");
8838 return const0_rtx;
8839 }
8840
8841 new = assign_stack_temp_for_type
8842 (TYPE_MODE (inner_type),
8843 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8844 : int_size_in_bytes (inner_type),
8845 1, build_qualified_type (inner_type,
8846 (TYPE_QUALS (inner_type)
8847 | TYPE_QUAL_CONST)));
8848
44bb111a 8849 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8403445a
AM
8850 (modifier == EXPAND_STACK_PARM
8851 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
bdaa131b 8852
a06ef755
RK
8853 op0 = new;
8854 }
8855
bbf6f052
RK
8856 op0 = force_operand (XEXP (op0, 0), target);
8857 }
987c71d9 8858
05c8e58b
HPN
8859 if (flag_force_addr
8860 && GET_CODE (op0) != REG
8861 && modifier != EXPAND_CONST_ADDRESS
8862 && modifier != EXPAND_INITIALIZER
8863 && modifier != EXPAND_SUM)
987c71d9
RK
8864 op0 = force_reg (Pmode, op0);
8865
dc6d66b3
RK
8866 if (GET_CODE (op0) == REG
8867 && ! REG_USERVAR_P (op0))
bdb429a5 8868 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
987c71d9 8869
5ae6cd0d 8870 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
9fcfcce7 8871 op0 = convert_memory_address (ptr_mode, op0);
88f63c77 8872
bbf6f052
RK
8873 return op0;
8874
8875 case ENTRY_VALUE_EXPR:
8876 abort ();
8877
7308a047
RS
8878 /* COMPLEX type for Extended Pascal & Fortran */
8879 case COMPLEX_EXPR:
8880 {
8881 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 8882 rtx insns;
7308a047
RS
8883
8884 /* Get the rtx code of the operands. */
8885 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8886 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8887
8888 if (! target)
8889 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8890
6551fa4d 8891 start_sequence ();
7308a047
RS
8892
8893 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
8894 emit_move_insn (gen_realpart (mode, target), op0);
8895 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 8896
6551fa4d
JW
8897 insns = get_insns ();
8898 end_sequence ();
8899
7308a047 8900 /* Complex construction should appear as a single unit. */
6551fa4d
JW
8901 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8902 each with a separate pseudo as destination.
8903 It's not correct for flow to treat them as a unit. */
6d6e61ce 8904 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8905 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8906 else
2f937369 8907 emit_insn (insns);
7308a047
RS
8908
8909 return target;
8910 }
8911
8912 case REALPART_EXPR:
2d7050fd
RS
8913 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8914 return gen_realpart (mode, op0);
3a94c984 8915
7308a047 8916 case IMAGPART_EXPR:
2d7050fd
RS
8917 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8918 return gen_imagpart (mode, op0);
7308a047
RS
8919
8920 case CONJ_EXPR:
8921 {
62acb978 8922 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 8923 rtx imag_t;
6551fa4d 8924 rtx insns;
3a94c984
KH
8925
8926 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7308a047
RS
8927
8928 if (! target)
d6a5ac33 8929 target = gen_reg_rtx (mode);
3a94c984 8930
6551fa4d 8931 start_sequence ();
7308a047
RS
8932
8933 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
8934 emit_move_insn (gen_realpart (partmode, target),
8935 gen_realpart (partmode, op0));
7308a047 8936
62acb978 8937 imag_t = gen_imagpart (partmode, target);
91ce572a 8938 temp = expand_unop (partmode,
0fb7aeda
KH
8939 ! unsignedp && flag_trapv
8940 && (GET_MODE_CLASS(partmode) == MODE_INT)
8941 ? negv_optab : neg_optab,
3a94c984 8942 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
8943 if (temp != imag_t)
8944 emit_move_insn (imag_t, temp);
8945
6551fa4d
JW
8946 insns = get_insns ();
8947 end_sequence ();
8948
3a94c984 8949 /* Conjugate should appear as a single unit
d6a5ac33 8950 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
8951 each with a separate pseudo as destination.
8952 It's not correct for flow to treat them as a unit. */
6d6e61ce 8953 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8954 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8955 else
2f937369 8956 emit_insn (insns);
7308a047
RS
8957
8958 return target;
8959 }
8960
e976b8b2
MS
8961 case TRY_CATCH_EXPR:
8962 {
8963 tree handler = TREE_OPERAND (exp, 1);
8964
8965 expand_eh_region_start ();
8966
8967 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8968
52a11cbf 8969 expand_eh_region_end_cleanup (handler);
e976b8b2
MS
8970
8971 return op0;
8972 }
8973
b335b813
PB
8974 case TRY_FINALLY_EXPR:
8975 {
8976 tree try_block = TREE_OPERAND (exp, 0);
8977 tree finally_block = TREE_OPERAND (exp, 1);
b335b813 8978
8ad8135a 8979 if (!optimize || unsafe_for_reeval (finally_block) > 1)
8943a0b4
RH
8980 {
8981 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
8982 is not sufficient, so we cannot expand the block twice.
8983 So we play games with GOTO_SUBROUTINE_EXPR to let us
8984 expand the thing only once. */
8ad8135a
RH
8985 /* When not optimizing, we go ahead with this form since
8986 (1) user breakpoints operate more predictably without
8987 code duplication, and
8988 (2) we're not running any of the global optimizers
8989 that would explode in time/space with the highly
8990 connected CFG created by the indirect branching. */
8943a0b4
RH
8991
8992 rtx finally_label = gen_label_rtx ();
8993 rtx done_label = gen_label_rtx ();
8994 rtx return_link = gen_reg_rtx (Pmode);
8995 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8996 (tree) finally_label, (tree) return_link);
8997 TREE_SIDE_EFFECTS (cleanup) = 1;
8998
8999 /* Start a new binding layer that will keep track of all cleanup
9000 actions to be performed. */
9001 expand_start_bindings (2);
9002 target_temp_slot_level = temp_slot_level;
9003
9004 expand_decl_cleanup (NULL_TREE, cleanup);
9005 op0 = expand_expr (try_block, target, tmode, modifier);
9006
9007 preserve_temp_slots (op0);
9008 expand_end_bindings (NULL_TREE, 0, 0);
9009 emit_jump (done_label);
9010 emit_label (finally_label);
9011 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9012 emit_indirect_jump (return_link);
9013 emit_label (done_label);
9014 }
9015 else
9016 {
9017 expand_start_bindings (2);
9018 target_temp_slot_level = temp_slot_level;
b335b813 9019
8943a0b4
RH
9020 expand_decl_cleanup (NULL_TREE, finally_block);
9021 op0 = expand_expr (try_block, target, tmode, modifier);
b335b813 9022
8943a0b4
RH
9023 preserve_temp_slots (op0);
9024 expand_end_bindings (NULL_TREE, 0, 0);
9025 }
b335b813 9026
b335b813
PB
9027 return op0;
9028 }
9029
3a94c984 9030 case GOTO_SUBROUTINE_EXPR:
b335b813
PB
9031 {
9032 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9033 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9034 rtx return_address = gen_label_rtx ();
3a94c984
KH
9035 emit_move_insn (return_link,
9036 gen_rtx_LABEL_REF (Pmode, return_address));
b335b813
PB
9037 emit_jump (subr);
9038 emit_label (return_address);
9039 return const0_rtx;
9040 }
9041
d3707adb
RH
9042 case VA_ARG_EXPR:
9043 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9044
52a11cbf 9045 case EXC_PTR_EXPR:
86c99549 9046 return get_exception_pointer (cfun);
52a11cbf 9047
67231816
RH
9048 case FDESC_EXPR:
9049 /* Function descriptors are not valid except for as
9050 initialization constants, and should not be expanded. */
9051 abort ();
9052
bbf6f052 9053 default:
0fab64a3
MM
9054 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier,
9055 alt_rtl);
bbf6f052
RK
9056 }
9057
9058 /* Here to do an ordinary binary operator, generating an instruction
9059 from the optab already placed in `this_optab'. */
9060 binop:
eb698c58
RS
9061 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9062 subtarget, &op0, &op1, 0);
bbf6f052 9063 binop2:
8403445a
AM
9064 if (modifier == EXPAND_STACK_PARM)
9065 target = 0;
bbf6f052
RK
9066 temp = expand_binop (mode, this_optab, op0, op1, target,
9067 unsignedp, OPTAB_LIB_WIDEN);
9068 if (temp == 0)
9069 abort ();
9070 return temp;
9071}
b93a436e 9072\f
1ce7f3c2
RK
9073/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9074 when applied to the address of EXP produces an address known to be
9075 aligned more than BIGGEST_ALIGNMENT. */
9076
9077static int
502b8322 9078is_aligning_offset (tree offset, tree exp)
1ce7f3c2
RK
9079{
9080 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9081 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9082 || TREE_CODE (offset) == NOP_EXPR
9083 || TREE_CODE (offset) == CONVERT_EXPR
9084 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9085 offset = TREE_OPERAND (offset, 0);
9086
9087 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9088 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9089 if (TREE_CODE (offset) != BIT_AND_EXPR
9090 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9091 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9092 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9093 return 0;
9094
9095 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9096 It must be NEGATE_EXPR. Then strip any more conversions. */
9097 offset = TREE_OPERAND (offset, 0);
9098 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9099 || TREE_CODE (offset) == NOP_EXPR
9100 || TREE_CODE (offset) == CONVERT_EXPR)
9101 offset = TREE_OPERAND (offset, 0);
9102
9103 if (TREE_CODE (offset) != NEGATE_EXPR)
9104 return 0;
9105
9106 offset = TREE_OPERAND (offset, 0);
9107 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9108 || TREE_CODE (offset) == NOP_EXPR
9109 || TREE_CODE (offset) == CONVERT_EXPR)
9110 offset = TREE_OPERAND (offset, 0);
9111
9112 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9113 whose type is the same as EXP. */
9114 return (TREE_CODE (offset) == ADDR_EXPR
9115 && (TREE_OPERAND (offset, 0) == exp
9116 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9117 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9118 == TREE_TYPE (exp)))));
9119}
9120\f
e0a2f705 9121/* Return the tree node if an ARG corresponds to a string constant or zero
cc2902df 9122 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
fed3cef0
RK
9123 in bytes within the string that ARG is accessing. The type of the
9124 offset will be `sizetype'. */
b93a436e 9125
28f4ec01 9126tree
502b8322 9127string_constant (tree arg, tree *ptr_offset)
b93a436e
JL
9128{
9129 STRIP_NOPS (arg);
9130
9131 if (TREE_CODE (arg) == ADDR_EXPR
9132 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9133 {
fed3cef0 9134 *ptr_offset = size_zero_node;
b93a436e
JL
9135 return TREE_OPERAND (arg, 0);
9136 }
9137 else if (TREE_CODE (arg) == PLUS_EXPR)
9138 {
9139 tree arg0 = TREE_OPERAND (arg, 0);
9140 tree arg1 = TREE_OPERAND (arg, 1);
9141
9142 STRIP_NOPS (arg0);
9143 STRIP_NOPS (arg1);
9144
9145 if (TREE_CODE (arg0) == ADDR_EXPR
9146 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 9147 {
fed3cef0 9148 *ptr_offset = convert (sizetype, arg1);
b93a436e 9149 return TREE_OPERAND (arg0, 0);
bbf6f052 9150 }
b93a436e
JL
9151 else if (TREE_CODE (arg1) == ADDR_EXPR
9152 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 9153 {
fed3cef0 9154 *ptr_offset = convert (sizetype, arg0);
b93a436e 9155 return TREE_OPERAND (arg1, 0);
bbf6f052 9156 }
b93a436e 9157 }
ca695ac9 9158
b93a436e
JL
9159 return 0;
9160}
ca695ac9 9161\f
b93a436e
JL
9162/* Expand code for a post- or pre- increment or decrement
9163 and return the RTX for the result.
9164 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 9165
b93a436e 9166static rtx
502b8322 9167expand_increment (tree exp, int post, int ignore)
ca695ac9 9168{
b3694847
SS
9169 rtx op0, op1;
9170 rtx temp, value;
9171 tree incremented = TREE_OPERAND (exp, 0);
b93a436e
JL
9172 optab this_optab = add_optab;
9173 int icode;
9174 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9175 int op0_is_copy = 0;
9176 int single_insn = 0;
9177 /* 1 means we can't store into OP0 directly,
9178 because it is a subreg narrower than a word,
9179 and we don't dare clobber the rest of the word. */
9180 int bad_subreg = 0;
1499e0a8 9181
b93a436e
JL
9182 /* Stabilize any component ref that might need to be
9183 evaluated more than once below. */
9184 if (!post
9185 || TREE_CODE (incremented) == BIT_FIELD_REF
9186 || (TREE_CODE (incremented) == COMPONENT_REF
9187 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9188 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9189 incremented = stabilize_reference (incremented);
9190 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9191 ones into save exprs so that they don't accidentally get evaluated
9192 more than once by the code below. */
9193 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9194 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9195 incremented = save_expr (incremented);
e9a25f70 9196
b93a436e
JL
9197 /* Compute the operands as RTX.
9198 Note whether OP0 is the actual lvalue or a copy of it:
9199 I believe it is a copy iff it is a register or subreg
6d2f8887 9200 and insns were generated in computing it. */
e9a25f70 9201
b93a436e 9202 temp = get_last_insn ();
37a08a29 9203 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
e9a25f70 9204
b93a436e
JL
9205 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9206 in place but instead must do sign- or zero-extension during assignment,
9207 so we copy it into a new register and let the code below use it as
9208 a copy.
e9a25f70 9209
b93a436e
JL
9210 Note that we can safely modify this SUBREG since it is know not to be
9211 shared (it was made by the expand_expr call above). */
9212
9213 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9214 {
9215 if (post)
9216 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9217 else
9218 bad_subreg = 1;
9219 }
9220 else if (GET_CODE (op0) == SUBREG
9221 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9222 {
9223 /* We cannot increment this SUBREG in place. If we are
9224 post-incrementing, get a copy of the old value. Otherwise,
9225 just mark that we cannot increment in place. */
9226 if (post)
9227 op0 = copy_to_reg (op0);
9228 else
9229 bad_subreg = 1;
e9a25f70
JL
9230 }
9231
b93a436e
JL
9232 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9233 && temp != get_last_insn ());
37a08a29 9234 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
1499e0a8 9235
b93a436e
JL
9236 /* Decide whether incrementing or decrementing. */
9237 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9238 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9239 this_optab = sub_optab;
9240
9241 /* Convert decrement by a constant into a negative increment. */
9242 if (this_optab == sub_optab
9243 && GET_CODE (op1) == CONST_INT)
ca695ac9 9244 {
3a94c984 9245 op1 = GEN_INT (-INTVAL (op1));
b93a436e 9246 this_optab = add_optab;
ca695ac9 9247 }
1499e0a8 9248
91ce572a 9249 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
505ddab6 9250 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
91ce572a 9251
b93a436e
JL
9252 /* For a preincrement, see if we can do this with a single instruction. */
9253 if (!post)
9254 {
9255 icode = (int) this_optab->handlers[(int) mode].insn_code;
9256 if (icode != (int) CODE_FOR_nothing
9257 /* Make sure that OP0 is valid for operands 0 and 1
9258 of the insn we want to queue. */
a995e389
RH
9259 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9260 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9261 && (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e
JL
9262 single_insn = 1;
9263 }
bbf6f052 9264
b93a436e
JL
9265 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9266 then we cannot just increment OP0. We must therefore contrive to
9267 increment the original value. Then, for postincrement, we can return
9268 OP0 since it is a copy of the old value. For preincrement, expand here
9269 unless we can do it with a single insn.
bbf6f052 9270
b93a436e
JL
9271 Likewise if storing directly into OP0 would clobber high bits
9272 we need to preserve (bad_subreg). */
9273 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 9274 {
b93a436e
JL
9275 /* This is the easiest way to increment the value wherever it is.
9276 Problems with multiple evaluation of INCREMENTED are prevented
9277 because either (1) it is a component_ref or preincrement,
9278 in which case it was stabilized above, or (2) it is an array_ref
9279 with constant index in an array in a register, which is
9280 safe to reevaluate. */
9281 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9282 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9283 ? MINUS_EXPR : PLUS_EXPR),
9284 TREE_TYPE (exp),
9285 incremented,
9286 TREE_OPERAND (exp, 1));
a358cee0 9287
b93a436e
JL
9288 while (TREE_CODE (incremented) == NOP_EXPR
9289 || TREE_CODE (incremented) == CONVERT_EXPR)
9290 {
9291 newexp = convert (TREE_TYPE (incremented), newexp);
9292 incremented = TREE_OPERAND (incremented, 0);
9293 }
bbf6f052 9294
b90f141a 9295 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
b93a436e
JL
9296 return post ? op0 : temp;
9297 }
bbf6f052 9298
b93a436e
JL
9299 if (post)
9300 {
9301 /* We have a true reference to the value in OP0.
9302 If there is an insn to add or subtract in this mode, queue it.
d91edf86 9303 Queuing the increment insn avoids the register shuffling
b93a436e
JL
9304 that often results if we must increment now and first save
9305 the old value for subsequent use. */
bbf6f052 9306
b93a436e
JL
9307#if 0 /* Turned off to avoid making extra insn for indexed memref. */
9308 op0 = stabilize (op0);
9309#endif
41dfd40c 9310
b93a436e
JL
9311 icode = (int) this_optab->handlers[(int) mode].insn_code;
9312 if (icode != (int) CODE_FOR_nothing
9313 /* Make sure that OP0 is valid for operands 0 and 1
9314 of the insn we want to queue. */
a995e389
RH
9315 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9316 && (*insn_data[icode].operand[1].predicate) (op0, mode))
b93a436e 9317 {
a995e389 9318 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9319 op1 = force_reg (mode, op1);
bbf6f052 9320
b93a436e
JL
9321 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9322 }
9323 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9324 {
9325 rtx addr = (general_operand (XEXP (op0, 0), mode)
9326 ? force_reg (Pmode, XEXP (op0, 0))
9327 : copy_to_reg (XEXP (op0, 0)));
9328 rtx temp, result;
ca695ac9 9329
792760b9 9330 op0 = replace_equiv_address (op0, addr);
b93a436e 9331 temp = force_reg (GET_MODE (op0), op0);
a995e389 9332 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9333 op1 = force_reg (mode, op1);
ca695ac9 9334
b93a436e
JL
9335 /* The increment queue is LIFO, thus we have to `queue'
9336 the instructions in reverse order. */
9337 enqueue_insn (op0, gen_move_insn (op0, temp));
9338 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9339 return result;
bbf6f052
RK
9340 }
9341 }
ca695ac9 9342
b93a436e
JL
9343 /* Preincrement, or we can't increment with one simple insn. */
9344 if (post)
9345 /* Save a copy of the value before inc or dec, to return it later. */
9346 temp = value = copy_to_reg (op0);
9347 else
9348 /* Arrange to return the incremented value. */
9349 /* Copy the rtx because expand_binop will protect from the queue,
9350 and the results of that would be invalid for us to return
9351 if our caller does emit_queue before using our result. */
9352 temp = copy_rtx (value = op0);
bbf6f052 9353
b93a436e 9354 /* Increment however we can. */
37a08a29 9355 op1 = expand_binop (mode, this_optab, value, op1, op0,
b93a436e 9356 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
37a08a29 9357
b93a436e
JL
9358 /* Make sure the value is stored into OP0. */
9359 if (op1 != op0)
9360 emit_move_insn (op0, op1);
5718612f 9361
b93a436e
JL
9362 return temp;
9363}
9364\f
b93a436e
JL
9365/* Generate code to calculate EXP using a store-flag instruction
9366 and return an rtx for the result. EXP is either a comparison
9367 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 9368
b93a436e 9369 If TARGET is nonzero, store the result there if convenient.
ca695ac9 9370
cc2902df 9371 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
b93a436e 9372 cheap.
ca695ac9 9373
b93a436e
JL
9374 Return zero if there is no suitable set-flag instruction
9375 available on this machine.
ca695ac9 9376
b93a436e
JL
9377 Once expand_expr has been called on the arguments of the comparison,
9378 we are committed to doing the store flag, since it is not safe to
9379 re-evaluate the expression. We emit the store-flag insn by calling
9380 emit_store_flag, but only expand the arguments if we have a reason
9381 to believe that emit_store_flag will be successful. If we think that
9382 it will, but it isn't, we have to simulate the store-flag with a
9383 set/jump/set sequence. */
ca695ac9 9384
b93a436e 9385static rtx
502b8322 9386do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
b93a436e
JL
9387{
9388 enum rtx_code code;
9389 tree arg0, arg1, type;
9390 tree tem;
9391 enum machine_mode operand_mode;
9392 int invert = 0;
9393 int unsignedp;
9394 rtx op0, op1;
9395 enum insn_code icode;
9396 rtx subtarget = target;
381127e8 9397 rtx result, label;
ca695ac9 9398
b93a436e
JL
9399 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9400 result at the end. We can't simply invert the test since it would
9401 have already been inverted if it were valid. This case occurs for
9402 some floating-point comparisons. */
ca695ac9 9403
b93a436e
JL
9404 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9405 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 9406
b93a436e
JL
9407 arg0 = TREE_OPERAND (exp, 0);
9408 arg1 = TREE_OPERAND (exp, 1);
5129d2ce
AH
9409
9410 /* Don't crash if the comparison was erroneous. */
9411 if (arg0 == error_mark_node || arg1 == error_mark_node)
9412 return const0_rtx;
9413
b93a436e
JL
9414 type = TREE_TYPE (arg0);
9415 operand_mode = TYPE_MODE (type);
9416 unsignedp = TREE_UNSIGNED (type);
ca695ac9 9417
b93a436e
JL
9418 /* We won't bother with BLKmode store-flag operations because it would mean
9419 passing a lot of information to emit_store_flag. */
9420 if (operand_mode == BLKmode)
9421 return 0;
ca695ac9 9422
b93a436e
JL
9423 /* We won't bother with store-flag operations involving function pointers
9424 when function pointers must be canonicalized before comparisons. */
9425#ifdef HAVE_canonicalize_funcptr_for_compare
9426 if (HAVE_canonicalize_funcptr_for_compare
9427 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9428 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9429 == FUNCTION_TYPE))
9430 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9431 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9432 == FUNCTION_TYPE))))
9433 return 0;
ca695ac9
JB
9434#endif
9435
b93a436e
JL
9436 STRIP_NOPS (arg0);
9437 STRIP_NOPS (arg1);
ca695ac9 9438
b93a436e
JL
9439 /* Get the rtx comparison code to use. We know that EXP is a comparison
9440 operation of some type. Some comparisons against 1 and -1 can be
9441 converted to comparisons with zero. Do so here so that the tests
9442 below will be aware that we have a comparison with zero. These
9443 tests will not catch constants in the first operand, but constants
9444 are rarely passed as the first operand. */
ca695ac9 9445
b93a436e
JL
9446 switch (TREE_CODE (exp))
9447 {
9448 case EQ_EXPR:
9449 code = EQ;
bbf6f052 9450 break;
b93a436e
JL
9451 case NE_EXPR:
9452 code = NE;
bbf6f052 9453 break;
b93a436e
JL
9454 case LT_EXPR:
9455 if (integer_onep (arg1))
9456 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9457 else
9458 code = unsignedp ? LTU : LT;
ca695ac9 9459 break;
b93a436e
JL
9460 case LE_EXPR:
9461 if (! unsignedp && integer_all_onesp (arg1))
9462 arg1 = integer_zero_node, code = LT;
9463 else
9464 code = unsignedp ? LEU : LE;
ca695ac9 9465 break;
b93a436e
JL
9466 case GT_EXPR:
9467 if (! unsignedp && integer_all_onesp (arg1))
9468 arg1 = integer_zero_node, code = GE;
9469 else
9470 code = unsignedp ? GTU : GT;
9471 break;
9472 case GE_EXPR:
9473 if (integer_onep (arg1))
9474 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9475 else
9476 code = unsignedp ? GEU : GE;
ca695ac9 9477 break;
1eb8759b
RH
9478
9479 case UNORDERED_EXPR:
9480 code = UNORDERED;
9481 break;
9482 case ORDERED_EXPR:
9483 code = ORDERED;
9484 break;
9485 case UNLT_EXPR:
9486 code = UNLT;
9487 break;
9488 case UNLE_EXPR:
9489 code = UNLE;
9490 break;
9491 case UNGT_EXPR:
9492 code = UNGT;
9493 break;
9494 case UNGE_EXPR:
9495 code = UNGE;
9496 break;
9497 case UNEQ_EXPR:
9498 code = UNEQ;
9499 break;
1eb8759b 9500
ca695ac9 9501 default:
b93a436e 9502 abort ();
bbf6f052 9503 }
bbf6f052 9504
b93a436e
JL
9505 /* Put a constant second. */
9506 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9507 {
9508 tem = arg0; arg0 = arg1; arg1 = tem;
9509 code = swap_condition (code);
ca695ac9 9510 }
bbf6f052 9511
b93a436e
JL
9512 /* If this is an equality or inequality test of a single bit, we can
9513 do this by shifting the bit being tested to the low-order bit and
9514 masking the result with the constant 1. If the condition was EQ,
9515 we xor it with 1. This does not require an scc insn and is faster
7960bf22
JL
9516 than an scc insn even if we have it.
9517
9518 The code to make this transformation was moved into fold_single_bit_test,
9519 so we just call into the folder and expand its result. */
d39985fa 9520
b93a436e
JL
9521 if ((code == NE || code == EQ)
9522 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9523 && integer_pow2p (TREE_OPERAND (arg0, 1)))
60cd4dae
JL
9524 {
9525 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
9526 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
450b1728 9527 arg0, arg1, type),
60cd4dae
JL
9528 target, VOIDmode, EXPAND_NORMAL);
9529 }
bbf6f052 9530
b93a436e 9531 /* Now see if we are likely to be able to do this. Return if not. */
1eb8759b 9532 if (! can_compare_p (code, operand_mode, ccp_store_flag))
b93a436e 9533 return 0;
1eb8759b 9534
b93a436e
JL
9535 icode = setcc_gen_code[(int) code];
9536 if (icode == CODE_FOR_nothing
a995e389 9537 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
ca695ac9 9538 {
b93a436e
JL
9539 /* We can only do this if it is one of the special cases that
9540 can be handled without an scc insn. */
9541 if ((code == LT && integer_zerop (arg1))
9542 || (! only_cheap && code == GE && integer_zerop (arg1)))
9543 ;
9544 else if (BRANCH_COST >= 0
9545 && ! only_cheap && (code == NE || code == EQ)
9546 && TREE_CODE (type) != REAL_TYPE
9547 && ((abs_optab->handlers[(int) operand_mode].insn_code
9548 != CODE_FOR_nothing)
9549 || (ffs_optab->handlers[(int) operand_mode].insn_code
9550 != CODE_FOR_nothing)))
9551 ;
9552 else
9553 return 0;
ca695ac9 9554 }
3a94c984 9555
296b4ed9 9556 if (! get_subtarget (target)
e3be1116 9557 || GET_MODE (subtarget) != operand_mode)
b93a436e
JL
9558 subtarget = 0;
9559
eb698c58 9560 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
b93a436e
JL
9561
9562 if (target == 0)
9563 target = gen_reg_rtx (mode);
9564
9565 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9566 because, if the emit_store_flag does anything it will succeed and
9567 OP0 and OP1 will not be used subsequently. */
ca695ac9 9568
b93a436e
JL
9569 result = emit_store_flag (target, code,
9570 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9571 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9572 operand_mode, unsignedp, 1);
ca695ac9 9573
b93a436e
JL
9574 if (result)
9575 {
9576 if (invert)
9577 result = expand_binop (mode, xor_optab, result, const1_rtx,
9578 result, 0, OPTAB_LIB_WIDEN);
9579 return result;
ca695ac9 9580 }
bbf6f052 9581
b93a436e
JL
9582 /* If this failed, we have to do this with set/compare/jump/set code. */
9583 if (GET_CODE (target) != REG
9584 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9585 target = gen_reg_rtx (GET_MODE (target));
9586
9587 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9588 result = compare_from_rtx (op0, op1, code, unsignedp,
a06ef755 9589 operand_mode, NULL_RTX);
b93a436e
JL
9590 if (GET_CODE (result) == CONST_INT)
9591 return (((result == const0_rtx && ! invert)
9592 || (result != const0_rtx && invert))
9593 ? const0_rtx : const1_rtx);
ca695ac9 9594
8f08e8c0
JL
9595 /* The code of RESULT may not match CODE if compare_from_rtx
9596 decided to swap its operands and reverse the original code.
9597
9598 We know that compare_from_rtx returns either a CONST_INT or
9599 a new comparison code, so it is safe to just extract the
9600 code from RESULT. */
9601 code = GET_CODE (result);
9602
b93a436e
JL
9603 label = gen_label_rtx ();
9604 if (bcc_gen_fctn[(int) code] == 0)
9605 abort ();
0f41302f 9606
b93a436e
JL
9607 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9608 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9609 emit_label (label);
bbf6f052 9610
b93a436e 9611 return target;
ca695ac9 9612}
b93a436e 9613\f
b93a436e 9614
ad82abb8
ZW
9615/* Stubs in case we haven't got a casesi insn. */
9616#ifndef HAVE_casesi
9617# define HAVE_casesi 0
9618# define gen_casesi(a, b, c, d, e) (0)
9619# define CODE_FOR_casesi CODE_FOR_nothing
9620#endif
9621
9622/* If the machine does not have a case insn that compares the bounds,
9623 this means extra overhead for dispatch tables, which raises the
9624 threshold for using them. */
9625#ifndef CASE_VALUES_THRESHOLD
9626#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9627#endif /* CASE_VALUES_THRESHOLD */
9628
9629unsigned int
502b8322 9630case_values_threshold (void)
ad82abb8
ZW
9631{
9632 return CASE_VALUES_THRESHOLD;
9633}
9634
9635/* Attempt to generate a casesi instruction. Returns 1 if successful,
9636 0 otherwise (i.e. if there is no casesi instruction). */
9637int
502b8322
AJ
9638try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9639 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
ad82abb8
ZW
9640{
9641 enum machine_mode index_mode = SImode;
9642 int index_bits = GET_MODE_BITSIZE (index_mode);
9643 rtx op1, op2, index;
9644 enum machine_mode op_mode;
9645
9646 if (! HAVE_casesi)
9647 return 0;
9648
9649 /* Convert the index to SImode. */
9650 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9651 {
9652 enum machine_mode omode = TYPE_MODE (index_type);
9653 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9654
9655 /* We must handle the endpoints in the original mode. */
9656 index_expr = build (MINUS_EXPR, index_type,
9657 index_expr, minval);
9658 minval = integer_zero_node;
9659 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9660 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
a06ef755 9661 omode, 1, default_label);
ad82abb8
ZW
9662 /* Now we can safely truncate. */
9663 index = convert_to_mode (index_mode, index, 0);
9664 }
9665 else
9666 {
9667 if (TYPE_MODE (index_type) != index_mode)
9668 {
b0c48229
NB
9669 index_expr = convert ((*lang_hooks.types.type_for_size)
9670 (index_bits, 0), index_expr);
ad82abb8
ZW
9671 index_type = TREE_TYPE (index_expr);
9672 }
9673
9674 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9675 }
9676 emit_queue ();
9677 index = protect_from_queue (index, 0);
9678 do_pending_stack_adjust ();
9679
9680 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9681 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9682 (index, op_mode))
9683 index = copy_to_mode_reg (op_mode, index);
e87b4f3f 9684
ad82abb8
ZW
9685 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9686
9687 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9688 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9689 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
9690 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9691 (op1, op_mode))
9692 op1 = copy_to_mode_reg (op_mode, op1);
9693
9694 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9695
9696 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9697 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9698 op2, TREE_UNSIGNED (TREE_TYPE (range)));
9699 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9700 (op2, op_mode))
9701 op2 = copy_to_mode_reg (op_mode, op2);
9702
9703 emit_jump_insn (gen_casesi (index, op1, op2,
9704 table_label, default_label));
9705 return 1;
9706}
9707
9708/* Attempt to generate a tablejump instruction; same concept. */
9709#ifndef HAVE_tablejump
9710#define HAVE_tablejump 0
9711#define gen_tablejump(x, y) (0)
9712#endif
9713
9714/* Subroutine of the next function.
9715
9716 INDEX is the value being switched on, with the lowest value
b93a436e
JL
9717 in the table already subtracted.
9718 MODE is its expected mode (needed if INDEX is constant).
9719 RANGE is the length of the jump table.
9720 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 9721
b93a436e
JL
9722 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9723 index value is out of range. */
0f41302f 9724
ad82abb8 9725static void
502b8322
AJ
9726do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9727 rtx default_label)
ca695ac9 9728{
b3694847 9729 rtx temp, vector;
88d3b7f0 9730
74f6d071
JH
9731 if (INTVAL (range) > cfun->max_jumptable_ents)
9732 cfun->max_jumptable_ents = INTVAL (range);
1877be45 9733
b93a436e
JL
9734 /* Do an unsigned comparison (in the proper mode) between the index
9735 expression and the value which represents the length of the range.
9736 Since we just finished subtracting the lower bound of the range
9737 from the index expression, this comparison allows us to simultaneously
9738 check that the original index expression value is both greater than
9739 or equal to the minimum value of the range and less than or equal to
9740 the maximum value of the range. */
709f5be1 9741
c5d5d461 9742 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
a06ef755 9743 default_label);
bbf6f052 9744
b93a436e
JL
9745 /* If index is in range, it must fit in Pmode.
9746 Convert to Pmode so we can index with it. */
9747 if (mode != Pmode)
9748 index = convert_to_mode (Pmode, index, 1);
bbf6f052 9749
ba228239 9750 /* Don't let a MEM slip through, because then INDEX that comes
b93a436e
JL
9751 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9752 and break_out_memory_refs will go to work on it and mess it up. */
9753#ifdef PIC_CASE_VECTOR_ADDRESS
9754 if (flag_pic && GET_CODE (index) != REG)
9755 index = copy_to_mode_reg (Pmode, index);
9756#endif
ca695ac9 9757
b93a436e
JL
9758 /* If flag_force_addr were to affect this address
9759 it could interfere with the tricky assumptions made
9760 about addresses that contain label-refs,
9761 which may be valid only very near the tablejump itself. */
9762 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9763 GET_MODE_SIZE, because this indicates how large insns are. The other
9764 uses should all be Pmode, because they are addresses. This code
9765 could fail if addresses and insns are not the same size. */
9766 index = gen_rtx_PLUS (Pmode,
9767 gen_rtx_MULT (Pmode, index,
9768 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9769 gen_rtx_LABEL_REF (Pmode, table_label));
9770#ifdef PIC_CASE_VECTOR_ADDRESS
9771 if (flag_pic)
9772 index = PIC_CASE_VECTOR_ADDRESS (index);
9773 else
bbf6f052 9774#endif
b93a436e
JL
9775 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9776 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9777 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9778 RTX_UNCHANGING_P (vector) = 1;
4da2eb6b 9779 MEM_NOTRAP_P (vector) = 1;
b93a436e
JL
9780 convert_move (temp, vector, 0);
9781
9782 emit_jump_insn (gen_tablejump (temp, table_label));
9783
9784 /* If we are generating PIC code or if the table is PC-relative, the
9785 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9786 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9787 emit_barrier ();
bbf6f052 9788}
b93a436e 9789
ad82abb8 9790int
502b8322
AJ
9791try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9792 rtx table_label, rtx default_label)
ad82abb8
ZW
9793{
9794 rtx index;
9795
9796 if (! HAVE_tablejump)
9797 return 0;
9798
9799 index_expr = fold (build (MINUS_EXPR, index_type,
9800 convert (index_type, index_expr),
9801 convert (index_type, minval)));
9802 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9803 emit_queue ();
9804 index = protect_from_queue (index, 0);
9805 do_pending_stack_adjust ();
9806
9807 do_tablejump (index, TYPE_MODE (index_type),
9808 convert_modes (TYPE_MODE (index_type),
9809 TYPE_MODE (TREE_TYPE (range)),
9810 expand_expr (range, NULL_RTX,
9811 VOIDmode, 0),
9812 TREE_UNSIGNED (TREE_TYPE (range))),
9813 table_label, default_label);
9814 return 1;
9815}
e2500fed 9816
cb2a532e
AH
9817/* Nonzero if the mode is a valid vector mode for this architecture.
9818 This returns nonzero even if there is no hardware support for the
9819 vector mode, but we can emulate with narrower modes. */
9820
9821int
502b8322 9822vector_mode_valid_p (enum machine_mode mode)
cb2a532e
AH
9823{
9824 enum mode_class class = GET_MODE_CLASS (mode);
9825 enum machine_mode innermode;
9826
9827 /* Doh! What's going on? */
9828 if (class != MODE_VECTOR_INT
9829 && class != MODE_VECTOR_FLOAT)
9830 return 0;
9831
9832 /* Hardware support. Woo hoo! */
9833 if (VECTOR_MODE_SUPPORTED_P (mode))
9834 return 1;
9835
9836 innermode = GET_MODE_INNER (mode);
9837
9838 /* We should probably return 1 if requesting V4DI and we have no DI,
9839 but we have V2DI, but this is probably very unlikely. */
9840
9841 /* If we have support for the inner mode, we can safely emulate it.
9842 We may not have V2DI, but me can emulate with a pair of DIs. */
9843 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9844}
9845
d744e06e
AH
9846/* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9847static rtx
502b8322 9848const_vector_from_tree (tree exp)
d744e06e
AH
9849{
9850 rtvec v;
9851 int units, i;
9852 tree link, elt;
9853 enum machine_mode inner, mode;
9854
9855 mode = TYPE_MODE (TREE_TYPE (exp));
9856
9857 if (is_zeros_p (exp))
9858 return CONST0_RTX (mode);
9859
9860 units = GET_MODE_NUNITS (mode);
9861 inner = GET_MODE_INNER (mode);
9862
9863 v = rtvec_alloc (units);
9864
9865 link = TREE_VECTOR_CST_ELTS (exp);
9866 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9867 {
9868 elt = TREE_VALUE (link);
9869
9870 if (TREE_CODE (elt) == REAL_CST)
9871 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9872 inner);
9873 else
9874 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9875 TREE_INT_CST_HIGH (elt),
9876 inner);
9877 }
9878
5f6c070d
AH
9879 /* Initialize remaining elements to 0. */
9880 for (; i < units; ++i)
9881 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9882
d744e06e
AH
9883 return gen_rtx_raw_CONST_VECTOR (mode, v);
9884}
9885
e2500fed 9886#include "gt-expr.h"